From b306fbe361c8162d571fb5b41f88c4732df72f54 Mon Sep 17 00:00:00 2001 From: Axmouth Date: Mon, 20 Jun 2022 01:24:37 +0300 Subject: [PATCH 1/5] Improving testability and fix a bug --- .github/workflows/build-deploy.yml | 68 ++-- .github/workflows/test.yml | 66 ++-- .gitignore | 20 +- Cargo.lock | 175 ++++++--- Cargo.toml | 53 +-- README.md | 2 +- bench_flamegraph.sh | 4 +- benchmarks/Cargo.toml | 32 +- benchmarks/src/bin/inserts.rs | 30 +- benchmarks/src/bin/lex.rs | 70 ++-- benchmarks/src/bin/select.rs | 46 +-- benchmarks/src/lib.rs | 46 +-- engine/Cargo.toml | 55 +-- engine/src/ast.rs | 99 +++-- engine/src/backend.rs | 391 +++++++++---------- engine/src/backend_memory/memory_store.rs | 34 +- engine/src/backend_memory/mod.rs | 18 +- engine/src/lexer/mod.rs | 14 +- engine/src/lib.rs | 74 ++-- engine/src/parser/mod.rs | 253 +++++------- engine/src/sql_types/mod.rs | 74 +--- engine/src/test_impls/mod.rs | 391 +++++++++++++++++-- repl/Cargo.toml | 27 +- repl/src/main.rs | 439 +++++++++++---------- server/Cargo.toml | 22 +- server/src/main.rs | 172 ++++---- test-macros/Cargo.toml | 20 +- test-macros/src/lib.rs | 6 +- test-test/Cargo.toml | 16 +- test-test/src/main.rs | 58 +-- test-util/Cargo.toml | 24 +- test-util/src/lib.rs | 456 ++++++++++++---------- tests/Cargo.toml | 26 +- tests/acceptance/memory1/query2 | 18 +- tests/acceptance/memory1/query3 | 2 +- tests/integration/test1/test.toml | 26 +- tests/src/lib.rs | 74 ++-- tests/unit/parser1/query1 | 165 ++------ tests/unit/parser1/query1.sql | 8 +- tests/unit/parser2/query1 | 141 +++++++ tests/unit/parser2/query1.sql | 10 + tests/unit/parser2/test.toml | 3 + update-gh-pages.sh | 14 +- wasm-repl/Cargo.toml | 34 +- wasm-repl/index.html | 28 +- wasm-repl/main.css | 372 +++++++++--------- wasm-repl/src/components/repl.rs | 4 +- wasm-repl/src/components/results_table.rs | 102 ++--- wasm-repl/src/services/mod.rs | 2 +- wasm-repl/src/services/sqlo2_service.rs | 42 +- wire-protocol/Cargo.toml | 14 +- wire-protocol/src/lib.rs | 100 ++--- 52 files changed, 2488 insertions(+), 1952 deletions(-) create mode 100644 tests/unit/parser2/query1 create mode 100644 tests/unit/parser2/query1.sql create mode 100644 tests/unit/parser2/test.toml diff --git a/.github/workflows/build-deploy.yml b/.github/workflows/build-deploy.yml index f942085..9be2e9d 100644 --- a/.github/workflows/build-deploy.yml +++ b/.github/workflows/build-deploy.yml @@ -1,34 +1,34 @@ -name: Build and Deploy - -on: - push: - branches: - - master -jobs: - build-and-deploy: - runs-on: ubuntu-latest - steps: - - name: Checkout - uses: actions/checkout@v1 - - uses: actions/cache@v1 - with: - path: ~/.cargo - key: ${{ runner.os }}-cargo-${{ hashFiles('**/Cargo.lock') }} - restore-keys: | - ${{ runner.os }}-cargo- - - name: Install trunk - uses: actions-rs/install@v0.1 - with: - crate: trunk - version: latest - use-tool-cache: true - - name: Build - run: | - chmod +x ./update-gh-pages.sh - ./update-gh-pages.sh - - name: Deploy - uses: JamesIves/github-pages-deploy-action@releases/v3 - with: - GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - BRANCH: gh-pages - FOLDER: docs +name: Build and Deploy + +on: + push: + branches: + - master +jobs: + build-and-deploy: + runs-on: ubuntu-latest + steps: + - name: Checkout + uses: actions/checkout@v1 + - uses: actions/cache@v1 + with: + path: ~/.cargo + key: ${{ runner.os }}-cargo-${{ hashFiles('**/Cargo.lock') }} + restore-keys: | + ${{ runner.os }}-cargo- + - name: Install trunk + uses: actions-rs/install@v0.1 + with: + crate: trunk + version: latest + use-tool-cache: true + - name: Build + run: | + chmod +x ./update-gh-pages.sh + ./update-gh-pages.sh + - name: Deploy + uses: JamesIves/github-pages-deploy-action@releases/v3 + with: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + BRANCH: gh-pages + FOLDER: docs diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml index aaa87bc..e4c450c 100644 --- a/.github/workflows/test.yml +++ b/.github/workflows/test.yml @@ -1,33 +1,33 @@ -name: Run tests - -on: [push, pull_request] - -jobs: - run-tests: - runs-on: ubuntu-latest - steps: - - name: Checkout - uses: actions/checkout@v1 - - - uses: actions/cache@v1 - with: - path: ~/.cargo - key: ${{ runner.os }}-cargo-${{ hashFiles('**/Cargo.lock') }} - restore-keys: | - ${{ runner.os }}-cargo- - - - uses: actions/checkout@v1 - - name: Check formatting - uses: icepuma/rust-action@master - with: - args: cargo fmt -- --check - - - name: Run tests - uses: icepuma/rust-action@master - with: - args: cargo test --workspace - - - name: Check for clippy issues - uses: icepuma/rust-action@master - with: - args: cargo clippy --all-targets --workspace -- -Dwarnings +name: Run tests + +on: [push, pull_request] + +jobs: + run-tests: + runs-on: ubuntu-latest + steps: + - name: Checkout + uses: actions/checkout@v1 + + - uses: actions/cache@v1 + with: + path: ~/.cargo + key: ${{ runner.os }}-cargo-${{ hashFiles('**/Cargo.lock') }} + restore-keys: | + ${{ runner.os }}-cargo- + + - uses: actions/checkout@v1 + - name: Check formatting + uses: icepuma/rust-action@master + with: + args: cargo fmt -- --check + + - name: Run tests + uses: icepuma/rust-action@master + with: + args: cargo test --workspace + + - name: Check for clippy issues + uses: icepuma/rust-action@master + with: + args: cargo clippy --all-targets --workspace -- -Dwarnings diff --git a/.gitignore b/.gitignore index 959ed62..6daee7b 100644 --- a/.gitignore +++ b/.gitignore @@ -1,10 +1,10 @@ -target -*flamegraph.svg -*flamegraph*.svg -*.data -*.data.old -*history.txt -/docs -.idea - -*.test_diff \ No newline at end of file +target +*flamegraph.svg +*flamegraph*.svg +*.data +*.data.old +*history.txt +/docs +.idea + +*.actual \ No newline at end of file diff --git a/Cargo.lock b/Cargo.lock index b32cbad..c14a286 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -178,6 +178,32 @@ dependencies = [ "winapi", ] +[[package]] +name = "colored" +version = "2.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b3616f750b84d8f0de8a58bda93e08e2a81ad3f523089b05f1dffecab48c6cbd" +dependencies = [ + "atty", + "lazy_static", + "winapi", +] + +[[package]] +name = "console" +version = "0.15.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a28b32d32ca44b70c3e4acd7db1babf555fa026e385fb95f18028f88848b3c31" +dependencies = [ + "encode_unicode", + "libc", + "once_cell", + "regex", + "terminal_size", + "unicode-width", + "winapi", +] + [[package]] name = "console_error_panic_hook" version = "0.1.7" @@ -304,9 +330,9 @@ dependencies = [ [[package]] name = "ctor" -version = "0.1.21" +version = "0.1.16" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ccc0a48a9b826acdf4028595adc9db92caea352f7af011a3034acd172a52a0aa" +checksum = "7fbaabec2c953050352311293be5c6aba8e141ba19d6811862b232d6fd020484" dependencies = [ "quote", "syn", @@ -491,7 +517,7 @@ checksum = "418d37c8b1d42553c93648be529cb70f920d3baf8ef469b74b9638df426e0b4c" dependencies = [ "cfg-if", "libc", - "wasi 0.10.2+wasi-snapshot-preview1", + "wasi 0.10.0+wasi-snapshot-preview1", ] [[package]] @@ -615,6 +641,12 @@ version = "0.11.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ab5ef0d4909ef3724cc8cce6ccc8572c5c817592e9285f5464f8e86f8bd3726e" +[[package]] +name = "heck" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2540771e65fc8cb83cd6e8a237f70c319bd5c29f78ed1084ba5d50eeac86f7f9" + [[package]] name = "hermit-abi" version = "0.1.19" @@ -706,9 +738,9 @@ dependencies = [ [[package]] name = "js-sys" -version = "0.3.56" +version = "0.3.57" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a38fc24e30fd564ce974c02bf1d337caddff65be6cc4735a1f7eab22a7440f04" +checksum = "671a26f820db17c2a2750743f1dd03bafd15b98c9f30c7c2628c024c05d73397" dependencies = [ "wasm-bindgen", ] @@ -727,9 +759,9 @@ checksum = "565dbd88872dbe4cc8a46e527f26483c1d1f7afa6b884a3bd6cd893d4f98da74" [[package]] name = "libmimalloc-sys" -version = "0.1.23" +version = "0.1.25" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9636c194f9db483f4d0adf2f99a65011a99f904bd222bbd67fb4df4f37863c30" +checksum = "11ca136052550448f55df7898c6dbe651c6b574fe38a0d9ea687a9f8088a2e2c" dependencies = [ "cc", ] @@ -742,9 +774,9 @@ checksum = "95f5690fef754d905294c56f7ac815836f2513af966aa47f2e07ac79be07827f" [[package]] name = "log" -version = "0.4.14" +version = "0.4.17" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "51b9bbe6c47d51fc3e1a9b945965946b4c44142ab8792c50835a980d362c2710" +checksum = "abb12e687cfb44aa40f41fc3978ef76448f9b6038cad6aef4259d3c095a2382e" dependencies = [ "cfg-if", ] @@ -772,9 +804,9 @@ dependencies = [ [[package]] name = "mimalloc" -version = "0.1.27" +version = "0.1.29" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cf5f78c1d9892fb5677a8b2f543f967ab891ac0f71feecd961435b74f877283a" +checksum = "2f64ad83c969af2e732e907564deb0d0ed393cec4af80776f77dd77a1a427698" dependencies = [ "libmimalloc-sys", ] @@ -892,9 +924,9 @@ dependencies = [ [[package]] name = "pretty_assertions" -version = "1.0.0" +version = "1.2.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ec0cfe1b2403f172ba0f234e500906ee0a3e493fb81092dac23ebefe129301cc" +checksum = "c89f989ac94207d048d92db058e4f6ec7342b0971fc58d1271ca148b799b3563" dependencies = [ "ansi_term", "ctor", @@ -951,9 +983,9 @@ dependencies = [ [[package]] name = "quote" -version = "1.0.15" +version = "1.0.18" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "864d3e96a899863136fc6e99f3d7cae289dafe43bf2c5ac19b70df7210c0a145" +checksum = "a1feb54ed693b93a84e14094943b84b7c4eae204c512b7ccb95ab0c66d278ad1" dependencies = [ "proc-macro2", ] @@ -1031,9 +1063,9 @@ dependencies = [ [[package]] name = "regex" -version = "1.5.4" +version = "1.5.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d07a8629359eb56f1e2fb1652bb04212c072a87ba68546a04065d525673ac461" +checksum = "1a11647b6b25ff05a515cb92c365cec08801e83423a235b51e231e1808747286" dependencies = [ "aho-corasick", "memchr", @@ -1136,6 +1168,16 @@ dependencies = [ "winapi", ] +[[package]] +name = "rustyline-derive" +version = "0.6.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bb35a55ab810b5c0fe31606fe9b47d1354e4dc519bec0a102655f78ea2b38057" +dependencies = [ + "quote", + "syn", +] + [[package]] name = "ryu" version = "1.0.9" @@ -1186,9 +1228,9 @@ checksum = "388a1df253eca08550bef6c72392cfe7c30914bf41df5269b68cbd6ff8f570a3" [[package]] name = "serde" -version = "1.0.136" +version = "1.0.137" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ce31e24b01e1e524df96f1c2fdd054405f8d7376249a5110886fb4b658484789" +checksum = "61ea8d54c77f8315140a05f4c7237403bf38b72704d031543aa1d16abbf517d1" dependencies = [ "serde_derive", ] @@ -1217,9 +1259,9 @@ dependencies = [ [[package]] name = "serde_derive" -version = "1.0.136" +version = "1.0.137" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "08597e7152fcd306f41838ed3e37be9eaeed2b61c42e2117266a554fab4662f9" +checksum = "1f26faba0c3959972377d3b2d306ee9f71faee9714294e41bb777f83f88578be" dependencies = [ "proc-macro2", "quote", @@ -1239,9 +1281,9 @@ dependencies = [ [[package]] name = "serde_qs" -version = "0.8.5" +version = "0.9.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c7715380eec75f029a4ef7de39a9200e0a63823176b759d055b613f5a87df6a6" +checksum = "6af4cee6cd4b23b45e6709150d1e9af5c748131de7e3316a7c2b3008051ed725" dependencies = [ "percent-encoding", "serde", @@ -1275,6 +1317,12 @@ version = "1.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ae1a47186c03a32177042e55dbc5fd5aee900b8e0069a8d70fba96a9375cd012" +[[package]] +name = "similar" +version = "2.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2e24979f63a11545f5f2c60141afe249d4f19f84581ea2138065e400941d83d3" + [[package]] name = "slab" version = "0.4.5" @@ -1303,6 +1351,7 @@ dependencies = [ "serde", "test-macros", "test-util", + "tree-display", ] [[package]] @@ -1320,11 +1369,14 @@ dependencies = [ name = "sqlo2_repl" version = "0.1.0" dependencies = [ + "colored", "prettytable-rs", "rustc_version_runtime", "rustyline", + "rustyline-derive", "sqlo2", "sysinfo", + "termcolor", ] [[package]] @@ -1422,9 +1474,9 @@ checksum = "d44a3643b4ff9caf57abcee9c2c621d6c03d9135e0d8b589bd9afb5992cb176a" [[package]] name = "syn" -version = "1.0.86" +version = "1.0.94" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8a65b3f4ffa0092e9887669db0eae07941f023991ab58ea44da8fe8e2d511c6b" +checksum = "a07e33e919ebcd69113d5be0e4d70c5707004ff45188910106854f38b960df4a" dependencies = [ "proc-macro2", "quote", @@ -1433,9 +1485,9 @@ dependencies = [ [[package]] name = "sysinfo" -version = "0.23.0" +version = "0.23.12" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9e757000a4bed2b1be9be65a3f418b9696adf30bb419214c73997422de73a591" +checksum = "56b1e20ee77901236c389ff74618a899ff5fd34719a7ff0fd1d64f0acca5179a" dependencies = [ "cfg-if", "core-foundation-sys", @@ -1459,26 +1511,40 @@ dependencies = [ [[package]] name = "termcolor" -version = "1.1.2" +version = "1.1.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2dfed899f0eb03f32ee8c6a0aabdb8a7949659e3466561fc0adf54e26d88c5f4" +checksum = "bab24d30b911b2376f3a13cc2cd443142f0c81dda04c118693e35b3835757755" dependencies = [ "winapi-util", ] +[[package]] +name = "terminal_size" +version = "0.1.17" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "633c1a546cee861a1a6d0dc69ebeca693bf4296661ba7852b9d21d159e0506df" +dependencies = [ + "libc", + "winapi", +] + [[package]] name = "test-macros" version = "0.1.0" dependencies = [ "lazy_static", + "quote", + "syn", ] [[package]] name = "test-util" version = "0.1.0" dependencies = [ + "console", "pretty_assertions", "serde", + "similar", "toml", ] @@ -1540,13 +1606,32 @@ dependencies = [ [[package]] name = "toml" -version = "0.5.8" +version = "0.5.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a31142970826733df8241ef35dc040ef98c679ab14d7c3e54d827099b3acecaa" +checksum = "8d82e1a7758622a465f8cee077614c73484dac5b836c02ff6a40d5d1010324d7" dependencies = [ "serde", ] +[[package]] +name = "tree-display" +version = "0.1.0" +source = "git+https://github.com/Axmouth/tree-display-rs?branch=main#2219e96dd713f223816b891951a22e1f2f3ddaf3" +dependencies = [ + "tree-display-macros", +] + +[[package]] +name = "tree-display-macros" +version = "0.1.0" +source = "git+https://github.com/Axmouth/tree-display-rs?branch=main#2219e96dd713f223816b891951a22e1f2f3ddaf3" +dependencies = [ + "heck", + "proc-macro2", + "quote", + "syn", +] + [[package]] name = "unicode-segmentation" version = "1.8.0" @@ -1596,15 +1681,15 @@ checksum = "cccddf32554fecc6acb585f82a32a72e28b48f8c4c1883ddfeeeaa96f7d8e519" [[package]] name = "wasi" -version = "0.10.2+wasi-snapshot-preview1" +version = "0.10.0+wasi-snapshot-preview1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fd6fbd9a79829dd1ad0cc20627bf1ed606756a7f77edff7b66b7064f9cb327c6" +checksum = "1a143597ca7c7793eff794def352d41792a93c481eb1042423ff7ff72ba2c31f" [[package]] name = "wasm-bindgen" -version = "0.2.79" +version = "0.2.80" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "25f1af7423d8588a3d840681122e72e6a24ddbcb3f0ec385cac0d12d24256c06" +checksum = "27370197c907c55e3f1a9fbe26f44e937fe6451368324e009cba39e139dc08ad" dependencies = [ "cfg-if", "serde", @@ -1614,9 +1699,9 @@ dependencies = [ [[package]] name = "wasm-bindgen-backend" -version = "0.2.79" +version = "0.2.80" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8b21c0df030f5a177f3cba22e9bc4322695ec43e7257d865302900290bcdedca" +checksum = "53e04185bfa3a779273da532f5025e33398409573f348985af9a1cbf3774d3f4" dependencies = [ "bumpalo", "lazy_static", @@ -1641,9 +1726,9 @@ dependencies = [ [[package]] name = "wasm-bindgen-macro" -version = "0.2.79" +version = "0.2.80" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2f4203d69e40a52ee523b2529a773d5ffc1dc0071801c87b3d270b471b80ed01" +checksum = "17cae7ff784d7e83a2fe7611cfe766ecf034111b49deb850a3dc7699c08251f5" dependencies = [ "quote", "wasm-bindgen-macro-support", @@ -1651,9 +1736,9 @@ dependencies = [ [[package]] name = "wasm-bindgen-macro-support" -version = "0.2.79" +version = "0.2.80" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bfa8a30d46208db204854cadbb5d4baf5fcf8071ba5bf48190c3e59937962ebc" +checksum = "99ec0dc7a4756fffc231aab1b9f2f578d23cd391390ab27f952ae0c9b3ece20b" dependencies = [ "proc-macro2", "quote", @@ -1664,9 +1749,9 @@ dependencies = [ [[package]] name = "wasm-bindgen-shared" -version = "0.2.79" +version = "0.2.80" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3d958d035c4438e28c70e4321a2911302f10135ce78a9c7834c0cab4123d06a2" +checksum = "d554b7f530dee5964d9a9468d95c1f8b8acae4f282807e7d27d4b03099a46744" [[package]] name = "wasm-logger" @@ -1681,9 +1766,9 @@ dependencies = [ [[package]] name = "web-sys" -version = "0.3.56" +version = "0.3.57" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c060b319f29dd25724f09a2ba1418f142f539b2be99fbf4d2d5a8f7330afb8eb" +checksum = "7b17e741662c70c8bd24ac5c5b18de314a2c26c32bf8346ee1e6f53de919c283" dependencies = [ "js-sys", "wasm-bindgen", diff --git a/Cargo.toml b/Cargo.toml index c03f097..bd79915 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -1,25 +1,28 @@ -[workspace] - -members = [ - "engine", - "benchmarks", - "repl", - "server", - "test-macros", - "test-test", - "test-util", - "tests", - "wasm-repl", - "wire-protocol" -] - -[profile.bench] -debug = true -lto = "thin" - -[profile.test] -debug = true - -[profile.release] -debug = false -lto = "thin" +[workspace] + +members = [ + "engine", + "benchmarks", + "repl", + "server", + "test-macros", + "test-test", + "test-util", + "tests", + "wasm-repl", + "wire-protocol" +] + +[profile.bench] +debug = true +lto = "thin" + +[profile.test] +debug = true + +[profile.release] +debug = false +lto = "thin" + +# [patch."https://github.com/Axmouth/tree-display-rs"] +# tree-display = { path = "../tree-display/tree-display" } \ No newline at end of file diff --git a/README.md b/README.md index 00d7bdd..236d407 100644 --- a/README.md +++ b/README.md @@ -1 +1 @@ -WIP +WIP diff --git a/bench_flamegraph.sh b/bench_flamegraph.sh index e59de54..5230e37 100644 --- a/bench_flamegraph.sh +++ b/bench_flamegraph.sh @@ -1,3 +1,3 @@ - -CARGO_PROFILE_RELEASE_DEBUG=true cargo flamegraph --bin==select + +CARGO_PROFILE_RELEASE_DEBUG=true cargo flamegraph --bin==select env PERF=/usr/lib/linux-tools-5.4.0-84/perf flamegraph target/release/select \ No newline at end of file diff --git a/benchmarks/Cargo.toml b/benchmarks/Cargo.toml index 4493137..314b304 100644 --- a/benchmarks/Cargo.toml +++ b/benchmarks/Cargo.toml @@ -1,17 +1,17 @@ -[package] -name = "sqlo2_benchmarks" -version = "0.1.0" -authors = ["Axmouth "] -edition = "2021" - -[dependencies] -sqlo2 = { path = "../engine" } -criterion = "=0.3" -instant = { version = "=0.1", features = [ "now" ] } -alloc_counter = "=0.0.4" -sysinfo = "=0.23.0" - - -[[bench]] -name = "bench_main" +[package] +name = "sqlo2_benchmarks" +version = "0.1.0" +authors = ["Axmouth "] +edition = "2021" + +[dependencies] +sqlo2 = { path = "../engine" } +criterion = "=0.3" +instant = { version = "=0.1", features = [ "now" ] } +alloc_counter = "=0.0.4" +sysinfo = "=0.23.12" + + +[[bench]] +name = "bench_main" harness = false \ No newline at end of file diff --git a/benchmarks/src/bin/inserts.rs b/benchmarks/src/bin/inserts.rs index 770b878..d4d3d25 100644 --- a/benchmarks/src/bin/inserts.rs +++ b/benchmarks/src/bin/inserts.rs @@ -1,15 +1,15 @@ -use sqlo2::*; - -fn main() { - let mut db = backend_memory::MemoryBackend::new(); - db.eval_query( - "CREATE TABLE people (id INT, name TEXT);" - .to_owned() - .as_str(), - ) - .unwrap(); - for i in 0..1000000 { - db.eval_query(&format!("INSERT INTO people VALUES ({i}, 'Baam{i}');")) - .unwrap(); - } -} +use sqlo2::*; + +fn main() { + let mut db = backend_memory::MemoryBackend::new(); + db.eval_query( + "CREATE TABLE people (id INT, name TEXT);" + .to_owned() + .as_str(), + ) + .unwrap(); + for i in 0..1000000 { + db.eval_query(&format!("INSERT INTO people VALUES ({i}, 'Baam{i}');")) + .unwrap(); + } +} diff --git a/benchmarks/src/bin/lex.rs b/benchmarks/src/bin/lex.rs index a1c5e2b..5c1f832 100644 --- a/benchmarks/src/bin/lex.rs +++ b/benchmarks/src/bin/lex.rs @@ -1,35 +1,35 @@ -use criterion::black_box; -use instant::Instant; -use sqlo2::lexer; - -fn lex_benchmark() { - let lexer = lexer::Lexer::new(); - lexer.lex(black_box(" - CREATE TABLE people (id INT PRIMARY KEY, name TEXT); INSERT INTO people VALUES (1, 'Baam'); INSERT INTO people VALUES (2, 'Rachel'); INSERT INTO people VALUES (3, 'Rak WraithKaiser'); INSERT INTO people VALUES (4, 'Khun Aguero Agnes'); - SELECT id, name FROM people; - SELECT id, name FROM people where id != 3; - SELECT id, name FROM people where name = 'Rachel';".to_owned().as_str())).unwrap(); -} - -fn lex_select_benchmark() { - let lexer = lexer::Lexer::new(); - lexer.lex(black_box(" - SELECT id, name FROM people; - SELECT id, name FROM people where id != 3; - SELECT id, name FROM people where name = 'Rachel'; - SELECT id, age, role, job, position, country, address from people WHERE country = 'GR' AND age > 17 - SELECT id, age, role, job, position, country, address from people WHERE country = 'GR' AND age > 17 INNER LEFT JOIN ON jobs".to_owned().as_str())).unwrap(); -} - -fn main() { - let before = Instant::now(); - for _ in 0..100000 { - lex_benchmark(); - } - for _ in 0..100000 { - lex_select_benchmark(); - } - let after = before.elapsed(); - let avg = after / 200000; - println!("Average time: {:.2?}", avg); -} +use criterion::black_box; +use instant::Instant; +use sqlo2::lexer; + +fn lex_benchmark() { + let lexer = lexer::Lexer::new(); + lexer.lex(black_box(" + CREATE TABLE people (id INT PRIMARY KEY, name TEXT); INSERT INTO people VALUES (1, 'Baam'); INSERT INTO people VALUES (2, 'Rachel'); INSERT INTO people VALUES (3, 'Rak WraithKaiser'); INSERT INTO people VALUES (4, 'Khun Aguero Agnes'); + SELECT id, name FROM people; + SELECT id, name FROM people where id != 3; + SELECT id, name FROM people where name = 'Rachel';".to_owned().as_str())).unwrap(); +} + +fn lex_select_benchmark() { + let lexer = lexer::Lexer::new(); + lexer.lex(black_box(" + SELECT id, name FROM people; + SELECT id, name FROM people where id != 3; + SELECT id, name FROM people where name = 'Rachel'; + SELECT id, age, role, job, position, country, address from people WHERE country = 'GR' AND age > 17 + SELECT id, age, role, job, position, country, address from people WHERE country = 'GR' AND age > 17 INNER LEFT JOIN ON jobs".to_owned().as_str())).unwrap(); +} + +fn main() { + let before = Instant::now(); + for _ in 0..100000 { + lex_benchmark(); + } + for _ in 0..100000 { + lex_select_benchmark(); + } + let after = before.elapsed(); + let avg = after / 200000; + println!("Average time: {:.2?}", avg); +} diff --git a/benchmarks/src/bin/select.rs b/benchmarks/src/bin/select.rs index 3f27629..c306682 100644 --- a/benchmarks/src/bin/select.rs +++ b/benchmarks/src/bin/select.rs @@ -1,23 +1,23 @@ -use sqlo2::*; - -fn run_load() { - let mut db = backend_memory::MemoryBackend::new(); - db.eval_query( - "CREATE TABLE people (id INT, name TEXT);" - .to_owned() - .as_str(), - ) - .unwrap(); - for i in 0..1000000 { - db.eval_query(&format!("INSERT INTO people VALUES ({i}, 'Baam{i}');")) - .unwrap(); - } - for _ in 0..100 { - db.eval_query("SELECT * FROM people WHERE id = 999999;") - .unwrap(); - } -} - -fn main() { - run_load(); -} +use sqlo2::*; + +fn run_load() { + let mut db = backend_memory::MemoryBackend::new(); + db.eval_query( + "CREATE TABLE people (id INT, name TEXT);" + .to_owned() + .as_str(), + ) + .unwrap(); + for i in 0..1000000 { + db.eval_query(&format!("INSERT INTO people VALUES ({i}, 'Baam{i}');")) + .unwrap(); + } + for _ in 0..100 { + db.eval_query("SELECT * FROM people WHERE id = 999999;") + .unwrap(); + } +} + +fn main() { + run_load(); +} diff --git a/benchmarks/src/lib.rs b/benchmarks/src/lib.rs index c018e8c..42ea3c0 100644 --- a/benchmarks/src/lib.rs +++ b/benchmarks/src/lib.rs @@ -1,23 +1,23 @@ -#[cfg(test)] -mod tests { - - use alloc_counter::{count_alloc, AllocCounterSystem}; - - #[global_allocator] - static A: AllocCounterSystem = AllocCounterSystem; - - #[test] - fn count_alloc_works() { - let ((allocations, reallocations, deallocations), _) = count_alloc(|| { - let a = "dfdddfdf"; - let _ = a.as_bytes().iter().copied().collect::>(); - }); - println!("Allocations : {allocations}"); - println!("Rellocations : {reallocations}"); - println!("Dellocations : {deallocations}"); - - assert_eq!(allocations, 1); - assert_eq!(reallocations, 0); - assert_eq!(deallocations, 1); - } -} +#[cfg(test)] +mod tests { + + use alloc_counter::{count_alloc, AllocCounterSystem}; + + #[global_allocator] + static A: AllocCounterSystem = AllocCounterSystem; + + #[test] + fn count_alloc_works() { + let ((allocations, reallocations, deallocations), _) = count_alloc(|| { + let a = "dfdddfdf"; + let _ = a.as_bytes().to_vec(); + }); + println!("Allocations : {allocations}"); + println!("Rellocations : {reallocations}"); + println!("Dellocations : {deallocations}"); + + assert_eq!(allocations, 1); + assert_eq!(reallocations, 0); + assert_eq!(deallocations, 1); + } +} diff --git a/engine/Cargo.toml b/engine/Cargo.toml index 2dd0500..303899e 100644 --- a/engine/Cargo.toml +++ b/engine/Cargo.toml @@ -1,27 +1,28 @@ -[package] -name = "sqlo2" -version = "0.1.0" -authors = ["Axmouth "] -edition = "2021" - -[features] -default = [] -stdweb = [ "instant/stdweb" ] -wasm-bindgen = [ "instant/wasm-bindgen" ] -wasm = ["stdweb", "wasm-bindgen"] - -[dependencies] -byteorder = "=1.4.3" -bytes = "=1.1.0" -regex = "=1.5.4" -lazy_static = "=1.4.0" -instant = { version = "=0.1.12", features = [ "now" ] } -serde = { version = "=1.0.136", features = ["derive"] } -jemallocator = {version = "=0.3.2", optional = true } -mimalloc = { version = "=0.1.27", default-features = false, optional = true } -alloc_counter = {version = "=0.0.4", optional = true } -test-util = { path = "../test-util" } - -[dev-dependencies] -pretty_assertions = "=1.0.0" -test-macros = { path = "../test-macros" } \ No newline at end of file +[package] +name = "sqlo2" +version = "0.1.0" +authors = ["Axmouth "] +edition = "2021" + +[features] +default = [] +stdweb = [ "instant/stdweb" ] +wasm-bindgen = [ "instant/wasm-bindgen" ] +wasm = ["stdweb", "wasm-bindgen"] + +[dependencies] +byteorder = "=1.4.3" +bytes = "=1.1.0" +regex = "=1.5.5" +lazy_static = "=1.4.0" +instant = { version = "=0.1.12", features = [ "now" ] } +serde = { version = "=1.0.137", features = ["derive"] } +jemallocator = {version = "=0.3.2", optional = true } +mimalloc = { version = "=0.1.29", default-features = false, optional = true } +alloc_counter = {version = "=0.0.4", optional = true } +test-util = { path = "../test-util" } +test-macros = { path = "../test-macros" } +tree-display = { git = "https://github.com/Axmouth/tree-display-rs", branch = "main" } + +[dev-dependencies] +pretty_assertions = "=1.2.1" \ No newline at end of file diff --git a/engine/src/ast.rs b/engine/src/ast.rs index ad534b3..5dfc02e 100644 --- a/engine/src/ast.rs +++ b/engine/src/ast.rs @@ -1,13 +1,23 @@ +use std::fmt::Display; + +use tree_display::{tree_display_macros::TreeDisplay, TreeDisplay}; + use crate::{parser::ParsingError, sql_types::SqlType}; use super::lexer::*; -#[derive(Clone, Eq, PartialEq, Debug)] +#[derive(Clone, Eq, PartialEq, Debug, TreeDisplay)] pub struct Ast { pub statements: Vec, } -#[derive(Clone, Eq, PartialEq, Debug)] +impl Display for Ast { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + self.tree_fmt(f, Default::default(), Default::default()) + } +} + +#[derive(Clone, Eq, PartialEq, Debug, TreeDisplay)] pub enum Statement { SelectStatement(SelectStatement), CreateTableStatement(CreateTableStatement), @@ -16,20 +26,22 @@ pub enum Statement { InsertStatement(InsertStatement), } -#[derive(Clone, Eq, PartialEq, Debug)] +#[derive(Clone, Eq, PartialEq, Debug, TreeDisplay)] +#[tree_display(rename_all_pascal)] pub struct OrderByClause { pub asc: bool, pub exp: Expression, } -#[derive(Clone, Eq, PartialEq, Debug)] +#[derive(Clone, Eq, PartialEq, Debug, TreeDisplay)] +#[tree_display(rename_all_pascal)] pub struct JoinClause { pub kind: JoinKind, pub source: RowDataSource, pub on: Expression, } -#[derive(Clone, Eq, PartialEq, Debug)] +#[derive(Clone, Eq, PartialEq, Debug, TreeDisplay)] pub enum JoinKind { Inner, FullOuter, @@ -37,60 +49,79 @@ pub enum JoinKind { RightOuter, } -#[derive(Clone, Eq, PartialEq, Debug)] +#[derive(Clone, Eq, PartialEq, Debug, TreeDisplay)] +#[tree_display(rename_all_pascal)] pub enum RowDataSource { SubSelect { select: SelectStatement, as_clause: String, + #[tree_display(skip_if_empty)] joins: Vec, }, Table { table_name: String, + #[tree_display(skip_if_none)] as_clause: Option, + #[tree_display(skip_if_empty)] joins: Vec, }, } -#[derive(Clone, Eq, PartialEq, Debug)] +#[derive(Clone, Eq, PartialEq, Debug, TreeDisplay)] +#[tree_display(rename_all_pascal)] pub struct TableColumn { pub col_name: String, + #[tree_display(skip_if_none)] pub table_name: Option, } -#[derive(Clone, Eq, PartialEq, Debug)] +#[derive(Clone, Eq, PartialEq, Debug, TreeDisplay)] +#[tree_display(rename_all_pascal)] pub struct ProcessedTableColumn { pub col_name: Option, pub col_idx: usize, } -#[derive(Clone, Eq, PartialEq, Debug)] +#[derive(Clone, Eq, PartialEq, Debug, TreeDisplay)] +#[tree_display(rename_all_pascal)] pub struct InsertStatement { pub table: String, pub values: Vec, } -#[derive(Clone, Eq, PartialEq, Debug)] +#[derive(Clone, Eq, PartialEq, Debug, TreeDisplay)] +#[tree_display(rename_all_pascal)] pub struct CreateTableStatement { pub name: String, + #[tree_display(skip_if_empty)] pub cols: Vec, } -#[derive(Clone, Eq, PartialEq, Debug)] +#[derive(Clone, Eq, PartialEq, Debug, TreeDisplay)] +#[tree_display(rename_all_pascal)] pub struct ColumnDefinition { pub name: String, pub data_type: SqlType, + #[tree_display(skip_if_false)] pub is_primary_key: bool, } -#[derive(Clone, Eq, PartialEq, Debug, Default)] +#[derive(Clone, Eq, PartialEq, Debug, Default, TreeDisplay)] +#[tree_display(rename_all_pascal)] pub struct SelectStatement { - pub items: Vec, + #[tree_display(skip_if_empty)] pub from: Vec, + #[tree_display(skip_if_empty)] pub where_clause: Expression, + #[tree_display(skip_if_false, rename = "Distinct")] pub is_distinct: bool, + #[tree_display(skip_if_none)] pub order_by: Option, + #[tree_display(skip_if_none)] pub limit: Option, + #[tree_display(skip_if_none)] pub offset: Option, + pub items: Vec, } impl SelectStatement { @@ -107,7 +138,8 @@ impl SelectStatement { } } -#[derive(Clone, Eq, PartialEq, Debug)] +#[derive(Clone, Eq, PartialEq, Debug, TreeDisplay)] +#[tree_display(rename_all_pascal)] pub struct CreateIndexStatement { pub name: String, pub is_unique: bool, @@ -128,14 +160,16 @@ impl CreateIndexStatement { } } -#[derive(Clone, Eq, PartialEq, Debug)] +#[derive(Clone, Eq, PartialEq, Debug, TreeDisplay)] +#[tree_display(rename_all_pascal)] pub struct CreateConstraintStatement { pub name: String, pub constraint: ConstraintType, pub table: String, } -#[derive(Clone, Eq, PartialEq, Debug)] +#[derive(Clone, Eq, PartialEq, Debug, TreeDisplay)] +#[tree_display(rename_all_pascal)] pub enum ConstraintType { Foreign { references: Vec<(String, String)> }, Check { expression: Expression }, @@ -153,12 +187,14 @@ impl CreateConstraintStatement { } } -#[derive(Clone, Eq, PartialEq, Debug)] +#[derive(Clone, Eq, PartialEq, Debug, TreeDisplay)] +#[tree_display(rename_all_pascal)] pub struct DropTableStatement { pub name: String, } -#[derive(Clone, Eq, PartialEq, Debug)] +#[derive(Clone, Eq, PartialEq, Debug, TreeDisplay)] +#[tree_display(rename_all_pascal)] pub enum Expression { Literal(LiteralExpression), Binary(BinaryExpression), @@ -215,28 +251,24 @@ impl Expression { } } - #[inline] pub fn is_unary(&self) -> bool { matches!(self, Expression::Unary(_)) } - #[inline] pub fn is_binary(&self) -> bool { matches!(self, Expression::Binary(_)) } - #[inline] pub fn is_literal(&self) -> bool { matches!(self, Expression::Literal(_)) } - #[inline] pub fn is_empty(&self) -> bool { matches!(self, Expression::Empty) } } -#[derive(Clone, Eq, PartialEq, Debug)] +#[derive(Clone, Eq, PartialEq, Debug, TreeDisplay)] pub enum LiteralExpression { String(String), Identifier(String), @@ -277,7 +309,7 @@ impl LiteralExpression { } } -#[derive(Clone, Eq, PartialEq, Debug)] +#[derive(Clone, Eq, PartialEq, Debug, TreeDisplay)] pub enum Operand { Add, Subtract, @@ -418,7 +450,8 @@ impl Operand { } } -#[derive(Clone, Eq, PartialEq, Debug)] +#[derive(Clone, Eq, PartialEq, Debug, TreeDisplay)] +#[tree_display(rename_all_pascal)] pub struct BinaryExpression { pub first: Box, pub second: Box, @@ -436,7 +469,8 @@ impl BinaryExpression { } } -#[derive(Clone, Eq, PartialEq, Debug)] +#[derive(Clone, Eq, PartialEq, Debug, TreeDisplay)] +#[tree_display(rename_all_pascal)] pub struct UnaryExpression { pub first: Box, pub operand: Operand, @@ -458,10 +492,13 @@ impl Token<'_> { } } -#[derive(Clone, Eq, PartialEq, Debug, Default)] +#[derive(Clone, Eq, PartialEq, Debug, Default, TreeDisplay)] +#[tree_display(rename_all_pascal)] pub struct SelectItem { pub expression: Expression, + #[tree_display(skip_if_none)] pub as_clause: Option, + #[tree_display(skip_if_false)] pub asterisk: bool, } @@ -480,6 +517,14 @@ mod ast_tests { use super::super::ast::*; use super::super::parser::*; + #[test] + fn test_ast_fmt() { + let sql = "SELECT * FROM table1"; + let parser = Parser::new(); + let ast = parser.parse(sql).unwrap(); + eprintln!("{}", ast); + } + struct ParseTest { ast: Ast, input: &'static str, diff --git a/engine/src/backend.rs b/engine/src/backend.rs index e2d7765..3e0c793 100644 --- a/engine/src/backend.rs +++ b/engine/src/backend.rs @@ -1,201 +1,190 @@ -use byteorder::{BigEndian, ReadBytesExt}; - -use crate::sql_types::{SqlType, SqlValue}; - -use super::ast::*; -use serde::{Deserialize, Serialize}; -use std::{io::Read, time::Duration}; - -pub trait Cell { - fn as_text(&self) -> Result; - fn as_int(&self) -> Result; - fn as_num(&self, typ: SqlType) -> Result; - fn as_bool(&self) -> Result; - fn equals(&self, other: Self) -> bool; -} -#[derive(Debug, Clone, PartialEq, Deserialize, Serialize)] -pub struct ResultColumn { - pub col_type: SqlType, - pub name: String, -} - -impl From for String { - fn from(col_type: SqlType) -> Self { - match col_type { - SqlType::Char => "Char".to_string(), - SqlType::Text => "Text".to_string(), - SqlType::VarChar => "Varchar".to_string(), - SqlType::SmallInt => "Smallint".to_string(), - SqlType::Int => "Int".to_string(), - SqlType::BigInt => "Bigint".to_string(), - SqlType::Real => "Real".to_string(), - SqlType::DoublePrecision => "Double Precision".to_string(), - SqlType::Boolean => "Bool".to_string(), - SqlType::Null => "Null".to_string(), - SqlType::Type => "Type".to_string(), - } - } -} - -impl From<&SqlType> for String { - fn from(col_type: &SqlType) -> Self { - match col_type { - SqlType::Char => "Char".to_string(), - SqlType::Text => "Text".to_string(), - SqlType::VarChar => "Varchar".to_string(), - SqlType::SmallInt => "Smallint".to_string(), - SqlType::Int => "Int".to_string(), - SqlType::BigInt => "Bigint".to_string(), - SqlType::Real => "Real".to_string(), - SqlType::DoublePrecision => "Double Precision".to_string(), - SqlType::Boolean => "Bool".to_string(), - SqlType::Null => "Null".to_string(), - SqlType::Type => "Type".to_string(), - } - } -} - -impl std::fmt::Display for SqlType { - fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { - f.write_str(String::from(self).as_str()) - } -} - -#[derive(Debug, Clone, Deserialize, Serialize)] -pub enum EvalResult { - Select { - results: QueryResults, - time: Duration, - }, - Insert { - success: bool, - time: Duration, - }, - CreateTable { - success: bool, - time: Duration, - }, - DropTable { - success: bool, - time: Duration, - }, -} - -impl EvalResult { - pub fn zero_time(&mut self) { - match self { - EvalResult::Select { time, .. } => *time = Duration::new(0, 0), - EvalResult::Insert { time, .. } => *time = Duration::new(0, 0), - EvalResult::CreateTable { time, .. } => *time = Duration::new(0, 0), - EvalResult::DropTable { time, .. } => *time = Duration::new(0, 0), - } - } -} - -impl PartialEq for EvalResult -where - C: PartialEq, -{ - fn eq(&self, other: &Self) -> bool { - match (self, other) { - ( - EvalResult::Select { results, time: _ }, - EvalResult::Select { - results: other_results, - time: _, - }, - ) => results == other_results, - ( - EvalResult::Insert { success, time: _ }, - EvalResult::Insert { - success: other_success, - time: _, - }, - ) => success == other_success, - ( - EvalResult::CreateTable { success, time: _ }, - EvalResult::CreateTable { - success: other_success, - time: _, - }, - ) => success == other_success, - ( - EvalResult::DropTable { success, time: _ }, - EvalResult::DropTable { - success: other_success, - time: _, - }, - ) => success == other_success, - _ => false, - } - } -} - -pub type ResultColumns = Vec; - -#[derive(Debug, Clone, PartialEq, Deserialize, Serialize, Default)] -pub struct QueryResults { - pub columns: ResultColumns, - pub rows: Vec>, -} - -pub const ERR_TABLE_DOES_NOT_EXIST: &str = "Table does not exist."; -pub const ERR_COLUMN_DOES_NOT_EXIST: &str = "Column does not exist."; -pub const ERR_INVALID_SELECT_ITEM: &str = "Select item is not valid."; -pub const ERR_INVALID_DATA_TYPE: &str = "Invalid data type."; -pub const ERR_MISSING_VALUES: &str = "Missing values."; - -pub trait Backend { - fn create_table(_: CreateTableStatement) -> Result; - fn insert(_: InsertStatement) -> Result; - fn select(_: SelectStatement) -> Result, String>; - fn eval_query(query: String) -> Result>, String>; -} - -pub type MemoryCellData = Vec; - -#[derive(Clone, Eq, PartialEq, Debug)] -pub struct MemoryCell { - pub bytes: MemoryCellData, -} - -impl Cell for MemoryCell { - fn as_int(&self) -> Result { - let mut rdr = std::io::Cursor::new(&self.bytes); - match rdr.read_i32::() { - Ok(result) => Ok(result), - Err(_err) => Err("Failed to parse bytes to int32."), - } - } - - fn as_num(&self, typ: SqlType) -> Result { - let text = match SqlValue::decode_type(self, typ) { - Ok(val) => val.to_string(), - Err(_) => { - return Err("Failed to parse bytes to double precision."); - } - }; - match text.parse::() { - Ok(val) => Ok(val), - Err(_) => Err("Failed to parse bytes to double precision."), - } - } - - fn as_bool(&self) -> Result { - Ok(self.bytes != vec![0]) - } - - fn as_text(&self) -> Result { - let mut rdr = std::io::Cursor::new(&self.bytes); - - let mut text = "".to_owned(); - match rdr.read_to_string(&mut text) { - Ok(_) => Ok(text), - Err(_err) => Err("Failed to parse bytes to String."), - } - } - - fn equals(&self, other: Self) -> bool { - self.bytes == other.bytes - } -} +use byteorder::{BigEndian, ReadBytesExt}; + +use crate::sql_types::{SqlType, SqlValue}; + +use super::ast::*; +use serde::{Deserialize, Serialize}; +use std::{io::Read, time::Duration}; + +pub trait Cell { + fn as_text(&self) -> Result; + fn as_int(&self) -> Result; + fn as_num(&self, typ: SqlType) -> Result; + fn as_bool(&self) -> Result; + fn equals(&self, other: Self) -> bool; +} +#[derive(Debug, Clone, PartialEq, Deserialize, Serialize)] +pub struct ResultColumn { + pub col_type: SqlType, + pub name: String, +} + +impl From for String { + fn from(col_type: SqlType) -> Self { + match col_type { + SqlType::Char => "Char".to_string(), + SqlType::Text => "Text".to_string(), + SqlType::VarChar => "Varchar".to_string(), + SqlType::SmallInt => "Smallint".to_string(), + SqlType::Int => "Int".to_string(), + SqlType::BigInt => "Bigint".to_string(), + SqlType::Real => "Real".to_string(), + SqlType::DoublePrecision => "Double Precision".to_string(), + SqlType::Boolean => "Bool".to_string(), + SqlType::Null => "Null".to_string(), + SqlType::Type => "Type".to_string(), + } + } +} + +impl From<&SqlType> for String { + fn from(col_type: &SqlType) -> Self { + match col_type { + SqlType::Char => "Char".to_string(), + SqlType::Text => "Text".to_string(), + SqlType::VarChar => "Varchar".to_string(), + SqlType::SmallInt => "Smallint".to_string(), + SqlType::Int => "Int".to_string(), + SqlType::BigInt => "Bigint".to_string(), + SqlType::Real => "Real".to_string(), + SqlType::DoublePrecision => "Double Precision".to_string(), + SqlType::Boolean => "Bool".to_string(), + SqlType::Null => "Null".to_string(), + SqlType::Type => "Type".to_string(), + } + } +} + +impl std::fmt::Display for SqlType { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + f.write_str(String::from(self).as_str()) + } +} + +#[derive(Debug, Clone, Deserialize, Serialize)] +pub enum EvalResult { + Select { + results: QueryResults, + time: Duration, + }, + Insert { + success: bool, + time: Duration, + }, + CreateTable { + success: bool, + time: Duration, + }, + DropTable { + success: bool, + time: Duration, + }, +} + +impl PartialEq for EvalResult +where + C: PartialEq, +{ + fn eq(&self, other: &Self) -> bool { + match (self, other) { + ( + EvalResult::Select { results, time: _ }, + EvalResult::Select { + results: other_results, + time: _, + }, + ) => results == other_results, + ( + EvalResult::Insert { success, time: _ }, + EvalResult::Insert { + success: other_success, + time: _, + }, + ) => success == other_success, + ( + EvalResult::CreateTable { success, time: _ }, + EvalResult::CreateTable { + success: other_success, + time: _, + }, + ) => success == other_success, + ( + EvalResult::DropTable { success, time: _ }, + EvalResult::DropTable { + success: other_success, + time: _, + }, + ) => success == other_success, + _ => false, + } + } +} + +pub type ResultColumns = Vec; + +#[derive(Debug, Clone, PartialEq, Deserialize, Serialize, Default)] +pub struct QueryResults { + pub columns: ResultColumns, + pub rows: Vec>, +} + +pub const ERR_TABLE_DOES_NOT_EXIST: &str = "Table does not exist."; +pub const ERR_COLUMN_DOES_NOT_EXIST: &str = "Column does not exist."; +pub const ERR_INVALID_SELECT_ITEM: &str = "Select item is not valid."; +pub const ERR_INVALID_DATA_TYPE: &str = "Invalid data type."; +pub const ERR_MISSING_VALUES: &str = "Missing values."; + +pub trait Backend { + fn create_table(_: CreateTableStatement) -> Result; + fn insert(_: InsertStatement) -> Result; + fn select(_: SelectStatement) -> Result, String>; + fn eval_query(query: String) -> Result>, String>; +} + +pub type MemoryCellData = Vec; + +#[derive(Clone, Eq, PartialEq, Debug)] +pub struct MemoryCell { + pub bytes: MemoryCellData, +} + +impl Cell for MemoryCell { + fn as_int(&self) -> Result { + let mut rdr = std::io::Cursor::new(&self.bytes); + match rdr.read_i32::() { + Ok(result) => Ok(result), + Err(_err) => Err("Failed to parse bytes to int32."), + } + } + + fn as_num(&self, typ: SqlType) -> Result { + let text = match SqlValue::decode_type(self, typ) { + Ok(val) => val.to_string(), + Err(_) => { + return Err("Failed to parse bytes to double precision."); + } + }; + match text.parse::() { + Ok(val) => Ok(val), + Err(_) => Err("Failed to parse bytes to double precision."), + } + } + + fn as_bool(&self) -> Result { + Ok(self.bytes != vec![0]) + } + + fn as_text(&self) -> Result { + let mut rdr = std::io::Cursor::new(&self.bytes); + + let mut text = "".to_owned(); + match rdr.read_to_string(&mut text) { + Ok(_) => Ok(text), + Err(_err) => Err("Failed to parse bytes to String."), + } + } + + fn equals(&self, other: Self) -> bool { + self.bytes == other.bytes + } +} diff --git a/engine/src/backend_memory/memory_store.rs b/engine/src/backend_memory/memory_store.rs index 9037567..4a8418a 100644 --- a/engine/src/backend_memory/memory_store.rs +++ b/engine/src/backend_memory/memory_store.rs @@ -1,13 +1,15 @@ -#[derive(Debug, Default, Clone)] -pub struct MemorySection { - data: Vec, +#[derive(Debug, Clone)] +pub struct MemorySection { + data: [u8; N], } -impl MemorySection { - pub fn new(size: usize) -> Self { - Self { - data: vec![0; size], - } +impl MemorySection { + pub fn new() -> Self { + Self { data: [0u8; N] } + } + + pub fn page_size() -> usize { + N } pub fn len(&self) -> usize { @@ -27,12 +29,18 @@ impl MemorySection { } } +impl Default for MemorySection { + fn default() -> Self { + Self::new() + } +} + #[derive(Debug, Default, Clone)] -pub struct MemoryStore { - sections: Vec, +pub struct MemoryStore { + sections: Vec>, } -impl MemoryStore { +impl MemoryStore { pub fn new() -> Self { Self { sections: vec![] } } @@ -44,4 +52,8 @@ impl MemoryStore { pub fn is_empty(&self) -> bool { self.sections.is_empty() } + + pub fn section(&self, index: usize) -> Option<&MemorySection> { + self.sections.get(index) + } } diff --git a/engine/src/backend_memory/mod.rs b/engine/src/backend_memory/mod.rs index 99cabb2..6b5889a 100644 --- a/engine/src/backend_memory/mod.rs +++ b/engine/src/backend_memory/mod.rs @@ -17,6 +17,7 @@ use crate::{ use instant::Instant; use std::collections::HashMap; use test_util::TestSubjectExt; +use tree_display::TreeDisplay; const ERR_INVALID_CELL: &str = "Invalid Cell"; //TODO: @@ -209,7 +210,6 @@ impl From> for Table { } impl Table { - #[inline] pub fn evaluate_literal_cell( &self, row_index: usize, @@ -298,7 +298,6 @@ impl Table { } } - #[inline] pub fn evaluate_binary_cell( &self, row_index: usize, @@ -465,7 +464,6 @@ impl Table { } } - #[inline] pub fn evaluate_cell( &self, row_index: usize, @@ -732,14 +730,13 @@ impl MemoryBackend { return Err(ERR_TABLE_DOES_NOT_EXIST.to_string()); } Some(table) => { - let mut new_table; let from_name = if let Some(from_name) = as_clause { from_name.clone() } else { from_name.clone() }; - new_table = TableContainer::Concrete(table); + let mut new_table = TableContainer::Concrete(table); for (index, exp) in table.get_applicable_indexes(Some(&select_statement.where_clause))? { @@ -788,14 +785,13 @@ impl MemoryBackend { return Err(ERR_TABLE_DOES_NOT_EXIST.to_string()); } Some(table) => { - let mut new_table; let from_name = if let Some(from_name) = as_clause { from_name.clone() } else { from_name.clone() }; - new_table = TableContainer::Concrete(table); + let mut new_table = TableContainer::Concrete(table); for (index, exp) in table.get_applicable_indexes(Some(&select_statement.where_clause))? { @@ -1108,7 +1104,12 @@ impl MemoryBackend { Err(err) => return Err(err.to_string()), }; - let mut eval_results = vec![]; + println!( + "Ast\n{}", + ast.tree_print(Default::default(), Default::default()) + ); + + let mut eval_results = Vec::new(); for statement in ast.statements { match statement { @@ -1214,7 +1215,6 @@ pub fn linearize_expressions( } } -#[inline] pub fn literal_to_memory_cell(literal: &LiteralExpression) -> Result { match literal { LiteralExpression::Numeric(value) => { diff --git a/engine/src/lexer/mod.rs b/engine/src/lexer/mod.rs index cace5c0..44561e7 100644 --- a/engine/src/lexer/mod.rs +++ b/engine/src/lexer/mod.rs @@ -266,14 +266,16 @@ impl<'a> Token<'_> { Token::Modulo => 6, Token::Exponentiation => 6, - // Unary ops + // Prefix Unary ops Token::SquareRoot => 7, Token::CubeRoot => 7, - Token::Factorial => 7, Token::FactorialPrefix => 7, + // Postfix Unary ops + Token::Factorial => 8, + // Cast - Token::TypeCast => 8, + Token::TypeCast => 9, _ => 0, } @@ -610,12 +612,10 @@ static KEYWORDS: &[&str] = &[ ]; impl TokenContainer<'_> { - #[inline] pub fn equals(&self, other: &Self) -> bool { self.token == other.token } - #[inline] pub fn binding_power(&self) -> u32 { self.token.binding_power() } @@ -1284,22 +1284,18 @@ pub fn get_location_from_cursor(source: &str, cursor: usize) -> TokenLocation { } } -#[inline] fn get_chat_at(source: &str, position: usize) -> Option { source[position..(position + 1)].chars().next() } -#[inline] fn is_char_alphabetical(c: char) -> bool { ('A'..='Z').contains(&c) || ('a'..='z').contains(&c) } -#[inline] fn is_char_digit(c: char) -> bool { ('0'..='9').contains(&c) } -#[inline] fn is_char_valid_for_identifier(c: char) -> bool { is_char_alphabetical(c) || is_char_digit(c) || c == '$' || c == '_' } diff --git a/engine/src/lib.rs b/engine/src/lib.rs index f24c7be..52a80ff 100644 --- a/engine/src/lib.rs +++ b/engine/src/lib.rs @@ -1,37 +1,37 @@ -#[cfg(not(target_env = "msvc"))] -#[cfg(feature = "jemallocator")] -extern crate jemallocator; - -#[cfg(not(target_env = "msvc"))] -#[cfg(feature = "jemallocator")] -#[global_allocator] -static ALLOC: jemallocator::Jemalloc = jemallocator::Jemalloc; - -#[cfg(feature = "mimalloc")] -use mimalloc::MiMalloc; - -#[cfg(feature = "mimalloc")] -#[global_allocator] -static GLOBAL: MiMalloc = MiMalloc; - -#[cfg(feature = "tcmalloc")] -extern crate tcmalloc; - -#[cfg(feature = "tcmalloc")] -use tcmalloc::TCMalloc; - -#[cfg(feature = "tcmalloc")] -#[global_allocator] -static GLOBAL: TCMalloc = TCMalloc; - -pub mod ast; -pub mod backend; -pub mod backend_memory; -pub mod lexer; -pub mod parser; -pub mod sql_types; - -pub mod test_impls; - -#[cfg(test)] -mod tests {} +#[cfg(not(target_env = "msvc"))] +#[cfg(feature = "jemallocator")] +extern crate jemallocator; + +#[cfg(not(target_env = "msvc"))] +#[cfg(feature = "jemallocator")] +#[global_allocator] +static ALLOC: jemallocator::Jemalloc = jemallocator::Jemalloc; + +#[cfg(feature = "mimalloc")] +use mimalloc::MiMalloc; + +#[cfg(feature = "mimalloc")] +#[global_allocator] +static GLOBAL: MiMalloc = MiMalloc; + +#[cfg(feature = "tcmalloc")] +extern crate tcmalloc; + +#[cfg(feature = "tcmalloc")] +use tcmalloc::TCMalloc; + +#[cfg(feature = "tcmalloc")] +#[global_allocator] +static GLOBAL: TCMalloc = TCMalloc; + +pub mod ast; +pub mod backend; +pub mod backend_memory; +pub mod lexer; +pub mod parser; +pub mod sql_types; + +pub mod test_impls; + +#[cfg(test)] +mod tests {} diff --git a/engine/src/parser/mod.rs b/engine/src/parser/mod.rs index 854a0d0..e7ede0b 100644 --- a/engine/src/parser/mod.rs +++ b/engine/src/parser/mod.rs @@ -95,18 +95,11 @@ impl Parser { ); } } - match parse_statement(&tokens, cursor, Token::Semicolon) { - Ok((statement, new_cursor)) => { - cursor = new_cursor; - - ast.statements.push(statement); - first_statement = false; - } + let (statement, new_cursor) = parse_statement(&tokens, cursor, Token::Semicolon)?; + cursor = new_cursor; + ast.statements.push(statement); + first_statement = false; - Err(err) => { - return Err(err); - } - } if cursor == tokens.len() - 1 { break; } @@ -147,7 +140,6 @@ impl From for ParsingError { } } -#[inline] fn expect_token(tokens: &[TokenContainer], cursor: usize, token: Token) -> bool { let current_token = match tokens.get(cursor) { Some(value) => value, @@ -158,7 +150,6 @@ fn expect_token(tokens: &[TokenContainer], cursor: usize, token: Token) -> bool token == current_token.token } -#[inline] fn help_message(token: Option<&TokenContainer>, cursor: usize, msg: &str) -> String { if let Some(token) = token { format!( @@ -450,17 +441,16 @@ fn parse_create_index_statement<'a>( } else { parse_err!(tokens, cursor, "Not a Create Index Statement"); } - let name; - if let Some(TokenContainer { + let name = if let Some(TokenContainer { loc: _, token: Token::IdentifierValue { value }, }) = tokens.get(cursor) { cursor += 1; - name = value; + value } else { parse_err!(tokens, cursor, "Expected Index Name"); - } + }; if let Some(TokenContainer { loc: _, token: Token::On, @@ -470,24 +460,17 @@ fn parse_create_index_statement<'a>( } else { parse_err!(tokens, cursor, "Expected ON Keyword"); } - let table; - if let Some(TokenContainer { + let table = if let Some(TokenContainer { loc: _, token: Token::IdentifierValue { value }, }) = tokens.get(cursor) { cursor += 1; - table = value; + value } else { parse_err!(tokens, cursor, "Expected Table Name"); - } - let (expression, cursor) = match parse_expression(tokens, cursor, &[delimiter], 0, true, false) - { - Ok(value) => value, - Err(_) => { - parse_err!(tokens, cursor, "Expected Index Expressions"); - } }; + let (expression, cursor) = parse_expression(tokens, cursor, &[delimiter], 0, true, false)?; Ok(( CreateIndexStatement { @@ -536,20 +519,14 @@ fn parse_expressions( } // Look for expression - let (expression, new_cursor); - if let Ok((expression_, new_cursor_)) = parse_expression( + let (expression, new_cursor) = parse_expression( tokens, cursor, &[Token::Comma, Token::RightParenthesis], tokens[cursor].binding_power(), true, false, - ) { - expression = expression_; - new_cursor = new_cursor_; - } else { - parse_err!(tokens, cursor, "Expected Expression"); - } + )?; cursor = new_cursor; expressions.push(expression); } @@ -584,26 +561,14 @@ fn parse_expression<'a>( expression = Expression::SubSelect(Box::new(select_statement)); cursor = new_cursor; } else { - match parse_expression( + (expression, cursor) = parse_expression( tokens, cursor, &[Token::RightParenthesis], min_binding_power, true, false, - ) { - Ok((expression_, cursor_)) => { - expression = expression_; - cursor = cursor_; - } - Err(_) => { - parse_err!( - tokens, - cursor, - "Expected Expression after opening Parenthesis" - ); - } - }; + )?; } if let Some(TokenContainer { @@ -616,9 +581,8 @@ fn parse_expression<'a>( parse_err!(tokens, cursor, "Expected closing Parenthesis"); } } else if cursor < tokens.len() && UNARY_OPERATORS.contains(&tokens[cursor].token) { - let operand; let token = &tokens[cursor]; - operand = token.token.clone(); + let operand = token.token.clone(); cursor += 1; let mut nested_un_ops = vec![operand]; let mut inner_exp; @@ -630,44 +594,42 @@ fn parse_expression<'a>( break; } } - if let Ok((expression_, cursor_)) = parse_literal_expression(tokens, cursor) { - inner_exp = expression_; - cursor = cursor_; - } else if let Some(TokenContainer { - token: Token::LeftParenthesis, - loc: _, - }) = tokens.get(cursor) - { - cursor += 1; - if let Ok((expression_, cursor_)) = parse_expression( - tokens, - cursor, - &[Token::RightParenthesis], - min_binding_power, - true, - takes_as_clause, - ) { + match parse_literal_expression(tokens, cursor) { + Ok((expression_, cursor_)) => { inner_exp = expression_; cursor = cursor_; - } else { - parse_err!( - tokens, - cursor, - "Expected Expression after opening Parenthesis" - ); } + Err(err) => { + if let Some(TokenContainer { + token: Token::LeftParenthesis, + loc: _, + }) = tokens.get(cursor) + { + cursor += 1; + let (expression_, cursor_) = parse_expression( + tokens, + cursor, + &[Token::RightParenthesis], + min_binding_power, + true, + takes_as_clause, + )?; + inner_exp = expression_; + cursor = cursor_; - if let Some(TokenContainer { - loc: _, - token: Token::RightParenthesis, - }) = tokens.get(cursor) - { - cursor += 1; - } else { - parse_err!(tokens, cursor, "Expected closing Parenthesis"); + if let Some(TokenContainer { + loc: _, + token: Token::RightParenthesis, + }) = tokens.get(cursor) + { + cursor += 1; + } else { + parse_err!(tokens, cursor, "Expected closing Parenthesis"); + } + } else { + return Err(err); + } } - } else { - parse_err!(tokens, cursor, "Expected Expression After unary Operator"); } if let Some(operand) = nested_un_ops.pop() { @@ -729,18 +691,30 @@ fn parse_expression<'a>( break; } } - let mut operand = Token::Empty; + let mut operand_tok = Token::Empty; if BINARY_OPERATORS.contains(token) { - operand = token.clone(); + operand_tok = token.clone(); cursor += 1; } - if operand == Token::TypeCast { + if operand_tok == Token::TypeCast { if let Some(TokenContainer { token: op, loc: _ }) = tokens.get(cursor) { if op.is_datatype() { - expression = Expression::Cast { - data: Box::new(expression), - typ: SqlType::try_from((op, cursor))?, - }; + // Make sure expression is cast before applying unary operator + if let Expression::Unary(UnaryExpression { first, operand }) = expression { + expression = Expression::Unary(UnaryExpression { + first: Box::from(Expression::Cast { + data: first, + typ: SqlType::try_from((op, cursor))?, + }), + operand, + }); + } else { + expression = Expression::Cast { + data: Box::new(expression), + typ: SqlType::try_from((op, cursor))?, + }; + } + cursor += 1; continue; } else { @@ -750,39 +724,42 @@ fn parse_expression<'a>( parse_err!(tokens, cursor, "Expected Type Name after Type Cast"); } } - if operand == Token::Empty { + if operand_tok == Token::Empty { parse_err!(tokens, cursor, "Expected Binary Operator"); } - let binding_power = operand.binding_power(); + let binding_power = operand_tok.binding_power(); if binding_power < min_binding_power { cursor = last_cursor; break; } - let (second_expression, new_cursor) = match parse_expression( + let (mut second_expression, new_cursor) = parse_expression( tokens, cursor, delimiters, binding_power, false, takes_as_clause, - ) { - Err(_) => { - parse_err!( - tokens, - cursor, - &format!("Expected Expression after bBnary Operator {:?}", operand) - ); + )?; + let operand = Operand::from_token(&operand_tok, cursor)?; + cursor = new_cursor; + + if let Some(TokenContainer { token, loc: _ }) = tokens.get(cursor) { + if UNARY_POSTFIX_OPERATORS.contains(token) { + cursor += 1; + second_expression = Expression::Unary(UnaryExpression { + first: Box::from(second_expression), + operand: Operand::from_token(token, cursor)?, + }); } - Ok(value) => value, - }; + } + expression = Expression::Binary(BinaryExpression { first: Box::from(expression), second: Box::from(second_expression), - operand: Operand::from_token(&operand, cursor)?, + operand, }); - cursor = new_cursor; last_cursor = cursor; } @@ -849,7 +826,9 @@ fn parse_literal_expression( cursor, )) } - _ => parse_err!(tokens, cursor, "Expected Literal"), + _ => { + parse_err!(tokens, cursor, "Expected Literal") + } } } else { parse_err!(tokens, cursor, "Expected Literal Expression"); @@ -904,12 +883,7 @@ fn parse_insert_statement( } // Look for expression list - let (values, new_cursor) = match parse_expressions(tokens, cursor, &[Token::RightParenthesis]) { - Err(_) => { - parse_err!(tokens, cursor, "Expected Value Expressions"); - } - Ok(value) => value, - }; + let (values, new_cursor) = parse_expressions(tokens, cursor, &[Token::RightParenthesis])?; cursor = new_cursor; @@ -957,17 +931,16 @@ fn parse_drop_table_statement<'a>( } else { parse_err!(tokens, cursor, "Not a Drop Table Statement"); } - let name; - if let Some(TokenContainer { + let name = if let Some(TokenContainer { loc: _, token: Token::IdentifierValue { value }, }) = tokens.get(cursor) { cursor += 1; - name = value; + value } else { parse_err!(tokens, cursor, "Not a Drop Table Statement"); - } + }; Ok(( DropTableStatement { @@ -1035,12 +1008,7 @@ fn parse_select_items<'a>( select_item.asterisk = true; } else { let (expression, new_cursor) = - match parse_expression(tokens, cursor, &delimiters_plus, 0, true, true) { - Err(_) => { - parse_err!(tokens, cursor, "Expected Expression"); - } - Ok(value) => value, - }; + parse_expression(tokens, cursor, &delimiters_plus, 0, true, true)?; cursor = new_cursor; select_item.expression = expression; @@ -1111,7 +1079,7 @@ fn parse_select_statement<'a>( offset: None, }; - let (select_items, new_cursor) = match parse_select_items( + let (select_items, new_cursor) = parse_select_items( tokens, cursor, &[ @@ -1121,12 +1089,7 @@ fn parse_select_statement<'a>( Token::Offset, delimiter.clone(), ], - ) { - Err(err) => { - return Err(err); - } - Ok(value) => value, - }; + )?; cursor = new_cursor; select.items = select_items; @@ -1179,7 +1142,7 @@ fn parse_select_statement<'a>( }) = tokens.get(cursor) { cursor += 1; - let (where_clause, new_cursor) = match parse_expression( + let (where_clause, new_cursor) = parse_expression( tokens, cursor, &[ @@ -1191,12 +1154,7 @@ fn parse_select_statement<'a>( 0, true, false, - ) { - Err(_) => { - parse_err!(tokens, cursor, "Expected WHERE Conditionals"); - } - Ok(value) => value, - }; + )?; cursor = new_cursor; select.where_clause = where_clause; @@ -1209,7 +1167,7 @@ fn parse_select_statement<'a>( { cursor += 1; - let (exp, new_cursor) = match parse_expression( + let (exp, new_cursor) = parse_expression( tokens, cursor, &[ @@ -1222,12 +1180,7 @@ fn parse_select_statement<'a>( 0, true, true, - ) { - Err(_) => { - parse_err!(tokens, cursor, "Expected WHERE Conditionals"); - } - Ok(value) => value, - }; + )?; cursor = new_cursor; let mut order_by_clause = OrderByClause { asc: true, exp }; @@ -1425,12 +1378,7 @@ fn parse_joins<'a>( } else { parse_err!(tokens, cursor, "No ON keyword in Join Expression"); } - let (col1, new_cursor) = match parse_table_column(tokens, cursor) { - Ok(val) => val, - Err(_) => { - parse_err!(tokens, cursor, "Failed to parse Column in Join Expression"); - } - }; + let (col1, new_cursor) = parse_table_column(tokens, cursor)?; cursor = new_cursor; let operand_token = if let Some(TokenContainer { token, loc: _ }) = tokens.get(cursor) { cursor += 1; @@ -1442,12 +1390,7 @@ fn parse_joins<'a>( } else { parse_err!(tokens, cursor, "No Binary Operator in Join Expression"); }; - let (col2, new_cursor) = match parse_table_column(tokens, cursor) { - Ok(val) => val, - Err(_) => { - parse_err!(tokens, cursor, "Failed to parse Column in Join Expression"); - } - }; + let (col2, new_cursor) = parse_table_column(tokens, cursor)?; cursor = new_cursor; let operand = if let Ok(o) = Operand::from_token(&operand_token, cursor) { diff --git a/engine/src/sql_types/mod.rs b/engine/src/sql_types/mod.rs index 0787545..28bed0a 100644 --- a/engine/src/sql_types/mod.rs +++ b/engine/src/sql_types/mod.rs @@ -10,8 +10,11 @@ use crate::{ lexer::Token, }; use serde::{Deserialize, Serialize, Serializer}; +use tree_display::tree_display_macros::TreeDisplay; -#[derive(Debug, Copy, Clone, Eq, PartialEq, Ord, PartialOrd, Serialize, Deserialize)] +#[derive( + Debug, Copy, Clone, Eq, PartialEq, Ord, PartialOrd, Serialize, Deserialize, TreeDisplay, +)] pub enum SqlType { SmallInt, Int, @@ -71,7 +74,6 @@ impl TryFrom<(&TokenContainer<'_>, usize)> for SqlType { } impl SqlType { - #[inline] pub fn from_token( token_container: &TokenContainer, cursor: usize, @@ -93,7 +95,6 @@ impl SqlType { } } - #[inline] pub fn order(&self) -> i32 { match self { SqlType::SmallInt => 1, @@ -111,7 +112,7 @@ impl SqlType { } } -#[derive(Debug, Clone, PartialEq, PartialOrd)] +#[derive(Debug, Clone, PartialEq, PartialOrd, TreeDisplay)] pub enum SqlTypeError { ConversionError(String), ParseError(String), @@ -136,7 +137,7 @@ impl ToString for SqlTypeError { } } -#[derive(Debug, Clone, Eq, PartialEq, Ord, PartialOrd, Deserialize)] +#[derive(Debug, Clone, Eq, PartialEq, Ord, PartialOrd, Deserialize, TreeDisplay)] pub enum SqlValue { Null, Text(SqlText), @@ -179,7 +180,8 @@ impl Serialize for SqlValue { } } } -#[derive(Debug, Copy, Clone, PartialEq, PartialOrd, Deserialize, Serialize)] + +#[derive(Debug, Copy, Clone, PartialEq, PartialOrd, Deserialize, Serialize, TreeDisplay)] pub enum SqlNumeric { SmallInt { value: i16 }, Int { value: i32 }, @@ -201,7 +203,7 @@ impl Ord for SqlNumeric { } } -#[derive(Debug, Clone, Eq, PartialEq, Ord, PartialOrd, Deserialize, Serialize)] +#[derive(Debug, Clone, Eq, PartialEq, Ord, PartialOrd, Deserialize, Serialize, TreeDisplay)] pub enum SqlText { Char { value: String, @@ -217,7 +219,6 @@ pub enum SqlText { }, } -#[inline] fn factorial(num: i64) -> Result { if num < 0 { return Err(SqlTypeError::OperationError( @@ -239,12 +240,10 @@ fn factorial(num: i64) -> Result { } impl SqlValue { - #[inline] pub fn is_numeric(&self) -> bool { matches!(self, SqlValue::Numeric(_)) } - #[inline] pub fn is_int(&self) -> bool { match self { SqlValue::Numeric(num) => matches!( @@ -257,7 +256,6 @@ impl SqlValue { } } - #[inline] pub fn is_float(&self) -> bool { match self { SqlValue::Numeric(num) => matches!( @@ -268,22 +266,18 @@ impl SqlValue { } } - #[inline] pub fn is_null(&self) -> bool { matches!(self, SqlValue::Null) } - #[inline] pub fn is_text(&self) -> bool { matches!(self, SqlValue::Text(_)) } - #[inline] pub fn is_bool(&self) -> bool { matches!(self, SqlValue::Boolean(_)) } - #[inline] pub fn implicist_cast_to_matching_types( &self, b: &SqlValue, @@ -432,7 +426,6 @@ impl SqlValue { } } - #[inline] pub fn decode_type(data: &MemoryCell, typ: SqlType) -> Result { if data.bytes.is_empty() { return Ok(SqlValue::Null); @@ -454,7 +447,6 @@ impl SqlValue { } } - #[inline] pub fn from_token(token: &Token) -> Result { match token { Token::StringValue { value } => Ok(SqlValue::Text(SqlText::Text { @@ -469,7 +461,6 @@ impl SqlValue { } } - #[inline] pub fn encode(&self) -> MemoryCell { match self { SqlValue::Null => MemoryCell { bytes: vec![] }, @@ -515,7 +506,6 @@ impl SqlValue { } } - #[inline] pub fn subtract(&self, b: &Self) -> Result { let (a, b) = SqlValue::implicist_cast_to_matching_types(self, b)?; match (&a, &b) { @@ -572,7 +562,6 @@ impl SqlValue { } } - #[inline] pub fn add(&self, b: &Self) -> Result { let (a, b) = SqlValue::implicist_cast_to_matching_types(self, b)?; match (&a, &b) { @@ -626,7 +615,6 @@ impl SqlValue { } } - #[inline] pub fn multiply(&self, b: &Self) -> Result { let (a, b) = SqlValue::implicist_cast_to_matching_types(self, b)?; match (&a, &b) { @@ -683,7 +671,6 @@ impl SqlValue { } } - #[inline] pub fn divide(&self, b: &Self) -> Result { let (a, b) = SqlValue::implicist_cast_to_matching_types(self, b)?; @@ -745,7 +732,6 @@ impl SqlValue { } } - #[inline] pub fn modulo(&self, b: &Self) -> Result { let (a, b) = SqlValue::implicist_cast_to_matching_types(self, b)?; @@ -807,7 +793,6 @@ impl SqlValue { } } - #[inline] pub fn exponentiation(&self, b: &Self) -> Result { let (a, b) = SqlValue::implicist_cast_to_matching_types(self, b)?; @@ -874,7 +859,6 @@ impl SqlValue { } } - #[inline] pub fn bitwise_and(&self, b: &Self) -> Result { let (a, b) = SqlValue::implicist_cast_to_matching_types(self, b)?; @@ -904,7 +888,6 @@ impl SqlValue { } } - #[inline] pub fn bitwise_or(&self, b: &Self) -> Result { let (a, b) = SqlValue::implicist_cast_to_matching_types(self, b)?; @@ -934,7 +917,6 @@ impl SqlValue { } } - #[inline] pub fn bitwise_xor(&self, b: &Self) -> Result { let (a, b) = SqlValue::implicist_cast_to_matching_types(self, b)?; @@ -964,7 +946,6 @@ impl SqlValue { } } - #[inline] pub fn bitwise_shift_left(&self, b: &Self) -> Result { let (a, b) = SqlValue::implicist_cast_to_matching_types(self, b)?; @@ -994,7 +975,6 @@ impl SqlValue { } } - #[inline] pub fn bitwise_shift_right(&self, b: &Self) -> Result { let (a, b) = SqlValue::implicist_cast_to_matching_types(self, b)?; @@ -1024,7 +1004,6 @@ impl SqlValue { } } - #[inline] pub fn factorial(&self) -> Result { match self { SqlValue::Numeric(num) => match num { @@ -1047,7 +1026,6 @@ impl SqlValue { } } - #[inline] pub fn square_root(&self) -> Result { match self { SqlValue::Numeric(num) => match num { @@ -1123,7 +1101,6 @@ impl SqlValue { } } - #[inline] pub fn cube_root(&self) -> Result { match self { SqlValue::Numeric(num) => match num { @@ -1174,7 +1151,6 @@ impl SqlValue { } } - #[inline] pub fn abs(&self) -> Result { match self { SqlValue::Numeric(num) => match num { @@ -1202,7 +1178,6 @@ impl SqlValue { } } - #[inline] pub fn minus(&self) -> Result { match self { SqlValue::Numeric(num) => match num { @@ -1230,7 +1205,6 @@ impl SqlValue { } } - #[inline] pub fn bitwise_not(&self) -> Result { match self { SqlValue::Numeric(num) => match num { @@ -1253,7 +1227,6 @@ impl SqlValue { } } - #[inline] pub fn not(&self) -> Result { match self { SqlValue::Boolean(val) => Ok(SqlValue::Boolean(!val)), @@ -1263,7 +1236,6 @@ impl SqlValue { } } - #[inline] pub fn concat(&self, b: &Self) -> Result { let (a, b) = SqlValue::implicist_cast_to_matching_types(self, b)?; match (a, b) { @@ -1283,7 +1255,6 @@ impl SqlValue { } } - #[inline] pub fn equals(&self, b: &Self) -> Result { if self.is_null() || b.is_null() { Ok(SqlValue::Null) @@ -1293,7 +1264,6 @@ impl SqlValue { } } - #[inline] pub fn not_equal(&self, b: &Self) -> Result { if self.is_null() || b.is_null() { Ok(SqlValue::Null) @@ -1303,7 +1273,6 @@ impl SqlValue { } } - #[inline] pub fn less_than(&self, b: &Self) -> Result { if self.is_null() || b.is_null() { Ok(SqlValue::Null) @@ -1313,7 +1282,6 @@ impl SqlValue { } } - #[inline] pub fn less_than_or_equals(&self, b: &Self) -> Result { if self.is_null() || b.is_null() { Ok(SqlValue::Null) @@ -1323,7 +1291,6 @@ impl SqlValue { } } - #[inline] pub fn greater_than(&self, b: &Self) -> Result { if self.is_null() || b.is_null() { Ok(SqlValue::Null) @@ -1333,7 +1300,6 @@ impl SqlValue { } } - #[inline] pub fn greater_than_or_equals(&self, b: &Self) -> Result { if self.is_null() || b.is_null() { Ok(SqlValue::Null) @@ -1343,7 +1309,6 @@ impl SqlValue { } } - #[inline] pub fn and(&self, b: &Self) -> Result { if let (SqlValue::Boolean(a), SqlValue::Boolean(b)) = (self, b) { Ok(SqlValue::Boolean(*a && *b)) @@ -1354,7 +1319,6 @@ impl SqlValue { } } - #[inline] pub fn or(&self, b: &Self) -> Result { if let (SqlValue::Boolean(a), SqlValue::Boolean(b)) = (self, b) { Ok(SqlValue::Boolean(*a || *b)) @@ -1365,7 +1329,6 @@ impl SqlValue { } } - #[inline] pub fn get_type(&self) -> SqlType { match self { SqlValue::Numeric(num) => match &num { @@ -1390,7 +1353,6 @@ impl SqlValue { } } - #[inline] pub fn explicit_cast_to_type(&self, typ: SqlType) -> Result { if self.is_null() { return Ok(SqlValue::Null); @@ -1846,7 +1808,6 @@ impl SqlValue { } } - #[inline] pub fn to_type(&self, typ: SqlType) -> Result { if self.is_null() { return Ok(SqlValue::Null); @@ -2045,7 +2006,6 @@ impl SqlValue { } impl SqlNumeric { - #[inline] pub fn parse(data: &str) -> Result { if let Ok(value) = data.parse::() { Ok(SqlNumeric::SmallInt { value }) @@ -2073,7 +2033,6 @@ impl SqlNumeric { } } - #[inline] pub fn parse_small_int(data: String) -> Result { if let Ok(value) = data.parse::() { Ok(SqlNumeric::SmallInt { value }) @@ -2084,7 +2043,6 @@ impl SqlNumeric { } } - #[inline] pub fn parse_int(data: String) -> Result { if let Ok(value) = data.parse::() { Ok(SqlNumeric::Int { value }) @@ -2095,7 +2053,6 @@ impl SqlNumeric { } } - #[inline] pub fn parse_big_int(data: String) -> Result { if let Ok(value) = data.parse::() { Ok(SqlNumeric::BigInt { value }) @@ -2106,7 +2063,6 @@ impl SqlNumeric { } } - #[inline] pub fn parse_real(data: String) -> Result { if let Ok(value) = data.parse::() { Ok(SqlNumeric::Real { value }) @@ -2117,7 +2073,6 @@ impl SqlNumeric { } } - #[inline] pub fn parse_double_precision(data: String) -> Result { if let Ok(value) = data.parse::() { Ok(SqlNumeric::DoublePrecision { value }) @@ -2128,7 +2083,6 @@ impl SqlNumeric { } } - #[inline] pub fn decode_small_int(data: &MemoryCell) -> Result { let mut rdr = std::io::Cursor::new(&data.bytes); @@ -2141,7 +2095,6 @@ impl SqlNumeric { } } - #[inline] pub fn decode_int(data: &MemoryCell) -> Result { let mut rdr = std::io::Cursor::new(&data.bytes); @@ -2154,7 +2107,6 @@ impl SqlNumeric { } } - #[inline] pub fn decode_big_int(data: &MemoryCell) -> Result { let mut rdr = std::io::Cursor::new(&data.bytes); if let Ok(value) = rdr.read_i64::() { @@ -2166,7 +2118,6 @@ impl SqlNumeric { } } - #[inline] pub fn decode_real(data: &MemoryCell) -> Result { let mut rdr = std::io::Cursor::new(&data.bytes); if let Ok(value) = rdr.read_f32::() { @@ -2178,7 +2129,6 @@ impl SqlNumeric { } } - #[inline] pub fn decode_double_precision(data: &MemoryCell) -> Result { let mut rdr = std::io::Cursor::new(&data.bytes); if let Ok(value) = rdr.read_f64::() { @@ -2192,12 +2142,10 @@ impl SqlNumeric { } impl SqlText { - #[inline] pub fn parse_text(data: String) -> Result { Ok(SqlText::Text { value: data }) } - #[inline] pub fn parse_varchar(data: String, maxlen: usize) -> Result { if maxlen <= data.len() { Ok(SqlText::Text { value: data }) @@ -2210,7 +2158,6 @@ impl SqlText { } } - #[inline] pub fn parse_char(data: String, len: usize) -> Result { match len.cmp(&data.len()) { Ordering::Equal => Ok(SqlText::Text { value: data }), @@ -2227,7 +2174,6 @@ impl SqlText { } } - #[inline] pub fn decode_text(data: &MemoryCell) -> Result { let mut rdr = std::io::Cursor::new(&data.bytes); @@ -2240,12 +2186,10 @@ impl SqlText { } } - #[inline] pub fn decode_varchar(data: &MemoryCell) -> Result { SqlText::decode_text(data) } - #[inline] pub fn decode_char(data: &MemoryCell) -> Result { SqlText::decode_text(data) } diff --git a/engine/src/test_impls/mod.rs b/engine/src/test_impls/mod.rs index 4d3b543..30e549e 100644 --- a/engine/src/test_impls/mod.rs +++ b/engine/src/test_impls/mod.rs @@ -1,6 +1,35 @@ -use crate::{ast::Ast, backend::EvalResult, parser::ParsingError}; +use crate::{ + ast::{ + Ast, BinaryExpression, CreateIndexStatement, CreateTableStatement, DropTableStatement, + Expression, InsertStatement, LiteralExpression, RowDataSource, SelectStatement, Statement, + TableColumn, UnaryExpression, + }, + backend::EvalResult, + parser::ParsingError, +}; use std::fmt::Write; use test_util::{TestResultExt, TestResultType, TestStringify}; +use tree_display::{Context, TreeDisplay}; + +const INDENT: &str = " "; +const INDENT_WITH_BRANCH: &str = "| "; + +#[derive(Debug, PartialEq, Eq)] +enum BranchPosition { + Last, + First, + Other, +} + +impl BranchPosition { + pub fn last(&self) -> bool { + &BranchPosition::Last == self + } + + pub fn right(&self) -> bool { + &BranchPosition::First == self + } +} pub struct VecContainer(Vec); @@ -8,10 +37,7 @@ pub trait IntoVecContainer { fn into_vec_container(self) -> VecContainer; } -impl IntoVecContainer for Vec -where - T: TestStringify, -{ +impl IntoVecContainer for Vec { fn into_vec_container(self) -> VecContainer { VecContainer(self) } @@ -19,7 +45,41 @@ where impl TestStringify for Ast { fn stringify(&self) -> String { - format!("{:#?}", self) + let mut out = "".to_string(); + let ctx = Context { + indent: " ", + ..Context::new() + }; + for statement in &self.statements { + match statement { + Statement::CreateTableStatement(create_table) => { + out.push_str("Create Table\n"); + out.push_str(&create_table.tree_print(ctx, Default::default())); + out.push('\n'); + } + Statement::InsertStatement(insert) => { + out.push_str("Insert\n"); + out.push_str(&insert.tree_print(ctx, Default::default())); + out.push('\n'); + } + Statement::SelectStatement(select) => { + out.push_str("Select\n"); + out.push_str(&select.tree_print(ctx, Default::default())); + out.push('\n'); + } + Statement::DropTableStatement(drop_table) => { + out.push_str("Drop Table\n"); + out.push_str(&drop_table.tree_print(ctx, Default::default())); + out.push('\n'); + } + Statement::CreateIndexStatement(create_index) => { + out.push_str("Create Index\n"); + out.push_str(&create_index.tree_print(ctx, Default::default())); + out.push('\n'); + } + } + } + out } } @@ -31,20 +91,32 @@ impl TestStringify for ParsingError { impl TestStringify for VecContainer where - T: TestStringify, + T: std::fmt::Display, { fn stringify(&self) -> String { let mut out = String::new(); for item in self.0.iter() { - writeln!(out, "{}", item.stringify()).ok(); + writeln!(out, "{}", item).ok(); } out } } +impl std::fmt::Display for VecContainer +where + T: std::fmt::Display, +{ + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + for item in self.0.iter() { + writeln!(f, "{}", item)?; + } + Ok(()) + } +} + impl TestResultExt for VecContainer where - T: TestStringify, + T: std::fmt::Display, { fn result_type(&self) -> TestResultType { TestResultType::Unknown @@ -55,18 +127,20 @@ where } } -impl TestStringify for EvalResult { - fn stringify(&self) -> String { +impl std::fmt::Display for EvalResult +where + C: std::fmt::Display, +{ + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { match self { EvalResult::CreateTable { success, .. } => { if *success { - "CreateTable: success\n".to_string() + writeln!(f, "CreateTable: success") } else { - "CreateTable: failure\n".to_string() + writeln!(f, "CreateTable: failure") } } EvalResult::Select { results, .. } => { - let mut out = String::new(); let mut col_lengths: Vec = results .columns .iter() @@ -96,14 +170,14 @@ impl TestStringify for EvalResult { .collect::>() .join(" | "); - writeln!(out, "Select:\n| {} |", columns).ok(); + writeln!(f, "Select:\n| {} |", columns)?; let columns_sep = col_lengths .iter() .map(|col_len| "-".repeat(*col_len)) .collect::>() .join("-|-"); - writeln!(out, "|-{}-|", columns_sep).ok(); + writeln!(f, "|-{}-|", columns_sep)?; for result in results.rows.iter() { let column: String = result .iter() @@ -113,24 +187,297 @@ impl TestStringify for EvalResult { }) .collect::>() .join(" | "); - writeln!(out, "| {} |", column).ok(); + writeln!(f, "| {} |", column)?; } - out + Ok(()) } EvalResult::Insert { success, .. } => { if *success { - "Insert: success\n".to_string() + writeln!(f, "Insert: success") } else { - "Insert: failure\n".to_string() + writeln!(f, "Insert: failure") } } EvalResult::DropTable { success, .. } => { if *success { - "DropTable: success\n".to_string() + writeln!(f, "DropTable: success") + } else { + writeln!(f, "DropTable: failure") + } + } + } + } +} + +// impl std::fmt::Display for Ast { +// fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { +// for statement in &self.statements { +// writeln!(f, "{}", statement)?; +// } + +// Ok(()) +// } +// } + +impl std::fmt::Display for Statement { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + format_statement(f, self, vec![]) + } +} + +fn format_statement( + f: &mut std::fmt::Formatter<'_>, + statement: &Statement, + _indent: Vec<&str>, +) -> std::fmt::Result { + match statement { + Statement::CreateTableStatement(CreateTableStatement { name, cols }) => { + writeln!(f, "Create Table")?; + writeln!(f, " ├──Name: {}", name)?; + writeln!(f, " └──Columns")?; + for (i, col) in cols.iter().enumerate() { + if i < cols.len() - 1 { + writeln!(f, " ├──{}", col.data_type)?; + } else { + writeln!(f, " └──{}", col.data_type)?; + } + } + } + Statement::InsertStatement(InsertStatement { table, values }) => { + writeln!(f, "Insert")?; + writeln!(f, " ├──Name: {}", table)?; + writeln!(f, " └──Values")?; + for (i, expr) in values.iter().enumerate() { + if i + 1 < values.len() { + format_expression(f, expr, vec![INDENT, INDENT], BranchPosition::Other)?; + } else { + format_expression(f, expr, vec![INDENT, INDENT], BranchPosition::Last)?; + } + } + } + Statement::DropTableStatement(DropTableStatement { name }) => { + writeln!(f, "Drop Table")?; + writeln!(f, " └───Name: {}", name)?; + } + Statement::SelectStatement(SelectStatement { + where_clause, + order_by, + from, + is_distinct, + items, + limit, + offset, + }) => { + writeln!(f, "Select")?; + writeln!(f, " ├──From")?; + for (i, table) in from.iter().enumerate() { + if i < from.len() - 1 { + write!(f, " | ├──")?; + } else { + write!(f, " | └──")?; + } + match table { + RowDataSource::Table { + table_name, + as_clause: _, + joins: _, + } => { + write!(f, "{}", table_name)?; + } + RowDataSource::SubSelect { + select: _, + as_clause: _, + joins: _, + } => {} + } + writeln!(f)?; + } + if where_clause != &Expression::Empty { + writeln!(f, " ├──Where")?; + format_expression( + f, + where_clause, + vec![INDENT, INDENT_WITH_BRANCH], + BranchPosition::Last, + )? + }; + if let Some(order_by) = order_by { + writeln!(f, " ├──Order By")?; + if order_by.asc { + writeln!(f, " | ├──Asc")?; } else { - "DropTable: failure\n".to_string() + writeln!(f, " | ├──Desc")?; } + format_expression( + f, + &order_by.exp, + vec![INDENT, INDENT_WITH_BRANCH], + BranchPosition::Last, + )?; } + if *is_distinct { + writeln!(f, " ├──Distinct")?; + } + if let Some(limit) = limit { + writeln!(f, " ├──Limit")?; + writeln!(f, " | └──{}", limit)?; + } + if let Some(offset) = offset { + writeln!(f, " ├──Offset")?; + writeln!(f, " | └──{}", offset)?; + } + writeln!(f, " └──Items")?; + for (i, item) in items.iter().enumerate() { + if i < items.len() - 1 { + writeln!(f, " ├──Item")?; + } else { + writeln!(f, " └──Item")?; + } + if item.asterisk && item.as_clause.is_some() && item.expression != Expression::Empty + { + writeln!(f, " ├──Asterisk")?; + } else if item.asterisk { + writeln!(f, " └──Asterisk")?; + } + if let Some(as_clause) = &item.as_clause { + if item.expression != Expression::Empty { + writeln!(f, " ├──As")?; + writeln!(f, " | └──{}", as_clause)?; + } else { + writeln!(f, " └──As")?; + writeln!(f, " └──{}", as_clause)?; + } + } + if item.expression != Expression::Empty { + if i < items.len() - 1 { + writeln!(f, " | └──Expression")?; + format_expression( + f, + &item.expression, + vec![INDENT, INDENT, INDENT_WITH_BRANCH, INDENT], + BranchPosition::Last, + )?; + } else { + writeln!(f, " └──Expression")?; + format_expression( + f, + &item.expression, + vec![INDENT, INDENT, INDENT, INDENT], + BranchPosition::Last, + )?; + } + } + } + } + Statement::CreateIndexStatement(CreateIndexStatement { + table: _, + name: _, + is_unique: _, + is_primary_key: _, + expression: _, + }) => {} + } + + Ok(()) +} + +fn format_expression( + f: &mut std::fmt::Formatter<'_>, + expr: &Expression, + indent: Vec<&str>, + pos: BranchPosition, +) -> std::fmt::Result { + indent.iter().for_each(|indent| { + let _ = write!(f, "{}", indent); + }); + if pos.last() { + write!(f, "└──")?; + } else if pos.right() { + write!(f, "|└─")?; + } else { + write!(f, "├──")?; + } + match expr { + Expression::TableColumn(TableColumn { + table_name, + col_name, + }) => { + if let Some(table_name) = table_name { + write!(f, "{}.", table_name)? + } + writeln!(f, "{}", col_name)?; + } + Expression::Literal(literal) => match literal { + LiteralExpression::String(s) => writeln!(f, "\"{}\"", s)?, + LiteralExpression::Numeric(s) => writeln!(f, "{}", s)?, + LiteralExpression::Bool(b) => writeln!(f, "{}", b)?, + LiteralExpression::Identifier(s) => writeln!(f, "{}", s)?, + LiteralExpression::Null => writeln!(f, "null")?, + }, + Expression::Binary(BinaryExpression { + first, + second, + operand, + }) => { + let mut indent = indent.clone(); + if pos.last() { + indent.push(INDENT); + } else { + indent.push(INDENT_WITH_BRANCH); + } + writeln!(f, "{:#?}", operand)?; + format_expression(f, first, indent.clone(), BranchPosition::First)?; + format_expression(f, second, indent, BranchPosition::Last)?; + } + Expression::Unary(UnaryExpression { first, operand }) => { + let mut indent = indent.clone(); + if pos.last() { + indent.push(INDENT); + } else { + indent.push(INDENT_WITH_BRANCH); + } + indent.iter().for_each(|indent| { + let _ = write!(f, "{}", indent); + }); + writeln!(f, "{:#?}", operand)?; + indent.iter().for_each(|indent| { + let _ = write!(f, "{}", indent); + }); + format_expression(f, first, indent, BranchPosition::Other)?; + } + Expression::Cast { data, typ } => { + let mut indent = indent.clone(); + if pos.last() { + indent.push(INDENT); + } else { + indent.push(INDENT_WITH_BRANCH); + } + writeln!(f, "Cast")?; + indent.iter().for_each(|indent| { + let _ = write!(f, "{}", indent); + }); + writeln!(f, "├──Expression")?; + let mut indent_data = indent.clone(); + indent_data.push(INDENT_WITH_BRANCH); + format_expression(f, data, indent_data.clone(), BranchPosition::Last)?; + indent.iter().for_each(|indent| { + let _ = write!(f, "{}", indent); + }); + writeln!(f, "└──Type")?; + indent.iter().for_each(|indent| { + let _ = write!(f, "{}", indent); + }); + writeln!(f, " └──{}", typ)?; + } + Expression::SubSelect(sub_select) => { + writeln!(f, "SubSelect")?; + let mut indent = indent.clone(); + indent.push(INDENT); + format_statement(f, &Statement::SelectStatement(*sub_select.clone()), indent)?; + } + _ => { + writeln!(f, "{:#?}", expr)?; } } + Ok(()) } diff --git a/repl/Cargo.toml b/repl/Cargo.toml index 08c7989..d8941d4 100644 --- a/repl/Cargo.toml +++ b/repl/Cargo.toml @@ -1,12 +1,15 @@ -[package] -name = "sqlo2_repl" -version = "0.1.0" -authors = ["Axmouth "] -edition = "2021" - -[dependencies] -sqlo2 = { path = "../engine" } -prettytable-rs = "=0.8.0" -rustc_version_runtime = "=0.2" -rustyline = "=9.1.2" -sysinfo = "=0.23.0" \ No newline at end of file +[package] +name = "sqlo2_repl" +version = "0.1.0" +authors = ["Axmouth "] +edition = "2021" + +[dependencies] +sqlo2 = { path = "../engine" } +prettytable-rs = "=0.8.0" +rustc_version_runtime = "=0.2" +rustyline = "=9.1.2" +rustyline-derive = "0.6.0" +colored = "=2.0.0" +sysinfo = "=0.23.12" +termcolor = "=1.1.3" \ No newline at end of file diff --git a/repl/src/main.rs b/repl/src/main.rs index ab24fca..0e57863 100644 --- a/repl/src/main.rs +++ b/repl/src/main.rs @@ -1,200 +1,239 @@ -use sqlo2::{self}; - -use sqlo2::backend::EvalResult; -use sqlo2::backend_memory::*; - -extern crate rustc_version_runtime; -use rustc_version_runtime::version; - -use rustyline::{error::ReadlineError, Editor}; -use std::io::{stdout, Write}; -use std::time::Duration; - -use sysinfo::{get_current_pid, ProcessExt, System, SystemExt}; - -fn main() { - const VERSION: &str = env!("CARGO_PKG_VERSION"); - - let mut mb = MemoryBackend::new(); - let mut rl = Editor::<()>::new(); - - if rl.load_history("history.txt").is_ok() {} - - let mut system = System::new(); - system.refresh_all(); - let rust_info = version(); - - println!(); - println!("SqlO2 {} Repl", VERSION); - println!(); - - // Display system information: - println!( - "System: {} {}", - system.name().unwrap_or_else(|| "Unknown".to_string()), - system.os_version().unwrap_or_else(|| "unknown".to_string()) - ); - println!( - "Kernel {}", - system - .kernel_version() - .unwrap_or_else(|| "Unknown".to_string()) - ); - println!( - "Rust version: {}.{}.{}", - rust_info.major, rust_info.minor, rust_info.patch - ); - if let Ok(current_pid) = get_current_pid() { - let current_process_opt = system.process(current_pid); - if let Some(current_process) = current_process_opt { - println!("Memory usage(kb): {}", current_process.memory()); - } - } - - println!(); - - loop { - match stdout().flush() { - Ok(_) => {} - Err(err) => { - eprintln!("An error occured: {}", err); - } - } - - let readline = rl.readline("SqlO2 #: "); - let input = match readline { - Ok(line) => { - rl.add_history_entry(line.as_str()); - line - } - Err(ReadlineError::Interrupted) => { - println!("CTRL-C"); - match rl.save_history("history.txt") { - Ok(_) => {} - Err(err) => { - eprintln!("An error occured: {}", err); - } - } - break; - } - Err(ReadlineError::Eof) => { - println!("CTRL-D"); - match rl.save_history("history.txt") { - Ok(_) => {} - Err(err) => { - eprintln!("An error occured: {}", err); - } - } - break; - } - Err(err) => { - println!("Error: {:?}", err); - match rl.save_history("history.txt") { - Ok(_) => {} - Err(err) => { - eprintln!("An error occured: {}", err); - } - } - break; - } - }; - - let cmd = input.trim_end().replace("\n", ""); - match cmd.as_str() { - "quit" | "exit" | "\\q" => { - break; - } - _ => { - println!("{}", repl_eval(&mut mb, cmd)); - } - } - } - rl.save_history("history.txt").unwrap(); -} - -pub fn repl_eval(mb: &mut MemoryBackend, cmd: String) -> String { - let mut output_text = String::from(""); - - let mut total_time: Duration = Duration::from_millis(0); - let mut multiple_results = false; - - match mb.eval_query(&cmd) { - Ok(eval_results) => { - if eval_results.len() > 1 { - multiple_results = true; - } - for eval_result in eval_results { - match eval_result { - EvalResult::Select { results, time } => { - let mut titles = Vec::with_capacity(10); - let mut table = prettytable::Table::new(); - for col in &results.columns { - let title = format!("{}({:?})", col.name, col.col_type); - let title_cell = prettytable::Cell::new(&title) - .with_style(prettytable::Attr::Bold) - .with_style(prettytable::Attr::ForegroundColor( - prettytable::color::GREEN, - )); - titles.push(title_cell); - } - - table.set_titles(prettytable::Row::new(titles)); - - for result in &results.rows { - let mut table_row = Vec::with_capacity(10); - for cell in result.iter() { - let s = cell.to_string(); - table_row.push(prettytable::Cell::new(&s)); - } - table.add_row(prettytable::Row::new(table_row)); - } - table - .set_format(*prettytable::format::consts::FORMAT_NO_LINESEP_WITH_TITLE); - if !results.rows.is_empty() { - output_text.push_str(table.to_string().as_str()); - } - output_text - .push_str(format!("({} Results)\n", results.rows.len()).as_str()); - - output_text.push_str("Ok!\n"); - if multiple_results { - total_time += time; - } - output_text.push_str(format!("Elapsed time : {:.2?}\n", time).as_str()); - } - EvalResult::CreateTable { success: _, time } => { - output_text.push_str("Ok!\n"); - if multiple_results { - total_time += time; - } - output_text.push_str(format!("Elapsed time : {:.2?}\n", time).as_str()); - } - EvalResult::Insert { success: _, time } => { - output_text.push_str("Ok!\n"); - if multiple_results { - total_time += time; - } - output_text.push_str(format!("Elapsed time : {:.2?}\n", time).as_str()); - } - EvalResult::DropTable { success: _, time } => { - output_text.push_str("Ok!\n"); - if multiple_results { - total_time += time; - } - output_text.push_str(format!("Elapsed time : {:.2?}\n", time).as_str()); - } - } - } - - if multiple_results { - output_text.push_str(format!("Total time : {:.2?}", total_time).as_str()); - } - } - - Err(err) => { - return err; - } - } - - output_text -} +use colored::*; +use rustc_version_runtime::version; +use rustyline::highlight::Highlighter; +use rustyline::{error::ReadlineError, Editor}; +use rustyline_derive::{Completer, Helper, Hinter, Validator}; +use sqlo2::backend::EvalResult; +use sqlo2::backend_memory::*; +use sqlo2::{self}; +use std::borrow::Cow; +use std::io::{stdout, Write}; +use std::time::Duration; + +use sysinfo::{get_current_pid, ProcessExt, System, SystemExt}; + +#[derive(Completer, Helper, Hinter, Validator)] +struct PromptHighligher; + +impl Highlighter for PromptHighligher { + fn highlight<'l>(&self, line: &'l str, _: usize) -> Cow<'l, str> { + Cow::Borrowed(line) + } + + fn highlight_prompt<'b, 's: 'b, 'p: 'b>(&'s self, _: &'p str, _: bool) -> Cow<'b, str> { + Cow::Owned(format!( + "{} {}", + "SqlO2".bright_cyan().bold(), + "#: ".white().bold() + )) + } + + fn highlight_char(&self, _line: &str, _pos: usize) -> bool { + true + } +} + +fn main() { + const VERSION: &str = env!("CARGO_PKG_VERSION"); + + let mut mb = MemoryBackend::new(); + let mut rl = Editor::new(); + + if rl.load_history("history.txt").is_ok() {} + + rl.set_helper(Some(PromptHighligher {})); + + let mut system = System::new(); + system.refresh_all(); + let rust_info = version(); + + println!(); + println!("SqlO2 {} Repl", VERSION); + println!(); + + // Display system information: + println!( + "System: {} {}", + system.name().unwrap_or_else(|| "Unknown".to_string()), + system.os_version().unwrap_or_else(|| "unknown".to_string()) + ); + println!( + "Kernel {}", + system + .kernel_version() + .unwrap_or_else(|| "Unknown".to_string()) + ); + println!( + "Rust version: {}.{}.{}", + rust_info.major, rust_info.minor, rust_info.patch + ); + if let Ok(current_pid) = get_current_pid() { + let current_process_opt = system.process(current_pid); + if let Some(current_process) = current_process_opt { + println!("Memory usage(kb): {}", current_process.memory()); + } + } + + println!(); + + loop { + match stdout().flush() { + Ok(_) => {} + Err(err) => { + eprintln!("An error occured: {}", err); + } + } + + let readline = rl.readline("SqlO2 #: "); + let input = match readline { + Ok(line) => { + rl.add_history_entry(line.as_str()); + line + } + Err(ReadlineError::Interrupted) => { + println!("CTRL-C"); + match rl.save_history("history.txt") { + Ok(_) => {} + Err(err) => { + eprintln!("An error occured: {}", err); + } + } + break; + } + Err(ReadlineError::Eof) => { + println!("CTRL-D"); + match rl.save_history("history.txt") { + Ok(_) => {} + Err(err) => { + eprintln!("An error occured: {}", err); + } + } + break; + } + Err(err) => { + println!("Error: {:?}", err); + match rl.save_history("history.txt") { + Ok(_) => {} + Err(err) => { + eprintln!("An error occured: {}", err); + } + } + break; + } + }; + + let cmd = input.trim_end().replace('\n', ""); + match cmd.as_str() { + "quit" | "exit" | "\\q" => { + break; + } + _ => { + println!("{}", repl_eval(&mut mb, cmd)); + } + } + } + rl.save_history("history.txt").unwrap(); +} + +pub fn repl_eval(mb: &mut MemoryBackend, cmd: String) -> String { + let mut output_text = String::from(""); + + let mut total_time: Duration = Duration::from_millis(0); + let mut multiple_results = false; + + match mb.eval_query(&cmd) { + Ok(eval_results) => { + if eval_results.len() > 1 { + multiple_results = true; + } + for eval_result in eval_results { + match eval_result { + EvalResult::Select { results, time } => { + let mut titles = Vec::with_capacity(10); + let mut table = prettytable::Table::new(); + for col in &results.columns { + let title = format!("{}({:?})", col.name, col.col_type); + let title_cell = prettytable::Cell::new(&title) + .with_style(prettytable::Attr::Bold) + .with_style(prettytable::Attr::ForegroundColor( + prettytable::color::GREEN, + )); + titles.push(title_cell); + } + + table.set_titles(prettytable::Row::new(titles)); + + for result in &results.rows { + let mut table_row = Vec::with_capacity(10); + for cell in result.iter() { + let s = cell.to_string(); + table_row.push(prettytable::Cell::new(&s)); + } + table.add_row(prettytable::Row::new(table_row)); + } + table + .set_format(*prettytable::format::consts::FORMAT_NO_LINESEP_WITH_TITLE); + if !results.rows.is_empty() { + output_text.push_str(table.to_string().as_str()); + } + output_text.push_str(&format!( + "{}", + format!("({} Results)\n", results.rows.len()) + .as_str() + .dimmed() + )); + + output_text.push_str(&"Ok!\n".green().to_string()); + if multiple_results { + total_time += time; + } + output_text.push_str(&format!( + "{}", + format!("Elapsed time : {:.2?}\n", time).as_str().dimmed() + )); + } + EvalResult::CreateTable { success: _, time } => { + output_text.push_str(&"Ok!\n".green().to_string()); + if multiple_results { + total_time += time; + } + output_text.push_str(&format!( + "{}", + format!("Elapsed time : {:.2?}\n", time).as_str().dimmed() + )); + } + EvalResult::Insert { success: _, time } => { + output_text.push_str(&"Ok!\n".green().to_string()); + if multiple_results { + total_time += time; + } + output_text.push_str(&format!( + "{}", + format!("Elapsed time : {:.2?}\n", time).as_str().dimmed() + )); + } + EvalResult::DropTable { success: _, time } => { + output_text.push_str(&"Ok!\n".green().to_string()); + if multiple_results { + total_time += time; + } + output_text.push_str(&format!( + "{}", + format!("Elapsed time : {:.2?}\n", time).as_str().dimmed() + )); + } + } + } + + if multiple_results { + output_text.push_str(&format!("Total time : {:.2?}", total_time).dimmed()); + } + } + + Err(err) => { + return format!("{}", err.bright_red()); + } + } + + output_text +} diff --git a/server/Cargo.toml b/server/Cargo.toml index 490b2a6..357beb1 100644 --- a/server/Cargo.toml +++ b/server/Cargo.toml @@ -1,12 +1,12 @@ -[package] -name = "sqlo2_server" -version = "0.1.0" -edition = "2021" - -[dependencies] -sqlo2_wire-protocol = { path = "../wire-protocol" } -sqlo2 = { path = "../engine" } -byteorder = "=1.4.3" -log = "=0.4.14" -env_logger = "=0.9.0" +[package] +name = "sqlo2_server" +version = "0.1.0" +edition = "2021" + +[dependencies] +sqlo2_wire-protocol = { path = "../wire-protocol" } +sqlo2 = { path = "../engine" } +byteorder = "=1.4.3" +log = "=0.4.17" +env_logger = "=0.9.0" dotenv = "=0.15.0" \ No newline at end of file diff --git a/server/src/main.rs b/server/src/main.rs index e58373a..de49f62 100644 --- a/server/src/main.rs +++ b/server/src/main.rs @@ -1,86 +1,86 @@ -use std::{ - io::{BufReader, BufWriter, Write}, - net::TcpListener, - sync::{Arc, RwLock}, - thread, -}; - -use sqlo2::backend_memory::MemoryBackend; -use sqlo2_wire_protocol::{ - messages::{ - AuthenticationOk, Bind, Describe, DeserializableMessage, Execute, Parse, Query, - ReadyForQuery, SerializableMessage, Sync, - }, - read_message, read_startup_message, -}; - -fn main() { - let _ = dotenv::dotenv().is_ok(); - env_logger::init(); - - log::info!("SqlO2 server starting.."); - - let db = Arc::new(RwLock::new(MemoryBackend::new())); - let port = 9876; - let listener = TcpListener::bind(("127.0.0.1", port)).unwrap(); - let mut last_used_session_id = 1; - - log::info!("SqlO2 server operational!"); - - for stream in listener.incoming() { - let db = db.clone(); - let stream = stream.unwrap(); - last_used_session_id += 1; - let sessid = last_used_session_id; - thread::spawn(move || { - log::info!( - "New connection from {}, session: {}", - stream.peer_addr().unwrap(), - sessid - ); - let mut sockreader = BufReader::with_capacity(16384, &stream); - let mut sockwriter = BufWriter::with_capacity(16384, &stream); - - let mut content: Vec = Vec::new(); - read_startup_message(&mut sockreader, &mut sockwriter, &mut content).unwrap(); - - let out = AuthenticationOk::default().serialize(); - sockwriter.write_all(&out).unwrap(); - - sockwriter - .write_all(&ReadyForQuery::default().serialize()) - .unwrap(); - sockwriter.flush().unwrap(); - - loop { - let msg_typ = read_message(&mut sockreader, &mut content).unwrap(); - if msg_typ == 'P' { - let parse = Parse::deserialize_content(&content).unwrap(); - log::debug!("Parse: {:?}", parse); - let parsed = db.read().unwrap().parse(&parse.query); - if let Ok(ast) = &parsed { - log::debug!("Parsed: {:?}", ast); - } else if let Err(err) = &parsed { - log::error!("{}", err); - } - } else if msg_typ == 'B' { - let bind = Bind::deserialize_content(&content).unwrap(); - log::debug!("Bind: {:?}", bind); - } else if msg_typ == 'D' { - let describe = Describe::deserialize_content(&content).unwrap(); - log::debug!("Describe: {:?}", describe); - } else if msg_typ == 'E' { - let execute = Execute::deserialize_content(&content).unwrap(); - log::debug!("Execute: {:?}", execute); - } else if msg_typ == 'S' { - let sync = Sync::deserialize_content(&content).unwrap(); - log::debug!("Sync: {:?}", sync); - } else if msg_typ == 'Q' { - let query = Query::deserialize_content(&content).unwrap(); - let _ = db.write().unwrap().eval_query(&query.q).is_ok(); - log::debug!("Query: {:?}", query); - } - } - }); - } -} +use std::{ + io::{BufReader, BufWriter, Write}, + net::TcpListener, + sync::{Arc, RwLock}, + thread, +}; + +use sqlo2::backend_memory::MemoryBackend; +use sqlo2_wire_protocol::{ + messages::{ + AuthenticationOk, Bind, Describe, DeserializableMessage, Execute, Parse, Query, + ReadyForQuery, SerializableMessage, Sync, + }, + read_message, read_startup_message, +}; + +fn main() { + let _ = dotenv::dotenv().is_ok(); + env_logger::init(); + + log::info!("SqlO2 server starting.."); + + let db = Arc::new(RwLock::new(MemoryBackend::new())); + let port = 9876; + let listener = TcpListener::bind(("127.0.0.1", port)).unwrap(); + let mut last_used_session_id = 1; + + log::info!("SqlO2 server operational!"); + + for stream in listener.incoming() { + let db = db.clone(); + let stream = stream.unwrap(); + last_used_session_id += 1; + let sessid = last_used_session_id; + thread::spawn(move || { + log::info!( + "New connection from {}, session: {}", + stream.peer_addr().unwrap(), + sessid + ); + let mut sockreader = BufReader::with_capacity(16384, &stream); + let mut sockwriter = BufWriter::with_capacity(16384, &stream); + + let mut content: Vec = Vec::new(); + read_startup_message(&mut sockreader, &mut sockwriter, &mut content).unwrap(); + + let out = AuthenticationOk::default().serialize(); + sockwriter.write_all(&out).unwrap(); + + sockwriter + .write_all(&ReadyForQuery::default().serialize()) + .unwrap(); + sockwriter.flush().unwrap(); + + loop { + let msg_typ = read_message(&mut sockreader, &mut content).unwrap(); + if msg_typ == 'P' { + let parse = Parse::deserialize_content(&content).unwrap(); + log::debug!("Parse: {:?}", parse); + let parsed = db.read().unwrap().parse(&parse.query); + if let Ok(ast) = &parsed { + log::debug!("Parsed: {:?}", ast); + } else if let Err(err) = &parsed { + log::error!("{}", err); + } + } else if msg_typ == 'B' { + let bind = Bind::deserialize_content(&content).unwrap(); + log::debug!("Bind: {:?}", bind); + } else if msg_typ == 'D' { + let describe = Describe::deserialize_content(&content).unwrap(); + log::debug!("Describe: {:?}", describe); + } else if msg_typ == 'E' { + let execute = Execute::deserialize_content(&content).unwrap(); + log::debug!("Execute: {:?}", execute); + } else if msg_typ == 'S' { + let sync = Sync::deserialize_content(&content).unwrap(); + log::debug!("Sync: {:?}", sync); + } else if msg_typ == 'Q' { + let query = Query::deserialize_content(&content).unwrap(); + let _ = db.write().unwrap().eval_query(&query.q).is_ok(); + log::debug!("Query: {:?}", query); + } + } + }); + } +} diff --git a/test-macros/Cargo.toml b/test-macros/Cargo.toml index 8320cf4..e232605 100644 --- a/test-macros/Cargo.toml +++ b/test-macros/Cargo.toml @@ -1,10 +1,12 @@ -[package] -name = "test-macros" -version = "0.1.0" -edition = "2021" - -[dependencies] -lazy_static = "=1.4.0" - -[lib] +[package] +name = "test-macros" +version = "0.1.0" +edition = "2021" + +[dependencies] +lazy_static = "=1.4.0" +syn = "=1.0" +quote = "=1.0" + +[lib] proc-macro = true \ No newline at end of file diff --git a/test-macros/src/lib.rs b/test-macros/src/lib.rs index 649b2d6..77c467a 100644 --- a/test-macros/src/lib.rs +++ b/test-macros/src/lib.rs @@ -32,11 +32,7 @@ pub fn test_case(attr: TokenStream, item: TokenStream) -> TokenStream { break; } } - let fn_name = if let Some(s) = fn_name_opt { - s - } else { - panic!("No function name found in test case"); - }; + let fn_name = fn_name_opt.expect("No function name found in test case"); let id: u32 = { let mut counts_guard = COUNTS.write().expect("Couldn't get write lock on counts"); let count_opt = counts_guard.get_mut(&fn_name); diff --git a/test-test/Cargo.toml b/test-test/Cargo.toml index b70c4ec..b76032c 100644 --- a/test-test/Cargo.toml +++ b/test-test/Cargo.toml @@ -1,8 +1,8 @@ -[package] -name = "test_test" -version = "0.1.0" -edition = "2021" - -[dependencies] -test-macros = { path = "../test-macros" } -pretty_assertions = "=1.0.0" \ No newline at end of file +[package] +name = "test_test" +version = "0.1.0" +edition = "2021" + +[dependencies] +test-macros = { path = "../test-macros" } +pretty_assertions = "=1.2.1" \ No newline at end of file diff --git a/test-test/src/main.rs b/test-test/src/main.rs index 9350a20..525043c 100644 --- a/test-test/src/main.rs +++ b/test-test/src/main.rs @@ -1,29 +1,29 @@ -extern crate test_macros; - -fn main() {} - -#[cfg(test)] -mod test_tests { - use pretty_assertions::{assert_eq, assert_ne}; - use test_macros::test_case; - - #[test_case([1, 2, 4], [1, 2, 4])] - #[test_case([2, 2, 3], [2, 2, 3])] - #[test_case([1, 3, 3], [1, 3, 3])] - #[test_case([1, 2, 3], [1, 2, 3])] - fn eqs(a: T, b: T) - where - T: Eq + std::fmt::Debug, - { - assert_eq!(a, b); - } - - #[test_case([1, 2, 3], [2, 2, 3])] - #[test_case([1, 2, 3], [2, 3, 1])] - fn not_equal(a: T, b: T) - where - T: Eq + std::fmt::Debug, - { - assert_ne!(a, b); - } -} +extern crate test_macros; + +fn main() {} + +#[cfg(test)] +mod test_tests { + use pretty_assertions::{assert_eq, assert_ne}; + use test_macros::test_case; + + #[test_case([1, 2, 4], [1, 2, 4])] + #[test_case([2, 2, 3], [2, 2, 3])] + #[test_case([1, 3, 3], [1, 3, 3])] + #[test_case([1, 2, 3], [1, 2, 3])] + fn eqs(a: T, b: T) + where + T: Eq + std::fmt::Debug, + { + assert_eq!(a, b); + } + + #[test_case([1, 2, 3], [2, 2, 3])] + #[test_case([1, 2, 3], [2, 3, 1])] + fn not_equal(a: T, b: T) + where + T: Eq + std::fmt::Debug, + { + assert_ne!(a, b); + } +} diff --git a/test-util/Cargo.toml b/test-util/Cargo.toml index 80d244a..12c7278 100644 --- a/test-util/Cargo.toml +++ b/test-util/Cargo.toml @@ -1,11 +1,13 @@ -[package] -name = "test-util" -version = "0.1.0" -edition = "2021" - -# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html - -[dependencies] -serde = { version = "=1.0.136", features = ["derive"] } -toml = "=0.5.8" -pretty_assertions = "=1.0.0" \ No newline at end of file +[package] +name = "test-util" +version = "0.1.0" +edition = "2021" + +# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html + +[dependencies] +serde = { version = "=1.0.137", features = ["derive"] } +toml = "=0.5.9" +pretty_assertions = "=1.2.1" +similar = { version = "=2.1.0", features = ["inline"] } +console = "=0.15.0" diff --git a/test-util/src/lib.rs b/test-util/src/lib.rs index efd4c45..0596aef 100644 --- a/test-util/src/lib.rs +++ b/test-util/src/lib.rs @@ -1,201 +1,255 @@ -mod impls; - -pub use impls::*; -use serde::Deserialize; -use std::fmt::Debug; - -// Implement file based tests: -// -// A test is contained in a directory with the name of the test. -// A toml file defines the test query and the expected output combinations. -// The directory contains .sql files with the sql input. -// The directory contains .out files with the test function output stringified. -// The queries contain a key named result, with values err/ok/etc. -// The queries contain an key named filename, with the name used for output and input files. -// The queries contain an optional key named name, with the name of the test. -// The queries contain an optional key named description, with an extra explanation for that query/step. -// The queries contain an optional key named query, with the query string. -// Different types have a different result key, defined with a trait impl -// When the test finishes but the file does not match the result a .test_diff is create in the same directory -// Proc macros can make this easier to write, by passing the folder path. -// The macro would execute the steps described in the test config(toml?) file in order, using the provided function. -// The macro would then use the output of said function to compare with the expected output. -// The macro would have to be able to use the same instance of the backend for the entire test. -// Maybe a type is passed to the macro, which it instantiates and passes to the function so it can be shared through the test. -// Macros for specific commonly used types could be prepared to make this easier. - -// Consider ways of provisioning an initial database for acceptance tests to use. - -#[derive(Debug, Deserialize, PartialEq, Eq)] -pub struct TestQuerySection { - pub name: Option, - pub description: Option, - pub query: Option, - pub result: TestResultType, - pub filename: String, -} - -#[derive(Debug, Deserialize, PartialEq, Eq)] -pub struct TestConfig { - #[serde(rename = "query")] - pub queries: Vec, -} - -pub fn compare_output( - expected_file: &str, - expected_result_type: &TestResultType, - output: T, -) -> bool -where - T: TestResultExt, -{ - let expected_output = match std::fs::read_to_string(expected_file) { - Ok(s) => Some(s), - Err(e) => { - if e.kind() == std::io::ErrorKind::NotFound { - eprintln!("Expected output file {expected_file} not found"); - None - } else { - panic!( - "Error reading expected output file {}: {}", - expected_file, e - ); - } - } - }; - - let output_stringified = output.stringified(); - - let is_expected_output = if let Some(expected_output) = expected_output { - expected_output == output_stringified - } else { - false - }; - - let is_expected_result = expected_result_type == &output.result_type(); - - let is_expected = is_expected_output && is_expected_result; - - if !is_expected_output { - let diff_file = format!("{expected_file}.test_diff"); - eprintln!("Didn't get expected output, writing to {diff_file}",); - std::fs::write(diff_file, output_stringified).expect("Unable to write file"); - } - - if !is_expected_result { - eprintln!( - "Didn't get expected result, expected {expected_result_type}, got {}", - output.result_type() - ); - } - - is_expected -} - -pub fn run_test(test: &str, test_fn: F, mut subject: T) -where - T: TestSubjectExt, - O: TestResultExt, - F: Fn(&str, &mut T) -> O, -{ - let toml_file = format!("{test}/test.toml"); - let toml_str = std::fs::read_to_string(&toml_file) - .unwrap_or_else(|_| panic!("Unable to read toml file {toml_file}")); - let test_config: TestConfig = toml::from_str(&toml_str).expect("Failed to parse toml"); - - let mut success = true; - for (i, query) in test_config.queries.iter().enumerate() { - let input_file = format!("{test}/{}.sql", query.filename); - let input = std::fs::read_to_string(&input_file) - .unwrap_or_else(|_| panic!("Unable to input file {input_file}")); - let result = test_fn(&input, &mut subject); - let expected_file = format!("{test}/{}", query.filename); - let is_expected = compare_output(&expected_file, &query.result, result); - - eprint!("Test step {i}"); - if let Some(name) = &query.name { - eprint!(": {name}"); - } - if let Some(description) = &query.description { - eprint!("({description})"); - } - eprintln!(); - - if !is_expected { - eprintln!("Test step {i} failed"); - } - - success = success && is_expected; - } - - if !success { - panic!("Test failed"); - } -} - -#[derive(Debug, Deserialize, PartialEq, Eq)] -pub enum TestResultType { - #[serde(rename = "ok")] - Ok, - #[serde(rename = "err")] - Err, - #[serde(rename = "some")] - Some, - #[serde(rename = "none")] - None, - #[serde(rename = "unknown")] - Unknown, -} - -impl std::fmt::Display for TestResultType { - fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { - match self { - TestResultType::Ok => write!(f, "ok"), - TestResultType::Err => write!(f, "err"), - TestResultType::Some => write!(f, "some"), - TestResultType::None => write!(f, "none"), - TestResultType::Unknown => write!(f, "unknown"), - } - } -} - -#[cfg(test)] -mod tests { - use super::*; - use pretty_assertions::assert_eq; - - #[test] - fn read_toml() { - let toml_str = std::fs::read_to_string("../tests/integration/test1/test.toml") - .expect("Unable to toml file"); - let decoded: TestConfig = toml::from_str(&toml_str).expect("Failed to parse toml"); - - let expected = TestConfig { - queries: vec![ - TestQuerySection { - name: None, - description: None, - query: None, - result: TestResultType::Err, - filename: "query1".to_string(), - }, - TestQuerySection { - name: None, - description: None, - query: None, - result: TestResultType::Ok, - filename: "query2".to_string(), - }, - TestQuerySection { - name: Some("query3 name".to_string()), - description: Some("query3 description".to_string()), - query: Some("query3 query".to_string()), - result: TestResultType::Ok, - filename: "query2".to_string(), - }, - ], - }; - - assert_eq!(expected, decoded); - } -} +mod impls; + +use console::{style, Style}; +pub use impls::*; +use serde::Deserialize; +use similar::{Algorithm, ChangeTag, TextDiff}; + +use std::fmt::{self, Debug}; + +// Implement file based tests: +// +// A test is contained in a directory with the name of the test. +// A toml file defines the test query and the expected output combinations. +// The directory contains .sql files with the sql input. +// The directory contains .out files with the test function output stringified. +// The queries contain a key named result, with values err/ok/etc. +// The queries contain an key named filename, with the name used for output and input files. +// The queries contain an optional key named name, with the name of the test. +// The queries contain an optional key named description, with an extra explanation for that query/step. +// The queries contain an optional key named query, with the query string. +// Different types have a different result key, defined with a trait impl +// When the test finishes but the file does not match the result a .actual is create in the same directory +// Proc macros can make this easier to write, by passing the folder path. +// The macro would execute the steps described in the test config(toml?) file in order, using the provided function. +// The macro would then use the output of said function to compare with the expected output. +// The macro would have to be able to use the same instance of the backend for the entire test. +// Maybe a type is passed to the macro, which it instantiates and passes to the function so it can be shared through the test. +// Macros for specific commonly used types could be prepared to make this easier. + +// Consider ways of provisioning an initial database for acceptance tests to use. + +struct Line(Option, usize); + +impl fmt::Display for Line { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + match self.0 { + None => write!(f, "{}", " ".repeat(self.1)), + Some(idx) => write!(f, "{:, + pub description: Option, + pub query: Option, + pub result: TestResultType, + pub filename: String, +} + +#[derive(Debug, Deserialize, PartialEq, Eq)] +pub struct TestConfig { + #[serde(rename = "query")] + pub queries: Vec, +} + +pub fn compare_output( + expected_file: &str, + expected_result_type: &TestResultType, + output: T, +) -> bool +where + T: TestResultExt, +{ + let expected_output = match std::fs::read_to_string(expected_file) { + Ok(s) => Some(s.replace('\r', "")), + Err(e) => { + if e.kind() == std::io::ErrorKind::NotFound { + eprintln!("Expected output file {expected_file} not found"); + None + } else { + panic!( + "Error reading expected output file {}: {}", + expected_file, e + ); + } + } + }; + + let output_stringified = output.stringified(); + + let is_expected_output = if let Some(expected_output) = expected_output { + let is_expected = expected_output == output_stringified; + let diff = TextDiff::configure() + .algorithm(Algorithm::Patience) + .diff_lines(&expected_output, &output_stringified); + let idx_len = std::cmp::max( + expected_output.lines().count().to_string().len(), + output_stringified.lines().count().to_string().len(), + ); + + for (idx, group) in diff.grouped_ops(3).iter().enumerate() { + if idx > 0 { + println!("{:-^1$}", "-", 80); + } + for op in group { + for change in diff.iter_inline_changes(op) { + let (sign, s) = match change.tag() { + ChangeTag::Delete => ("-", Style::new().red()), + ChangeTag::Insert => ("+", Style::new().green()), + ChangeTag::Equal => (" ", Style::new().dim()), + }; + print!( + "{} {} |{}", + style(Line(change.old_index(), idx_len)).dim(), + style(Line(change.new_index(), idx_len)).dim(), + s.apply_to(sign).bold(), + ); + for (emphasized, value) in change.iter_strings_lossy() { + if emphasized { + print!("{}", s.apply_to(value).underlined().on_black()); + } else { + print!("{}", s.apply_to(value)); + } + } + if change.missing_newline() { + println!(); + } + } + } + } + + is_expected + } else { + false + }; + + let is_expected_result = expected_result_type == &output.result_type(); + + let is_expected = is_expected_output && is_expected_result; + + if !is_expected_output { + let diff_file = format!("{expected_file}.actual"); + eprintln!("Didn't get expected output, writing to {diff_file}",); + std::fs::write(diff_file, output_stringified).expect("Unable to write file"); + } + + if !is_expected_result { + eprintln!( + "Didn't get expected result, expected {expected_result_type}, got {}", + output.result_type() + ); + } + + is_expected +} + +pub fn run_test(test: &str, test_fn: F, mut subject: T) +where + T: TestSubjectExt, + O: TestResultExt, + F: Fn(&str, &mut T) -> O, +{ + let toml_file = format!("{test}/test.toml"); + let toml_str = std::fs::read_to_string(&toml_file) + .unwrap_or_else(|_| panic!("Unable to read toml file {toml_file}")); + let test_config: TestConfig = toml::from_str(&toml_str).expect("Failed to parse toml"); + + let mut success = true; + for (i, query) in test_config.queries.iter().enumerate() { + let input_file = format!("{test}/{}.sql", query.filename); + let input = std::fs::read_to_string(&input_file) + .unwrap_or_else(|_| panic!("Unable to input file {input_file}")); + let result = test_fn(&input, &mut subject); + let expected_file = format!("{test}/{}", query.filename); + let is_expected = compare_output(&expected_file, &query.result, result); + + eprint!("Test step {i}"); + if let Some(name) = &query.name { + eprint!(": {name}"); + } + if let Some(description) = &query.description { + eprint!("({description})"); + } + eprintln!(); + + if !is_expected { + eprintln!("Test step {i} failed"); + } + + success = success && is_expected; + } + + if !success { + panic!("Test failed"); + } +} + +#[derive(Debug, Deserialize, PartialEq, Eq)] +pub enum TestResultType { + #[serde(rename = "ok")] + Ok, + #[serde(rename = "err")] + Err, + #[serde(rename = "some")] + Some, + #[serde(rename = "none")] + None, + #[serde(rename = "unknown")] + Unknown, +} + +impl std::fmt::Display for TestResultType { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + match self { + TestResultType::Ok => write!(f, "ok"), + TestResultType::Err => write!(f, "err"), + TestResultType::Some => write!(f, "some"), + TestResultType::None => write!(f, "none"), + TestResultType::Unknown => write!(f, "unknown"), + } + } +} + +#[cfg(test)] +mod tests { + use super::*; + use pretty_assertions::assert_eq; + + #[test] + fn read_toml() { + let toml_str = std::fs::read_to_string("../tests/integration/test1/test.toml") + .expect("Unable to toml file"); + let decoded: TestConfig = toml::from_str(&toml_str).expect("Failed to parse toml"); + + let expected = TestConfig { + queries: vec![ + TestQuerySection { + name: None, + description: None, + query: None, + result: TestResultType::Err, + filename: "query1".to_string(), + }, + TestQuerySection { + name: None, + description: None, + query: None, + result: TestResultType::Ok, + filename: "query2".to_string(), + }, + TestQuerySection { + name: Some("query3 name".to_string()), + description: Some("query3 description".to_string()), + query: Some("query3 query".to_string()), + result: TestResultType::Ok, + filename: "query2".to_string(), + }, + ], + }; + + assert_eq!(expected, decoded); + } +} diff --git a/tests/Cargo.toml b/tests/Cargo.toml index 1759d35..754baa5 100644 --- a/tests/Cargo.toml +++ b/tests/Cargo.toml @@ -1,13 +1,13 @@ -[package] -name = "tests" -version = "0.1.0" -edition = "2021" - -# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html - -[dependencies] - -[dev-dependencies] -sqlo2 = { path = "../engine" } -test-macros = { path = "../test-macros" } -test-util = { path = "../test-util" } +[package] +name = "tests" +version = "0.1.0" +edition = "2021" + +# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html + +[dependencies] + +[dev-dependencies] +sqlo2 = { path = "../engine" } +test-macros = { path = "../test-macros" } +test-util = { path = "../test-util" } diff --git a/tests/acceptance/memory1/query2 b/tests/acceptance/memory1/query2 index 34df8b6..d4887ae 100644 --- a/tests/acceptance/memory1/query2 +++ b/tests/acceptance/memory1/query2 @@ -1,9 +1,9 @@ -Select: -| id(Int) | name(Text) | character_id(Int) | role_name(Text) | -|---------|-------------------|-------------------|-----------------| -| 1 | The 25th Bam | 1 | Wave Controller | -| 1 | The 25th Bam | 1 | Fisherman | -| 3 | Rak WraithKaiser | 3 | Spear Bearer | -| 4 | Khun Aguero Agnes | 4 | Light Bearer | -| 4 | Khun Aguero Agnes | 4 | Spear Bearer | - +Select: +| id(Int) | name(Text) | character_id(Int) | role_name(Text) | +|---------|-------------------|-------------------|-----------------| +| 1 | The 25th Bam | 1 | Wave Controller | +| 1 | The 25th Bam | 1 | Fisherman | +| 3 | Rak WraithKaiser | 3 | Spear Bearer | +| 4 | Khun Aguero Agnes | 4 | Light Bearer | +| 4 | Khun Aguero Agnes | 4 | Spear Bearer | + diff --git a/tests/acceptance/memory1/query3 b/tests/acceptance/memory1/query3 index d2ab3bd..a1a9515 100644 --- a/tests/acceptance/memory1/query3 +++ b/tests/acceptance/memory1/query3 @@ -1 +1 @@ -Duplicate Value violates UNIQUE Constraint \ No newline at end of file +"Duplicate Value violates UNIQUE Constraint" \ No newline at end of file diff --git a/tests/integration/test1/test.toml b/tests/integration/test1/test.toml index 6af52c5..a7a2999 100644 --- a/tests/integration/test1/test.toml +++ b/tests/integration/test1/test.toml @@ -1,14 +1,14 @@ -[[query]] -result = "err" -filename = "query1" - -[[query]] -result = "ok" -filename = "query2" - -[[query]] -name = "query3 name" -description = "query3 description" -query = "query3 query" -result = "ok" +[[query]] +result = "err" +filename = "query1" + +[[query]] +result = "ok" +filename = "query2" + +[[query]] +name = "query3 name" +description = "query3 description" +query = "query3 query" +result = "ok" filename = "query2" \ No newline at end of file diff --git a/tests/src/lib.rs b/tests/src/lib.rs index 8c94d31..b48c404 100644 --- a/tests/src/lib.rs +++ b/tests/src/lib.rs @@ -1,39 +1,35 @@ -#[cfg(test)] -mod parser_tests { - use sqlo2::{ - ast::Ast, - backend::EvalResult, - backend_memory::MemoryBackend, - parser::{Parser, ParsingError}, - sql_types::SqlValue, - test_impls::{IntoVecContainer, VecContainer}, - }; - use test_macros::test_case; - use test_util::{run_test, TestSubjectExt}; - - fn parser_test_fn(sql: &str, parser: &mut Parser) -> Result { - parser.parse(sql) - } - - #[test_case("../tests/unit/parser1")] - fn parser(test: &str) { - run_test(test, parser_test_fn, Parser::init()); - } - - fn memory_backend_test_fn( - sql: &str, - backend: &mut MemoryBackend, - ) -> Result>, String> { - let mut result = backend.eval_query(sql); - result - .as_mut() - .map(|r| r.iter_mut().for_each(|r| r.zero_time())) - .ok(); - result.map(|r| r.into_vec_container()) - } - - #[test_case("../tests/acceptance/memory1")] - fn memory_backend(test: &str) { - run_test(test, memory_backend_test_fn, MemoryBackend::init()); - } -} +#[cfg(test)] +mod parser_tests { + use sqlo2::{ + ast::Ast, + backend::EvalResult, + backend_memory::MemoryBackend, + parser::{Parser, ParsingError}, + sql_types::SqlValue, + test_impls::{IntoVecContainer, VecContainer}, + }; + use test_macros::test_case; + use test_util::{run_test, TestSubjectExt}; + + fn parser_test_fn(sql: &str, parser: &mut Parser) -> Result { + parser.parse(sql) + } + + #[test_case("../tests/unit/parser1")] + #[test_case("../tests/unit/parser2")] + fn parser(test: &str) { + run_test(test, parser_test_fn, Parser::init()); + } + + fn memory_backend_test_fn( + sql: &str, + backend: &mut MemoryBackend, + ) -> Result>, String> { + backend.eval_query(sql).map(|r| r.into_vec_container()) + } + + #[test_case("../tests/acceptance/memory1")] + fn memory_backend(test: &str) { + run_test(test, memory_backend_test_fn, MemoryBackend::init()); + } +} diff --git a/tests/unit/parser1/query1 b/tests/unit/parser1/query1 index 820f912..975ca8c 100644 --- a/tests/unit/parser1/query1 +++ b/tests/unit/parser1/query1 @@ -1,123 +1,42 @@ -Ast { - statements: [ - CreateTableStatement( - CreateTableStatement { - name: "character_roles", - cols: [ - ColumnDefinition { - name: "character_id", - data_type: Int, - is_primary_key: false, - }, - ColumnDefinition { - name: "role_name", - data_type: Text, - is_primary_key: false, - }, - ], - }, - ), - InsertStatement( - InsertStatement { - table: "character_roles", - values: [ - Literal( - Numeric( - "1", - ), - ), - Literal( - String( - "Wave Controller", - ), - ), - ], - }, - ), - InsertStatement( - InsertStatement { - table: "character_roles", - values: [ - Literal( - Numeric( - "2", - ), - ), - Literal( - String( - "Light Bearer", - ), - ), - ], - }, - ), - InsertStatement( - InsertStatement { - table: "character_roles", - values: [ - Literal( - Numeric( - "3", - ), - ), - Literal( - String( - "Spear Bearer", - ), - ), - ], - }, - ), - InsertStatement( - InsertStatement { - table: "character_roles", - values: [ - Literal( - Numeric( - "4", - ), - ), - Literal( - String( - "Light Bearer", - ), - ), - ], - }, - ), - InsertStatement( - InsertStatement { - table: "character_roles", - values: [ - Literal( - Numeric( - "1", - ), - ), - Literal( - String( - "Fisherman", - ), - ), - ], - }, - ), - InsertStatement( - InsertStatement { - table: "character_roles", - values: [ - Literal( - Numeric( - "4", - ), - ), - Literal( - String( - "Spear Bearer", - ), - ), - ], - }, - ), - ], -} \ No newline at end of file +Create Table + ├──Name: character_roles + └──Columns + ├──Int + └──Text + +Insert + ├──Name: character_roles + └──Values + ├──1 + └──"Wave Controller" + +Insert + ├──Name: character_roles + └──Values + ├──2 + └──"Light Bearer" + +Insert + ├──Name: character_roles + └──Values + ├──3 + └──"Spear Bearer" + +Insert + ├──Name: character_roles + └──Values + ├──4 + └──"Light Bearer" + +Insert + ├──Name: character_roles + └──Values + ├──1 + └──"Fisherman" + +Insert + ├──Name: character_roles + └──Values + ├──4 + └──"Spear Bearer" + diff --git a/tests/unit/parser1/query1.sql b/tests/unit/parser1/query1.sql index bdd2875..f1f2387 100644 --- a/tests/unit/parser1/query1.sql +++ b/tests/unit/parser1/query1.sql @@ -1 +1,7 @@ -CREATE TABLE character_roles (character_id INT, role_name TEXT); INSERT INTO character_roles VALUES (1, 'Wave Controller'); INSERT INTO character_roles VALUES (2, 'Light Bearer'); INSERT INTO character_roles VALUES (3, 'Spear Bearer'); INSERT INTO character_roles VALUES (4, 'Light Bearer'); INSERT INTO character_roles VALUES (1, 'Fisherman'); INSERT INTO character_roles VALUES (4, 'Spear Bearer'); \ No newline at end of file +CREATE TABLE character_roles (character_id INT, role_name TEXT); +INSERT INTO character_roles VALUES (1, 'Wave Controller'); +INSERT INTO character_roles VALUES (2, 'Light Bearer'); +INSERT INTO character_roles VALUES (3, 'Spear Bearer'); +INSERT INTO character_roles VALUES (4, 'Light Bearer'); +INSERT INTO character_roles VALUES (1, 'Fisherman'); +INSERT INTO character_roles VALUES (4, 'Spear Bearer'); \ No newline at end of file diff --git a/tests/unit/parser2/query1 b/tests/unit/parser2/query1 new file mode 100644 index 0000000..2e5674d --- /dev/null +++ b/tests/unit/parser2/query1 @@ -0,0 +1,141 @@ +Select + ├──From + └──Items + ├──Item + | └──Expression + | └──1 + └──Item + └──Expression + └──2 + +Select + ├──From + | └──characters + └──Items + ├──Item + | └──Expression + | └──id + └──Item + └──Expression + └──name + +Select + ├──From + | └──characters + ├──Where + | └──NotEqual + | |└─id + | └──2 + └──Items + ├──Item + | └──Expression + | └──id + └──Item + └──Expression + └──name + +Select + ├──From + | └──characters + ├──Where + | └──Equal + | |└─name + | └──"Rachel" + └──Items + ├──Item + | └──Expression + | └──id + └──Item + └──Expression + └──name + +Select + ├──From + | └──characters + ├──Where + | └──And + | |└─NotEqual + | | |└─name + | | └──"Rachel" + | └──LessThan + | |└─id + | └──5 + └──Items + ├──Item + | └──Expression + | └──id + └──Item + ├──As + | └──charName + └──Expression + └──name + +Select + ├──From + | └──characters + ├──Order By + | ├──Asc + | └──name + └──Items + └──Item + └──Expression + └──name + +Select + ├──From + | └──characters + ├──Distinct + └──Items + └──Item + └──Expression + └──Cast + ├──Expression + | └──Divide + | |└─id + | └──2 + └──Type + └──Int + +Select + ├──From + | └──characters + ├──Where + | └──GreaterThan + | |└─id + | └──1 + ├──Order By + | ├──Desc + | └──id + ├──Limit + | └──4 + ├──Offset + | └──5 + └──Items + └──Item + ├──As + | └──name_with_id + └──Expression + └──Concat + |└─Cast + | ├──Expression + | | └──id + | └──Type + | └──Text + └──Concat + |└─" " + └──name + +Select + ├──From + | └──characters + ├──Where + | └──NotEqual + | |└─id + | └──2 + ├──Order By + | ├──Asc + | └──id + └──Items + └──Item + └──Asterisk + diff --git a/tests/unit/parser2/query1.sql b/tests/unit/parser2/query1.sql new file mode 100644 index 0000000..7d4f362 --- /dev/null +++ b/tests/unit/parser2/query1.sql @@ -0,0 +1,10 @@ + +select 1, 2; +SELECT id, name FROM characters; +SELECT id, name FROM characters WHERE id != 2; +SELECT id, name FROM characters WHERE name = 'Rachel'; +SELECT id, name as charName FROM characters WHERE name != 'Rachel' AND id < 5; +SELECT name FROM characters ORDER BY name ASC; +SELECT DISTINCT (id / 2)::int FROM characters; +SELECT id::text || ' ' || name AS name_with_id FROM characters WHERE id > 1 ORDER BY id DESC LIMIT 4 OFFSET 5; +SELECT * FROM characters INNER JOIN character_roles ON characters.id=character_roles.character_id WHERE id != 2 ORDER BY id \ No newline at end of file diff --git a/tests/unit/parser2/test.toml b/tests/unit/parser2/test.toml new file mode 100644 index 0000000..3b27896 --- /dev/null +++ b/tests/unit/parser2/test.toml @@ -0,0 +1,3 @@ +[[query]] +result = "ok" +filename = "query1" \ No newline at end of file diff --git a/update-gh-pages.sh b/update-gh-pages.sh index a3b1f7b..eb60f7d 100644 --- a/update-gh-pages.sh +++ b/update-gh-pages.sh @@ -1,8 +1,8 @@ -cargo install trunk --locked -rustup target add wasm32-unknown-unknown -cd wasm-repl -trunk build --release --public-url SqlO2/ -cd .. - -mkdir -p docs +cargo install trunk --locked +rustup target add wasm32-unknown-unknown +cd wasm-repl +trunk build --release --public-url SqlO2/ +cd .. + +mkdir -p docs cp -r wasm-repl/dist/* docs \ No newline at end of file diff --git a/wasm-repl/Cargo.toml b/wasm-repl/Cargo.toml index 7c5cb01..aeabcd5 100644 --- a/wasm-repl/Cargo.toml +++ b/wasm-repl/Cargo.toml @@ -1,17 +1,17 @@ -[package] -name = "sqlo2_wasm-repl" -version = "0.1.0" -edition = "2021" - -[dependencies] -sqlo2 = { path = "../engine", features = ["stdweb", "wasm-bindgen"] } - -yew = "=0.19.3" -yew-router = "=0.16.0" -lazy_static = "=1.4.0" -serde = "=1.0.136" -serde_qs = "=0.8.5" -wasm-bindgen = { version = "=0.2.79", features = ["serde-serialize"] } -wasm-logger = "=0.2.0" -web-sys = { version = "=0.3.56", features = ["HtmlTextAreaElement"] } -log = "=0.4.14" +[package] +name = "sqlo2_wasm-repl" +version = "0.1.0" +edition = "2021" + +[dependencies] +sqlo2 = { path = "../engine", features = ["stdweb", "wasm-bindgen"] } + +yew = "=0.19.3" +yew-router = "=0.16.0" +lazy_static = "=1.4.0" +serde = "=1.0.137" +serde_qs = "=0.9.2" +wasm-bindgen = { version = "=0.2.80", features = ["serde-serialize"] } +wasm-logger = "=0.2.0" +web-sys = { version = "=0.3.57", features = ["HtmlTextAreaElement"] } +log = "=0.4.17" diff --git a/wasm-repl/index.html b/wasm-repl/index.html index 23c0585..0103424 100644 --- a/wasm-repl/index.html +++ b/wasm-repl/index.html @@ -1,14 +1,14 @@ - - - - - SqlO2 - - - - - - - - - + + + + + SqlO2 + + + + + + + + + diff --git a/wasm-repl/main.css b/wasm-repl/main.css index 8958316..17f3e8c 100644 --- a/wasm-repl/main.css +++ b/wasm-repl/main.css @@ -1,186 +1,186 @@ -html, -body { - padding: 0px; - margin: 0px; - width: 100%; - height: 100%; -} - -body { - background-color: black; - background-image: radial-gradient(rgba(0, 150, 0, 0.75), black 120%); - background-attachment: fixed; - margin: 0; - color: white; - font: 0.8rem Inconsolata, monospace; - text-shadow: 0 0 5px #c8c8c8; -} - -body::after { - content: ''; - position: fixed; - top: 0; - left: 0; - background: repeating-linear-gradient( - 0deg, - rgba(0, 0, 0, 0.15), - rgba(0, 0, 0, 0.15) 1px, - transparent 1px, - transparent 2px - ); - pointer-events: none; - width: 100%; - height: 100%; -} - -::selection { - background: #0080ff; - text-shadow: none; -} - -.repl-main { - height: 100%; - width: 100%; -} - -.table-header { - color: cyan; - text-shadow: 0 0 2px darkolivegreen; -} - -.table-header::selection { - color: darkolivegreen; - background-color: cyan; -} - -.results { - color: darkgray; - text-shadow: 0 0 5px gray; -} - -.results::selection { - color: darkolivegreen; - background-color: darkgray; -} - -table { - border-collapse: collapse; -} - -th, -td { - padding: 0.6em; - border-collapse: collapse; -} - -th::selection, -td::selection { - color: darkolivegreen; - background-color: white; -} - -.dashed-border { - border: 2px dashed #3fa535; - position: relative; -} - -.dashed-border:after { - content: ''; - position: absolute; - left: -1px; - top: -1px; - right: -1px; - bottom: -1px; - border: 1px solid transparent; -} - -.failed * { - color: orangered; - text-shadow: 0 0 5px red; - display: block; -} - -.failed *::selection { - background-color: orangered; - color: darkgreen; -} - -.ok * { - color: yellowgreen; - text-shadow: 0 0 2px darkgreen; - display: block; -} - -.ok *::selection { - color: darkgreen; - background-color: yellowgreen; -} - -.results-section { - display: block; -} - -.results-table { - margin: 0.6em; - align-self: flex-start; -} - -.terminal-input { - background: transparent; - border: none; - color: white; - width: 100%; - margin: 0; - padding: 0; - align-self: flex-start; - font: 0.8rem Inconsolata, monospace; - text-shadow: 0 0 2px #c8c8c8; - line-height: 18px; -} - -.executed-query { - word-break: break-all; - background: transparent; - border: none; - color: white; - font: 0.8rem Inconsolata, monospace; - text-shadow: 0 0 5px #c8c8c8; - width: 100%; - margin: 0; - padding: 0; - display: contents; -} - -.executed-query::selection { - background-color: white; - color: darkgreen; -} - -.prompt { - word-break: keep-all; - font-family: 'courier', monospace; - color: lime; - height: 1em; - font: 0.8rem Inconsolata, monospace; - text-shadow: none; -} - -.prompt::selection { - background-color: lime; - color: darkgreen; -} - -.prompt-wrapper { - display: flex; - margin-bottom: 0.4em; - margin-top: 0.2em; - width: 100%; -} - -textarea { - outline: none; -} - -textarea:focus { - outline: none; -} +html, +body { + padding: 0px; + margin: 0px; + width: 100%; + height: 100%; +} + +body { + background-color: black; + background-image: radial-gradient(rgba(0, 150, 0, 0.75), black 120%); + background-attachment: fixed; + margin: 0; + color: white; + font: 0.8rem Inconsolata, monospace; + text-shadow: 0 0 5px #c8c8c8; +} + +body::after { + content: ''; + position: fixed; + top: 0; + left: 0; + background: repeating-linear-gradient( + 0deg, + rgba(0, 0, 0, 0.15), + rgba(0, 0, 0, 0.15) 1px, + transparent 1px, + transparent 2px + ); + pointer-events: none; + width: 100%; + height: 100%; +} + +::selection { + background: #0080ff; + text-shadow: none; +} + +.repl-main { + height: 100%; + width: 100%; +} + +.table-header { + color: cyan; + text-shadow: 0 0 2px darkolivegreen; +} + +.table-header::selection { + color: darkolivegreen; + background-color: cyan; +} + +.results { + color: darkgray; + text-shadow: 0 0 5px gray; +} + +.results::selection { + color: darkolivegreen; + background-color: darkgray; +} + +table { + border-collapse: collapse; +} + +th, +td { + padding: 0.6em; + border-collapse: collapse; +} + +th::selection, +td::selection { + color: darkolivegreen; + background-color: white; +} + +.dashed-border { + border: 2px dashed #3fa535; + position: relative; +} + +.dashed-border:after { + content: ''; + position: absolute; + left: -1px; + top: -1px; + right: -1px; + bottom: -1px; + border: 1px solid transparent; +} + +.failed * { + color: orangered; + text-shadow: 0 0 5px red; + display: block; +} + +.failed *::selection { + background-color: orangered; + color: darkgreen; +} + +.ok * { + color: yellowgreen; + text-shadow: 0 0 2px darkgreen; + display: block; +} + +.ok *::selection { + color: darkgreen; + background-color: yellowgreen; +} + +.results-section { + display: block; +} + +.results-table { + margin: 0.6em; + align-self: flex-start; +} + +.terminal-input { + background: transparent; + border: none; + color: white; + width: 100%; + margin: 0; + padding: 0; + align-self: flex-start; + font: 0.8rem Inconsolata, monospace; + text-shadow: 0 0 2px #c8c8c8; + line-height: 18px; +} + +.executed-query { + word-break: break-all; + background: transparent; + border: none; + color: white; + font: 0.8rem Inconsolata, monospace; + text-shadow: 0 0 5px #c8c8c8; + width: 100%; + margin: 0; + padding: 0; + display: contents; +} + +.executed-query::selection { + background-color: white; + color: darkgreen; +} + +.prompt { + word-break: keep-all; + font-family: 'courier', monospace; + color: lime; + height: 1em; + font: 0.8rem Inconsolata, monospace; + text-shadow: none; +} + +.prompt::selection { + background-color: lime; + color: darkgreen; +} + +.prompt-wrapper { + display: flex; + margin-bottom: 0.4em; + margin-top: 0.2em; + width: 100%; +} + +textarea { + outline: none; +} + +textarea:focus { + outline: none; +} diff --git a/wasm-repl/src/components/repl.rs b/wasm-repl/src/components/repl.rs index 628109c..85ea036 100644 --- a/wasm-repl/src/components/repl.rs +++ b/wasm-repl/src/components/repl.rs @@ -121,14 +121,14 @@ pub fn repl() -> Html { }); let location = use_location(); - let query: Option = location.map(|loc| loc.query().ok()).flatten(); + let query: Option = location.and_then(|loc| loc.query().ok()); let state_cb = state.clone(); use_effect_with_deps( move |_| { if let Some(q) = query { if let Some(Some(true)) = q.default { - for sql in DEFAULT_QUERIES.iter().copied() { + for sql in DEFAULT_QUERIES.iter() { state_cb.dispatch(ReplAction::QuerySubmit(sql.to_string())); } } diff --git a/wasm-repl/src/components/results_table.rs b/wasm-repl/src/components/results_table.rs index 63534d3..bfd8839 100644 --- a/wasm-repl/src/components/results_table.rs +++ b/wasm-repl/src/components/results_table.rs @@ -1,51 +1,51 @@ -use sqlo2::{backend::QueryResults, sql_types::SqlValue}; -use yew::prelude::*; - -#[derive(Clone, PartialEq, Properties)] -pub struct ResultsTableProps { - pub result: QueryResults, -} - -#[function_component(ResultsTable)] -pub fn results_table(props: &ResultsTableProps) -> Html { - html! { - <> - - - { for props.result.columns.iter().map(|column| { - html! { - - } - }) - } - - - { for props.result.rows.iter().map(|row| { - html! { - - { for row.iter().map(|field| { - html! { - - } - }) - } - - } - }) - } -
{ format!("{}({})", column.name, column.col_type) }
{ field }
- { - if props.result.rows.len() == 1 { - html! { -
{"("}{ props.result.rows.len() }{" result)"}
- } - } else { - html! { -
{"("}{ props.result.rows.len() }{" results)"}
- } - } - - } - - } -} +use sqlo2::{backend::QueryResults, sql_types::SqlValue}; +use yew::prelude::*; + +#[derive(Clone, PartialEq, Properties)] +pub struct ResultsTableProps { + pub result: QueryResults, +} + +#[function_component(ResultsTable)] +pub fn results_table(props: &ResultsTableProps) -> Html { + html! { + <> + + + { for props.result.columns.iter().map(|column| { + html! { + + } + }) + } + + + { for props.result.rows.iter().map(|row| { + html! { + + { for row.iter().map(|field| { + html! { + + } + }) + } + + } + }) + } +
{ format!("{}({})", column.name, column.col_type) }
{ field }
+ { + if props.result.rows.len() == 1 { + html! { +
{"("}{ props.result.rows.len() }{" result)"}
+ } + } else { + html! { +
{"("}{ props.result.rows.len() }{" results)"}
+ } + } + + } + + } +} diff --git a/wasm-repl/src/services/mod.rs b/wasm-repl/src/services/mod.rs index cbfcdfa..abfa250 100644 --- a/wasm-repl/src/services/mod.rs +++ b/wasm-repl/src/services/mod.rs @@ -1 +1 @@ -pub mod sqlo2_service; +pub mod sqlo2_service; diff --git a/wasm-repl/src/services/sqlo2_service.rs b/wasm-repl/src/services/sqlo2_service.rs index 6beb0d6..d5a124d 100644 --- a/wasm-repl/src/services/sqlo2_service.rs +++ b/wasm-repl/src/services/sqlo2_service.rs @@ -1,21 +1,21 @@ -use std::sync::Mutex; - -use sqlo2::{backend::EvalResult, backend_memory::MemoryBackend, sql_types::SqlValue}; - -lazy_static! { - static ref BACKEND: Mutex = Mutex::new(MemoryBackend::new()); -} - -pub struct SqlO2Service; - -impl SqlO2Service { - pub fn execute(sql: &str) -> Result>, String> { - let mut backend = BACKEND.lock().unwrap(); - let result = backend.eval_query(sql); - - match result { - Ok(result) => Ok(result), - Err(err) => Err(err), - } - } -} +use std::sync::Mutex; + +use sqlo2::{backend::EvalResult, backend_memory::MemoryBackend, sql_types::SqlValue}; + +lazy_static! { + static ref BACKEND: Mutex = Mutex::new(MemoryBackend::new()); +} + +pub struct SqlO2Service; + +impl SqlO2Service { + pub fn execute(sql: &str) -> Result>, String> { + let mut backend = BACKEND.lock().unwrap(); + let result = backend.eval_query(sql); + + match result { + Ok(result) => Ok(result), + Err(err) => Err(err), + } + } +} diff --git a/wire-protocol/Cargo.toml b/wire-protocol/Cargo.toml index 5e9d70c..822b114 100644 --- a/wire-protocol/Cargo.toml +++ b/wire-protocol/Cargo.toml @@ -1,8 +1,8 @@ -[package] -name = "sqlo2_wire-protocol" -version = "0.1.0" -edition = "2021" - -[dependencies] -log = "=0.4.14" +[package] +name = "sqlo2_wire-protocol" +version = "0.1.0" +edition = "2021" + +[dependencies] +log = "=0.4.17" byteorder = "=1.4.3" \ No newline at end of file diff --git a/wire-protocol/src/lib.rs b/wire-protocol/src/lib.rs index 297a73a..04923d5 100644 --- a/wire-protocol/src/lib.rs +++ b/wire-protocol/src/lib.rs @@ -1,50 +1,50 @@ -extern crate log; - -pub mod messages; -use std::{ - io::{BufReader, BufWriter, Read, Write}, - mem::size_of, - net::TcpStream, -}; - -use byteorder::{NetworkEndian, ReadBytesExt}; - -use crate::messages::{DeserializableMessage, SSLRequest, StartupMessage}; - -pub fn read_startup_message( - stream_reader: &mut BufReader<&TcpStream>, - stream_writer: &mut BufWriter<&TcpStream>, - content: &mut Vec, -) -> std::io::Result<()> { - log::debug!("Startup Message"); - let len = stream_reader.read_i32::()?; - log::debug!("Startup Content length: {:?}", len); - content.resize(len as usize - size_of::(), 0); - stream_reader.read_exact(content.as_mut_slice())?; - - if SSLRequest::deserialize_content(content.as_slice()).is_ok() { - log::debug!("SSL Request"); - stream_writer.write_all(&[b'N']).unwrap(); - stream_writer.flush().unwrap(); - return read_startup_message(stream_reader, stream_writer, content); - } - - if let Ok(msg) = StartupMessage::deserialize_content(content.as_slice()) { - log::debug!("Received: {:?}", msg); - } - Ok(()) -} - -pub fn read_message( - stream_reader: &mut BufReader<&TcpStream>, - content: &mut Vec, -) -> std::io::Result { - let msg_typ = stream_reader.read_u8()? as char; - log::debug!("Received message type: {:?}", msg_typ); - let len = stream_reader.read_i32::()?; - log::debug!("Content length: {:?}", len); - content.resize(len as usize - size_of::(), 0); - stream_reader.read_exact(content.as_mut_slice())?; - log::debug!("Received: {:?}", content); - Ok(msg_typ) -} +extern crate log; + +pub mod messages; +use std::{ + io::{BufReader, BufWriter, Read, Write}, + mem::size_of, + net::TcpStream, +}; + +use byteorder::{NetworkEndian, ReadBytesExt}; + +use crate::messages::{DeserializableMessage, SSLRequest, StartupMessage}; + +pub fn read_startup_message( + stream_reader: &mut BufReader<&TcpStream>, + stream_writer: &mut BufWriter<&TcpStream>, + content: &mut Vec, +) -> std::io::Result<()> { + log::debug!("Startup Message"); + let len = stream_reader.read_i32::()?; + log::debug!("Startup Content length: {:?}", len); + content.resize(len as usize - size_of::(), 0); + stream_reader.read_exact(content.as_mut_slice())?; + + if SSLRequest::deserialize_content(content.as_slice()).is_ok() { + log::debug!("SSL Request"); + stream_writer.write_all(&[b'N']).unwrap(); + stream_writer.flush().unwrap(); + return read_startup_message(stream_reader, stream_writer, content); + } + + if let Ok(msg) = StartupMessage::deserialize_content(content.as_slice()) { + log::debug!("Received: {:?}", msg); + } + Ok(()) +} + +pub fn read_message( + stream_reader: &mut BufReader<&TcpStream>, + content: &mut Vec, +) -> std::io::Result { + let msg_typ = stream_reader.read_u8()? as char; + log::debug!("Received message type: {:?}", msg_typ); + let len = stream_reader.read_i32::()?; + log::debug!("Content length: {:?}", len); + content.resize(len as usize - size_of::(), 0); + stream_reader.read_exact(content.as_mut_slice())?; + log::debug!("Received: {:?}", content); + Ok(msg_typ) +} From d4ef41ada98bdd94cca5598db11e0e0f05df0aa7 Mon Sep 17 00:00:00 2001 From: Axmouth Date: Mon, 4 Jul 2022 00:11:56 +0300 Subject: [PATCH 2/5] Split parser code to more files --- engine/src/parser.rs | 333 +++++ engine/src/parser/column_definitions.rs | 90 ++ engine/src/parser/create_index.rs | 77 + engine/src/parser/create_table.rs | 60 + engine/src/parser/drop_table.rs | 44 + engine/src/parser/expression.rs | 245 ++++ engine/src/parser/expressions.rs | 49 + engine/src/parser/insert.rs | 73 + engine/src/parser/joins.rs | 139 ++ engine/src/parser/literal.rs | 60 + engine/src/parser/mod.rs | 1753 ----------------------- engine/src/parser/select.rs | 242 ++++ engine/src/parser/select_items.rs | 90 ++ engine/src/parser/statement.rs | 100 ++ engine/src/parser/table.rs | 100 ++ engine/src/parser/table_column.rs | 48 + engine/src/parser/tables.rs | 40 + 17 files changed, 1790 insertions(+), 1753 deletions(-) create mode 100644 engine/src/parser.rs create mode 100644 engine/src/parser/column_definitions.rs create mode 100644 engine/src/parser/create_index.rs create mode 100644 engine/src/parser/create_table.rs create mode 100644 engine/src/parser/drop_table.rs create mode 100644 engine/src/parser/expression.rs create mode 100644 engine/src/parser/expressions.rs create mode 100644 engine/src/parser/insert.rs create mode 100644 engine/src/parser/joins.rs create mode 100644 engine/src/parser/literal.rs delete mode 100644 engine/src/parser/mod.rs create mode 100644 engine/src/parser/select.rs create mode 100644 engine/src/parser/select_items.rs create mode 100644 engine/src/parser/statement.rs create mode 100644 engine/src/parser/table.rs create mode 100644 engine/src/parser/table_column.rs create mode 100644 engine/src/parser/tables.rs diff --git a/engine/src/parser.rs b/engine/src/parser.rs new file mode 100644 index 0000000..5d485a0 --- /dev/null +++ b/engine/src/parser.rs @@ -0,0 +1,333 @@ +mod column_definitions; +mod create_index; +mod create_table; +mod drop_table; +mod expression; +mod expressions; +mod insert; +mod joins; +mod literal; +mod select; +mod select_items; +mod statement; +mod table; +mod table_column; +mod tables; + +use super::ast::*; +use super::lexer::*; +use crate::sql_types::SqlType; +use std::cmp::Ordering; +use std::convert::TryFrom; +use test_util::TestSubjectExt; +pub use { + column_definitions::*, create_index::*, create_table::*, drop_table::*, expression::*, + expressions::*, insert::*, joins::*, literal::*, select::*, select_items::*, statement::*, + table::*, table_column::*, tables::*, +}; + +static BINARY_OPERATORS: &[Token<'static>] = &[ + Token::And, + Token::Or, + Token::Equal, + Token::NotEqual, + Token::Concat, + Token::Plus, + Token::Minus, + Token::Asterisk, + Token::Slash, + Token::LessThan, + Token::LessThanOrEqual, + Token::GreaterThan, + Token::GreaterThanOrEqual, + Token::Modulo, + Token::Exponentiation, + Token::BitwiseAnd, + Token::BitwiseOr, + Token::BitwiseXor, + Token::BitwiseShiftLeft, + Token::BitwiseShiftRight, + Token::TypeCast, +]; +static UNARY_OPERATORS: &[Token<'static>] = &[ + Token::Minus, + Token::Not, + Token::FactorialPrefix, + Token::SquareRoot, + Token::CubeRoot, + Token::AbsoluteValue, + Token::CubeRoot, + Token::BitwiseNot, +]; +static UNARY_POSTFIX_OPERATORS: &[Token<'static>] = &[Token::Factorial]; + +macro_rules! parse_err { + ($tokens:expr, $cursor:expr, $msg:expr) => { + parse_err!($tokens, $cursor, General, $msg) + }; + ($tokens:expr, $cursor:expr, $err_type:ident, $msg:expr) => { + return Err(ParsingError::$err_type { + msg: help_message($tokens.get($cursor), $cursor, $msg), + cursor: $cursor, + }) + }; +} +pub(crate) use parse_err; + +#[derive(Debug, Clone, PartialEq, Eq, Hash, Default)] +pub struct Parser { + lexer: Lexer, +} + +impl TestSubjectExt for Parser { + fn init() -> Self { + Self::new() + } +} + +impl Parser { + pub fn new() -> Parser { + Parser { + lexer: Lexer::new(), + } + } + + pub fn parse<'a>(&'a self, source: &'a str) -> Result { + let tokens = self.lexer.lex(source)?; + + let mut ast = Ast { + statements: Vec::with_capacity(1), + }; + + let mut cursor: usize = 0; + let mut first_statement = true; + while cursor < tokens.len() { + if !first_statement { + let mut at_least_one_semicolon = false; + while expect_token(&tokens, cursor, Token::Semicolon) { + cursor += 1; + at_least_one_semicolon = true; + } + if !(first_statement || at_least_one_semicolon) { + parse_err!( + tokens, + cursor, + Delimiter, + "Expected Semicolon Delimiter between Statements" + ); + } + } + let (statement, new_cursor) = parse_statement(&tokens, cursor, Token::Semicolon)?; + cursor = new_cursor; + ast.statements.push(statement); + first_statement = false; + + if cursor == tokens.len() - 1 { + break; + } + } + + Ok(ast) + } +} + +#[derive(Debug, Clone, Eq, PartialEq)] +pub enum ParsingError { + General { msg: String, cursor: usize }, + Lexing { msg: String, loc: TokenLocation }, + Delimiter { msg: String, cursor: usize }, + Internal { msg: String, cursor: usize }, +} + +impl std::fmt::Display for ParsingError { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + write!( + f, + "{}", + match self { + ParsingError::General { msg, cursor: _ } => msg.clone(), + ParsingError::Lexing { msg, loc: _ } => msg.clone(), + ParsingError::Delimiter { msg, cursor: _ } => msg.clone(), + ParsingError::Internal { msg, cursor: _ } => msg.clone(), + } + ) + } +} + +impl From for ParsingError { + fn from(err: LexingError) -> Self { + match err { + LexingError::General { msg, loc } => ParsingError::Lexing { msg, loc }, + } + } +} + +fn expect_token(tokens: &[TokenContainer], cursor: usize, token: Token) -> bool { + let current_token = match tokens.get(cursor) { + Some(value) => value, + None => { + return false; + } + }; + token == current_token.token +} + +fn help_message(token: Option<&TokenContainer>, cursor: usize, msg: &str) -> String { + if let Some(token) = token { + format!( + "[{}, {}]: {}, got {}", + token.loc.line, token.loc.col, msg, token.token, + ) + } else { + format!("Token {cursor} : {msg}") + } +} + +#[cfg(test)] +mod parser_tests { + use crate::parser::*; + + struct ParseTest { + ast: Ast, + input: &'static str, + } + + #[test] + fn test_parser() { + let parse_tests = vec![ + ParseTest { + input: "INSERT INTO users VALUES (105, 'George');", + ast: Ast { + statements: vec![Statement::InsertStatement(InsertStatement { + table: "users".to_string(), + values: vec![ + Expression::Literal(LiteralExpression::Numeric("105".to_owned())), + Expression::Literal(LiteralExpression::String("George".to_owned())), + ], + })], + }, + }, + ParseTest { + input: "CREATE TABLE users (id INT, name TEXT);", + ast: Ast { + statements: vec![Statement::CreateTableStatement(CreateTableStatement { + name: "users".to_owned(), + cols: vec![ + ColumnDefinition { + name: "id".to_string(), + data_type: SqlType::Int, + is_primary_key: false, + }, + ColumnDefinition { + name: "name".to_string(), + data_type: SqlType::Int, + is_primary_key: false, + }, + ], + })], + }, + }, + ParseTest { + input: "SELECT id, name AS fullname FROM users;", + ast: Ast { + statements: vec![Statement::SelectStatement(SelectStatement { + items: vec![ + SelectItem { + asterisk: false, + as_clause: None, + expression: Expression::TableColumn(TableColumn { + col_name: "id".to_string(), + table_name: None, + }), + }, + SelectItem { + asterisk: false, + as_clause: Some("fullname".to_string()), + expression: Expression::TableColumn(TableColumn { + col_name: "name".to_string(), + table_name: None, + }), + }, + ], + from: vec![RowDataSource::Table { + table_name: "users".to_string(), + as_clause: None, + joins: vec![], + }], + where_clause: Expression::Empty, + is_distinct: false, + order_by: None, + limit: None, + offset: None, + })], + }, + }, + ParseTest { + input: "SELECT distinct id, name AS fullname FROM users;", + ast: Ast { + statements: vec![Statement::SelectStatement(SelectStatement { + items: vec![ + SelectItem { + asterisk: false, + as_clause: None, + expression: Expression::TableColumn(TableColumn { + col_name: "id".to_string(), + table_name: None, + }), + }, + SelectItem { + asterisk: false, + as_clause: Some("fullname".to_owned()), + expression: Expression::TableColumn(TableColumn { + col_name: "name".to_string(), + table_name: None, + }), + }, + ], + from: vec![RowDataSource::Table { + table_name: "users".to_string(), + as_clause: None, + joins: vec![], + }], + where_clause: Expression::Empty, + is_distinct: true, + order_by: None, + limit: None, + offset: None, + })], + }, + }, + ]; + + let mut found_faults = false; + let mut err_msg = "\n".to_owned(); + let parser = Parser::new(); + + for test in parse_tests { + print!("(Parser) Testing: {}", test.input); + + parser.parse(test.input).unwrap(); + let ast = match parser.parse(test.input) { + Ok(value) => value, + Err(err) => { + found_faults = true; + err_msg.push_str(err.to_string().as_str()); + continue; + } + }; + + if ast != test.ast { + err_msg.push_str( + format!("\n\nExpected:\n{:#?}\n\nGot:\n{:#?}\n", test.ast, ast).as_str(), + ); + } + + // assert_eq!(ast, test.ast); + println!(" Passed!"); + } + + if found_faults { + panic!("{err_msg}"); + } + } +} diff --git a/engine/src/parser/column_definitions.rs b/engine/src/parser/column_definitions.rs new file mode 100644 index 0000000..11c87c9 --- /dev/null +++ b/engine/src/parser/column_definitions.rs @@ -0,0 +1,90 @@ +use super::*; + +pub fn parse_column_definitions<'a>( + tokens: &'a [TokenContainer], + initial_cursor: usize, + delimiter: Token, +) -> Result<(Vec, usize), ParsingError> { + let mut cursor = initial_cursor; + + let mut column_definitions: Vec = Vec::with_capacity(5); + + loop { + if cursor >= tokens.len() { + parse_err!(tokens, cursor, "Unexpected end of input"); + } + + // Look for a delimiter + if let Some(TokenContainer { + loc: _, + token: current_token, + }) = tokens.get(cursor) + { + if current_token == &delimiter { + break; + } + } + + // Look for a comma + if !column_definitions.is_empty() { + if let Some(TokenContainer { loc: _, token }) = tokens.get(cursor) { + if token == &Token::Comma { + cursor += 1; + } else { + parse_err!(tokens, cursor, "Expected Comma"); + } + } + } + // Look for a column name + let col_name = match &tokens.get(cursor) { + Some(TokenContainer { + loc: _, + token: Token::IdentifierValue { value }, + }) => value, + _ => { + parse_err!(tokens, cursor, "Expected Column Name"); + } + }; + cursor += 1; + + // Look for a column type + if let Some(token_c) = tokens.get(cursor) { + if !token_c.token.is_datatype() { + parse_err!(tokens, cursor, "Expected Column Type"); + } + } + + let mut is_primary_key = false; + let col_type = match tokens.get(cursor) { + Some(v) => v, + None => { + parse_err!(tokens, cursor, "Expected Column Type"); + } + }; + cursor += 1; + + // Look for primary key + if let ( + Some(TokenContainer { + loc: _, + token: Token::Primary, + }), + Some(TokenContainer { + loc: _, + token: Token::Key, + }), + ) = (&tokens.get(cursor), &tokens.get(cursor + 1)) + { + is_primary_key = true; + cursor += 2; + } + + column_definitions.push(ColumnDefinition { + name: col_name.to_string(), + data_type: SqlType::from_token(col_type, cursor)?, + is_primary_key, + }); + } + + Ok((column_definitions, cursor)) +} diff --git a/engine/src/parser/create_index.rs b/engine/src/parser/create_index.rs new file mode 100644 index 0000000..bcaeded --- /dev/null +++ b/engine/src/parser/create_index.rs @@ -0,0 +1,77 @@ +use super::*; + +pub fn parse_create_index_statement<'a>( + tokens: &'a [TokenContainer], + initial_cursor: usize, + delimiter: Token, +) -> Result<(CreateIndexStatement, usize), ParsingError> { + let mut cursor = initial_cursor; + if let Some(TokenContainer { + loc: _, + token: Token::Create, + }) = tokens.get(cursor) + { + cursor += 1; + } else { + parse_err!(tokens, cursor, "Not a Create Index Statement"); + } + let mut is_unique = false; + if let Some(TokenContainer { + loc: _, + token: Token::Unique, + }) = tokens.get(cursor) + { + is_unique = true; + cursor += 1; + } + if let Some(TokenContainer { + loc: _, + token: Token::Index, + }) = tokens.get(cursor) + { + cursor += 1; + } else { + parse_err!(tokens, cursor, "Not a Create Index Statement"); + } + let name = if let Some(TokenContainer { + loc: _, + token: Token::IdentifierValue { value }, + }) = tokens.get(cursor) + { + cursor += 1; + value + } else { + parse_err!(tokens, cursor, "Expected Index Name"); + }; + if let Some(TokenContainer { + loc: _, + token: Token::On, + }) = tokens.get(cursor) + { + cursor += 1; + } else { + parse_err!(tokens, cursor, "Expected ON Keyword"); + } + let table = if let Some(TokenContainer { + loc: _, + token: Token::IdentifierValue { value }, + }) = tokens.get(cursor) + { + cursor += 1; + value + } else { + parse_err!(tokens, cursor, "Expected Table Name"); + }; + let (expression, cursor) = parse_expression(tokens, cursor, &[delimiter], 0, true, false)?; + + Ok(( + CreateIndexStatement { + is_primary_key: false, + is_unique, + name: name.to_string(), + expression, + table: table.to_string(), + }, + cursor, + )) +} diff --git a/engine/src/parser/create_table.rs b/engine/src/parser/create_table.rs new file mode 100644 index 0000000..bc87e3e --- /dev/null +++ b/engine/src/parser/create_table.rs @@ -0,0 +1,60 @@ +use super::*; + +pub fn parse_create_table_statement<'a>( + tokens: &'a [TokenContainer], + initial_cursor: usize, + _: Token, +) -> Result<(CreateTableStatement, usize), ParsingError> { + let mut cursor = initial_cursor; + + if !expect_token(tokens, cursor, Token::Create) { + parse_err!(tokens, cursor, "Not a Create Table Statement"); + } + cursor += 1; + + if !expect_token(tokens, cursor, Token::Table) { + parse_err!(tokens, cursor, "Expected TABLE Keyword"); + } + cursor += 1; + + let name = match tokens.get(cursor) { + Some(TokenContainer { + loc: _, + token: Token::IdentifierValue { value }, + }) => value, + _ => { + parse_err!(tokens, cursor, "Expected Table Name"); + } + }; + cursor += 1; + if let Some(TokenContainer { + loc: _, + token: Token::LeftParenthesis, + }) = tokens.get(cursor) + { + cursor += 1; + } else { + parse_err!(tokens, cursor, "Expected Left Parenthesis"); + } + + let (cols, new_cursor) = parse_column_definitions(tokens, cursor, Token::RightParenthesis)?; + cursor = new_cursor; + + if let Some(TokenContainer { + loc: _, + token: Token::RightParenthesis, + }) = tokens.get(cursor) + { + cursor += 1; + } else { + parse_err!(tokens, cursor, "Expected Left Parenthesis"); + } + + Ok(( + CreateTableStatement { + name: name.to_string(), + cols, + }, + cursor, + )) +} diff --git a/engine/src/parser/drop_table.rs b/engine/src/parser/drop_table.rs new file mode 100644 index 0000000..498c921 --- /dev/null +++ b/engine/src/parser/drop_table.rs @@ -0,0 +1,44 @@ +use super::*; + +pub fn parse_drop_table_statement<'a>( + tokens: &'a [TokenContainer], + initial_cursor: usize, + _: Token, +) -> Result<(DropTableStatement, usize), ParsingError> { + let mut cursor = initial_cursor; + if let Some(TokenContainer { + loc: _, + token: Token::Drop, + }) = tokens.get(cursor) + { + cursor += 1; + } else { + parse_err!(tokens, cursor, "Not a Drop Table Statement"); + } + if let Some(TokenContainer { + loc: _, + token: Token::Table, + }) = tokens.get(cursor) + { + cursor += 1; + } else { + parse_err!(tokens, cursor, "Not a Drop Table Statement"); + } + let name = if let Some(TokenContainer { + loc: _, + token: Token::IdentifierValue { value }, + }) = tokens.get(cursor) + { + cursor += 1; + value + } else { + parse_err!(tokens, cursor, "Not a Drop Table Statement"); + }; + + Ok(( + DropTableStatement { + name: name.to_string(), + }, + cursor, + )) +} diff --git a/engine/src/parser/expression.rs b/engine/src/parser/expression.rs new file mode 100644 index 0000000..7a512f5 --- /dev/null +++ b/engine/src/parser/expression.rs @@ -0,0 +1,245 @@ +use super::*; + +pub fn parse_expression<'a>( + tokens: &'a [TokenContainer], + initial_cursor: usize, + delimiters: &[Token], + min_binding_power: u32, + is_top_level: bool, + takes_as_clause: bool, +) -> Result<(Expression, usize), ParsingError> { + let mut cursor = initial_cursor; + + let mut expression; + + if let Some(TokenContainer { + loc: _, + token: Token::LeftParenthesis, + }) = tokens.get(cursor) + { + cursor += 1; + + if let Some(TokenContainer { + loc: _, + token: Token::Select, + }) = tokens.get(cursor) + { + let (select_statement, new_cursor) = + parse_select_statement(tokens, cursor, Token::RightParenthesis)?; + expression = Expression::SubSelect(Box::new(select_statement)); + cursor = new_cursor; + } else { + (expression, cursor) = parse_expression( + tokens, + cursor, + &[Token::RightParenthesis], + min_binding_power, + true, + false, + )?; + } + + if let Some(TokenContainer { + loc: _, + token: Token::RightParenthesis, + }) = tokens.get(cursor) + { + cursor += 1; + } else { + parse_err!(tokens, cursor, "Expected closing Parenthesis"); + } + } else if cursor < tokens.len() && UNARY_OPERATORS.contains(&tokens[cursor].token) { + let token = &tokens[cursor]; + let operand = token.token.clone(); + cursor += 1; + let mut nested_un_ops = vec![operand]; + let mut inner_exp; + loop { + if cursor < tokens.len() && UNARY_OPERATORS.contains(&tokens[cursor].token) { + nested_un_ops.push(tokens[cursor].token.clone()); + cursor += 1; + } else { + break; + } + } + match parse_literal_expression(tokens, cursor) { + Ok((expression_, cursor_)) => { + inner_exp = expression_; + cursor = cursor_; + } + Err(err) => { + if let Some(TokenContainer { + token: Token::LeftParenthesis, + loc: _, + }) = tokens.get(cursor) + { + cursor += 1; + let (expression_, cursor_) = parse_expression( + tokens, + cursor, + &[Token::RightParenthesis], + min_binding_power, + true, + takes_as_clause, + )?; + inner_exp = expression_; + cursor = cursor_; + + if let Some(TokenContainer { + loc: _, + token: Token::RightParenthesis, + }) = tokens.get(cursor) + { + cursor += 1; + } else { + parse_err!(tokens, cursor, "Expected closing Parenthesis"); + } + } else { + return Err(err); + } + } + } + + if let Some(operand) = nested_un_ops.pop() { + inner_exp = Expression::Unary(UnaryExpression { + first: Box::from(inner_exp), + operand: Operand::from_token(&operand, cursor)?, + }); + } else { + parse_err!(tokens, cursor, "Expected Unary Operator"); + } + while let Some(operand) = nested_un_ops.pop() { + inner_exp = Expression::Unary(UnaryExpression { + first: Box::from(inner_exp), + operand: Operand::from_token(&operand, cursor)?, + }); + } + expression = inner_exp; + } else { + let (first_expression, new_cursor) = parse_literal_expression(tokens, cursor)?; + expression = first_expression; + cursor = new_cursor; + } + + if let ( + Some(TokenContainer { + token: token1, + loc: _, + }), + Some(TokenContainer { + token: token2, + loc: _, + }), + ) = (tokens.get(cursor), tokens.get(cursor + 1)) + { + if UNARY_POSTFIX_OPERATORS.contains(token1) && BINARY_OPERATORS.contains(token2) { + cursor += 1; + expression = Expression::Unary(UnaryExpression { + first: Box::from(expression), + operand: Operand::from_token(token1, cursor)?, + }); + } + } + + let mut last_cursor = cursor; + 'outer: while let Some(TokenContainer { token, loc: _ }) = tokens.get(cursor) { + if delimiters.contains(token) { + break 'outer; + } + if UNARY_POSTFIX_OPERATORS.contains(token) { + break 'outer; + } + + if let Some(TokenContainer { + token: Token::IdentifierValue { value: _ }, + loc: _, + }) = tokens.get(cursor) + { + if takes_as_clause { + break; + } + } + let mut operand_tok = Token::Empty; + if BINARY_OPERATORS.contains(token) { + operand_tok = token.clone(); + cursor += 1; + } + if operand_tok == Token::TypeCast { + if let Some(TokenContainer { token: op, loc: _ }) = tokens.get(cursor) { + if op.is_datatype() { + // Make sure expression is cast before applying unary operator + if let Expression::Unary(UnaryExpression { first, operand }) = expression { + expression = Expression::Unary(UnaryExpression { + first: Box::from(Expression::Cast { + data: first, + typ: SqlType::try_from((op, cursor))?, + }), + operand, + }); + } else { + expression = Expression::Cast { + data: Box::new(expression), + typ: SqlType::try_from((op, cursor))?, + }; + } + + cursor += 1; + continue; + } else { + parse_err!(tokens, cursor, "Expected Type Name after Type Cast"); + } + } else { + parse_err!(tokens, cursor, "Expected Type Name after Type Cast"); + } + } + if operand_tok == Token::Empty { + parse_err!(tokens, cursor, "Expected Binary Operator"); + } + + let binding_power = operand_tok.binding_power(); + if binding_power < min_binding_power { + cursor = last_cursor; + break; + } + + let (mut second_expression, new_cursor) = parse_expression( + tokens, + cursor, + delimiters, + binding_power, + false, + takes_as_clause, + )?; + let operand = Operand::from_token(&operand_tok, cursor)?; + cursor = new_cursor; + + if let Some(TokenContainer { token, loc: _ }) = tokens.get(cursor) { + if UNARY_POSTFIX_OPERATORS.contains(token) { + cursor += 1; + second_expression = Expression::Unary(UnaryExpression { + first: Box::from(second_expression), + operand: Operand::from_token(token, cursor)?, + }); + } + } + + expression = Expression::Binary(BinaryExpression { + first: Box::from(expression), + second: Box::from(second_expression), + operand, + }); + last_cursor = cursor; + } + + if let Some(TokenContainer { token, loc: _ }) = tokens.get(cursor) { + if UNARY_POSTFIX_OPERATORS.contains(token) && is_top_level { + cursor += 1; + expression = Expression::Unary(UnaryExpression { + first: Box::from(expression), + operand: Operand::from_token(token, cursor)?, + }); + } + } + + Ok((expression, cursor)) +} diff --git a/engine/src/parser/expressions.rs b/engine/src/parser/expressions.rs new file mode 100644 index 0000000..16b94bb --- /dev/null +++ b/engine/src/parser/expressions.rs @@ -0,0 +1,49 @@ +use super::*; + +pub fn parse_expressions( + tokens: &[TokenContainer], + initial_cursor: usize, + delimiters: &[Token], +) -> Result<(Vec, usize), ParsingError> { + let mut cursor = initial_cursor; + + let mut expressions: Vec = Vec::with_capacity(5); + + loop { + if cursor >= tokens.len() { + parse_err!(tokens, cursor, "Expected Expression"); + } + + // Look for delimiter + if let Some(TokenContainer { + loc: _, + token: current_token, + }) = tokens.get(cursor) + { + if delimiters.contains(current_token) { + return Ok((expressions, cursor)); + } + } + + // Look for comma + if !expressions.is_empty() { + if !expect_token(tokens, cursor, Token::Comma) { + parse_err!(tokens, cursor, "Expected Comma"); + } + + cursor += 1; + } + + // Look for expression + let (expression, new_cursor) = parse_expression( + tokens, + cursor, + &[Token::Comma, Token::RightParenthesis], + tokens[cursor].binding_power(), + true, + false, + )?; + cursor = new_cursor; + expressions.push(expression); + } +} diff --git a/engine/src/parser/insert.rs b/engine/src/parser/insert.rs new file mode 100644 index 0000000..4032f5c --- /dev/null +++ b/engine/src/parser/insert.rs @@ -0,0 +1,73 @@ +use super::*; + +pub fn parse_insert_statement( + tokens: &[TokenContainer], + initial_cursor: usize, + _: Token, +) -> Result<(InsertStatement, usize), ParsingError> { + let mut cursor = initial_cursor; + + // Look for INSERT + if !expect_token(tokens, cursor, Token::Insert) { + parse_err!(tokens, cursor, "Not an Insert Statement"); + } + cursor += 1; + + // Look for INTO + if !expect_token(tokens, cursor, Token::Into) { + parse_err!(tokens, cursor, "Expected INTO"); + } + cursor += 1; + + let table_name = match tokens.get(cursor) { + Some(TokenContainer { + loc: _, + token: Token::IdentifierValue { value }, + }) => value, + _ => { + parse_err!(tokens, cursor, "Expected Table Name"); + } + }; + + cursor += 1; + + // Look for VALUES + if let Some(token) = tokens.get(cursor) { + if token.token != Token::Values { + parse_err!(tokens, cursor, "Expected VALUES"); + } + cursor += 1; + } + + // Look for left parenthesis + if let Some(token) = tokens.get(cursor) { + if token.token != Token::LeftParenthesis { + parse_err!(tokens, cursor, "Expected Left parenthesis"); + } + cursor += 1; + } + + // Look for expression list + let (values, new_cursor) = parse_expressions(tokens, cursor, &[Token::RightParenthesis])?; + + cursor = new_cursor; + + // Look for right parenthesis + if let Some(TokenContainer { + token: Token::RightParenthesis, + loc: _, + }) = tokens.get(cursor) + { + cursor += 1; + } else { + parse_err!(tokens, cursor, "Expected Right Parenthesis"); + } + + Ok(( + InsertStatement { + table: table_name.to_string(), + values, + }, + cursor, + )) +} diff --git a/engine/src/parser/joins.rs b/engine/src/parser/joins.rs new file mode 100644 index 0000000..5a8e7f6 --- /dev/null +++ b/engine/src/parser/joins.rs @@ -0,0 +1,139 @@ +use super::*; + +pub fn parse_joins<'a>( + tokens: &'a [TokenContainer], + initial_cursor: usize, + delimiters: &[Token], +) -> Result<(Vec, usize), ParsingError> { + let mut cursor = initial_cursor; + + let mut joins = vec![]; + + loop { + let mut kind = JoinKind::Inner; + if tokens.get(cursor).is_none() { + break; + } + + if let Some(TokenContainer { + token: Token::On, + loc: _, + }) = tokens.get(cursor) + { + break; + } else if let Some(TokenContainer { + token: Token::Inner, + loc: _, + }) = tokens.get(cursor) + { + cursor += 1; + kind = JoinKind::Inner; + } else if let Some(TokenContainer { + token: Token::Right, + loc: _, + }) = tokens.get(cursor) + { + cursor += 1; + if let Some(TokenContainer { + token: Token::Outer, + loc: _, + }) = tokens.get(cursor) + { + cursor += 1; + } + kind = JoinKind::RightOuter; + } else if let Some(TokenContainer { + token: Token::Left, + loc: _, + }) = tokens.get(cursor) + { + cursor += 1; + if let Some(TokenContainer { + token: Token::Outer, + loc: _, + }) = tokens.get(cursor) + { + cursor += 1; + } + kind = JoinKind::LeftOuter; + } else if let Some(TokenContainer { + token: Token::Full, + loc: _, + }) = tokens.get(cursor) + { + cursor += 1; + if let Some(TokenContainer { + token: Token::Outer, + loc: _, + }) = tokens.get(cursor) + { + cursor += 1; + kind = JoinKind::FullOuter; + } else { + parse_err!(tokens, cursor, "Expected OUTER Keyword after FULL"); + } + } else if let Some(TokenContainer { token, loc: _ }) = tokens.get(cursor) { + if delimiters.contains(token) { + break; + } + parse_err!(tokens, cursor, "Failed to parse Join Clause"); + } + if let Some(TokenContainer { + token: Token::Join, + loc: _, + }) = tokens.get(cursor) + { + cursor += 1; + } else { + parse_err!(tokens, cursor, "No JOIN Keyword after INNER"); + } + let (table, new_cursor) = parse_table(tokens, cursor, delimiters)?; + cursor = new_cursor; + if let Some(TokenContainer { + token: Token::On, + loc: _, + }) = tokens.get(cursor) + { + cursor += 1; + } else { + parse_err!(tokens, cursor, "No ON keyword in Join Expression"); + } + let (col1, new_cursor) = parse_table_column(tokens, cursor)?; + cursor = new_cursor; + let operand_token = if let Some(TokenContainer { token, loc: _ }) = tokens.get(cursor) { + cursor += 1; + if BINARY_OPERATORS.contains(token) { + token.clone() + } else { + parse_err!(tokens, cursor, "No Binary Operator in Join Expression"); + } + } else { + parse_err!(tokens, cursor, "No Binary Operator in Join Expression"); + }; + let (col2, new_cursor) = parse_table_column(tokens, cursor)?; + cursor = new_cursor; + + let operand = if let Ok(o) = Operand::from_token(&operand_token, cursor) { + o + } else { + parse_err!( + tokens, + cursor, + "Failed to parse Binary Operator in Join Expression" + ); + }; + + let join = JoinClause { + kind, + source: table, + on: Expression::Binary(BinaryExpression { + first: Box::new(Expression::TableColumn(col1)), + second: Box::new(Expression::TableColumn(col2)), + operand, + }), + }; + joins.push(join); + } + + Ok((joins, cursor)) +} diff --git a/engine/src/parser/literal.rs b/engine/src/parser/literal.rs new file mode 100644 index 0000000..0fb6454 --- /dev/null +++ b/engine/src/parser/literal.rs @@ -0,0 +1,60 @@ +use super::*; + +pub fn parse_literal_expression( + tokens: &[TokenContainer], + initial_cursor: usize, +) -> Result<(Expression, usize), ParsingError> { + let mut cursor = initial_cursor; + + if let Some(tok) = tokens.get(cursor) { + match tok.token { + Token::IdentifierValue { + value: ref first_identifier, + } => { + cursor += 1; + let mut col_name = first_identifier; + let mut table_name = None; + if let Some(TokenContainer { + token: Token::Dot, + loc: _, + }) = tokens.get(cursor) + { + cursor += 1; + if let Some(TokenContainer { + token: Token::IdentifierValue { value }, + loc: _, + }) = tokens.get(cursor) + { + cursor += 1; + table_name = Some(col_name); + col_name = value; + } else { + parse_err!(tokens, cursor, "Expected Identifier after dot"); + } + } + Ok(( + Expression::TableColumn(TableColumn { + col_name: col_name.to_string(), + table_name: table_name.map(|x| x.to_string()), + }), + cursor, + )) + } + Token::NumericValue { value: _ } + | Token::StringValue { value: _ } + | Token::BoolValue { value: _ } + | Token::Null => { + cursor += 1; + Ok(( + Expression::Literal(LiteralExpression::from_token(&tok.token, cursor)?), + cursor, + )) + } + _ => { + parse_err!(tokens, cursor, "Expected Literal") + } + } + } else { + parse_err!(tokens, cursor, "Expected Literal Expression"); + } +} diff --git a/engine/src/parser/mod.rs b/engine/src/parser/mod.rs deleted file mode 100644 index e7ede0b..0000000 --- a/engine/src/parser/mod.rs +++ /dev/null @@ -1,1753 +0,0 @@ -use super::ast::*; -use super::lexer::*; -use crate::sql_types::SqlType; -use std::cmp::Ordering; -use std::convert::TryFrom; -use test_util::TestSubjectExt; - -static BINARY_OPERATORS: &[Token<'static>] = &[ - Token::And, - Token::Or, - Token::Equal, - Token::NotEqual, - Token::Concat, - Token::Plus, - Token::Minus, - Token::Asterisk, - Token::Slash, - Token::LessThan, - Token::LessThanOrEqual, - Token::GreaterThan, - Token::GreaterThanOrEqual, - Token::Modulo, - Token::Exponentiation, - Token::BitwiseAnd, - Token::BitwiseOr, - Token::BitwiseXor, - Token::BitwiseShiftLeft, - Token::BitwiseShiftRight, - Token::TypeCast, -]; -static UNARY_OPERATORS: &[Token<'static>] = &[ - Token::Minus, - Token::Not, - Token::FactorialPrefix, - Token::SquareRoot, - Token::CubeRoot, - Token::AbsoluteValue, - Token::CubeRoot, - Token::BitwiseNot, -]; -static UNARY_POSTFIX_OPERATORS: &[Token<'static>] = &[Token::Factorial]; - -macro_rules! parse_err { - ($tokens:expr, $cursor:expr, $msg:expr) => { - parse_err!($tokens, $cursor, General, $msg) - }; - ($tokens:expr, $cursor:expr, $err_type:ident, $msg:expr) => { - return Err(ParsingError::$err_type { - msg: help_message($tokens.get($cursor), $cursor, $msg), - cursor: $cursor, - }) - }; -} - -#[derive(Debug, Clone, PartialEq, Eq, Hash, Default)] -pub struct Parser { - lexer: Lexer, -} - -impl TestSubjectExt for Parser { - fn init() -> Self { - Self::new() - } -} - -impl Parser { - pub fn new() -> Parser { - Parser { - lexer: Lexer::new(), - } - } - - pub fn parse<'a>(&'a self, source: &'a str) -> Result { - let tokens = self.lexer.lex(source)?; - - let mut ast = Ast { - statements: Vec::with_capacity(1), - }; - - let mut cursor: usize = 0; - let mut first_statement = true; - while cursor < tokens.len() { - if !first_statement { - let mut at_least_one_semicolon = false; - while expect_token(&tokens, cursor, Token::Semicolon) { - cursor += 1; - at_least_one_semicolon = true; - } - if !(first_statement || at_least_one_semicolon) { - parse_err!( - tokens, - cursor, - Delimiter, - "Expected Semicolon Delimiter between Statements" - ); - } - } - let (statement, new_cursor) = parse_statement(&tokens, cursor, Token::Semicolon)?; - cursor = new_cursor; - ast.statements.push(statement); - first_statement = false; - - if cursor == tokens.len() - 1 { - break; - } - } - - Ok(ast) - } -} - -#[derive(Debug, Clone, Eq, PartialEq)] -pub enum ParsingError { - General { msg: String, cursor: usize }, - Lexing { msg: String, loc: TokenLocation }, - Delimiter { msg: String, cursor: usize }, - Internal { msg: String, cursor: usize }, -} - -impl std::fmt::Display for ParsingError { - fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { - write!( - f, - "{}", - match self { - ParsingError::General { msg, cursor: _ } => msg.clone(), - ParsingError::Lexing { msg, loc: _ } => msg.clone(), - ParsingError::Delimiter { msg, cursor: _ } => msg.clone(), - ParsingError::Internal { msg, cursor: _ } => msg.clone(), - } - ) - } -} - -impl From for ParsingError { - fn from(err: LexingError) -> Self { - match err { - LexingError::General { msg, loc } => ParsingError::Lexing { msg, loc }, - } - } -} - -fn expect_token(tokens: &[TokenContainer], cursor: usize, token: Token) -> bool { - let current_token = match tokens.get(cursor) { - Some(value) => value, - None => { - return false; - } - }; - token == current_token.token -} - -fn help_message(token: Option<&TokenContainer>, cursor: usize, msg: &str) -> String { - if let Some(token) = token { - format!( - "[{}, {}]: {}, got {}", - token.loc.line, token.loc.col, msg, token.token, - ) - } else { - format!("Token {cursor} : {msg}") - } -} - -fn parse_statement<'a>( - tokens: &'a [TokenContainer], - initial_cursor: usize, - delimiter: Token, -) -> Result<(Statement, usize), ParsingError> { - let cursor = initial_cursor; - - if let Some(first_token) = tokens.get(cursor) { - match first_token.token { - Token::Select => { - // Look for a SELECT statement - match parse_select_statement(tokens, cursor, delimiter) { - Ok((select, new_cursor)) => { - Ok((Statement::SelectStatement(select), new_cursor)) - } - Err(err) => Err(err), - } - } - Token::Insert => { - // Look for an INSERT statement - match parse_insert_statement(tokens, cursor, delimiter) { - Ok((insert, new_cursor)) => { - Ok((Statement::InsertStatement(insert), new_cursor)) - } - Err(err) => Err(err), - } - } - Token::Delete => parse_err!(tokens, cursor, Internal, "Delete not implemented"), - Token::Update => parse_err!(tokens, cursor, Internal, "Update not implemented"), - Token::Alter => parse_err!(tokens, cursor, Internal, "Alter not implemented"), - Token::IdentifierValue { value: _ } => { - parse_err!(tokens, cursor, Internal, "Assignment not implemented") - } - Token::Create => { - if let Some(first_token) = tokens.get(cursor + 1) { - match first_token.token { - Token::Index => { - // Look for a CREATE INDEX statement - match parse_create_index_statement(tokens, cursor, delimiter) { - Ok((create_index, new_cursor)) => { - Ok((Statement::CreateIndexStatement(create_index), new_cursor)) - } - Err(err) => (Err(err)), - } - } - Token::Unique => match tokens.get(cursor + 2) { - Some(TokenContainer { - token: Token::Index, - loc: _, - }) => { - // Look for a CREATE UNIQUE INDEX statement - match parse_create_index_statement(tokens, cursor, delimiter) { - Ok((create_index, new_cursor)) => Ok(( - Statement::CreateIndexStatement(create_index), - new_cursor, - )), - Err(err) => (Err(err)), - } - } - Some(TokenContainer { - token: Token::Constraint, - loc: _, - }) => { - parse_err!(tokens, cursor, "Create Constraint not implemented") - } - _ => parse_err!(tokens, cursor, "Invalid Create Statement"), - }, - Token::Constraint => { - parse_err!(tokens, cursor, "Create Constraint not implemented") - } - Token::Table => { - // Look for a CREATE TABLE statement - match parse_create_table_statement(tokens, cursor, delimiter) { - Ok((create_table, new_cursor)) => { - Ok((Statement::CreateTableStatement(create_table), new_cursor)) - } - Err(err) => (Err(err)), - } - } - _ => parse_err!(tokens, cursor, "Invalid Create Statement"), - } - } else { - parse_err!(tokens, cursor, "Invalid Create Statement"); - } - } - Token::Drop => { - // Look for an DROP statement - match parse_drop_table_statement(tokens, cursor, delimiter) { - Ok((drop, new_cursor)) => Ok((Statement::DropTableStatement(drop), new_cursor)), - Err(err) => (Err(err)), - } - } - _ => parse_err!(tokens, cursor, "Expected a valid Statement"), - } - } else { - parse_err!(tokens, cursor, "Expected a valid Statement"); - } -} - -fn parse_column_definitions<'a>( - tokens: &'a [TokenContainer], - initial_cursor: usize, - delimiter: Token, -) -> Result<(Vec, usize), ParsingError> { - let mut cursor = initial_cursor; - - let mut column_definitions: Vec = Vec::with_capacity(5); - - loop { - if cursor >= tokens.len() { - parse_err!(tokens, cursor, "Unexpected end of input"); - } - - // Look for a delimiter - if let Some(TokenContainer { - loc: _, - token: current_token, - }) = tokens.get(cursor) - { - if current_token == &delimiter { - break; - } - } - - // Look for a comma - if !column_definitions.is_empty() { - if let Some(TokenContainer { loc: _, token }) = tokens.get(cursor) { - if token == &Token::Comma { - cursor += 1; - } else { - parse_err!(tokens, cursor, "Expected Comma"); - } - } - } - // Look for a column name - let col_name = match &tokens.get(cursor) { - Some(TokenContainer { - loc: _, - token: Token::IdentifierValue { value }, - }) => value, - _ => { - parse_err!(tokens, cursor, "Expected Column Name"); - } - }; - cursor += 1; - - // Look for a column type - if let Some(token_c) = tokens.get(cursor) { - if !token_c.token.is_datatype() { - parse_err!(tokens, cursor, "Expected Column Type"); - } - } - - let mut is_primary_key = false; - let col_type = match tokens.get(cursor) { - Some(v) => v, - None => { - parse_err!(tokens, cursor, "Expected Column Type"); - } - }; - cursor += 1; - - // Look for primary key - if let ( - Some(TokenContainer { - loc: _, - token: Token::Primary, - }), - Some(TokenContainer { - loc: _, - token: Token::Key, - }), - ) = (&tokens.get(cursor), &tokens.get(cursor + 1)) - { - is_primary_key = true; - cursor += 2; - } - - column_definitions.push(ColumnDefinition { - name: col_name.to_string(), - data_type: SqlType::from_token(col_type, cursor)?, - is_primary_key, - }); - } - - Ok((column_definitions, cursor)) -} - -fn parse_create_table_statement<'a>( - tokens: &'a [TokenContainer], - initial_cursor: usize, - _: Token, -) -> Result<(CreateTableStatement, usize), ParsingError> { - let mut cursor = initial_cursor; - - if !expect_token(tokens, cursor, Token::Create) { - parse_err!(tokens, cursor, "Not a Create Table Statement"); - } - cursor += 1; - - if !expect_token(tokens, cursor, Token::Table) { - parse_err!(tokens, cursor, "Expected TABLE Keyword"); - } - cursor += 1; - - let name = match tokens.get(cursor) { - Some(TokenContainer { - loc: _, - token: Token::IdentifierValue { value }, - }) => value, - _ => { - parse_err!(tokens, cursor, "Expected Table Name"); - } - }; - cursor += 1; - if let Some(TokenContainer { - loc: _, - token: Token::LeftParenthesis, - }) = tokens.get(cursor) - { - cursor += 1; - } else { - parse_err!(tokens, cursor, "Expected Left Parenthesis"); - } - - let (cols, new_cursor) = parse_column_definitions(tokens, cursor, Token::RightParenthesis)?; - cursor = new_cursor; - - if let Some(TokenContainer { - loc: _, - token: Token::RightParenthesis, - }) = tokens.get(cursor) - { - cursor += 1; - } else { - parse_err!(tokens, cursor, "Expected Left Parenthesis"); - } - - Ok(( - CreateTableStatement { - name: name.to_string(), - cols, - }, - cursor, - )) -} - -fn parse_create_index_statement<'a>( - tokens: &'a [TokenContainer], - initial_cursor: usize, - delimiter: Token, -) -> Result<(CreateIndexStatement, usize), ParsingError> { - let mut cursor = initial_cursor; - if let Some(TokenContainer { - loc: _, - token: Token::Create, - }) = tokens.get(cursor) - { - cursor += 1; - } else { - parse_err!(tokens, cursor, "Not a Create Index Statement"); - } - let mut is_unique = false; - if let Some(TokenContainer { - loc: _, - token: Token::Unique, - }) = tokens.get(cursor) - { - is_unique = true; - cursor += 1; - } - if let Some(TokenContainer { - loc: _, - token: Token::Index, - }) = tokens.get(cursor) - { - cursor += 1; - } else { - parse_err!(tokens, cursor, "Not a Create Index Statement"); - } - let name = if let Some(TokenContainer { - loc: _, - token: Token::IdentifierValue { value }, - }) = tokens.get(cursor) - { - cursor += 1; - value - } else { - parse_err!(tokens, cursor, "Expected Index Name"); - }; - if let Some(TokenContainer { - loc: _, - token: Token::On, - }) = tokens.get(cursor) - { - cursor += 1; - } else { - parse_err!(tokens, cursor, "Expected ON Keyword"); - } - let table = if let Some(TokenContainer { - loc: _, - token: Token::IdentifierValue { value }, - }) = tokens.get(cursor) - { - cursor += 1; - value - } else { - parse_err!(tokens, cursor, "Expected Table Name"); - }; - let (expression, cursor) = parse_expression(tokens, cursor, &[delimiter], 0, true, false)?; - - Ok(( - CreateIndexStatement { - is_primary_key: false, - is_unique, - name: name.to_string(), - expression, - table: table.to_string(), - }, - cursor, - )) -} - -fn parse_expressions( - tokens: &[TokenContainer], - initial_cursor: usize, - delimiters: &[Token], -) -> Result<(Vec, usize), ParsingError> { - let mut cursor = initial_cursor; - - let mut expressions: Vec = Vec::with_capacity(5); - - loop { - if cursor >= tokens.len() { - parse_err!(tokens, cursor, "Expected Expression"); - } - - // Look for delimiter - if let Some(TokenContainer { - loc: _, - token: current_token, - }) = tokens.get(cursor) - { - if delimiters.contains(current_token) { - return Ok((expressions, cursor)); - } - } - - // Look for comma - if !expressions.is_empty() { - if !expect_token(tokens, cursor, Token::Comma) { - parse_err!(tokens, cursor, "Expected Comma"); - } - - cursor += 1; - } - - // Look for expression - let (expression, new_cursor) = parse_expression( - tokens, - cursor, - &[Token::Comma, Token::RightParenthesis], - tokens[cursor].binding_power(), - true, - false, - )?; - cursor = new_cursor; - expressions.push(expression); - } -} - -fn parse_expression<'a>( - tokens: &'a [TokenContainer], - initial_cursor: usize, - delimiters: &[Token], - min_binding_power: u32, - is_top_level: bool, - takes_as_clause: bool, -) -> Result<(Expression, usize), ParsingError> { - let mut cursor = initial_cursor; - - let mut expression; - - if let Some(TokenContainer { - loc: _, - token: Token::LeftParenthesis, - }) = tokens.get(cursor) - { - cursor += 1; - - if let Some(TokenContainer { - loc: _, - token: Token::Select, - }) = tokens.get(cursor) - { - let (select_statement, new_cursor) = - parse_select_statement(tokens, cursor, Token::RightParenthesis)?; - expression = Expression::SubSelect(Box::new(select_statement)); - cursor = new_cursor; - } else { - (expression, cursor) = parse_expression( - tokens, - cursor, - &[Token::RightParenthesis], - min_binding_power, - true, - false, - )?; - } - - if let Some(TokenContainer { - loc: _, - token: Token::RightParenthesis, - }) = tokens.get(cursor) - { - cursor += 1; - } else { - parse_err!(tokens, cursor, "Expected closing Parenthesis"); - } - } else if cursor < tokens.len() && UNARY_OPERATORS.contains(&tokens[cursor].token) { - let token = &tokens[cursor]; - let operand = token.token.clone(); - cursor += 1; - let mut nested_un_ops = vec![operand]; - let mut inner_exp; - loop { - if cursor < tokens.len() && UNARY_OPERATORS.contains(&tokens[cursor].token) { - nested_un_ops.push(tokens[cursor].token.clone()); - cursor += 1; - } else { - break; - } - } - match parse_literal_expression(tokens, cursor) { - Ok((expression_, cursor_)) => { - inner_exp = expression_; - cursor = cursor_; - } - Err(err) => { - if let Some(TokenContainer { - token: Token::LeftParenthesis, - loc: _, - }) = tokens.get(cursor) - { - cursor += 1; - let (expression_, cursor_) = parse_expression( - tokens, - cursor, - &[Token::RightParenthesis], - min_binding_power, - true, - takes_as_clause, - )?; - inner_exp = expression_; - cursor = cursor_; - - if let Some(TokenContainer { - loc: _, - token: Token::RightParenthesis, - }) = tokens.get(cursor) - { - cursor += 1; - } else { - parse_err!(tokens, cursor, "Expected closing Parenthesis"); - } - } else { - return Err(err); - } - } - } - - if let Some(operand) = nested_un_ops.pop() { - inner_exp = Expression::Unary(UnaryExpression { - first: Box::from(inner_exp), - operand: Operand::from_token(&operand, cursor)?, - }); - } else { - parse_err!(tokens, cursor, "Expected Unary Operator"); - } - while let Some(operand) = nested_un_ops.pop() { - inner_exp = Expression::Unary(UnaryExpression { - first: Box::from(inner_exp), - operand: Operand::from_token(&operand, cursor)?, - }); - } - expression = inner_exp; - } else { - let (first_expression, new_cursor) = parse_literal_expression(tokens, cursor)?; - expression = first_expression; - cursor = new_cursor; - } - - if let ( - Some(TokenContainer { - token: token1, - loc: _, - }), - Some(TokenContainer { - token: token2, - loc: _, - }), - ) = (tokens.get(cursor), tokens.get(cursor + 1)) - { - if UNARY_POSTFIX_OPERATORS.contains(token1) && BINARY_OPERATORS.contains(token2) { - cursor += 1; - expression = Expression::Unary(UnaryExpression { - first: Box::from(expression), - operand: Operand::from_token(token1, cursor)?, - }); - } - } - - let mut last_cursor = cursor; - 'outer: while let Some(TokenContainer { token, loc: _ }) = tokens.get(cursor) { - if delimiters.contains(token) { - break 'outer; - } - if UNARY_POSTFIX_OPERATORS.contains(token) { - break 'outer; - } - - if let Some(TokenContainer { - token: Token::IdentifierValue { value: _ }, - loc: _, - }) = tokens.get(cursor) - { - if takes_as_clause { - break; - } - } - let mut operand_tok = Token::Empty; - if BINARY_OPERATORS.contains(token) { - operand_tok = token.clone(); - cursor += 1; - } - if operand_tok == Token::TypeCast { - if let Some(TokenContainer { token: op, loc: _ }) = tokens.get(cursor) { - if op.is_datatype() { - // Make sure expression is cast before applying unary operator - if let Expression::Unary(UnaryExpression { first, operand }) = expression { - expression = Expression::Unary(UnaryExpression { - first: Box::from(Expression::Cast { - data: first, - typ: SqlType::try_from((op, cursor))?, - }), - operand, - }); - } else { - expression = Expression::Cast { - data: Box::new(expression), - typ: SqlType::try_from((op, cursor))?, - }; - } - - cursor += 1; - continue; - } else { - parse_err!(tokens, cursor, "Expected Type Name after Type Cast"); - } - } else { - parse_err!(tokens, cursor, "Expected Type Name after Type Cast"); - } - } - if operand_tok == Token::Empty { - parse_err!(tokens, cursor, "Expected Binary Operator"); - } - - let binding_power = operand_tok.binding_power(); - if binding_power < min_binding_power { - cursor = last_cursor; - break; - } - - let (mut second_expression, new_cursor) = parse_expression( - tokens, - cursor, - delimiters, - binding_power, - false, - takes_as_clause, - )?; - let operand = Operand::from_token(&operand_tok, cursor)?; - cursor = new_cursor; - - if let Some(TokenContainer { token, loc: _ }) = tokens.get(cursor) { - if UNARY_POSTFIX_OPERATORS.contains(token) { - cursor += 1; - second_expression = Expression::Unary(UnaryExpression { - first: Box::from(second_expression), - operand: Operand::from_token(token, cursor)?, - }); - } - } - - expression = Expression::Binary(BinaryExpression { - first: Box::from(expression), - second: Box::from(second_expression), - operand, - }); - last_cursor = cursor; - } - - if let Some(TokenContainer { token, loc: _ }) = tokens.get(cursor) { - if UNARY_POSTFIX_OPERATORS.contains(token) && is_top_level { - cursor += 1; - expression = Expression::Unary(UnaryExpression { - first: Box::from(expression), - operand: Operand::from_token(token, cursor)?, - }); - } - } - - Ok((expression, cursor)) -} - -fn parse_literal_expression( - tokens: &[TokenContainer], - initial_cursor: usize, -) -> Result<(Expression, usize), ParsingError> { - let mut cursor = initial_cursor; - - if let Some(tok) = tokens.get(cursor) { - match tok.token { - Token::IdentifierValue { - value: ref first_identifier, - } => { - cursor += 1; - let mut col_name = first_identifier; - let mut table_name = None; - if let Some(TokenContainer { - token: Token::Dot, - loc: _, - }) = tokens.get(cursor) - { - cursor += 1; - if let Some(TokenContainer { - token: Token::IdentifierValue { value }, - loc: _, - }) = tokens.get(cursor) - { - cursor += 1; - table_name = Some(col_name); - col_name = value; - } else { - parse_err!(tokens, cursor, "Expected Identifier after dot"); - } - } - Ok(( - Expression::TableColumn(TableColumn { - col_name: col_name.to_string(), - table_name: table_name.map(|x| x.to_string()), - }), - cursor, - )) - } - Token::NumericValue { value: _ } - | Token::StringValue { value: _ } - | Token::BoolValue { value: _ } - | Token::Null => { - cursor += 1; - Ok(( - Expression::Literal(LiteralExpression::from_token(&tok.token, cursor)?), - cursor, - )) - } - _ => { - parse_err!(tokens, cursor, "Expected Literal") - } - } - } else { - parse_err!(tokens, cursor, "Expected Literal Expression"); - } -} - -fn parse_insert_statement( - tokens: &[TokenContainer], - initial_cursor: usize, - _: Token, -) -> Result<(InsertStatement, usize), ParsingError> { - let mut cursor = initial_cursor; - - // Look for INSERT - if !expect_token(tokens, cursor, Token::Insert) { - parse_err!(tokens, cursor, "Not an Insert Statement"); - } - cursor += 1; - - // Look for INTO - if !expect_token(tokens, cursor, Token::Into) { - parse_err!(tokens, cursor, "Expected INTO"); - } - cursor += 1; - - let table_name = match tokens.get(cursor) { - Some(TokenContainer { - loc: _, - token: Token::IdentifierValue { value }, - }) => value, - _ => { - parse_err!(tokens, cursor, "Expected Table Name"); - } - }; - - cursor += 1; - - // Look for VALUES - if let Some(token) = tokens.get(cursor) { - if token.token != Token::Values { - parse_err!(tokens, cursor, "Expected VALUES"); - } - cursor += 1; - } - - // Look for left parenthesis - if let Some(token) = tokens.get(cursor) { - if token.token != Token::LeftParenthesis { - parse_err!(tokens, cursor, "Expected Left parenthesis"); - } - cursor += 1; - } - - // Look for expression list - let (values, new_cursor) = parse_expressions(tokens, cursor, &[Token::RightParenthesis])?; - - cursor = new_cursor; - - // Look for right parenthesis - if let Some(TokenContainer { - token: Token::RightParenthesis, - loc: _, - }) = tokens.get(cursor) - { - cursor += 1; - } else { - parse_err!(tokens, cursor, "Expected Right Parenthesis"); - } - - Ok(( - InsertStatement { - table: table_name.to_string(), - values, - }, - cursor, - )) -} - -fn parse_drop_table_statement<'a>( - tokens: &'a [TokenContainer], - initial_cursor: usize, - _: Token, -) -> Result<(DropTableStatement, usize), ParsingError> { - let mut cursor = initial_cursor; - if let Some(TokenContainer { - loc: _, - token: Token::Drop, - }) = tokens.get(cursor) - { - cursor += 1; - } else { - parse_err!(tokens, cursor, "Not a Drop Table Statement"); - } - if let Some(TokenContainer { - loc: _, - token: Token::Table, - }) = tokens.get(cursor) - { - cursor += 1; - } else { - parse_err!(tokens, cursor, "Not a Drop Table Statement"); - } - let name = if let Some(TokenContainer { - loc: _, - token: Token::IdentifierValue { value }, - }) = tokens.get(cursor) - { - cursor += 1; - value - } else { - parse_err!(tokens, cursor, "Not a Drop Table Statement"); - }; - - Ok(( - DropTableStatement { - name: name.to_string(), - }, - cursor, - )) -} - -fn parse_select_items<'a>( - tokens: &'a [TokenContainer], - initial_cursor: usize, - delimiters: &[Token], -) -> Result<(Vec, usize), ParsingError> { - let mut cursor = initial_cursor; - - let mut select_items = Vec::with_capacity(5); - let mut item_delims = delimiters.to_vec(); - item_delims.push(Token::As); - let mut delimiters_plus = delimiters.to_vec(); - delimiters_plus.push(Token::Comma); - delimiters_plus.push(Token::As); - - 'outer: loop { - match cursor.cmp(&tokens.len()) { - Ordering::Equal => { - return Ok((select_items, cursor - 1)); - } - Ordering::Greater => { - parse_err!(tokens, cursor, "Unexpected end of tokens"); - } - _ => {} - } - let current_token = &tokens[cursor]; - for delimiter in delimiters { - if delimiter == ¤t_token.token { - break 'outer; - } - } - - if !select_items.is_empty() { - if let Some(TokenContainer { - loc: _, - token: Token::Comma, - }) = tokens.get(cursor) - { - cursor += 1; - } else { - parse_err!(tokens, cursor, "Expected comma"); - } - } - - let mut select_item = SelectItem { - expression: Expression::new(), - as_clause: None, - asterisk: false, - }; - - if let Some(TokenContainer { - token: Token::Asterisk, - loc: _, - }) = tokens.get(cursor) - { - cursor += 1; - select_item.asterisk = true; - } else { - let (expression, new_cursor) = - parse_expression(tokens, cursor, &delimiters_plus, 0, true, true)?; - cursor = new_cursor; - select_item.expression = expression; - - let mut found_as = false; - if let Some(TokenContainer { - loc: _, - token: Token::As, - }) = tokens.get(cursor) - { - found_as = true; - cursor += 1; - } - if let Some(TokenContainer { - token: Token::IdentifierValue { value }, - loc: _, - }) = tokens.get(cursor) - { - select_item.as_clause = Some(value.to_string()); - cursor += 1; - } else if found_as { - parse_err!(tokens, cursor, "Expected Identifier after AS"); - } - } - - select_items.push(select_item); - } - - Ok((select_items, cursor)) -} - -fn parse_select_statement<'a>( - tokens: &'a [TokenContainer], - initial_cursor: usize, - delimiter: Token, -) -> Result<(SelectStatement, usize), ParsingError> { - let mut cursor = initial_cursor; - - // TODO: refactor - if let Some(TokenContainer { - token: Token::Select, - loc: _, - }) = tokens.get(cursor) - { - } else if let Some(TokenContainer { token: _, loc: _ }) = tokens.get(cursor) { - parse_err!(tokens, cursor, "Not a Select statement"); - } else { - parse_err!(tokens, cursor, "Reached end of input"); - } - cursor += 1; - - let mut distinct = false; - if let Some(TokenContainer { - token: Token::Distinct, - loc: _, - }) = tokens.get(cursor) - { - distinct = true; - cursor += 1; - } - - let mut select: SelectStatement = SelectStatement { - items: Vec::with_capacity(5), - from: vec![], - where_clause: Expression::new(), - is_distinct: distinct, - order_by: None, - limit: None, - offset: None, - }; - - let (select_items, new_cursor) = parse_select_items( - tokens, - cursor, - &[ - Token::From, - Token::OrderBy, - Token::Limit, - Token::Offset, - delimiter.clone(), - ], - )?; - - cursor = new_cursor; - select.items = select_items; - - // let delimiters_plus = vec![delimiter, &where_token]; - - if let Some(TokenContainer { - token: Token::From, - loc: _, - }) = tokens.get(cursor) - { - cursor += 1; - let (tables, new_cursor) = parse_tables( - tokens, - cursor, - &[ - Token::Inner, - Token::Left, - Token::Right, - Token::Join, - Token::Where, - Token::OrderBy, - Token::Limit, - Token::Offset, - delimiter.clone(), - ], - )?; - cursor = new_cursor; - select.from = tables; - } - - // TODO Parse join - /* - let (joins, new_cursor) = parse_joins( - tokens, - cursor, - &[ - Token::Where, - Token::OrderBy, - Token::Limit, - Token::Offset, - delimiter.clone(), - ], - )?; - cursor = new_cursor;*/ - - if let Some(TokenContainer { - loc: _, - token: Token::Where, - }) = tokens.get(cursor) - { - cursor += 1; - let (where_clause, new_cursor) = parse_expression( - tokens, - cursor, - &[ - Token::OrderBy, - Token::Limit, - Token::Offset, - delimiter.clone(), - ], - 0, - true, - false, - )?; - - cursor = new_cursor; - select.where_clause = where_clause; - } - - if let Some(TokenContainer { - loc: _, - token: Token::OrderBy, - }) = tokens.get(cursor) - { - cursor += 1; - - let (exp, new_cursor) = parse_expression( - tokens, - cursor, - &[ - Token::Desc, - Token::Asc, - Token::Limit, - Token::Offset, - delimiter, - ], - 0, - true, - true, - )?; - cursor = new_cursor; - let mut order_by_clause = OrderByClause { asc: true, exp }; - - if let Some(TokenContainer { - loc: _, - token: Token::Asc, - }) = tokens.get(cursor) - { - cursor += 1; - order_by_clause.asc = true; - } else if let Some(TokenContainer { - loc: _, - token: Token::Desc, - }) = tokens.get(cursor) - { - cursor += 1; - order_by_clause.asc = false; - } - - select.order_by = Some(order_by_clause); - } - - if let Some(TokenContainer { - loc: _, - token: Token::Limit, - }) = tokens.get(cursor) - { - cursor += 1; - - if let Some(TokenContainer { - loc: _, - token: Token::NumericValue { value }, - }) = tokens.get(cursor) - { - cursor += 1; - let limit = match value.parse::() { - Ok(val) => val, - Err(err) => { - parse_err!( - tokens, - cursor, - &format!("Failed to parse Limit value: {err}") - ); - } - }; - if limit.is_sign_negative() { - parse_err!(tokens, cursor, "Limit must not be negative"); - } - if limit.is_nan() || limit.is_infinite() { - parse_err!( - tokens, - cursor, - "Limit cannot be interpreted as a whole number" - ); - } - select.limit = Some(limit as usize); - } - } - - if let Some(TokenContainer { - loc: _, - token: Token::Offset, - }) = tokens.get(cursor) - { - cursor += 1; - - if let Some(TokenContainer { - loc: _, - token: Token::NumericValue { value }, - }) = tokens.get(cursor) - { - cursor += 1; - let offset = match value.parse::() { - Ok(val) => val, - Err(err) => { - parse_err!( - tokens, - cursor, - &format!("Failed to parse Offset value: {err}") - ); - } - }; - if offset.is_sign_negative() { - parse_err!(tokens, cursor, "Offset must not be negative"); - } - if offset.is_nan() || offset.is_infinite() { - parse_err!( - tokens, - cursor, - "Limit cannot be interpreted as a whole number" - ); - } - select.offset = Some(offset as usize); - } - }; - - Ok((select, cursor)) -} - -fn parse_joins<'a>( - tokens: &'a [TokenContainer], - initial_cursor: usize, - delimiters: &[Token], -) -> Result<(Vec, usize), ParsingError> { - let mut cursor = initial_cursor; - - let mut joins = vec![]; - - loop { - let mut kind = JoinKind::Inner; - if tokens.get(cursor).is_none() { - break; - } - - if let Some(TokenContainer { - token: Token::On, - loc: _, - }) = tokens.get(cursor) - { - break; - } else if let Some(TokenContainer { - token: Token::Inner, - loc: _, - }) = tokens.get(cursor) - { - cursor += 1; - kind = JoinKind::Inner; - } else if let Some(TokenContainer { - token: Token::Right, - loc: _, - }) = tokens.get(cursor) - { - cursor += 1; - if let Some(TokenContainer { - token: Token::Outer, - loc: _, - }) = tokens.get(cursor) - { - cursor += 1; - } - kind = JoinKind::RightOuter; - } else if let Some(TokenContainer { - token: Token::Left, - loc: _, - }) = tokens.get(cursor) - { - cursor += 1; - if let Some(TokenContainer { - token: Token::Outer, - loc: _, - }) = tokens.get(cursor) - { - cursor += 1; - } - kind = JoinKind::LeftOuter; - } else if let Some(TokenContainer { - token: Token::Full, - loc: _, - }) = tokens.get(cursor) - { - cursor += 1; - if let Some(TokenContainer { - token: Token::Outer, - loc: _, - }) = tokens.get(cursor) - { - cursor += 1; - kind = JoinKind::FullOuter; - } else { - parse_err!(tokens, cursor, "Expected OUTER Keyword after FULL"); - } - } else if let Some(TokenContainer { token, loc: _ }) = tokens.get(cursor) { - if delimiters.contains(token) { - break; - } - parse_err!(tokens, cursor, "Failed to parse Join Clause"); - } - if let Some(TokenContainer { - token: Token::Join, - loc: _, - }) = tokens.get(cursor) - { - cursor += 1; - } else { - parse_err!(tokens, cursor, "No JOIN Keyword after INNER"); - } - let (table, new_cursor) = parse_table(tokens, cursor, delimiters)?; - cursor = new_cursor; - if let Some(TokenContainer { - token: Token::On, - loc: _, - }) = tokens.get(cursor) - { - cursor += 1; - } else { - parse_err!(tokens, cursor, "No ON keyword in Join Expression"); - } - let (col1, new_cursor) = parse_table_column(tokens, cursor)?; - cursor = new_cursor; - let operand_token = if let Some(TokenContainer { token, loc: _ }) = tokens.get(cursor) { - cursor += 1; - if BINARY_OPERATORS.contains(token) { - token.clone() - } else { - parse_err!(tokens, cursor, "No Binary Operator in Join Expression"); - } - } else { - parse_err!(tokens, cursor, "No Binary Operator in Join Expression"); - }; - let (col2, new_cursor) = parse_table_column(tokens, cursor)?; - cursor = new_cursor; - - let operand = if let Ok(o) = Operand::from_token(&operand_token, cursor) { - o - } else { - parse_err!( - tokens, - cursor, - "Failed to parse Binary Operator in Join Expression" - ); - }; - - let join = JoinClause { - kind, - source: table, - on: Expression::Binary(BinaryExpression { - first: Box::new(Expression::TableColumn(col1)), - second: Box::new(Expression::TableColumn(col2)), - operand, - }), - }; - joins.push(join); - } - - Ok((joins, cursor)) -} - -fn parse_table_column( - tokens: &[TokenContainer], - initial_cursor: usize, -) -> Result<(TableColumn, usize), ParsingError> { - let mut cursor = initial_cursor; - - if let Some(tok) = tokens.get(cursor) { - match tok.token { - Token::IdentifierValue { - value: ref first_identifier, - } => { - cursor += 1; - let mut col_name = first_identifier; - let mut table_name = None; - if let Some(TokenContainer { - token: Token::Dot, - loc: _, - }) = tokens.get(cursor) - { - cursor += 1; - if let Some(TokenContainer { - token: Token::IdentifierValue { value }, - loc: _, - }) = tokens.get(cursor) - { - cursor += 1; - table_name = Some(col_name); - col_name = value; - } else { - parse_err!(tokens, cursor, "Failed to parse Table name in Column"); - } - } - Ok(( - TableColumn { - col_name: col_name.to_string(), - table_name: table_name.map(|s| s.to_string()), - }, - cursor, - )) - } - _ => parse_err!(tokens, cursor, "Failed to parse Column"), - } - } else { - parse_err!(tokens, cursor, "Failed to parse Column"); - } -} - -fn parse_tables<'a>( - tokens: &'a [TokenContainer], - initial_cursor: usize, - delimiters: &[Token], -) -> Result<(Vec, usize), ParsingError> { - let mut cursor = initial_cursor; - - let mut tables = vec![]; - - loop { - let (table, new_cursor) = parse_table(tokens, cursor, delimiters)?; - cursor = new_cursor; - tables.push(table); - if tokens.get(cursor).is_none() { - break; - } else if let Some(TokenContainer { - token: Token::Comma, - loc: _, - }) = tokens.get(cursor) - { - continue; - } else if let Some(TokenContainer { token, loc: _ }) = tokens.get(cursor) { - if delimiters.contains(token) { - break; - } else { - parse_err!( - tokens, - cursor, - &format!("Failed to parse table, unexpected {:?}", token) - ); - } - } else { - parse_err!(tokens, cursor, "Failed to parse Table"); - } - } - - Ok((tables, cursor)) -} - -fn parse_table<'a>( - tokens: &'a [TokenContainer], - initial_cursor: usize, - delimiters: &[Token], -) -> Result<(RowDataSource, usize), ParsingError> { - let mut cursor = initial_cursor; - - if let Some(TokenContainer { - token: Token::IdentifierValue { value }, - loc: _, - }) = tokens.get(cursor) - { - cursor += 1; - let mut as_clause = None; - let table_name = value; - let mut found_as = false; - if let Some(TokenContainer { - token: Token::As, - loc: _, - }) = tokens.get(cursor) - { - found_as = true; - cursor += 1; - } - if let Some(TokenContainer { - token: Token::IdentifierValue { value }, - loc: _, - }) = tokens.get(cursor) - { - cursor += 1; - as_clause = Some(value); - } else if found_as { - parse_err!(tokens, cursor, "Failed to parse As clause after AS"); - } - let (joins, new_cursor) = parse_joins(tokens, cursor, delimiters)?; - cursor = new_cursor; - return Ok(( - RowDataSource::Table { - table_name: table_name.to_string(), - as_clause: as_clause.map(|s| s.to_string()), - joins, - }, - cursor, - )); - } else if let Some(TokenContainer { - token: Token::LeftParenthesis, - loc: _, - }) = tokens.get(cursor) - { - cursor += 1; - if let Some(TokenContainer { - token: Token::Select, - loc: _, - }) = tokens.get(cursor) - { - let (select, new_cursor) = - parse_select_statement(tokens, cursor, Token::RightParenthesis)?; - cursor = new_cursor + 1; - let mut found_as = false; - if let Some(TokenContainer { - token: Token::As, - loc: _, - }) = tokens.get(cursor) - { - found_as = true; - cursor += 1; - } - if let Some(TokenContainer { - token: Token::IdentifierValue { value }, - loc: _, - }) = tokens.get(cursor) - { - cursor += 1; - let as_clause = value; - let (joins, new_cursor) = parse_joins(tokens, cursor, delimiters)?; - cursor = new_cursor; - return Ok(( - RowDataSource::SubSelect { - select, - as_clause: as_clause.to_string(), - joins, - }, - cursor, - )); - } else if found_as { - parse_err!(tokens, cursor, "Failed to parse As clause after AS"); - } else if let Some(TokenContainer { token, loc: _ }) = tokens.get(cursor) { - parse_err!( - tokens, - cursor, - &format!("Unexpected {:?}, subquery requires as clause", token) - ); - } - } - } - - parse_err!(tokens, cursor, "Failed to parse a source Table"); -} - -#[cfg(test)] -mod parser_tests { - use crate::parser::*; - - struct ParseTest { - ast: Ast, - input: &'static str, - } - - #[test] - fn test_parser() { - let parse_tests = vec![ - ParseTest { - input: "INSERT INTO users VALUES (105, 'George');", - ast: Ast { - statements: vec![Statement::InsertStatement(InsertStatement { - table: "users".to_string(), - values: vec![ - Expression::Literal(LiteralExpression::Numeric("105".to_owned())), - Expression::Literal(LiteralExpression::String("George".to_owned())), - ], - })], - }, - }, - ParseTest { - input: "CREATE TABLE users (id INT, name TEXT);", - ast: Ast { - statements: vec![Statement::CreateTableStatement(CreateTableStatement { - name: "users".to_owned(), - cols: vec![ - ColumnDefinition { - name: "id".to_string(), - data_type: SqlType::Int, - is_primary_key: false, - }, - ColumnDefinition { - name: "name".to_string(), - data_type: SqlType::Int, - is_primary_key: false, - }, - ], - })], - }, - }, - ParseTest { - input: "SELECT id, name AS fullname FROM users;", - ast: Ast { - statements: vec![Statement::SelectStatement(SelectStatement { - items: vec![ - SelectItem { - asterisk: false, - as_clause: None, - expression: Expression::TableColumn(TableColumn { - col_name: "id".to_string(), - table_name: None, - }), - }, - SelectItem { - asterisk: false, - as_clause: Some("fullname".to_string()), - expression: Expression::TableColumn(TableColumn { - col_name: "name".to_string(), - table_name: None, - }), - }, - ], - from: vec![RowDataSource::Table { - table_name: "users".to_string(), - as_clause: None, - joins: vec![], - }], - where_clause: Expression::Empty, - is_distinct: false, - order_by: None, - limit: None, - offset: None, - })], - }, - }, - ParseTest { - input: "SELECT distinct id, name AS fullname FROM users;", - ast: Ast { - statements: vec![Statement::SelectStatement(SelectStatement { - items: vec![ - SelectItem { - asterisk: false, - as_clause: None, - expression: Expression::TableColumn(TableColumn { - col_name: "id".to_string(), - table_name: None, - }), - }, - SelectItem { - asterisk: false, - as_clause: Some("fullname".to_owned()), - expression: Expression::TableColumn(TableColumn { - col_name: "name".to_string(), - table_name: None, - }), - }, - ], - from: vec![RowDataSource::Table { - table_name: "users".to_string(), - as_clause: None, - joins: vec![], - }], - where_clause: Expression::Empty, - is_distinct: true, - order_by: None, - limit: None, - offset: None, - })], - }, - }, - ]; - - let mut found_faults = false; - let mut err_msg = "\n".to_owned(); - let parser = Parser::new(); - - for test in parse_tests { - print!("(Parser) Testing: {}", test.input); - - parser.parse(test.input).unwrap(); - let ast = match parser.parse(test.input) { - Ok(value) => value, - Err(err) => { - found_faults = true; - err_msg.push_str(err.to_string().as_str()); - continue; - } - }; - - if ast != test.ast { - err_msg.push_str( - format!("\n\nExpected:\n{:#?}\n\nGot:\n{:#?}\n", test.ast, ast).as_str(), - ); - } - - // assert_eq!(ast, test.ast); - println!(" Passed!"); - } - - if found_faults { - panic!("{err_msg}"); - } - } -} diff --git a/engine/src/parser/select.rs b/engine/src/parser/select.rs new file mode 100644 index 0000000..e05a6ff --- /dev/null +++ b/engine/src/parser/select.rs @@ -0,0 +1,242 @@ +use super::*; + +pub fn parse_select_statement<'a>( + tokens: &'a [TokenContainer], + initial_cursor: usize, + delimiter: Token, +) -> Result<(SelectStatement, usize), ParsingError> { + let mut cursor = initial_cursor; + + // TODO: refactor + if let Some(TokenContainer { + token: Token::Select, + loc: _, + }) = tokens.get(cursor) + { + } else if let Some(TokenContainer { token: _, loc: _ }) = tokens.get(cursor) { + parse_err!(tokens, cursor, "Not a Select statement"); + } else { + parse_err!(tokens, cursor, "Reached end of input"); + } + cursor += 1; + + let mut distinct = false; + if let Some(TokenContainer { + token: Token::Distinct, + loc: _, + }) = tokens.get(cursor) + { + distinct = true; + cursor += 1; + } + + let mut select: SelectStatement = SelectStatement { + items: Vec::with_capacity(5), + from: vec![], + where_clause: Expression::new(), + is_distinct: distinct, + order_by: None, + limit: None, + offset: None, + }; + + let (select_items, new_cursor) = parse_select_items( + tokens, + cursor, + &[ + Token::From, + Token::OrderBy, + Token::Limit, + Token::Offset, + delimiter.clone(), + ], + )?; + + cursor = new_cursor; + select.items = select_items; + + // let delimiters_plus = vec![delimiter, &where_token]; + + if let Some(TokenContainer { + token: Token::From, + loc: _, + }) = tokens.get(cursor) + { + cursor += 1; + let (tables, new_cursor) = parse_tables( + tokens, + cursor, + &[ + Token::Inner, + Token::Left, + Token::Right, + Token::Join, + Token::Where, + Token::OrderBy, + Token::Limit, + Token::Offset, + delimiter.clone(), + ], + )?; + cursor = new_cursor; + select.from = tables; + } + + // TODO Parse join + /* + let (joins, new_cursor) = parse_joins( + tokens, + cursor, + &[ + Token::Where, + Token::OrderBy, + Token::Limit, + Token::Offset, + delimiter.clone(), + ], + )?; + cursor = new_cursor;*/ + + if let Some(TokenContainer { + loc: _, + token: Token::Where, + }) = tokens.get(cursor) + { + cursor += 1; + let (where_clause, new_cursor) = parse_expression( + tokens, + cursor, + &[ + Token::OrderBy, + Token::Limit, + Token::Offset, + delimiter.clone(), + ], + 0, + true, + false, + )?; + + cursor = new_cursor; + select.where_clause = where_clause; + } + + if let Some(TokenContainer { + loc: _, + token: Token::OrderBy, + }) = tokens.get(cursor) + { + cursor += 1; + + let (exp, new_cursor) = parse_expression( + tokens, + cursor, + &[ + Token::Desc, + Token::Asc, + Token::Limit, + Token::Offset, + delimiter, + ], + 0, + true, + true, + )?; + cursor = new_cursor; + let mut order_by_clause = OrderByClause { asc: true, exp }; + + if let Some(TokenContainer { + loc: _, + token: Token::Asc, + }) = tokens.get(cursor) + { + cursor += 1; + order_by_clause.asc = true; + } else if let Some(TokenContainer { + loc: _, + token: Token::Desc, + }) = tokens.get(cursor) + { + cursor += 1; + order_by_clause.asc = false; + } + + select.order_by = Some(order_by_clause); + } + + if let Some(TokenContainer { + loc: _, + token: Token::Limit, + }) = tokens.get(cursor) + { + cursor += 1; + + if let Some(TokenContainer { + loc: _, + token: Token::NumericValue { value }, + }) = tokens.get(cursor) + { + cursor += 1; + let limit = match value.parse::() { + Ok(val) => val, + Err(err) => { + parse_err!( + tokens, + cursor, + &format!("Failed to parse Limit value: {err}") + ); + } + }; + if limit.is_sign_negative() { + parse_err!(tokens, cursor, "Limit must not be negative"); + } + if limit.is_nan() || limit.is_infinite() { + parse_err!( + tokens, + cursor, + "Limit cannot be interpreted as a whole number" + ); + } + select.limit = Some(limit as usize); + } + } + + if let Some(TokenContainer { + loc: _, + token: Token::Offset, + }) = tokens.get(cursor) + { + cursor += 1; + + if let Some(TokenContainer { + loc: _, + token: Token::NumericValue { value }, + }) = tokens.get(cursor) + { + cursor += 1; + let offset = match value.parse::() { + Ok(val) => val, + Err(err) => { + parse_err!( + tokens, + cursor, + &format!("Failed to parse Offset value: {err}") + ); + } + }; + if offset.is_sign_negative() { + parse_err!(tokens, cursor, "Offset must not be negative"); + } + if offset.is_nan() || offset.is_infinite() { + parse_err!( + tokens, + cursor, + "Limit cannot be interpreted as a whole number" + ); + } + select.offset = Some(offset as usize); + } + }; + + Ok((select, cursor)) +} diff --git a/engine/src/parser/select_items.rs b/engine/src/parser/select_items.rs new file mode 100644 index 0000000..b3a264f --- /dev/null +++ b/engine/src/parser/select_items.rs @@ -0,0 +1,90 @@ +use super::*; + +pub fn parse_select_items<'a>( + tokens: &'a [TokenContainer], + initial_cursor: usize, + delimiters: &[Token], +) -> Result<(Vec, usize), ParsingError> { + let mut cursor = initial_cursor; + + let mut select_items = Vec::with_capacity(5); + let mut item_delims = delimiters.to_vec(); + item_delims.push(Token::As); + let mut delimiters_plus = delimiters.to_vec(); + delimiters_plus.push(Token::Comma); + delimiters_plus.push(Token::As); + + 'outer: loop { + match cursor.cmp(&tokens.len()) { + Ordering::Equal => { + return Ok((select_items, cursor - 1)); + } + Ordering::Greater => { + parse_err!(tokens, cursor, "Unexpected end of tokens"); + } + _ => {} + } + let current_token = &tokens[cursor]; + for delimiter in delimiters { + if delimiter == ¤t_token.token { + break 'outer; + } + } + + if !select_items.is_empty() { + if let Some(TokenContainer { + loc: _, + token: Token::Comma, + }) = tokens.get(cursor) + { + cursor += 1; + } else { + parse_err!(tokens, cursor, "Expected comma"); + } + } + + let mut select_item = SelectItem { + expression: Expression::new(), + as_clause: None, + asterisk: false, + }; + + if let Some(TokenContainer { + token: Token::Asterisk, + loc: _, + }) = tokens.get(cursor) + { + cursor += 1; + select_item.asterisk = true; + } else { + let (expression, new_cursor) = + parse_expression(tokens, cursor, &delimiters_plus, 0, true, true)?; + cursor = new_cursor; + select_item.expression = expression; + + let mut found_as = false; + if let Some(TokenContainer { + loc: _, + token: Token::As, + }) = tokens.get(cursor) + { + found_as = true; + cursor += 1; + } + if let Some(TokenContainer { + token: Token::IdentifierValue { value }, + loc: _, + }) = tokens.get(cursor) + { + select_item.as_clause = Some(value.to_string()); + cursor += 1; + } else if found_as { + parse_err!(tokens, cursor, "Expected Identifier after AS"); + } + } + + select_items.push(select_item); + } + + Ok((select_items, cursor)) +} diff --git a/engine/src/parser/statement.rs b/engine/src/parser/statement.rs new file mode 100644 index 0000000..bef58a3 --- /dev/null +++ b/engine/src/parser/statement.rs @@ -0,0 +1,100 @@ +use super::*; + +pub fn parse_statement<'a>( + tokens: &'a [TokenContainer], + initial_cursor: usize, + delimiter: Token, +) -> Result<(Statement, usize), ParsingError> { + let cursor = initial_cursor; + + if let Some(first_token) = tokens.get(cursor) { + match first_token.token { + Token::Select => { + // Look for a SELECT statement + match parse_select_statement(tokens, cursor, delimiter) { + Ok((select, new_cursor)) => { + Ok((Statement::SelectStatement(select), new_cursor)) + } + Err(err) => Err(err), + } + } + Token::Insert => { + // Look for an INSERT statement + match parse_insert_statement(tokens, cursor, delimiter) { + Ok((insert, new_cursor)) => { + Ok((Statement::InsertStatement(insert), new_cursor)) + } + Err(err) => Err(err), + } + } + Token::Delete => parse_err!(tokens, cursor, Internal, "Delete not implemented"), + Token::Update => parse_err!(tokens, cursor, Internal, "Update not implemented"), + Token::Alter => parse_err!(tokens, cursor, Internal, "Alter not implemented"), + Token::IdentifierValue { value: _ } => { + parse_err!(tokens, cursor, Internal, "Assignment not implemented") + } + Token::Create => { + if let Some(first_token) = tokens.get(cursor + 1) { + match first_token.token { + Token::Index => { + // Look for a CREATE INDEX statement + match parse_create_index_statement(tokens, cursor, delimiter) { + Ok((create_index, new_cursor)) => { + Ok((Statement::CreateIndexStatement(create_index), new_cursor)) + } + Err(err) => (Err(err)), + } + } + Token::Unique => match tokens.get(cursor + 2) { + Some(TokenContainer { + token: Token::Index, + loc: _, + }) => { + // Look for a CREATE UNIQUE INDEX statement + match parse_create_index_statement(tokens, cursor, delimiter) { + Ok((create_index, new_cursor)) => Ok(( + Statement::CreateIndexStatement(create_index), + new_cursor, + )), + Err(err) => (Err(err)), + } + } + Some(TokenContainer { + token: Token::Constraint, + loc: _, + }) => { + parse_err!(tokens, cursor, "Create Constraint not implemented") + } + _ => parse_err!(tokens, cursor, "Invalid Create Statement"), + }, + Token::Constraint => { + parse_err!(tokens, cursor, "Create Constraint not implemented") + } + Token::Table => { + // Look for a CREATE TABLE statement + match parse_create_table_statement(tokens, cursor, delimiter) { + Ok((create_table, new_cursor)) => { + Ok((Statement::CreateTableStatement(create_table), new_cursor)) + } + Err(err) => (Err(err)), + } + } + _ => parse_err!(tokens, cursor, "Invalid Create Statement"), + } + } else { + parse_err!(tokens, cursor, "Invalid Create Statement"); + } + } + Token::Drop => { + // Look for an DROP statement + match parse_drop_table_statement(tokens, cursor, delimiter) { + Ok((drop, new_cursor)) => Ok((Statement::DropTableStatement(drop), new_cursor)), + Err(err) => (Err(err)), + } + } + _ => parse_err!(tokens, cursor, "Expected a valid Statement"), + } + } else { + parse_err!(tokens, cursor, "Expected a valid Statement"); + } +} diff --git a/engine/src/parser/table.rs b/engine/src/parser/table.rs new file mode 100644 index 0000000..6cfeaba --- /dev/null +++ b/engine/src/parser/table.rs @@ -0,0 +1,100 @@ +use super::*; + +pub fn parse_table<'a>( + tokens: &'a [TokenContainer], + initial_cursor: usize, + delimiters: &[Token], +) -> Result<(RowDataSource, usize), ParsingError> { + let mut cursor = initial_cursor; + + if let Some(TokenContainer { + token: Token::IdentifierValue { value }, + loc: _, + }) = tokens.get(cursor) + { + cursor += 1; + let mut as_clause = None; + let table_name = value; + let mut found_as = false; + if let Some(TokenContainer { + token: Token::As, + loc: _, + }) = tokens.get(cursor) + { + found_as = true; + cursor += 1; + } + if let Some(TokenContainer { + token: Token::IdentifierValue { value }, + loc: _, + }) = tokens.get(cursor) + { + cursor += 1; + as_clause = Some(value); + } else if found_as { + parse_err!(tokens, cursor, "Failed to parse As clause after AS"); + } + let (joins, new_cursor) = parse_joins(tokens, cursor, delimiters)?; + cursor = new_cursor; + return Ok(( + RowDataSource::Table { + table_name: table_name.to_string(), + as_clause: as_clause.map(|s| s.to_string()), + joins, + }, + cursor, + )); + } else if let Some(TokenContainer { + token: Token::LeftParenthesis, + loc: _, + }) = tokens.get(cursor) + { + cursor += 1; + if let Some(TokenContainer { + token: Token::Select, + loc: _, + }) = tokens.get(cursor) + { + let (select, new_cursor) = + parse_select_statement(tokens, cursor, Token::RightParenthesis)?; + cursor = new_cursor + 1; + let mut found_as = false; + if let Some(TokenContainer { + token: Token::As, + loc: _, + }) = tokens.get(cursor) + { + found_as = true; + cursor += 1; + } + if let Some(TokenContainer { + token: Token::IdentifierValue { value }, + loc: _, + }) = tokens.get(cursor) + { + cursor += 1; + let as_clause = value; + let (joins, new_cursor) = parse_joins(tokens, cursor, delimiters)?; + cursor = new_cursor; + return Ok(( + RowDataSource::SubSelect { + select, + as_clause: as_clause.to_string(), + joins, + }, + cursor, + )); + } else if found_as { + parse_err!(tokens, cursor, "Failed to parse As clause after AS"); + } else if let Some(TokenContainer { token, loc: _ }) = tokens.get(cursor) { + parse_err!( + tokens, + cursor, + &format!("Unexpected {:?}, subquery requires as clause", token) + ); + } + } + } + + parse_err!(tokens, cursor, "Failed to parse a source Table"); +} diff --git a/engine/src/parser/table_column.rs b/engine/src/parser/table_column.rs new file mode 100644 index 0000000..aa39d34 --- /dev/null +++ b/engine/src/parser/table_column.rs @@ -0,0 +1,48 @@ +use super::*; + +pub fn parse_table_column( + tokens: &[TokenContainer], + initial_cursor: usize, +) -> Result<(TableColumn, usize), ParsingError> { + let mut cursor = initial_cursor; + + if let Some(tok) = tokens.get(cursor) { + match tok.token { + Token::IdentifierValue { + value: ref first_identifier, + } => { + cursor += 1; + let mut col_name = first_identifier; + let mut table_name = None; + if let Some(TokenContainer { + token: Token::Dot, + loc: _, + }) = tokens.get(cursor) + { + cursor += 1; + if let Some(TokenContainer { + token: Token::IdentifierValue { value }, + loc: _, + }) = tokens.get(cursor) + { + cursor += 1; + table_name = Some(col_name); + col_name = value; + } else { + parse_err!(tokens, cursor, "Failed to parse Table name in Column"); + } + } + Ok(( + TableColumn { + col_name: col_name.to_string(), + table_name: table_name.map(|s| s.to_string()), + }, + cursor, + )) + } + _ => parse_err!(tokens, cursor, "Failed to parse Column"), + } + } else { + parse_err!(tokens, cursor, "Failed to parse Column"); + } +} diff --git a/engine/src/parser/tables.rs b/engine/src/parser/tables.rs new file mode 100644 index 0000000..4b77592 --- /dev/null +++ b/engine/src/parser/tables.rs @@ -0,0 +1,40 @@ +use super::*; + +pub fn parse_tables<'a>( + tokens: &'a [TokenContainer], + initial_cursor: usize, + delimiters: &[Token], +) -> Result<(Vec, usize), ParsingError> { + let mut cursor = initial_cursor; + + let mut tables = vec![]; + + loop { + let (table, new_cursor) = parse_table(tokens, cursor, delimiters)?; + cursor = new_cursor; + tables.push(table); + if tokens.get(cursor).is_none() { + break; + } else if let Some(TokenContainer { + token: Token::Comma, + loc: _, + }) = tokens.get(cursor) + { + continue; + } else if let Some(TokenContainer { token, loc: _ }) = tokens.get(cursor) { + if delimiters.contains(token) { + break; + } else { + parse_err!( + tokens, + cursor, + &format!("Failed to parse table, unexpected {:?}", token) + ); + } + } else { + parse_err!(tokens, cursor, "Failed to parse Table"); + } + } + + Ok((tables, cursor)) +} From bf44ff5f18ab4e7b1b04e014f39a35a4e43b058c Mon Sep 17 00:00:00 2001 From: Axmouth Date: Mon, 4 Jul 2022 00:19:41 +0300 Subject: [PATCH 3/5] Update deps --- Cargo.lock | 262 +++++++++++++++++++++++++++-------------------------- 1 file changed, 133 insertions(+), 129 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index c14a286..710ef33 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -66,15 +66,15 @@ dependencies = [ [[package]] name = "autocfg" -version = "1.0.1" +version = "1.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cdb031dd78e28731d87d56cc8ffef4a8f36ca26c38fe2de700543e627f8a464a" +checksum = "d468802bab17cbc0cc575e9b053f41e72aa36bfa6b7f55e3529ffa43161b97fa" [[package]] name = "base-x" -version = "0.2.8" +version = "0.2.11" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a4521f3e3d031370679b3b140beb36dfe4801b09ac77e30c61941f97df3ef28b" +checksum = "4cbbc9d0964165b47557570cce6c952866c2678457aca742aafc9fb771d30270" [[package]] name = "base64" @@ -119,9 +119,9 @@ dependencies = [ [[package]] name = "bumpalo" -version = "3.9.1" +version = "3.10.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a4a45a46ab1f2412e53d3a0ade76ffad2025804294569aae387231a0cd6e0899" +checksum = "37ccbd214614c6783386c1af30caf03192f17891059cecc394b4fb119e363de3" [[package]] name = "byteorder" @@ -146,9 +146,9 @@ dependencies = [ [[package]] name = "cc" -version = "1.0.72" +version = "1.0.73" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "22a9137b95ea06864e018375b72adfb7db6e6f68cfc8df5a04d00288050485ee" +checksum = "2fff2a6927b3bb87f9595d67196a70493f627687a71d87a0d692242c33f58c11" [[package]] name = "cfg-if" @@ -264,9 +264,9 @@ dependencies = [ [[package]] name = "crossbeam-channel" -version = "0.5.2" +version = "0.5.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e54ea8bc3fb1ee042f5aace6e3c6e025d3874866da222930f70ce62aceba0bfa" +checksum = "4c02a4d71819009c192cf4872265391563fd6a84c81ff2c0f2a7026ca4c1d85c" dependencies = [ "cfg-if", "crossbeam-utils", @@ -285,25 +285,26 @@ dependencies = [ [[package]] name = "crossbeam-epoch" -version = "0.9.6" +version = "0.9.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "97242a70df9b89a65d0b6df3c4bf5b9ce03c5b7309019777fbde37e7537f8762" +checksum = "07db9d94cbd326813772c968ccd25999e5f8ae22f4f8d1b11effa37ef6ce281d" dependencies = [ + "autocfg", "cfg-if", "crossbeam-utils", - "lazy_static", "memoffset", + "once_cell", "scopeguard", ] [[package]] name = "crossbeam-utils" -version = "0.8.6" +version = "0.8.10" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cfcae03edb34f947e64acdb1c33ec169824e20657e9ecb61cef6c8c74dcb8120" +checksum = "7d82ee10ce34d7bc12c2122495e7593a9c41347ecdd64185af4ecf72cb1a7f83" dependencies = [ "cfg-if", - "lazy_static", + "once_cell", ] [[package]] @@ -330,9 +331,9 @@ dependencies = [ [[package]] name = "ctor" -version = "0.1.16" +version = "0.1.22" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7fbaabec2c953050352311293be5c6aba8e141ba19d6811862b232d6fd020484" +checksum = "f877be4f7c9f246b183111634f75baa039715e3f46ce860677d3b19a69fb229c" dependencies = [ "quote", "syn", @@ -340,9 +341,9 @@ dependencies = [ [[package]] name = "diff" -version = "0.1.12" +version = "0.1.13" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0e25ea47919b1560c4e3b7fe0aaab9becf5b84a10325ddf7db0f0ba5e1026499" +checksum = "56254986775e3233ffa9c4d7d3faaf6d36a2c09d30b20687e9f88bc8bafc16c8" [[package]] name = "dirs" @@ -372,7 +373,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "4ebda144c4fe02d1f7ea1a7d9641b6fc6b580adcfa024ae48797ecdeb6825b4d" dependencies = [ "libc", - "redox_users 0.4.0", + "redox_users 0.4.3", "winapi", ] @@ -390,9 +391,9 @@ checksum = "77c90badedccf4105eca100756a0b1289e191f6fcbdadd3cee1d2f614f97da8f" [[package]] name = "either" -version = "1.6.1" +version = "1.7.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e78d4f1cc4ae33bbfc157ed5d5a5ef3bc29227303d595861deb238fcec4e9457" +checksum = "3f107b87b6afc2a64fd13cac55fe06d6c8859f12d4b14cbcdd2c67d0976781be" [[package]] name = "encode_unicode" @@ -452,9 +453,9 @@ dependencies = [ [[package]] name = "fd-lock" -version = "3.0.3" +version = "3.0.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fcef756dea9cf3db5ce73759cf0467330427a786b47711b8d6c97620d718ceb9" +checksum = "e11dcc7e4d79a8c89b9ab4c6f5c30b1fc4a83c420792da3542fd31179ed5f517" dependencies = [ "cfg-if", "rustix", @@ -485,18 +486,18 @@ checksum = "2022715d62ab30faffd124d40b76f4134a550a87792276512b18d63272333394" [[package]] name = "futures-channel" -version = "0.3.19" +version = "0.3.21" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ba3dda0b6588335f360afc675d0564c17a77a2bda81ca178a4b6081bd86c7f0b" +checksum = "c3083ce4b914124575708913bca19bfe887522d6e2e6d0952943f5eac4a74010" dependencies = [ "futures-core", ] [[package]] name = "futures-core" -version = "0.3.19" +version = "0.3.21" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d0c8ff0461b82559810cdccfde3215c3f373807f5e5232b71479bff7bb2583d7" +checksum = "0c09fd04b7e4073ac7156a9539b57a484a8ea920f79c7c675d05d289ab6110d3" [[package]] name = "getrandom" @@ -511,13 +512,13 @@ dependencies = [ [[package]] name = "getrandom" -version = "0.2.4" +version = "0.2.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "418d37c8b1d42553c93648be529cb70f920d3baf8ef469b74b9638df426e0b4c" +checksum = "4eb1a864a501629691edf6c15a593b7a51eebaa1e8468e9ddc623de7c9b58ec6" dependencies = [ "cfg-if", "libc", - "wasi 0.10.0+wasi-snapshot-preview1", + "wasi 0.11.0+wasi-snapshot-preview1", ] [[package]] @@ -550,9 +551,9 @@ dependencies = [ [[package]] name = "gloo-dialogs" -version = "0.1.0" +version = "0.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4ffb557a2ea2ed283f1334423d303a336fad55fb8572d51ae488f828b1464b40" +checksum = "67062364ac72d27f08445a46cab428188e2e224ec9e37efdba48ae8c289002e6" dependencies = [ "wasm-bindgen", "web-sys", @@ -560,9 +561,9 @@ dependencies = [ [[package]] name = "gloo-events" -version = "0.1.1" +version = "0.1.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "088514ec8ef284891c762c88a66b639b3a730134714692ee31829765c5bc814f" +checksum = "68b107f8abed8105e4182de63845afcc7b69c098b7852a813ea7462a320992fc" dependencies = [ "wasm-bindgen", "web-sys", @@ -570,9 +571,9 @@ dependencies = [ [[package]] name = "gloo-file" -version = "0.2.1" +version = "0.2.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "aa5d6084efa4a2b182ef3a8649cb6506cb4843f22cf907c6e0a799944248ae90" +checksum = "a8d5564e570a38b43d78bdc063374a0c3098c4f0d64005b12f9bbe87e869b6d7" dependencies = [ "futures-channel", "gloo-events", @@ -583,9 +584,9 @@ dependencies = [ [[package]] name = "gloo-render" -version = "0.1.0" +version = "0.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5b4cda6e149df3bb4a3c6a343873903e5bcc2448a9877d61bb8274806ad67f6e" +checksum = "2fd9306aef67cfd4449823aadcd14e3958e0800aa2183955a309112a84ec7764" dependencies = [ "wasm-bindgen", "web-sys", @@ -593,9 +594,9 @@ dependencies = [ [[package]] name = "gloo-storage" -version = "0.2.0" +version = "0.2.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1b5057761927af1b1929d02b1f49cf83553dd347a473ee7c8bb08420f2673ffc" +checksum = "1caa4ba51c99de680dee3ad99c32ca45e9f13311be72079154d222c3f9a6b6f5" dependencies = [ "gloo-utils", "js-sys", @@ -608,9 +609,9 @@ dependencies = [ [[package]] name = "gloo-timers" -version = "0.2.3" +version = "0.2.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4d12a7f4e95cfe710f1d624fb1210b7d961a5fb05c4fd942f4feab06e61f590e" +checksum = "5fb7d06c1c8cc2a29bee7ec961009a0b2caa0793ee4900c2ffb348734ba1c8f9" dependencies = [ "futures-channel", "futures-core", @@ -620,9 +621,9 @@ dependencies = [ [[package]] name = "gloo-utils" -version = "0.1.2" +version = "0.1.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "05c77af6f96a4f9e27c8ac23a88407381a31f4a74c3fb985c85aa79b8d898136" +checksum = "929c53c913bb7a88d75d9dc3e9705f963d8c2b9001510b25ddaf671b9fb7049d" dependencies = [ "js-sys", "wasm-bindgen", @@ -637,9 +638,9 @@ checksum = "eabb4a44450da02c90444cf74558da904edde8fb4e9035a9a6a4e15445af0bd7" [[package]] name = "hashbrown" -version = "0.11.2" +version = "0.12.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ab5ef0d4909ef3724cc8cce6ccc8572c5c817592e9285f5464f8e86f8bd3726e" +checksum = "db0d4cf898abf0081f964436dc980e96670a0f36863e4b83aaacdb65c9d7ccc3" [[package]] name = "heck" @@ -664,9 +665,9 @@ checksum = "9a3a5bfb195931eeb336b2a7b4d761daec841b97f947d34394601737a7bba5e4" [[package]] name = "indexmap" -version = "1.8.0" +version = "1.9.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "282a6247722caba404c065016bbfa522806e51714c34f5dfc3e4a3a46fcb4223" +checksum = "10a35a97730320ffe8e2d410b5d3b69279b98d2c14bdb8b70ea89ecf7888d41e" dependencies = [ "autocfg", "hashbrown", @@ -687,12 +688,9 @@ dependencies = [ [[package]] name = "io-lifetimes" -version = "0.4.4" +version = "0.7.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f6ef6787e7f0faedc040f95716bdd0e62bcfcf4ba93da053b62dea2691c13864" -dependencies = [ - "winapi", -] +checksum = "24c3f4eff5495aee4c0399d7b6a0dc2b6e81be84242ffbfcf253ebacccc1d0cb" [[package]] name = "itertools" @@ -711,9 +709,9 @@ checksum = "b71991ff56294aa922b450139ee08b3bfc70982c6b2c7562771375cf73542dd4" [[package]] name = "itoa" -version = "1.0.1" +version = "1.0.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1aab8fc367588b89dcee83ab0fd66b72b50b72fa1904d7095045ace2b0c81c35" +checksum = "112c678d4050afce233f4f2852bb2eb519230b3cf12f33585275537d7e41578d" [[package]] name = "jemalloc-sys" @@ -753,9 +751,9 @@ checksum = "e2abad23fbc42b3700f2f279844dc832adb2b2eb069b2df918f455c4e18cc646" [[package]] name = "libc" -version = "0.2.116" +version = "0.2.126" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "565dbd88872dbe4cc8a46e527f26483c1d1f7afa6b884a3bd6cd893d4f98da74" +checksum = "349d5a591cd28b49e1d1037471617a32ddcda5731b99419008085f72d5a53836" [[package]] name = "libmimalloc-sys" @@ -768,9 +766,9 @@ dependencies = [ [[package]] name = "linux-raw-sys" -version = "0.0.37" +version = "0.0.46" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "95f5690fef754d905294c56f7ac815836f2513af966aa47f2e07ac79be07827f" +checksum = "d4d2456c373231a208ad294c33dc5bff30051eafd954cd4caae83a712b12854d" [[package]] name = "log" @@ -789,9 +787,9 @@ checksum = "a3e378b66a060d48947b590737b30a1be76706c8dd7b8ba0f2fe3989c68a853f" [[package]] name = "memchr" -version = "2.4.1" +version = "2.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "308cc39be01b73d0d18f82a0e7b2a3df85245f84af96fdddc5d202d27e47b86a" +checksum = "2dffe52ecf27772e601905b7522cb4ef790d2cc203488bbd0e2fe85fcb74566d" [[package]] name = "memoffset" @@ -835,18 +833,18 @@ dependencies = [ [[package]] name = "ntapi" -version = "0.3.6" +version = "0.3.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3f6bb902e437b6d86e03cce10a7e2af662292c5dfef23b65899ea3ac9354ad44" +checksum = "c28774a7fd2fbb4f0babd8237ce554b73af68021b5f695a3cebd6c59bac0980f" dependencies = [ "winapi", ] [[package]] name = "num-traits" -version = "0.2.14" +version = "0.2.15" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9a64b1ec5cda2586e284722486d802acf1f7dbdc623e2bfc57e65ca1cd099290" +checksum = "578ede34cf02f8924ab9447f50c28075b4d3e5b269972345e7e0372b38c6cdcd" dependencies = [ "autocfg", ] @@ -863,9 +861,9 @@ dependencies = [ [[package]] name = "once_cell" -version = "1.9.0" +version = "1.12.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "da32515d9f6e6e489d7bc9d84c71b060db7247dc035bbe44eac88cf87486d8d5" +checksum = "7709cef83f0c1f58f666e746a08b21e0085f7440fa6a29cc194d68aac97a4225" [[package]] name = "oorandom" @@ -875,9 +873,9 @@ checksum = "0ab1bc2a289d34bd04a330323ac98a1b4bc82c9d9fcb1e66b63caa84da26b575" [[package]] name = "output_vt100" -version = "0.1.2" +version = "0.1.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "53cdc5b785b7a58c5aad8216b3dfa114df64b0b06ae6e1501cef91df2fbdf8f9" +checksum = "628223faebab4e3e40667ee0b2336d34a5b960ff60ea743ddfdbcf7770bcfb66" dependencies = [ "winapi", ] @@ -974,11 +972,11 @@ dependencies = [ [[package]] name = "proc-macro2" -version = "1.0.36" +version = "1.0.40" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c7342d5883fbccae1cc37a2353b09c87c9b0f3afd73f5fb9bba687a1f733b029" +checksum = "dd96a1e8ed2596c337f8eae5f24924ec83f5ad5ab21ea8e455d3566c69fbcaf7" dependencies = [ - "unicode-xid", + "unicode-ident", ] [[package]] @@ -1002,9 +1000,9 @@ dependencies = [ [[package]] name = "rayon" -version = "1.5.1" +version = "1.5.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c06aca804d41dbc8ba42dfd964f0d01334eceb64314b9ecf7c5fad5188a06d90" +checksum = "bd99e5772ead8baa5215278c9b15bf92087709e9c1b2d1f97cdb5a183c933a7d" dependencies = [ "autocfg", "crossbeam-deque", @@ -1014,14 +1012,13 @@ dependencies = [ [[package]] name = "rayon-core" -version = "1.9.1" +version = "1.9.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d78120e2c850279833f1dd3582f730c4ab53ed95aeaaaa862a2a5c71b1656d8e" +checksum = "258bcdb5ac6dad48491bb2992db6b7cf74878b0384908af124823d118c99683f" dependencies = [ "crossbeam-channel", "crossbeam-deque", "crossbeam-utils", - "lazy_static", "num_cpus", ] @@ -1033,9 +1030,9 @@ checksum = "41cc0f7e4d5d4544e8861606a285bb08d3e70712ccc7d2b84d7c0ccfaf4b05ce" [[package]] name = "redox_syscall" -version = "0.2.10" +version = "0.2.13" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8383f39639269cde97d255a32bdb68c047337295414940c68bdd30c2e13203ff" +checksum = "62f25bc4c7e55e0b0b7a1d43fb893f4fa1361d0abe38b9ce4f323c2adfe6ef42" dependencies = [ "bitflags", ] @@ -1053,12 +1050,13 @@ dependencies = [ [[package]] name = "redox_users" -version = "0.4.0" +version = "0.4.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "528532f3d801c87aec9def2add9ca802fe569e44a544afe633765267840abe64" +checksum = "b033d837a7cf162d7993aded9304e30a83213c648b6e389db233191f891e5c2b" dependencies = [ - "getrandom 0.2.4", - "redox_syscall 0.2.10", + "getrandom 0.2.7", + "redox_syscall 0.2.13", + "thiserror", ] [[package]] @@ -1080,9 +1078,9 @@ checksum = "6c230d73fb8d8c1b9c0b3135c5142a8acee3a0558fb8db5cf1cb65f8d7862132" [[package]] name = "regex-syntax" -version = "0.6.25" +version = "0.6.26" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f497285884f3fcff424ffc933e56d7cbca511def0c9831a7f9b5f6153e3cc89b" +checksum = "49b3de9ec5dc0a3417da371aab17d729997c15010e7fd24ff707773a33bddb64" [[package]] name = "route-recognizer" @@ -1117,7 +1115,7 @@ version = "0.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "bfa0f585226d2e68097d4f95d113b15b83a82e819ab25717ec0590d9584ef366" dependencies = [ - "semver 1.0.4", + "semver 1.0.12", ] [[package]] @@ -1132,16 +1130,16 @@ dependencies = [ [[package]] name = "rustix" -version = "0.32.1" +version = "0.35.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7cee647393af53c750e15dcbf7781cdd2e550b246bde76e46c326e7ea3c73773" +checksum = "ef258c11e17f5c01979a10543a30a4e12faef6aab217a74266e747eefa3aed88" dependencies = [ "bitflags", "errno", "io-lifetimes", "libc", "linux-raw-sys", - "winapi", + "windows-sys", ] [[package]] @@ -1180,9 +1178,9 @@ dependencies = [ [[package]] name = "ryu" -version = "1.0.9" +version = "1.0.10" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "73b4b750c782965c211b42f022f59af1fbceabdd026623714f104152f1ec149f" +checksum = "f3f6f92acf49d1b98f7a81226834412ada05458b7364277387724a237f062695" [[package]] name = "same-file" @@ -1216,9 +1214,9 @@ dependencies = [ [[package]] name = "semver" -version = "1.0.4" +version = "1.0.12" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "568a8e6258aa33c13358f81fd834adb854c6f7c9468520910a9b1e8fac068012" +checksum = "a2333e6df6d6598f2b1974829f853c2b4c5f4a6e503c10af918081aa6f8564e1" [[package]] name = "semver-parser" @@ -1270,11 +1268,11 @@ dependencies = [ [[package]] name = "serde_json" -version = "1.0.78" +version = "1.0.82" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d23c1ba4cf0efd44be32017709280b32d1cea5c3f1275c3b6d9e8bc54f758085" +checksum = "82c2c1fdcd807d1098552c5b9a36e425e42e9fbd7c6a37a8425f390f781f7fa7" dependencies = [ - "itoa 1.0.1", + "itoa 1.0.2", "ryu", "serde", ] @@ -1297,7 +1295,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d3491c14715ca2294c4d6a88f15e84739788c1d030eed8c110436aafdaa2f3fd" dependencies = [ "form_urlencoded", - "itoa 1.0.1", + "itoa 1.0.2", "ryu", "serde", ] @@ -1325,15 +1323,15 @@ checksum = "2e24979f63a11545f5f2c60141afe249d4f19f84581ea2138065e400941d83d3" [[package]] name = "slab" -version = "0.4.5" +version = "0.4.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9def91fd1e018fe007022791f865d0ccc9b3a0d5001e01aabb8b40e46000afb5" +checksum = "eb703cfe953bccee95685111adeedb76fabe4e97549a58d16f03ea7b9367bb32" [[package]] name = "smallvec" -version = "1.8.0" +version = "1.9.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f2dd574626839106c320a323308629dcb1acfc96e32a8cba364ddc61ac23ee83" +checksum = "2fd0db749597d91ff862fd1d55ea87f7855a744a8425a64695b6fca237d1dad1" [[package]] name = "sqlo2" @@ -1468,9 +1466,9 @@ checksum = "213701ba3370744dcd1a12960caa4843b3d68b4d1c0a5d575e0d65b2ee9d16c0" [[package]] name = "str-buf" -version = "1.0.5" +version = "1.0.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d44a3643b4ff9caf57abcee9c2c621d6c03d9135e0d8b589bd9afb5992cb176a" +checksum = "9e08d8363704e6c71fc928674353e6b7c23dcea9d82d7012c8faf2a3a025f8d0" [[package]] name = "syn" @@ -1576,18 +1574,18 @@ dependencies = [ [[package]] name = "thiserror" -version = "1.0.30" +version = "1.0.31" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "854babe52e4df1653706b98fcfc05843010039b406875930a70e4d9644e5c417" +checksum = "bd829fe32373d27f76265620b5309d0340cb8550f523c1dda251d6298069069a" dependencies = [ "thiserror-impl", ] [[package]] name = "thiserror-impl" -version = "1.0.30" +version = "1.0.31" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "aa32fd3f627f367fe16f893e2597ae3c05020f8bba2666a4e6ea73d377e5714b" +checksum = "0396bc89e626244658bef819e22d0cc459e795a5ebe878e6ec336d1674a8d79a" dependencies = [ "proc-macro2", "quote", @@ -1632,11 +1630,17 @@ dependencies = [ "syn", ] +[[package]] +name = "unicode-ident" +version = "1.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5bd2fe26506023ed7b5e1e315add59d6f584c621d037f9368fea9cfb988f368c" + [[package]] name = "unicode-segmentation" -version = "1.8.0" +version = "1.9.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8895849a949e7845e06bd6dc1aa51731a103c42707010a5b591c0038fb73385b" +checksum = "7e8820f5d777f6224dc4be3632222971ac30164d4a258d595640799554ebfd99" [[package]] name = "unicode-width" @@ -1646,9 +1650,9 @@ checksum = "3ed742d4ea2bd1176e236172c8429aaf54486e7ac098db29ffe6529e0ce50973" [[package]] name = "unicode-xid" -version = "0.2.2" +version = "0.2.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8ccb82d61f80a663efe1f787a51b16b5a51e3314d6ac365b08639f52387b33f3" +checksum = "957e51f3646910546462e67d5f7599b9e4fb8acdd304b087a6494730f9eebf04" [[package]] name = "utf8parse" @@ -1681,9 +1685,9 @@ checksum = "cccddf32554fecc6acb585f82a32a72e28b48f8c4c1883ddfeeeaa96f7d8e519" [[package]] name = "wasi" -version = "0.10.0+wasi-snapshot-preview1" +version = "0.11.0+wasi-snapshot-preview1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1a143597ca7c7793eff794def352d41792a93c481eb1042423ff7ff72ba2c31f" +checksum = "9c8d87e72b64a3b4db28d11ce29237c246188f4f51057d65a7eab63b7987e423" [[package]] name = "wasm-bindgen" @@ -1714,9 +1718,9 @@ dependencies = [ [[package]] name = "wasm-bindgen-futures" -version = "0.4.29" +version = "0.4.30" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2eb6ec270a31b1d3c7e266b999739109abce8b6c87e4b31fcfcd788b65267395" +checksum = "6f741de44b75e14c35df886aff5f1eb73aa114fa5d4d00dcd37b5e01259bf3b2" dependencies = [ "cfg-if", "js-sys", @@ -1807,9 +1811,9 @@ checksum = "712e227841d057c1ee1cd2fb22fa7e5a5461ae8e48fa2ca79ec42cfc1931183f" [[package]] name = "windows-sys" -version = "0.30.0" +version = "0.36.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "030b7ff91626e57a05ca64a07c481973cbb2db774e4852c9c7ca342408c6a99a" +checksum = "ea04155a16a59f9eab786fe12a4a450e75cdb175f9e0d80da1e17db09f55b8d2" dependencies = [ "windows_aarch64_msvc", "windows_i686_gnu", @@ -1820,33 +1824,33 @@ dependencies = [ [[package]] name = "windows_aarch64_msvc" -version = "0.30.0" +version = "0.36.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "29277a4435d642f775f63c7d1faeb927adba532886ce0287bd985bffb16b6bca" +checksum = "9bb8c3fd39ade2d67e9874ac4f3db21f0d710bee00fe7cab16949ec184eeaa47" [[package]] name = "windows_i686_gnu" -version = "0.30.0" +version = "0.36.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1145e1989da93956c68d1864f32fb97c8f561a8f89a5125f6a2b7ea75524e4b8" +checksum = "180e6ccf01daf4c426b846dfc66db1fc518f074baa793aa7d9b9aaeffad6a3b6" [[package]] name = "windows_i686_msvc" -version = "0.30.0" +version = "0.36.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d4a09e3a0d4753b73019db171c1339cd4362c8c44baf1bcea336235e955954a6" +checksum = "e2e7917148b2812d1eeafaeb22a97e4813dfa60a3f8f78ebe204bcc88f12f024" [[package]] name = "windows_x86_64_gnu" -version = "0.30.0" +version = "0.36.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8ca64fcb0220d58db4c119e050e7af03c69e6f4f415ef69ec1773d9aab422d5a" +checksum = "4dcd171b8776c41b97521e5da127a2d86ad280114807d0b2ab1e462bc764d9e1" [[package]] name = "windows_x86_64_msvc" -version = "0.30.0" +version = "0.36.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "08cabc9f0066848fef4bc6a1c1668e6efce38b661d2aeec75d18d8617eebb5f1" +checksum = "c811ca4a8c853ef420abd8592ba53ddbbac90410fab6903b3e79972a631f7680" [[package]] name = "yew" From 7f9003ff19075491ae2b9004aa33a2702c9a8b99 Mon Sep 17 00:00:00 2001 From: Axmouth Date: Mon, 11 Jul 2022 01:40:18 +0300 Subject: [PATCH 4/5] More refactoring and fixing another postfix bug --- engine/src/backend_memory/mod.rs | 6 -- engine/src/lexer/mod.rs | 2 +- engine/src/parser.rs | 71 ++++++++++++++++--- engine/src/parser/column_definitions.rs | 28 +++----- engine/src/parser/create_index.rs | 10 +-- engine/src/parser/create_table.rs | 48 ++++--------- engine/src/parser/drop_table.rs | 6 +- engine/src/parser/expression.rs | 47 ++++++------ engine/src/parser/expressions.rs | 10 +-- engine/src/parser/insert.rs | 50 ++++--------- engine/src/parser/joins.rs | 14 ++-- engine/src/parser/literal.rs | 6 +- engine/src/parser/select.rs | 16 ++--- engine/src/parser/select_items.rs | 44 +++--------- engine/src/parser/statement.rs | 22 +++--- engine/src/parser/table.rs | 8 +-- engine/src/parser/table_column.rs | 6 +- engine/src/parser/tables.rs | 4 +- repl/src/main.rs | 33 ++++++--- tests/acceptance/memory/operation_order_1 | 60 ++++++++++++++++ tests/acceptance/memory/operation_order_1.sql | 23 ++++++ tests/acceptance/{memory1 => memory}/query1 | 0 .../acceptance/{memory1 => memory}/query1.sql | 0 tests/acceptance/{memory1 => memory}/query2 | 0 .../acceptance/{memory1 => memory}/query2.sql | 0 tests/acceptance/{memory1 => memory}/query3 | 0 .../acceptance/{memory1 => memory}/query3.sql | 0 .../acceptance/{memory1 => memory}/test.toml | 6 +- tests/src/lib.rs | 2 +- 29 files changed, 296 insertions(+), 226 deletions(-) create mode 100644 tests/acceptance/memory/operation_order_1 create mode 100644 tests/acceptance/memory/operation_order_1.sql rename tests/acceptance/{memory1 => memory}/query1 (100%) rename tests/acceptance/{memory1 => memory}/query1.sql (100%) rename tests/acceptance/{memory1 => memory}/query2 (100%) rename tests/acceptance/{memory1 => memory}/query2.sql (100%) rename tests/acceptance/{memory1 => memory}/query3 (100%) rename tests/acceptance/{memory1 => memory}/query3.sql (100%) rename tests/acceptance/{memory1 => memory}/test.toml (60%) diff --git a/engine/src/backend_memory/mod.rs b/engine/src/backend_memory/mod.rs index 6b5889a..0ebdb66 100644 --- a/engine/src/backend_memory/mod.rs +++ b/engine/src/backend_memory/mod.rs @@ -17,7 +17,6 @@ use crate::{ use instant::Instant; use std::collections::HashMap; use test_util::TestSubjectExt; -use tree_display::TreeDisplay; const ERR_INVALID_CELL: &str = "Invalid Cell"; //TODO: @@ -1104,11 +1103,6 @@ impl MemoryBackend { Err(err) => return Err(err.to_string()), }; - println!( - "Ast\n{}", - ast.tree_print(Default::default(), Default::default()) - ); - let mut eval_results = Vec::new(); for statement in ast.statements { diff --git a/engine/src/lexer/mod.rs b/engine/src/lexer/mod.rs index 44561e7..fcfac7d 100644 --- a/engine/src/lexer/mod.rs +++ b/engine/src/lexer/mod.rs @@ -241,7 +241,7 @@ impl std::fmt::Display for Token<'_> { } } -impl<'a> Token<'_> { +impl Token<'_> { pub fn binding_power(&self) -> u32 { match self { Token::And => 1, diff --git a/engine/src/parser.rs b/engine/src/parser.rs index 5d485a0..010e98e 100644 --- a/engine/src/parser.rs +++ b/engine/src/parser.rs @@ -17,6 +17,7 @@ mod tables; use super::ast::*; use super::lexer::*; use crate::sql_types::SqlType; +use std::borrow::Cow; use std::cmp::Ordering; use std::convert::TryFrom; use test_util::TestSubjectExt; @@ -66,13 +67,24 @@ macro_rules! parse_err { parse_err!($tokens, $cursor, General, $msg) }; ($tokens:expr, $cursor:expr, $err_type:ident, $msg:expr) => { - return Err(ParsingError::$err_type { + ParsingError::$err_type { msg: help_message($tokens.get($cursor), $cursor, $msg), cursor: $cursor, - }) + } }; } + +macro_rules! ret_parse_err { + ($tokens:expr, $cursor:expr, $msg:expr) => { + ret_parse_err!($tokens, $cursor, General, $msg) + }; + ($tokens:expr, $cursor:expr, $err_type:ident, $msg:expr) => { + return Err(parse_err!($tokens, $cursor, $err_type, $msg)) + }; +} + pub(crate) use parse_err; +pub(crate) use ret_parse_err; #[derive(Debug, Clone, PartialEq, Eq, Hash, Default)] pub struct Parser { @@ -104,12 +116,11 @@ impl Parser { while cursor < tokens.len() { if !first_statement { let mut at_least_one_semicolon = false; - while expect_token(&tokens, cursor, Token::Semicolon) { - cursor += 1; + while expect_token(&tokens, &mut cursor, &Token::Semicolon) { at_least_one_semicolon = true; } if !(first_statement || at_least_one_semicolon) { - parse_err!( + ret_parse_err!( tokens, cursor, Delimiter, @@ -162,14 +173,56 @@ impl From for ParsingError { } } -fn expect_token(tokens: &[TokenContainer], cursor: usize, token: Token) -> bool { - let current_token = match tokens.get(cursor) { - Some(value) => value, +fn expect_token(tokens: &[TokenContainer], cursor: &mut usize, token: &Token) -> bool { + let current_token = match tokens.get(*cursor) { + Some(TokenContainer { token, .. }) => token, + None => { + return false; + } + }; + if token == current_token { + *cursor += 1; + true + } else { + false + } +} + +#[allow(dead_code)] +fn expect_tokens(tokens: &[TokenContainer], cursor: &mut usize, expected_tokens: &[Token]) -> bool { + let tokens_found = match tokens.get(*cursor..(*cursor + expected_tokens.len())) { + Some(val) => val, None => { return false; } }; - token == current_token.token + if tokens_found + .iter() + .zip(expected_tokens.iter()) + .map(|(tok, exp)| &tok.token == exp) + .all(|x| x) + { + *cursor += expected_tokens.len(); + true + } else { + false + } +} + +fn expect_identifier<'a, 'b>( + tokens: &'a [TokenContainer], + cursor: &'b mut usize, +) -> Option<&'a Cow<'a, str>> { + match tokens.get(*cursor) { + Some(TokenContainer { + token: Token::IdentifierValue { value }, + .. + }) => { + *cursor += 1; + Some(value) + } + _ => None, + } } fn help_message(token: Option<&TokenContainer>, cursor: usize, msg: &str) -> String { diff --git a/engine/src/parser/column_definitions.rs b/engine/src/parser/column_definitions.rs index 11c87c9..026e21e 100644 --- a/engine/src/parser/column_definitions.rs +++ b/engine/src/parser/column_definitions.rs @@ -11,30 +11,19 @@ pub fn parse_column_definitions<'a>( loop { if cursor >= tokens.len() { - parse_err!(tokens, cursor, "Unexpected end of input"); + ret_parse_err!(tokens, cursor, "Unexpected end of input"); } // Look for a delimiter - if let Some(TokenContainer { - loc: _, - token: current_token, - }) = tokens.get(cursor) - { - if current_token == &delimiter { + if expect_token(tokens, &mut cursor, &delimiter) { break; - } } // Look for a comma - if !column_definitions.is_empty() { - if let Some(TokenContainer { loc: _, token }) = tokens.get(cursor) { - if token == &Token::Comma { - cursor += 1; - } else { - parse_err!(tokens, cursor, "Expected Comma"); - } - } + if !column_definitions.is_empty() && !expect_token(tokens, &mut cursor, &Token::Comma) { + ret_parse_err!(tokens, cursor, "Expected Comma"); } + // Look for a column name let col_name = match &tokens.get(cursor) { Some(TokenContainer { @@ -42,7 +31,7 @@ pub fn parse_column_definitions<'a>( token: Token::IdentifierValue { value }, }) => value, _ => { - parse_err!(tokens, cursor, "Expected Column Name"); + ret_parse_err!(tokens, cursor, "Expected Column Name"); } }; cursor += 1; @@ -50,7 +39,7 @@ pub fn parse_column_definitions<'a>( // Look for a column type if let Some(token_c) = tokens.get(cursor) { if !token_c.token.is_datatype() { - parse_err!(tokens, cursor, "Expected Column Type"); + ret_parse_err!(tokens, cursor, "Expected Column Type"); } } @@ -58,12 +47,13 @@ pub fn parse_column_definitions<'a>( let col_type = match tokens.get(cursor) { Some(v) => v, None => { - parse_err!(tokens, cursor, "Expected Column Type"); + ret_parse_err!(tokens, cursor, "Expected Column Type"); } }; cursor += 1; // Look for primary key + // TODO: expect_tokens(..) if let ( Some(TokenContainer { loc: _, diff --git a/engine/src/parser/create_index.rs b/engine/src/parser/create_index.rs index bcaeded..c3dbc36 100644 --- a/engine/src/parser/create_index.rs +++ b/engine/src/parser/create_index.rs @@ -13,7 +13,7 @@ pub fn parse_create_index_statement<'a>( { cursor += 1; } else { - parse_err!(tokens, cursor, "Not a Create Index Statement"); + ret_parse_err!(tokens, cursor, "Not a Create Index Statement"); } let mut is_unique = false; if let Some(TokenContainer { @@ -31,7 +31,7 @@ pub fn parse_create_index_statement<'a>( { cursor += 1; } else { - parse_err!(tokens, cursor, "Not a Create Index Statement"); + ret_parse_err!(tokens, cursor, "Not a Create Index Statement"); } let name = if let Some(TokenContainer { loc: _, @@ -41,7 +41,7 @@ pub fn parse_create_index_statement<'a>( cursor += 1; value } else { - parse_err!(tokens, cursor, "Expected Index Name"); + ret_parse_err!(tokens, cursor, "Expected Index Name"); }; if let Some(TokenContainer { loc: _, @@ -50,7 +50,7 @@ pub fn parse_create_index_statement<'a>( { cursor += 1; } else { - parse_err!(tokens, cursor, "Expected ON Keyword"); + ret_parse_err!(tokens, cursor, "Expected ON Keyword"); } let table = if let Some(TokenContainer { loc: _, @@ -60,7 +60,7 @@ pub fn parse_create_index_statement<'a>( cursor += 1; value } else { - parse_err!(tokens, cursor, "Expected Table Name"); + ret_parse_err!(tokens, cursor, "Expected Table Name"); }; let (expression, cursor) = parse_expression(tokens, cursor, &[delimiter], 0, true, false)?; diff --git a/engine/src/parser/create_table.rs b/engine/src/parser/create_table.rs index bc87e3e..f51088e 100644 --- a/engine/src/parser/create_table.rs +++ b/engine/src/parser/create_table.rs @@ -7,52 +7,30 @@ pub fn parse_create_table_statement<'a>( ) -> Result<(CreateTableStatement, usize), ParsingError> { let mut cursor = initial_cursor; - if !expect_token(tokens, cursor, Token::Create) { - parse_err!(tokens, cursor, "Not a Create Table Statement"); + if !expect_token(tokens, &mut cursor, &Token::Create) { + ret_parse_err!(tokens, cursor, "Not a Create Table Statement"); } - cursor += 1; - if !expect_token(tokens, cursor, Token::Table) { - parse_err!(tokens, cursor, "Expected TABLE Keyword"); + if !expect_token(tokens, &mut cursor, &Token::Table) { + ret_parse_err!(tokens, cursor, "Expected TABLE Keyword"); } - cursor += 1; - let name = match tokens.get(cursor) { - Some(TokenContainer { - loc: _, - token: Token::IdentifierValue { value }, - }) => value, - _ => { - parse_err!(tokens, cursor, "Expected Table Name"); - } - }; - cursor += 1; - if let Some(TokenContainer { - loc: _, - token: Token::LeftParenthesis, - }) = tokens.get(cursor) - { - cursor += 1; - } else { - parse_err!(tokens, cursor, "Expected Left Parenthesis"); + let table_name = expect_identifier(tokens, &mut cursor).ok_or(parse_err!( + tokens, + cursor, + "Expected Table Name" + ))?; + + if !expect_token(tokens, &mut cursor, &Token::LeftParenthesis) { + ret_parse_err!(tokens, cursor, "Expected Left Parenthesis"); } let (cols, new_cursor) = parse_column_definitions(tokens, cursor, Token::RightParenthesis)?; cursor = new_cursor; - if let Some(TokenContainer { - loc: _, - token: Token::RightParenthesis, - }) = tokens.get(cursor) - { - cursor += 1; - } else { - parse_err!(tokens, cursor, "Expected Left Parenthesis"); - } - Ok(( CreateTableStatement { - name: name.to_string(), + name: table_name.to_string(), cols, }, cursor, diff --git a/engine/src/parser/drop_table.rs b/engine/src/parser/drop_table.rs index 498c921..05e0e2b 100644 --- a/engine/src/parser/drop_table.rs +++ b/engine/src/parser/drop_table.rs @@ -13,7 +13,7 @@ pub fn parse_drop_table_statement<'a>( { cursor += 1; } else { - parse_err!(tokens, cursor, "Not a Drop Table Statement"); + ret_parse_err!(tokens, cursor, "Not a Drop Table Statement"); } if let Some(TokenContainer { loc: _, @@ -22,7 +22,7 @@ pub fn parse_drop_table_statement<'a>( { cursor += 1; } else { - parse_err!(tokens, cursor, "Not a Drop Table Statement"); + ret_parse_err!(tokens, cursor, "Not a Drop Table Statement"); } let name = if let Some(TokenContainer { loc: _, @@ -32,7 +32,7 @@ pub fn parse_drop_table_statement<'a>( cursor += 1; value } else { - parse_err!(tokens, cursor, "Not a Drop Table Statement"); + ret_parse_err!(tokens, cursor, "Not a Drop Table Statement"); }; Ok(( diff --git a/engine/src/parser/expression.rs b/engine/src/parser/expression.rs index 7a512f5..6a0695b 100644 --- a/engine/src/parser/expression.rs +++ b/engine/src/parser/expression.rs @@ -1,5 +1,7 @@ use super::*; +// TODO: Split different paths based on expression type? +// TODO: Document better pub fn parse_expression<'a>( tokens: &'a [TokenContainer], initial_cursor: usize, @@ -46,7 +48,7 @@ pub fn parse_expression<'a>( { cursor += 1; } else { - parse_err!(tokens, cursor, "Expected closing Parenthesis"); + ret_parse_err!(tokens, cursor, "Expected closing Parenthesis"); } } else if cursor < tokens.len() && UNARY_OPERATORS.contains(&tokens[cursor].token) { let token = &tokens[cursor]; @@ -68,12 +70,7 @@ pub fn parse_expression<'a>( cursor = cursor_; } Err(err) => { - if let Some(TokenContainer { - token: Token::LeftParenthesis, - loc: _, - }) = tokens.get(cursor) - { - cursor += 1; + if expect_token(tokens, &mut cursor, &Token::LeftParenthesis) { let (expression_, cursor_) = parse_expression( tokens, cursor, @@ -85,14 +82,8 @@ pub fn parse_expression<'a>( inner_exp = expression_; cursor = cursor_; - if let Some(TokenContainer { - loc: _, - token: Token::RightParenthesis, - }) = tokens.get(cursor) - { - cursor += 1; - } else { - parse_err!(tokens, cursor, "Expected closing Parenthesis"); + if !expect_token(tokens, &mut cursor, &Token::RightParenthesis) { + ret_parse_err!(tokens, cursor, "Expected closing Parenthesis"); } } else { return Err(err); @@ -106,7 +97,7 @@ pub fn parse_expression<'a>( operand: Operand::from_token(&operand, cursor)?, }); } else { - parse_err!(tokens, cursor, "Expected Unary Operator"); + ret_parse_err!(tokens, cursor, "Expected Unary Operator"); } while let Some(operand) = nested_un_ops.pop() { inner_exp = Expression::Unary(UnaryExpression { @@ -133,7 +124,7 @@ pub fn parse_expression<'a>( ) = (tokens.get(cursor), tokens.get(cursor + 1)) { if UNARY_POSTFIX_OPERATORS.contains(token1) && BINARY_OPERATORS.contains(token2) { - cursor += 1; + cursor += 2; expression = Expression::Unary(UnaryExpression { first: Box::from(expression), operand: Operand::from_token(token1, cursor)?, @@ -146,8 +137,20 @@ pub fn parse_expression<'a>( if delimiters.contains(token) { break 'outer; } + + // Makes sure that if there are postfix unary ops, they are applied in the current expression before continuing. if UNARY_POSTFIX_OPERATORS.contains(token) { - break 'outer; + if !expression.is_empty() { + expression = Expression::Unary(UnaryExpression { + first: Box::from(expression), + operand: Operand::from_token(token, cursor)?, + }); + cursor += 1; + last_cursor = cursor; + continue; + } else { + ret_parse_err!(tokens, cursor, "Expected Expression"); + } } if let Some(TokenContainer { @@ -159,11 +162,13 @@ pub fn parse_expression<'a>( break; } } + let mut operand_tok = Token::Empty; if BINARY_OPERATORS.contains(token) { operand_tok = token.clone(); cursor += 1; } + if operand_tok == Token::TypeCast { if let Some(TokenContainer { token: op, loc: _ }) = tokens.get(cursor) { if op.is_datatype() { @@ -186,14 +191,14 @@ pub fn parse_expression<'a>( cursor += 1; continue; } else { - parse_err!(tokens, cursor, "Expected Type Name after Type Cast"); + ret_parse_err!(tokens, cursor, "Expected Type for Cast"); } } else { - parse_err!(tokens, cursor, "Expected Type Name after Type Cast"); + ret_parse_err!(tokens, cursor, "Unexpected end of input"); } } if operand_tok == Token::Empty { - parse_err!(tokens, cursor, "Expected Binary Operator"); + ret_parse_err!(tokens, cursor, "Expected Binary Operator"); } let binding_power = operand_tok.binding_power(); diff --git a/engine/src/parser/expressions.rs b/engine/src/parser/expressions.rs index 16b94bb..d91f86b 100644 --- a/engine/src/parser/expressions.rs +++ b/engine/src/parser/expressions.rs @@ -11,7 +11,7 @@ pub fn parse_expressions( loop { if cursor >= tokens.len() { - parse_err!(tokens, cursor, "Expected Expression"); + ret_parse_err!(tokens, cursor, "Expected Expression"); } // Look for delimiter @@ -26,12 +26,8 @@ pub fn parse_expressions( } // Look for comma - if !expressions.is_empty() { - if !expect_token(tokens, cursor, Token::Comma) { - parse_err!(tokens, cursor, "Expected Comma"); - } - - cursor += 1; + if !expressions.is_empty() && !expect_token(tokens, &mut cursor, &Token::Comma) { + ret_parse_err!(tokens, cursor, "Expected Comma"); } // Look for expression diff --git a/engine/src/parser/insert.rs b/engine/src/parser/insert.rs index 4032f5c..eba5c1c 100644 --- a/engine/src/parser/insert.rs +++ b/engine/src/parser/insert.rs @@ -8,43 +8,29 @@ pub fn parse_insert_statement( let mut cursor = initial_cursor; // Look for INSERT - if !expect_token(tokens, cursor, Token::Insert) { - parse_err!(tokens, cursor, "Not an Insert Statement"); + if !expect_token(tokens, &mut cursor, &Token::Insert) { + ret_parse_err!(tokens, cursor, "Not an Insert Statement"); } - cursor += 1; // Look for INTO - if !expect_token(tokens, cursor, Token::Into) { - parse_err!(tokens, cursor, "Expected INTO"); + if !expect_token(tokens, &mut cursor, &Token::Into) { + ret_parse_err!(tokens, cursor, "Expected INTO"); } - cursor += 1; - let table_name = match tokens.get(cursor) { - Some(TokenContainer { - loc: _, - token: Token::IdentifierValue { value }, - }) => value, - _ => { - parse_err!(tokens, cursor, "Expected Table Name"); - } - }; - - cursor += 1; + let table_name = expect_identifier(tokens, &mut cursor).ok_or(parse_err!( + tokens, + cursor, + "Expected Table Name" + ))?; // Look for VALUES - if let Some(token) = tokens.get(cursor) { - if token.token != Token::Values { - parse_err!(tokens, cursor, "Expected VALUES"); - } - cursor += 1; + if !expect_token(tokens, &mut cursor, &Token::Values) { + ret_parse_err!(tokens, cursor, "Expected VALUES"); } // Look for left parenthesis - if let Some(token) = tokens.get(cursor) { - if token.token != Token::LeftParenthesis { - parse_err!(tokens, cursor, "Expected Left parenthesis"); - } - cursor += 1; + if !expect_token(tokens, &mut cursor, &Token::LeftParenthesis) { + ret_parse_err!(tokens, cursor, "Expected Left parenthesis"); } // Look for expression list @@ -53,14 +39,8 @@ pub fn parse_insert_statement( cursor = new_cursor; // Look for right parenthesis - if let Some(TokenContainer { - token: Token::RightParenthesis, - loc: _, - }) = tokens.get(cursor) - { - cursor += 1; - } else { - parse_err!(tokens, cursor, "Expected Right Parenthesis"); + if !expect_token(tokens, &mut cursor, &Token::RightParenthesis) { + ret_parse_err!(tokens, cursor, "Expected Right Parenthesis"); } Ok(( diff --git a/engine/src/parser/joins.rs b/engine/src/parser/joins.rs index 5a8e7f6..5754cea 100644 --- a/engine/src/parser/joins.rs +++ b/engine/src/parser/joins.rs @@ -70,13 +70,13 @@ pub fn parse_joins<'a>( cursor += 1; kind = JoinKind::FullOuter; } else { - parse_err!(tokens, cursor, "Expected OUTER Keyword after FULL"); + ret_parse_err!(tokens, cursor, "Expected OUTER Keyword after FULL"); } } else if let Some(TokenContainer { token, loc: _ }) = tokens.get(cursor) { if delimiters.contains(token) { break; } - parse_err!(tokens, cursor, "Failed to parse Join Clause"); + ret_parse_err!(tokens, cursor, "Failed to parse Join Clause"); } if let Some(TokenContainer { token: Token::Join, @@ -85,7 +85,7 @@ pub fn parse_joins<'a>( { cursor += 1; } else { - parse_err!(tokens, cursor, "No JOIN Keyword after INNER"); + ret_parse_err!(tokens, cursor, "No JOIN Keyword after INNER"); } let (table, new_cursor) = parse_table(tokens, cursor, delimiters)?; cursor = new_cursor; @@ -96,7 +96,7 @@ pub fn parse_joins<'a>( { cursor += 1; } else { - parse_err!(tokens, cursor, "No ON keyword in Join Expression"); + ret_parse_err!(tokens, cursor, "No ON keyword in Join Expression"); } let (col1, new_cursor) = parse_table_column(tokens, cursor)?; cursor = new_cursor; @@ -105,10 +105,10 @@ pub fn parse_joins<'a>( if BINARY_OPERATORS.contains(token) { token.clone() } else { - parse_err!(tokens, cursor, "No Binary Operator in Join Expression"); + ret_parse_err!(tokens, cursor, "No Binary Operator in Join Expression"); } } else { - parse_err!(tokens, cursor, "No Binary Operator in Join Expression"); + ret_parse_err!(tokens, cursor, "No Binary Operator in Join Expression"); }; let (col2, new_cursor) = parse_table_column(tokens, cursor)?; cursor = new_cursor; @@ -116,7 +116,7 @@ pub fn parse_joins<'a>( let operand = if let Ok(o) = Operand::from_token(&operand_token, cursor) { o } else { - parse_err!( + ret_parse_err!( tokens, cursor, "Failed to parse Binary Operator in Join Expression" diff --git a/engine/src/parser/literal.rs b/engine/src/parser/literal.rs index 0fb6454..a808266 100644 --- a/engine/src/parser/literal.rs +++ b/engine/src/parser/literal.rs @@ -29,7 +29,7 @@ pub fn parse_literal_expression( table_name = Some(col_name); col_name = value; } else { - parse_err!(tokens, cursor, "Expected Identifier after dot"); + ret_parse_err!(tokens, cursor, "Expected Identifier after dot"); } } Ok(( @@ -51,10 +51,10 @@ pub fn parse_literal_expression( )) } _ => { - parse_err!(tokens, cursor, "Expected Literal") + ret_parse_err!(tokens, cursor, "Expected Literal") } } } else { - parse_err!(tokens, cursor, "Expected Literal Expression"); + ret_parse_err!(tokens, cursor, "Expected Literal Expression"); } } diff --git a/engine/src/parser/select.rs b/engine/src/parser/select.rs index e05a6ff..8624e2d 100644 --- a/engine/src/parser/select.rs +++ b/engine/src/parser/select.rs @@ -14,9 +14,9 @@ pub fn parse_select_statement<'a>( }) = tokens.get(cursor) { } else if let Some(TokenContainer { token: _, loc: _ }) = tokens.get(cursor) { - parse_err!(tokens, cursor, "Not a Select statement"); + ret_parse_err!(tokens, cursor, "Not a Select statement"); } else { - parse_err!(tokens, cursor, "Reached end of input"); + ret_parse_err!(tokens, cursor, "Reached end of input"); } cursor += 1; @@ -180,7 +180,7 @@ pub fn parse_select_statement<'a>( let limit = match value.parse::() { Ok(val) => val, Err(err) => { - parse_err!( + ret_parse_err!( tokens, cursor, &format!("Failed to parse Limit value: {err}") @@ -188,10 +188,10 @@ pub fn parse_select_statement<'a>( } }; if limit.is_sign_negative() { - parse_err!(tokens, cursor, "Limit must not be negative"); + ret_parse_err!(tokens, cursor, "Limit must not be negative"); } if limit.is_nan() || limit.is_infinite() { - parse_err!( + ret_parse_err!( tokens, cursor, "Limit cannot be interpreted as a whole number" @@ -217,7 +217,7 @@ pub fn parse_select_statement<'a>( let offset = match value.parse::() { Ok(val) => val, Err(err) => { - parse_err!( + ret_parse_err!( tokens, cursor, &format!("Failed to parse Offset value: {err}") @@ -225,10 +225,10 @@ pub fn parse_select_statement<'a>( } }; if offset.is_sign_negative() { - parse_err!(tokens, cursor, "Offset must not be negative"); + ret_parse_err!(tokens, cursor, "Offset must not be negative"); } if offset.is_nan() || offset.is_infinite() { - parse_err!( + ret_parse_err!( tokens, cursor, "Limit cannot be interpreted as a whole number" diff --git a/engine/src/parser/select_items.rs b/engine/src/parser/select_items.rs index b3a264f..f1f9585 100644 --- a/engine/src/parser/select_items.rs +++ b/engine/src/parser/select_items.rs @@ -20,27 +20,17 @@ pub fn parse_select_items<'a>( return Ok((select_items, cursor - 1)); } Ordering::Greater => { - parse_err!(tokens, cursor, "Unexpected end of tokens"); + ret_parse_err!(tokens, cursor, "Unexpected end of tokens"); } _ => {} } let current_token = &tokens[cursor]; - for delimiter in delimiters { - if delimiter == ¤t_token.token { - break 'outer; - } + if delimiters.contains(¤t_token.token) { + break 'outer; } - if !select_items.is_empty() { - if let Some(TokenContainer { - loc: _, - token: Token::Comma, - }) = tokens.get(cursor) - { - cursor += 1; - } else { - parse_err!(tokens, cursor, "Expected comma"); - } + if !select_items.is_empty() && !expect_token(tokens, &mut cursor, &Token::Comma) { + ret_parse_err!(tokens, cursor, "Expected comma1"); } let mut select_item = SelectItem { @@ -49,12 +39,7 @@ pub fn parse_select_items<'a>( asterisk: false, }; - if let Some(TokenContainer { - token: Token::Asterisk, - loc: _, - }) = tokens.get(cursor) - { - cursor += 1; + if expect_token(tokens, &mut cursor, &Token::Asterisk) { select_item.asterisk = true; } else { let (expression, new_cursor) = @@ -63,23 +48,14 @@ pub fn parse_select_items<'a>( select_item.expression = expression; let mut found_as = false; - if let Some(TokenContainer { - loc: _, - token: Token::As, - }) = tokens.get(cursor) - { + if expect_token(tokens, &mut cursor, &Token::As) { found_as = true; - cursor += 1; } - if let Some(TokenContainer { - token: Token::IdentifierValue { value }, - loc: _, - }) = tokens.get(cursor) - { + + if let Some(value) = expect_identifier(tokens, &mut cursor) { select_item.as_clause = Some(value.to_string()); - cursor += 1; } else if found_as { - parse_err!(tokens, cursor, "Expected Identifier after AS"); + ret_parse_err!(tokens, cursor, "Expected Identifier after AS"); } } diff --git a/engine/src/parser/statement.rs b/engine/src/parser/statement.rs index bef58a3..85975f7 100644 --- a/engine/src/parser/statement.rs +++ b/engine/src/parser/statement.rs @@ -27,11 +27,11 @@ pub fn parse_statement<'a>( Err(err) => Err(err), } } - Token::Delete => parse_err!(tokens, cursor, Internal, "Delete not implemented"), - Token::Update => parse_err!(tokens, cursor, Internal, "Update not implemented"), - Token::Alter => parse_err!(tokens, cursor, Internal, "Alter not implemented"), + Token::Delete => ret_parse_err!(tokens, cursor, Internal, "Delete not implemented"), + Token::Update => ret_parse_err!(tokens, cursor, Internal, "Update not implemented"), + Token::Alter => ret_parse_err!(tokens, cursor, Internal, "Alter not implemented"), Token::IdentifierValue { value: _ } => { - parse_err!(tokens, cursor, Internal, "Assignment not implemented") + ret_parse_err!(tokens, cursor, Internal, "Assignment not implemented") } Token::Create => { if let Some(first_token) = tokens.get(cursor + 1) { @@ -63,12 +63,12 @@ pub fn parse_statement<'a>( token: Token::Constraint, loc: _, }) => { - parse_err!(tokens, cursor, "Create Constraint not implemented") + ret_parse_err!(tokens, cursor, "Create Constraint not implemented") } - _ => parse_err!(tokens, cursor, "Invalid Create Statement"), + _ => ret_parse_err!(tokens, cursor, "Invalid Create Statement"), }, Token::Constraint => { - parse_err!(tokens, cursor, "Create Constraint not implemented") + ret_parse_err!(tokens, cursor, "Create Constraint not implemented") } Token::Table => { // Look for a CREATE TABLE statement @@ -79,10 +79,10 @@ pub fn parse_statement<'a>( Err(err) => (Err(err)), } } - _ => parse_err!(tokens, cursor, "Invalid Create Statement"), + _ => ret_parse_err!(tokens, cursor, "Invalid Create Statement"), } } else { - parse_err!(tokens, cursor, "Invalid Create Statement"); + ret_parse_err!(tokens, cursor, "Invalid Create Statement"); } } Token::Drop => { @@ -92,9 +92,9 @@ pub fn parse_statement<'a>( Err(err) => (Err(err)), } } - _ => parse_err!(tokens, cursor, "Expected a valid Statement"), + _ => ret_parse_err!(tokens, cursor, "Expected a valid Statement"), } } else { - parse_err!(tokens, cursor, "Expected a valid Statement"); + ret_parse_err!(tokens, cursor, "Expected a valid Statement"); } } diff --git a/engine/src/parser/table.rs b/engine/src/parser/table.rs index 6cfeaba..003523c 100644 --- a/engine/src/parser/table.rs +++ b/engine/src/parser/table.rs @@ -32,7 +32,7 @@ pub fn parse_table<'a>( cursor += 1; as_clause = Some(value); } else if found_as { - parse_err!(tokens, cursor, "Failed to parse As clause after AS"); + ret_parse_err!(tokens, cursor, "Failed to parse As clause after AS"); } let (joins, new_cursor) = parse_joins(tokens, cursor, delimiters)?; cursor = new_cursor; @@ -85,9 +85,9 @@ pub fn parse_table<'a>( cursor, )); } else if found_as { - parse_err!(tokens, cursor, "Failed to parse As clause after AS"); + ret_parse_err!(tokens, cursor, "Failed to parse As clause after AS"); } else if let Some(TokenContainer { token, loc: _ }) = tokens.get(cursor) { - parse_err!( + ret_parse_err!( tokens, cursor, &format!("Unexpected {:?}, subquery requires as clause", token) @@ -96,5 +96,5 @@ pub fn parse_table<'a>( } } - parse_err!(tokens, cursor, "Failed to parse a source Table"); + ret_parse_err!(tokens, cursor, "Failed to parse a source Table"); } diff --git a/engine/src/parser/table_column.rs b/engine/src/parser/table_column.rs index aa39d34..dea5f78 100644 --- a/engine/src/parser/table_column.rs +++ b/engine/src/parser/table_column.rs @@ -29,7 +29,7 @@ pub fn parse_table_column( table_name = Some(col_name); col_name = value; } else { - parse_err!(tokens, cursor, "Failed to parse Table name in Column"); + ret_parse_err!(tokens, cursor, "Failed to parse Table name in Column"); } } Ok(( @@ -40,9 +40,9 @@ pub fn parse_table_column( cursor, )) } - _ => parse_err!(tokens, cursor, "Failed to parse Column"), + _ => ret_parse_err!(tokens, cursor, "Failed to parse Column"), } } else { - parse_err!(tokens, cursor, "Failed to parse Column"); + ret_parse_err!(tokens, cursor, "Failed to parse Column"); } } diff --git a/engine/src/parser/tables.rs b/engine/src/parser/tables.rs index 4b77592..c3e7f2c 100644 --- a/engine/src/parser/tables.rs +++ b/engine/src/parser/tables.rs @@ -25,14 +25,14 @@ pub fn parse_tables<'a>( if delimiters.contains(token) { break; } else { - parse_err!( + ret_parse_err!( tokens, cursor, &format!("Failed to parse table, unexpected {:?}", token) ); } } else { - parse_err!(tokens, cursor, "Failed to parse Table"); + ret_parse_err!(tokens, cursor, "Failed to parse Table"); } } diff --git a/repl/src/main.rs b/repl/src/main.rs index 0e57863..a33451b 100644 --- a/repl/src/main.rs +++ b/repl/src/main.rs @@ -7,6 +7,7 @@ use sqlo2::backend::EvalResult; use sqlo2::backend_memory::*; use sqlo2::{self}; use std::borrow::Cow; +use std::fmt::Write as FmtWrite; use std::io::{stdout, Write}; use std::time::Duration; @@ -176,51 +177,61 @@ pub fn repl_eval(mb: &mut MemoryBackend, cmd: String) -> String { if !results.rows.is_empty() { output_text.push_str(table.to_string().as_str()); } - output_text.push_str(&format!( + write!( + output_text, "{}", format!("({} Results)\n", results.rows.len()) .as_str() .dimmed() - )); + ) + .expect("Failed to write to output"); output_text.push_str(&"Ok!\n".green().to_string()); if multiple_results { total_time += time; } - output_text.push_str(&format!( + write!( + output_text, "{}", - format!("Elapsed time : {:.2?}\n", time).as_str().dimmed() - )); + &format!("Elapsed time : {:.2?}\n", time).as_str().dimmed() + ) + .expect("Failed to write to output"); } EvalResult::CreateTable { success: _, time } => { output_text.push_str(&"Ok!\n".green().to_string()); if multiple_results { total_time += time; } - output_text.push_str(&format!( + write!( + output_text, "{}", format!("Elapsed time : {:.2?}\n", time).as_str().dimmed() - )); + ) + .expect("Failed to write to output"); } EvalResult::Insert { success: _, time } => { output_text.push_str(&"Ok!\n".green().to_string()); if multiple_results { total_time += time; } - output_text.push_str(&format!( + write!( + output_text, "{}", format!("Elapsed time : {:.2?}\n", time).as_str().dimmed() - )); + ) + .expect("Failed to write to output"); } EvalResult::DropTable { success: _, time } => { output_text.push_str(&"Ok!\n".green().to_string()); if multiple_results { total_time += time; } - output_text.push_str(&format!( + write!( + output_text, "{}", format!("Elapsed time : {:.2?}\n", time).as_str().dimmed() - )); + ) + .expect("Failed to write to output"); } } } diff --git a/tests/acceptance/memory/operation_order_1 b/tests/acceptance/memory/operation_order_1 new file mode 100644 index 0000000..4665fac --- /dev/null +++ b/tests/acceptance/memory/operation_order_1 @@ -0,0 +1,60 @@ +Select: +| ?column?(Double Precision) | +|----------------------------| +| 31 | + +Select: +| ?column?(Double Precision) | +|----------------------------| +| 51 | + +Select: +| ?column?(Double Precision) | +|----------------------------| +| -11 | + +Select: +| ?column?(Double Precision) | +|----------------------------| +| 9 | + +Select: +| ?column?(Double Precision) | +|----------------------------| +| 12.333333333333334 | + +Select: +| ?column?(Double Precision) | +|----------------------------| +| 5.666666666666667 | + +Select: +| ?column?(Double Precision) | +|----------------------------| +| 15130 | + +Select: +| ?column?(Double Precision) | +|----------------------------| +| 15130 | + +Select: +| ?column?(Double Precision) | +|----------------------------| +| 52 | + +Select: +| ?column?(Double Precision) | +|----------------------------| +| 52 | + +Select: +| ?column?(Double Precision) | +|----------------------------| +| 3628821 | + +Select: +| ?column?(Double Precision) | +|----------------------------| +| 3628821 | + diff --git a/tests/acceptance/memory/operation_order_1.sql b/tests/acceptance/memory/operation_order_1.sql new file mode 100644 index 0000000..291e66b --- /dev/null +++ b/tests/acceptance/memory/operation_order_1.sql @@ -0,0 +1,23 @@ +select 10 + 7 * 3; + +select (10 + 7) * 3; + +select 10 - 7 * 3; + +select (10 - 7) * 3; + +select 10 + 7 / 3; + +select (10 + 7) / 3; + +select 10 + 7::int! * 3; + +select 10 + !!7::int * 3; + +select 10 + 7 * 3::int!; + +select 10 + 7 * !!3::int; + +select 10::int! + 7 * 3; + +select !!10::int + 7 * 3; diff --git a/tests/acceptance/memory1/query1 b/tests/acceptance/memory/query1 similarity index 100% rename from tests/acceptance/memory1/query1 rename to tests/acceptance/memory/query1 diff --git a/tests/acceptance/memory1/query1.sql b/tests/acceptance/memory/query1.sql similarity index 100% rename from tests/acceptance/memory1/query1.sql rename to tests/acceptance/memory/query1.sql diff --git a/tests/acceptance/memory1/query2 b/tests/acceptance/memory/query2 similarity index 100% rename from tests/acceptance/memory1/query2 rename to tests/acceptance/memory/query2 diff --git a/tests/acceptance/memory1/query2.sql b/tests/acceptance/memory/query2.sql similarity index 100% rename from tests/acceptance/memory1/query2.sql rename to tests/acceptance/memory/query2.sql diff --git a/tests/acceptance/memory1/query3 b/tests/acceptance/memory/query3 similarity index 100% rename from tests/acceptance/memory1/query3 rename to tests/acceptance/memory/query3 diff --git a/tests/acceptance/memory1/query3.sql b/tests/acceptance/memory/query3.sql similarity index 100% rename from tests/acceptance/memory1/query3.sql rename to tests/acceptance/memory/query3.sql diff --git a/tests/acceptance/memory1/test.toml b/tests/acceptance/memory/test.toml similarity index 60% rename from tests/acceptance/memory1/test.toml rename to tests/acceptance/memory/test.toml index 62aeace..652dc91 100644 --- a/tests/acceptance/memory1/test.toml +++ b/tests/acceptance/memory/test.toml @@ -8,4 +8,8 @@ filename = "query2" [[query]] result = "err" -filename = "query3" \ No newline at end of file +filename = "query3" + +[[query]] +result = "ok" +filename = "operation_order_1" \ No newline at end of file diff --git a/tests/src/lib.rs b/tests/src/lib.rs index b48c404..a140bce 100644 --- a/tests/src/lib.rs +++ b/tests/src/lib.rs @@ -28,7 +28,7 @@ mod parser_tests { backend.eval_query(sql).map(|r| r.into_vec_container()) } - #[test_case("../tests/acceptance/memory1")] + #[test_case("../tests/acceptance/memory")] fn memory_backend(test: &str) { run_test(test, memory_backend_test_fn, MemoryBackend::init()); } From 6de174db9901c72d52e09ba9761acf9afdf290df Mon Sep 17 00:00:00 2001 From: Axmouth Date: Mon, 30 Jan 2023 04:15:11 +0200 Subject: [PATCH 5/5] MEGA WIP --- Cargo.lock | 439 ++++++++--- Cargo.toml | 5 +- benchmarks/Cargo.toml | 3 +- benchmarks/benches/bench_main.rs | 14 + engine/Cargo.toml | 11 +- engine/src/ast.rs | 266 ++++--- engine/src/backend.rs | 56 +- engine/src/backend_memory/mod.rs | 472 +++++++----- engine/src/lib.rs | 19 - engine/src/parser/expression.rs | 42 +- engine/src/parser/joins.rs | 2 +- engine/src/parser/statement.rs | 8 +- engine/src/sql_types/mod.rs | 242 +++++-- repl/Cargo.toml | 6 +- repl/src/main.rs | 2 +- test-util/Cargo.toml | 2 +- tests/acceptance/memory/operation_order_1 | 61 +- tests/acceptance/memory/query1 | 152 +--- tests/acceptance/memory/query2 | 10 +- tests/unit/parser1/query1 | 135 ++-- tests/unit/parser1/query1.target | 42 ++ tests/unit/parser2/query1 | 441 +++++++---- tests/unit/parser2/query1.target | 141 ++++ transactional-store/Cargo.toml | 22 + transactional-store/benches/bench.rs | 27 + transactional-store/src/lib.rs | 842 ++++++++++++++++++++++ transactional-store/src/main.rs | 3 + wasm-repl/Cargo.toml | 8 +- wire-protocol/src/messages/mod.rs | 10 +- 29 files changed, 2513 insertions(+), 970 deletions(-) create mode 100644 tests/unit/parser1/query1.target create mode 100644 tests/unit/parser2/query1.target create mode 100644 transactional-store/Cargo.toml create mode 100644 transactional-store/benches/bench.rs create mode 100644 transactional-store/src/lib.rs create mode 100644 transactional-store/src/main.rs diff --git a/Cargo.lock b/Cargo.lock index 710ef33..07cfa0b 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -32,6 +32,12 @@ dependencies = [ "syn", ] +[[package]] +name = "anes" +version = "0.1.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4b46cbb362ab8752921c97e041f5e366ee6297bd428a31275b9fcf1e380f7299" + [[package]] name = "ansi_term" version = "0.12.1" @@ -131,9 +137,9 @@ checksum = "14c189c53d098945499cdfa7ecc63567cf3886b3332b312a5b4585d8d3a6a610" [[package]] name = "bytes" -version = "1.1.0" +version = "1.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c4872d67bab6358e59559027aa3b9157c53d9358c51423c17554809a8858e0f8" +checksum = "f0b3de4a0c5e67e16066a0715723abd91edc2f9001d09c46e1dca929351e130e" [[package]] name = "cast" @@ -144,6 +150,12 @@ dependencies = [ "rustc_version 0.4.0", ] +[[package]] +name = "cast" +version = "0.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "37b2a672a2cb129a2e41c10b1224bb368f9f37a2b16b612598138befd7b37eb5" + [[package]] name = "cc" version = "1.0.73" @@ -156,6 +168,33 @@ version = "1.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "baf1de4339761588bc0619e3cbc0120ee582ebb74b53b4efbf79117bd2da40fd" +[[package]] +name = "ciborium" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b0c137568cc60b904a7724001b35ce2630fd00d5d84805fbb608ab89509d788f" +dependencies = [ + "ciborium-io", + "ciborium-ll", + "serde", +] + +[[package]] +name = "ciborium-io" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "346de753af073cc87b52b2083a506b38ac176a44cfb05497b622e27be899b369" + +[[package]] +name = "ciborium-ll" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "213030a2b5a4e0c0892b6652260cf6ccac84827b83a85a534e178e3906c4cf1b" +dependencies = [ + "ciborium-io", + "half", +] + [[package]] name = "clap" version = "2.34.0" @@ -163,10 +202,31 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a0610544180c38b88101fecf2dd634b174a62eef6946f84dfc6a7127512b381c" dependencies = [ "bitflags", - "textwrap", + "textwrap 0.11.0", "unicode-width", ] +[[package]] +name = "clap" +version = "3.2.21" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1ed5341b2301a26ab80be5cbdced622e80ed808483c52e45e3310a877d3b37d7" +dependencies = [ + "bitflags", + "clap_lex", + "indexmap", + "textwrap 0.15.0", +] + +[[package]] +name = "clap_lex" +version = "0.2.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2850f2f5a82cbf437dd5af4d49848fbdfc27c157c3d010345776f952765261c5" +dependencies = [ + "os_str_bytes", +] + [[package]] name = "clipboard-win" version = "4.4.1" @@ -228,14 +288,14 @@ checksum = "5827cebf4670468b8772dd191856768aedcb1b0278a04f989f7766351917b9dc" [[package]] name = "criterion" -version = "0.3.5" +version = "0.3.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1604dafd25fba2fe2d5895a9da139f8dc9b319a5fe5354ca137cbbce4e178d10" +checksum = "b01d6de93b2b6c65e17c634a26653a29d107b3c98c607c765bf38d041531cd8f" dependencies = [ "atty", - "cast", - "clap", - "criterion-plot", + "cast 0.3.0", + "clap 2.34.0", + "criterion-plot 0.4.4", "csv", "itertools", "lazy_static", @@ -252,13 +312,49 @@ dependencies = [ "walkdir", ] +[[package]] +name = "criterion" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e7c76e09c1aae2bc52b3d2f29e13c6572553b30c4aa1b8a49fd70de6412654cb" +dependencies = [ + "anes", + "atty", + "cast 0.3.0", + "ciborium", + "clap 3.2.21", + "criterion-plot 0.5.0", + "itertools", + "lazy_static", + "num-traits", + "oorandom", + "plotters", + "rayon", + "regex", + "serde", + "serde_derive", + "serde_json", + "tinytemplate", + "walkdir", +] + [[package]] name = "criterion-plot" version = "0.4.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d00996de9f2f7559f7f4dc286073197f83e92256a59ed395f9aac01fe717da57" dependencies = [ - "cast", + "cast 0.2.7", + "itertools", +] + +[[package]] +name = "criterion-plot" +version = "0.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6b50826342786a51a89e2da3a28f1c32b06e387201bc2d19791f622c673706b1" +dependencies = [ + "cast 0.3.0", "itertools", ] @@ -339,6 +435,19 @@ dependencies = [ "syn", ] +[[package]] +name = "dashmap" +version = "5.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "907076dfda823b0b36d2a1bb5f90c96660a5bbcd7729e10727f07858f22c4edc" +dependencies = [ + "cfg-if", + "hashbrown", + "lock_api", + "once_cell", + "parking_lot_core", +] + [[package]] name = "diff" version = "0.1.13" @@ -459,7 +568,7 @@ checksum = "e11dcc7e4d79a8c89b9ab4c6f5c30b1fc4a83c420792da3542fd31179ed5f517" dependencies = [ "cfg-if", "rustix", - "windows-sys", + "windows-sys 0.36.1", ] [[package]] @@ -478,12 +587,6 @@ dependencies = [ "percent-encoding", ] -[[package]] -name = "fs_extra" -version = "1.2.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2022715d62ab30faffd124d40b76f4134a550a87792276512b18d63272333394" - [[package]] name = "futures-channel" version = "0.3.21" @@ -638,9 +741,9 @@ checksum = "eabb4a44450da02c90444cf74558da904edde8fb4e9035a9a6a4e15445af0bd7" [[package]] name = "hashbrown" -version = "0.12.1" +version = "0.12.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "db0d4cf898abf0081f964436dc980e96670a0f36863e4b83aaacdb65c9d7ccc3" +checksum = "8a9ee70c43aaf417c914396645a0fa852624801b24ebb7ae78fe8272889ac888" [[package]] name = "heck" @@ -713,32 +816,11 @@ version = "1.0.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "112c678d4050afce233f4f2852bb2eb519230b3cf12f33585275537d7e41578d" -[[package]] -name = "jemalloc-sys" -version = "0.3.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0d3b9f3f5c9b31aa0f5ed3260385ac205db665baa41d49bb8338008ae94ede45" -dependencies = [ - "cc", - "fs_extra", - "libc", -] - -[[package]] -name = "jemallocator" -version = "0.3.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "43ae63fcfc45e99ab3d1b29a46782ad679e98436c3169d15a167a1108a724b69" -dependencies = [ - "jemalloc-sys", - "libc", -] - [[package]] name = "js-sys" -version = "0.3.57" +version = "0.3.58" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "671a26f820db17c2a2750743f1dd03bafd15b98c9f30c7c2628c024c05d73397" +checksum = "c3fac17f7123a73ca62df411b1bf727ccc805daa070338fda671c86dac1bdc27" dependencies = [ "wasm-bindgen", ] @@ -770,6 +852,16 @@ version = "0.0.46" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d4d2456c373231a208ad294c33dc5bff30051eafd954cd4caae83a712b12854d" +[[package]] +name = "lock_api" +version = "0.4.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "435011366fe56583b16cf956f9df0095b405b82d76425bc8981c0e22e60ec4df" +dependencies = [ + "autocfg", + "scopeguard", +] + [[package]] name = "log" version = "0.4.17" @@ -820,15 +912,13 @@ dependencies = [ [[package]] name = "nix" -version = "0.23.1" +version = "0.24.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9f866317acbd3a240710c63f065ffb1e4fd466259045ccb504130b7f668f35c6" +checksum = "195cdbc1741b8134346d515b3a56a1c94b0912758009cfd53f99ea0f57b065fc" dependencies = [ "bitflags", - "cc", "cfg-if", "libc", - "memoffset", ] [[package]] @@ -861,9 +951,9 @@ dependencies = [ [[package]] name = "once_cell" -version = "1.12.0" +version = "1.15.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7709cef83f0c1f58f666e746a08b21e0085f7440fa6a29cc194d68aac97a4225" +checksum = "e82dad04139b71a90c080c8463fe0dc7902db5192d939bd0950f074d014339e1" [[package]] name = "oorandom" @@ -871,6 +961,12 @@ version = "11.1.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "0ab1bc2a289d34bd04a330323ac98a1b4bc82c9d9fcb1e66b63caa84da26b575" +[[package]] +name = "os_str_bytes" +version = "6.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9ff7415e9ae3fff1225851df9e0d9e4e5479f947619774677a63572e55e80eff" + [[package]] name = "output_vt100" version = "0.1.3" @@ -880,12 +976,41 @@ dependencies = [ "winapi", ] +[[package]] +name = "parking_lot" +version = "0.12.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3742b2c103b9f06bc9fff0a37ff4912935851bee6d36f3c02bcc755bcfec228f" +dependencies = [ + "lock_api", + "parking_lot_core", +] + +[[package]] +name = "parking_lot_core" +version = "0.9.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4dc9e0dc2adc1c69d09143aff38d3d30c5c3f0df0dad82e6d25547af174ebec0" +dependencies = [ + "cfg-if", + "libc", + "redox_syscall 0.2.13", + "smallvec", + "windows-sys 0.42.0", +] + [[package]] name = "percent-encoding" version = "2.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d4fd5641d01c8f18a23da7b6fe29298ff4b55afcccdf78973b24cf3175fee32e" +[[package]] +name = "pin-project-lite" +version = "0.2.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e0a7ae3ac2f1173085d398531c705756c94a4c56843785df85a60c1a0afac116" + [[package]] name = "pin-utils" version = "0.1.0" @@ -920,6 +1045,12 @@ dependencies = [ "plotters-backend", ] +[[package]] +name = "ppv-lite86" +version = "0.2.16" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "eb9f9e6e233e5c4a35559a617bf40a4ec447db2e84c20b55a6f83167b7e57872" + [[package]] name = "pretty_assertions" version = "1.2.1" @@ -998,6 +1129,36 @@ dependencies = [ "nibble_vec", ] +[[package]] +name = "rand" +version = "0.8.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "34af8d1a0e25924bc5b7c43c079c942339d8f0a8b57c39049bef581b46327404" +dependencies = [ + "libc", + "rand_chacha", + "rand_core", +] + +[[package]] +name = "rand_chacha" +version = "0.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e6c10a63a0fa32252be49d21e7709d4d4baf8d231c2dbce1eaa8141b9b127d88" +dependencies = [ + "ppv-lite86", + "rand_core", +] + +[[package]] +name = "rand_core" +version = "0.6.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ec0be4795e2f6a28069bec0b5ff3e2ac9bafc99e6a9a7dc3547996c5c816922c" +dependencies = [ + "getrandom 0.2.7", +] + [[package]] name = "rayon" version = "1.5.3" @@ -1061,9 +1222,9 @@ dependencies = [ [[package]] name = "regex" -version = "1.5.5" +version = "1.6.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1a11647b6b25ff05a515cb92c365cec08801e83423a235b51e231e1808747286" +checksum = "4c4eb3267174b8c6c2f654116623910a0fef09c4753f8dd83db29c48a0df988b" dependencies = [ "aho-corasick", "memchr", @@ -1078,9 +1239,9 @@ checksum = "6c230d73fb8d8c1b9c0b3135c5142a8acee3a0558fb8db5cf1cb65f8d7862132" [[package]] name = "regex-syntax" -version = "0.6.26" +version = "0.6.27" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "49b3de9ec5dc0a3417da371aab17d729997c15010e7fd24ff707773a33bddb64" +checksum = "a3f87b73ce11b1619a3c6332f45341e0047173771e8b8b73f87bfeefb7b56244" [[package]] name = "route-recognizer" @@ -1139,14 +1300,14 @@ dependencies = [ "io-lifetimes", "libc", "linux-raw-sys", - "windows-sys", + "windows-sys 0.36.1", ] [[package]] name = "rustyline" -version = "9.1.2" +version = "10.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "db7826789c0e25614b03e5a54a0717a86f9ff6e6e5247f92b369472869320039" +checksum = "1d1cd5ae51d3f7bf65d7969d579d502168ef578f289452bd8ccc91de28fda20e" dependencies = [ "bitflags", "cfg-if", @@ -1159,7 +1320,6 @@ dependencies = [ "nix", "radix_trie", "scopeguard", - "smallvec", "unicode-segmentation", "unicode-width", "utf8parse", @@ -1168,10 +1328,11 @@ dependencies = [ [[package]] name = "rustyline-derive" -version = "0.6.0" +version = "0.7.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bb35a55ab810b5c0fe31606fe9b47d1354e4dc519bec0a102655f78ea2b38057" +checksum = "107c3d5d7f370ac09efa62a78375f94d94b8a33c61d8c278b96683fb4dbf2d8d" dependencies = [ + "proc-macro2", "quote", "syn", ] @@ -1226,9 +1387,9 @@ checksum = "388a1df253eca08550bef6c72392cfe7c30914bf41df5269b68cbd6ff8f570a3" [[package]] name = "serde" -version = "1.0.137" +version = "1.0.140" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "61ea8d54c77f8315140a05f4c7237403bf38b72704d031543aa1d16abbf517d1" +checksum = "fc855a42c7967b7c369eb5860f7164ef1f6f81c20c7cc1141f2a604e18723b03" dependencies = [ "serde_derive", ] @@ -1257,9 +1418,9 @@ dependencies = [ [[package]] name = "serde_derive" -version = "1.0.137" +version = "1.0.140" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1f26faba0c3959972377d3b2d306ee9f71faee9714294e41bb777f83f88578be" +checksum = "6f2122636b9fe3b81f1cb25099fcf2d3f542cdb1d45940d56c713158884a05da" dependencies = [ "proc-macro2", "quote", @@ -1279,9 +1440,9 @@ dependencies = [ [[package]] name = "serde_qs" -version = "0.9.2" +version = "0.10.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6af4cee6cd4b23b45e6709150d1e9af5c748131de7e3316a7c2b3008051ed725" +checksum = "8cac3f1e2ca2fe333923a1ae72caca910b98ed0630bb35ef6f8c8517d6e81afa" dependencies = [ "percent-encoding", "serde", @@ -1341,7 +1502,6 @@ dependencies = [ "byteorder", "bytes", "instant", - "jemallocator", "lazy_static", "mimalloc", "pretty_assertions", @@ -1357,10 +1517,11 @@ name = "sqlo2_benchmarks" version = "0.1.0" dependencies = [ "alloc_counter", - "criterion", + "criterion 0.3.6", "instant", "sqlo2", "sysinfo", + "transactional-store", ] [[package]] @@ -1483,9 +1644,9 @@ dependencies = [ [[package]] name = "sysinfo" -version = "0.23.12" +version = "0.24.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "56b1e20ee77901236c389ff74618a899ff5fd34719a7ff0fd1d64f0acca5179a" +checksum = "54cb4ebf3d49308b99e6e9dc95e989e2fdbdc210e4f67c39db0bb89ba927001c" dependencies = [ "cfg-if", "core-foundation-sys", @@ -1526,6 +1687,28 @@ dependencies = [ "winapi", ] +[[package]] +name = "test-case" +version = "2.2.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "21d6cf5a7dffb3f9dceec8e6b8ca528d9bd71d36c9f074defb548ce161f598c0" +dependencies = [ + "test-case-macros", +] + +[[package]] +name = "test-case-macros" +version = "2.2.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e45b7bf6e19353ddd832745c8fcf77a17a93171df7151187f26623f2b75b5b26" +dependencies = [ + "cfg-if", + "proc-macro-error", + "proc-macro2", + "quote", + "syn", +] + [[package]] name = "test-macros" version = "0.1.0" @@ -1572,6 +1755,12 @@ dependencies = [ "unicode-width", ] +[[package]] +name = "textwrap" +version = "0.15.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b1141d4d61095b28419e22cb0bbf02755f5e54e0526f97f1e3d1d160e60885fb" + [[package]] name = "thiserror" version = "1.0.31" @@ -1602,6 +1791,16 @@ dependencies = [ "serde_json", ] +[[package]] +name = "tokio" +version = "1.21.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a9e03c497dc955702ba729190dc4aac6f2a0ce97f913e5b1b5912fc5039d9099" +dependencies = [ + "autocfg", + "pin-project-lite", +] + [[package]] name = "toml" version = "0.5.9" @@ -1611,10 +1810,22 @@ dependencies = [ "serde", ] +[[package]] +name = "transactional-store" +version = "0.1.0" +dependencies = [ + "criterion 0.4.0", + "dashmap", + "instant", + "parking_lot", + "rand", + "test-case", + "tokio", +] + [[package]] name = "tree-display" version = "0.1.0" -source = "git+https://github.com/Axmouth/tree-display-rs?branch=main#2219e96dd713f223816b891951a22e1f2f3ddaf3" dependencies = [ "tree-display-macros", ] @@ -1622,7 +1833,6 @@ dependencies = [ [[package]] name = "tree-display-macros" version = "0.1.0" -source = "git+https://github.com/Axmouth/tree-display-rs?branch=main#2219e96dd713f223816b891951a22e1f2f3ddaf3" dependencies = [ "heck", "proc-macro2", @@ -1691,9 +1901,9 @@ checksum = "9c8d87e72b64a3b4db28d11ce29237c246188f4f51057d65a7eab63b7987e423" [[package]] name = "wasm-bindgen" -version = "0.2.80" +version = "0.2.81" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "27370197c907c55e3f1a9fbe26f44e937fe6451368324e009cba39e139dc08ad" +checksum = "7c53b543413a17a202f4be280a7e5c62a1c69345f5de525ee64f8cfdbc954994" dependencies = [ "cfg-if", "serde", @@ -1703,9 +1913,9 @@ dependencies = [ [[package]] name = "wasm-bindgen-backend" -version = "0.2.80" +version = "0.2.81" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "53e04185bfa3a779273da532f5025e33398409573f348985af9a1cbf3774d3f4" +checksum = "5491a68ab4500fa6b4d726bd67408630c3dbe9c4fe7bda16d5c82a1fd8c7340a" dependencies = [ "bumpalo", "lazy_static", @@ -1730,9 +1940,9 @@ dependencies = [ [[package]] name = "wasm-bindgen-macro" -version = "0.2.80" +version = "0.2.81" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "17cae7ff784d7e83a2fe7611cfe766ecf034111b49deb850a3dc7699c08251f5" +checksum = "c441e177922bc58f1e12c022624b6216378e5febc2f0533e41ba443d505b80aa" dependencies = [ "quote", "wasm-bindgen-macro-support", @@ -1740,9 +1950,9 @@ dependencies = [ [[package]] name = "wasm-bindgen-macro-support" -version = "0.2.80" +version = "0.2.81" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "99ec0dc7a4756fffc231aab1b9f2f578d23cd391390ab27f952ae0c9b3ece20b" +checksum = "7d94ac45fcf608c1f45ef53e748d35660f168490c10b23704c7779ab8f5c3048" dependencies = [ "proc-macro2", "quote", @@ -1753,9 +1963,9 @@ dependencies = [ [[package]] name = "wasm-bindgen-shared" -version = "0.2.80" +version = "0.2.81" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d554b7f530dee5964d9a9468d95c1f8b8acae4f282807e7d27d4b03099a46744" +checksum = "6a89911bd99e5f3659ec4acf9c4d93b0a90fe4a2a11f15328472058edc5261be" [[package]] name = "wasm-logger" @@ -1770,9 +1980,9 @@ dependencies = [ [[package]] name = "web-sys" -version = "0.3.57" +version = "0.3.58" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7b17e741662c70c8bd24ac5c5b18de314a2c26c32bf8346ee1e6f53de919c283" +checksum = "2fed94beee57daf8dd7d51f2b15dc2bcde92d7a72304cdf662a4371008b71b90" dependencies = [ "js-sys", "wasm-bindgen", @@ -1815,43 +2025,100 @@ version = "0.36.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ea04155a16a59f9eab786fe12a4a450e75cdb175f9e0d80da1e17db09f55b8d2" dependencies = [ - "windows_aarch64_msvc", - "windows_i686_gnu", - "windows_i686_msvc", - "windows_x86_64_gnu", - "windows_x86_64_msvc", + "windows_aarch64_msvc 0.36.1", + "windows_i686_gnu 0.36.1", + "windows_i686_msvc 0.36.1", + "windows_x86_64_gnu 0.36.1", + "windows_x86_64_msvc 0.36.1", +] + +[[package]] +name = "windows-sys" +version = "0.42.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5a3e1820f08b8513f676f7ab6c1f99ff312fb97b553d30ff4dd86f9f15728aa7" +dependencies = [ + "windows_aarch64_gnullvm", + "windows_aarch64_msvc 0.42.0", + "windows_i686_gnu 0.42.0", + "windows_i686_msvc 0.42.0", + "windows_x86_64_gnu 0.42.0", + "windows_x86_64_gnullvm", + "windows_x86_64_msvc 0.42.0", ] +[[package]] +name = "windows_aarch64_gnullvm" +version = "0.42.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "41d2aa71f6f0cbe00ae5167d90ef3cfe66527d6f613ca78ac8024c3ccab9a19e" + [[package]] name = "windows_aarch64_msvc" version = "0.36.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "9bb8c3fd39ade2d67e9874ac4f3db21f0d710bee00fe7cab16949ec184eeaa47" +[[package]] +name = "windows_aarch64_msvc" +version = "0.42.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "dd0f252f5a35cac83d6311b2e795981f5ee6e67eb1f9a7f64eb4500fbc4dcdb4" + [[package]] name = "windows_i686_gnu" version = "0.36.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "180e6ccf01daf4c426b846dfc66db1fc518f074baa793aa7d9b9aaeffad6a3b6" +[[package]] +name = "windows_i686_gnu" +version = "0.42.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fbeae19f6716841636c28d695375df17562ca208b2b7d0dc47635a50ae6c5de7" + [[package]] name = "windows_i686_msvc" version = "0.36.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e2e7917148b2812d1eeafaeb22a97e4813dfa60a3f8f78ebe204bcc88f12f024" +[[package]] +name = "windows_i686_msvc" +version = "0.42.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "84c12f65daa39dd2babe6e442988fc329d6243fdce47d7d2d155b8d874862246" + [[package]] name = "windows_x86_64_gnu" version = "0.36.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "4dcd171b8776c41b97521e5da127a2d86ad280114807d0b2ab1e462bc764d9e1" +[[package]] +name = "windows_x86_64_gnu" +version = "0.42.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bf7b1b21b5362cbc318f686150e5bcea75ecedc74dd157d874d754a2ca44b0ed" + +[[package]] +name = "windows_x86_64_gnullvm" +version = "0.42.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "09d525d2ba30eeb3297665bd434a54297e4170c7f1a44cad4ef58095b4cd2028" + [[package]] name = "windows_x86_64_msvc" version = "0.36.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c811ca4a8c853ef420abd8592ba53ddbbac90410fab6903b3e79972a631f7680" +[[package]] +name = "windows_x86_64_msvc" +version = "0.42.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f40009d85759725a34da6d89a94e63d7bdc50a862acf0dbc7c8e488f1edcb6f5" + [[package]] name = "yew" version = "0.19.3" diff --git a/Cargo.toml b/Cargo.toml index bd79915..cc82904 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -9,6 +9,7 @@ members = [ "test-test", "test-util", "tests", + "transactional-store", "wasm-repl", "wire-protocol" ] @@ -24,5 +25,5 @@ debug = true debug = false lto = "thin" -# [patch."https://github.com/Axmouth/tree-display-rs"] -# tree-display = { path = "../tree-display/tree-display" } \ No newline at end of file +[patch."https://github.com/Axmouth/tree-display-rs"] +tree-display = { path = "../tree-display/tree-display" } \ No newline at end of file diff --git a/benchmarks/Cargo.toml b/benchmarks/Cargo.toml index 314b304..8003201 100644 --- a/benchmarks/Cargo.toml +++ b/benchmarks/Cargo.toml @@ -6,10 +6,11 @@ edition = "2021" [dependencies] sqlo2 = { path = "../engine" } +transactional-store = { path = "../transactional-store" } criterion = "=0.3" instant = { version = "=0.1", features = [ "now" ] } alloc_counter = "=0.0.4" -sysinfo = "=0.23.12" +sysinfo = "=0.24.7" [[bench]] diff --git a/benchmarks/benches/bench_main.rs b/benchmarks/benches/bench_main.rs index cd318ce..bd538cf 100644 --- a/benchmarks/benches/bench_main.rs +++ b/benchmarks/benches/bench_main.rs @@ -209,6 +209,19 @@ pub fn million_row_benchmark(_c: &mut Criterion) { ); } +fn store_set_benchmark(c: &mut Criterion) { + use transactional_store::{TransactionalStore, TransactionalStoreExt}; + let store = TransactionalStore::::new(); + // Bench here + c.bench_function("create", |b| { + b.iter(|| { + for i in 0..100000 { + store.set(i % 999, i.to_string().as_str(), None); + } + }) + }); +} + criterion_group!( benches, lex_benchmark, @@ -220,5 +233,6 @@ criterion_group!( insert_benchmark, select_benchmark, million_row_benchmark, + store_set_benchmark, ); criterion_main!(benches); diff --git a/engine/Cargo.toml b/engine/Cargo.toml index 303899e..77abaa6 100644 --- a/engine/Cargo.toml +++ b/engine/Cargo.toml @@ -12,17 +12,16 @@ wasm = ["stdweb", "wasm-bindgen"] [dependencies] byteorder = "=1.4.3" -bytes = "=1.1.0" -regex = "=1.5.5" +bytes = "=1.2.0" +regex = "=1.6.0" lazy_static = "=1.4.0" instant = { version = "=0.1.12", features = [ "now" ] } -serde = { version = "=1.0.137", features = ["derive"] } -jemallocator = {version = "=0.3.2", optional = true } +serde = { version = "=1.0.140", features = ["derive"] } mimalloc = { version = "=0.1.29", default-features = false, optional = true } alloc_counter = {version = "=0.0.4", optional = true } test-util = { path = "../test-util" } -test-macros = { path = "../test-macros" } tree-display = { git = "https://github.com/Axmouth/tree-display-rs", branch = "main" } [dev-dependencies] -pretty_assertions = "=1.2.1" \ No newline at end of file +pretty_assertions = "=1.2.1" +test-macros = { path = "../test-macros" } \ No newline at end of file diff --git a/engine/src/ast.rs b/engine/src/ast.rs index 5dfc02e..c656028 100644 --- a/engine/src/ast.rs +++ b/engine/src/ast.rs @@ -310,7 +310,7 @@ impl LiteralExpression { } #[derive(Clone, Eq, PartialEq, Debug, TreeDisplay)] -pub enum Operand { +pub enum BinaryOperand { Add, Subtract, Multiply, @@ -324,128 +324,172 @@ pub enum Operand { GreaterThanOrEqual, LessThan, LessThanOrEqual, - Not, - SquareRoot, - CubeRoot, - Factorial, - FactorialPrefix, - AbsoluteValue, Exponentiation, Concat, - Is, - IsNot, In, NotIn, Like, NotLike, Between, NotBetween, - Exists, - NotExists, - Null, - NotNull, BitwiseAnd, BitwiseOr, BitwiseXor, - BitwiseNot, BitwiseShiftLeft, BitwiseShiftRight, BitwiseShiftRightZeroFill, } -impl Operand { +impl BinaryOperand { + pub fn from_token(token: &Token) -> Option { + match token { + Token::Plus => Some(BinaryOperand::Add), + Token::Minus => Some(BinaryOperand::Subtract), + Token::Asterisk => Some(BinaryOperand::Multiply), + Token::Slash => Some(BinaryOperand::Divide), + Token::Modulo => Some(BinaryOperand::Modulo), + Token::And => Some(BinaryOperand::And), + Token::Or => Some(BinaryOperand::Or), + Token::Equal => Some(BinaryOperand::Equal), + Token::NotEqual => Some(BinaryOperand::NotEqual), + Token::GreaterThan => Some(BinaryOperand::GreaterThan), + Token::GreaterThanOrEqual => Some(BinaryOperand::GreaterThanOrEqual), + Token::LessThan => Some(BinaryOperand::LessThan), + Token::LessThanOrEqual => Some(BinaryOperand::LessThanOrEqual), + Token::Exponentiation => Some(BinaryOperand::Exponentiation), + Token::Concat => Some(BinaryOperand::Concat), + // Token::In => Some(BinaryOperand::In), + // Token::NotIn => Some(BinaryOperand::NotIn), + Token::Like => Some(BinaryOperand::Like), + // Token::NotLike => Some(BinaryOperand::NotLike), + // Token::Between => Some(BinaryOperand::Between), + // Token::NotBetween => Some(BinaryOperand::NotBetween), + Token::BitwiseAnd => Some(BinaryOperand::BitwiseAnd), + Token::BitwiseOr => Some(BinaryOperand::BitwiseOr), + Token::BitwiseXor => Some(BinaryOperand::BitwiseXor), + Token::BitwiseShiftLeft => Some(BinaryOperand::BitwiseShiftLeft), + Token::BitwiseShiftRight => Some(BinaryOperand::BitwiseShiftRight), + // Token::BitwiseShiftRightZeroFill => Some(BinaryOperand::BitwiseShiftRightZeroFill), + _ => None, + } + } + pub fn generate_code(&self) -> String { match self { - Operand::Add => PLUS_SYMBOL, - Operand::Subtract => MINUS_SYMBOL, - Operand::Multiply => ASTERISK_SYMBOL, - Operand::Divide => SLASH_SYMBOL, - Operand::Modulo => MODULO_SYMBOL, - Operand::And => AND_KEYWORD, - Operand::Or => OR_KEYWORD, - Operand::Equal => EQUAL_SYMBOL, - Operand::NotEqual => NOT_EQUAL_SYMBOL, - Operand::GreaterThan => GREATER_THAN_SYMBOL, - Operand::GreaterThanOrEqual => GREATER_THAN_OR_EQUAL_SYMBOL, - Operand::LessThan => LESS_THAN_SYMBOL, - Operand::LessThanOrEqual => LESS_THAN_OR_EQUAL_SYMBOL, - Operand::Not => NOT_KEYWORD, - Operand::SquareRoot => SQUARE_ROOT_SYMBOL, - Operand::CubeRoot => CUBE_ROOT_SYMBOL, - Operand::Factorial => FACTORIAL_SYMBOL, - Operand::FactorialPrefix => FACTORIAL_PREFIX_SYMBOL, - Operand::AbsoluteValue => ABS_SYMBOL, - Operand::Exponentiation => EXPONENTIATION_SYMBOL, - Operand::Concat => CONCAT_SYMBOL, - Operand::Is => IS_KEYWORD, - Operand::IsNot => "IS NOT", - Operand::In => "IN", - Operand::NotIn => "NOT IN", - Operand::Like => LIKE_KEYWORD, - Operand::NotLike => "NOT LIKE", - Operand::Between => "BETWEEN", - Operand::NotBetween => "NOT BETWEEN", - Operand::Exists => "EXISTS", - Operand::NotExists => "NOT EXISTS", - Operand::Null => NULL_KEYWORD, - Operand::NotNull => "NOT NULL", - Operand::BitwiseAnd => BITWISE_AND_SYMBOL, - Operand::BitwiseOr => BITWISE_OR_SYMBOL, - Operand::BitwiseXor => BITWISE_XOR_SYMBOL, - Operand::BitwiseNot => BITWISE_NOT_SYMBOL, - Operand::BitwiseShiftLeft => BITWISE_SHIFT_LEFT_SYMBOL, - Operand::BitwiseShiftRight => BITWISE_SHIFT_RIGHT_SYMBOL, - Operand::BitwiseShiftRightZeroFill => ">>", + BinaryOperand::Add => "+", + BinaryOperand::Subtract => "-", + BinaryOperand::Multiply => "*", + BinaryOperand::Divide => "/", + BinaryOperand::Modulo => "%", + BinaryOperand::And => "AND", + BinaryOperand::Or => "OR", + BinaryOperand::Equal => "=", + BinaryOperand::NotEqual => "<>", + BinaryOperand::GreaterThan => ">", + BinaryOperand::GreaterThanOrEqual => ">=", + BinaryOperand::LessThan => "<", + BinaryOperand::LessThanOrEqual => "<=", + BinaryOperand::Exponentiation => "^", + BinaryOperand::Concat => "||", + BinaryOperand::In => "IN", + BinaryOperand::NotIn => "NOT IN", + BinaryOperand::Like => "LIKE", + BinaryOperand::NotLike => "NOT LIKE", + BinaryOperand::Between => "BETWEEN", + BinaryOperand::NotBetween => "NOT BETWEEN", + BinaryOperand::BitwiseAnd => "&", + BinaryOperand::BitwiseOr => "|", + BinaryOperand::BitwiseXor => "#", + BinaryOperand::BitwiseShiftLeft => "<<", + BinaryOperand::BitwiseShiftRight => ">>", + BinaryOperand::BitwiseShiftRightZeroFill => ">>>", } .to_string() } +} + +#[derive(Clone, Eq, PartialEq, Debug, TreeDisplay)] +pub enum UnaryOperand { + Not, + SquareRoot, + CubeRoot, + Factorial, + FactorialPrefix, + AbsoluteValue, + Exists, + NotExists, + IsNull, + IsNotNull, + BitwiseNot, + Minus, +} + +impl UnaryOperand { + pub fn from_token(token: &Token) -> Option { + match token { + Token::Not => Some(UnaryOperand::Not), + Token::SquareRoot => Some(UnaryOperand::SquareRoot), + Token::CubeRoot => Some(UnaryOperand::CubeRoot), + Token::Factorial => Some(UnaryOperand::Factorial), + Token::FactorialPrefix => Some(UnaryOperand::FactorialPrefix), + Token::AbsoluteValue => Some(UnaryOperand::AbsoluteValue), + // Token::Exists => Some(UnaryOperand::Exists), + // Token::NotExists => Some(UnaryOperand::NotExists), + // Token::IsNull => Some(UnaryOperand::IsNull), + // Token::IsNotNull => Some(UnaryOperand::IsNotNull), + Token::BitwiseNot => Some(UnaryOperand::BitwiseNot), + Token::Minus => Some(UnaryOperand::Minus), + _ => None, + } + } - pub fn from_token(token: &Token, cursor: usize) -> Result { + pub fn postfix_from_token(token: &Token) -> Option { match token { - Token::Plus => Ok(Operand::Add), - Token::Minus => Ok(Operand::Subtract), - Token::Asterisk => Ok(Operand::Multiply), - Token::Slash => Ok(Operand::Divide), - Token::Modulo => Ok(Operand::Modulo), - Token::And => Ok(Operand::And), - Token::Or => Ok(Operand::Or), - Token::Equal => Ok(Operand::Equal), - Token::NotEqual => Ok(Operand::NotEqual), - Token::GreaterThan => Ok(Operand::GreaterThan), - Token::GreaterThanOrEqual => Ok(Operand::GreaterThanOrEqual), - Token::LessThan => Ok(Operand::LessThan), - Token::LessThanOrEqual => Ok(Operand::LessThanOrEqual), - Token::Not => Ok(Operand::Not), - Token::SquareRoot => Ok(Operand::SquareRoot), - Token::CubeRoot => Ok(Operand::CubeRoot), - Token::Factorial => Ok(Operand::Factorial), - Token::FactorialPrefix => Ok(Operand::FactorialPrefix), - Token::AbsoluteValue => Ok(Operand::AbsoluteValue), - Token::Exponentiation => Ok(Operand::Exponentiation), - Token::Concat => Ok(Operand::Concat), - Token::Is => Ok(Operand::Is), - // Token::IsNot => Ok(Operand::IsNot), - // Token::In => Ok(Operand::In), - // Token::NotIn => Ok(Operand::NotIn), - Token::Like => Ok(Operand::Like), - // Token::NotLike => Ok(Operand::NotLike), - // Token::Between => Ok(Operand::Between), - // Token::NotBetween => Ok(Operand::NotBetween), - // Token::Exists => Ok(Operand::Exists), - // Token::NotExists => Ok(Operand::NotExists), - // Token::Null => Ok(Operand::Null), - // Token::NotNull => Ok(Operand::NotNull), - Token::BitwiseAnd => Ok(Operand::BitwiseAnd), - Token::BitwiseOr => Ok(Operand::BitwiseOr), - Token::BitwiseXor => Ok(Operand::BitwiseXor), - Token::BitwiseNot => Ok(Operand::BitwiseNot), - Token::BitwiseShiftLeft => Ok(Operand::BitwiseShiftLeft), - Token::BitwiseShiftRight => Ok(Operand::BitwiseShiftRight), - // Token::BitwiseShiftRightZeroFill => Ok(Operand::BitwiseShiftRightZeroFill), - _ => Err(ParsingError::Internal { - msg: format!("Unrecognized token: {:?}", token), - cursor, - }), + Token::Factorial => Some(UnaryOperand::Factorial), + _ => None, + } + } + + pub fn generate_code(&self) -> String { + match self { + UnaryOperand::Not => "NOT", + UnaryOperand::SquareRoot => "SQRT", + UnaryOperand::CubeRoot => "CBRT", + UnaryOperand::Factorial => "!", + UnaryOperand::FactorialPrefix => "!", + UnaryOperand::AbsoluteValue => "ABS", + UnaryOperand::Exists => "EXISTS", + UnaryOperand::NotExists => "NOT EXISTS", + UnaryOperand::IsNull => "IS NULL", + UnaryOperand::IsNotNull => "IS NOT NULL", + UnaryOperand::BitwiseNot => "~", + UnaryOperand::Minus => "-", + } + .to_string() + } +} + +#[derive(Clone, Eq, PartialEq, Debug, TreeDisplay)] +pub enum Operand { + Binary(BinaryOperand), + Unary(UnaryOperand), +} + +impl Operand { + pub fn generate_code(&self) -> String { + match self { + Operand::Binary(value) => value.generate_code(), + Operand::Unary(value) => value.generate_code(), + } + } + + pub fn from_token(token: &Token) -> Option { + if let Some(value) = BinaryOperand::from_token(token) { + Some(Operand::Binary(value)) + } else if let Some(value) = UnaryOperand::from_token(token) { + Some(Operand::Unary(value)) + } else { + None } } } @@ -455,7 +499,7 @@ impl Operand { pub struct BinaryExpression { pub first: Box, pub second: Box, - pub operand: Operand, + pub operand: BinaryOperand, } impl BinaryExpression { @@ -473,7 +517,7 @@ impl BinaryExpression { #[tree_display(rename_all_pascal)] pub struct UnaryExpression { pub first: Box, - pub operand: Operand, + pub operand: UnaryOperand, } impl UnaryExpression { @@ -634,16 +678,16 @@ mod ast_tests { col_name: "name".to_owned(), table_name: None, })), - operand: Operand::NotEqual, + operand: BinaryOperand::NotEqual, second: Box::new(Expression::Literal(LiteralExpression::String("Rachel".to_owned()))), })), - operand: Operand::And, + operand: BinaryOperand::And, second: Box::new(Expression::Binary(BinaryExpression { first: Box::new(Expression::TableColumn(TableColumn { col_name: "id".to_owned(), table_name: None, })), - operand: Operand::LessThan, + operand: BinaryOperand::LessThan, second: Box::new(Expression::Literal(LiteralExpression::Numeric( "5".to_owned()))), })), }), @@ -698,7 +742,7 @@ mod ast_tests { col_name: "id".to_owned(), table_name: None, })), - operand: Operand::Divide, + operand: BinaryOperand::Divide, second: Box::new(Expression::Literal(LiteralExpression::Numeric( "2".to_owned()))), })), typ: SqlType::Int, @@ -732,10 +776,10 @@ mod ast_tests { })), typ: SqlType::Text, }), - operand: Operand::Concat, + operand: BinaryOperand::Concat, second: Box::new(Expression::Binary(BinaryExpression { first: Box::new(Expression::Literal(LiteralExpression::String( " ".to_owned()))), - operand: Operand::Concat, + operand: BinaryOperand::Concat, second: Box::new(Expression::TableColumn(TableColumn { col_name: "name".to_owned(), table_name: None, @@ -753,7 +797,7 @@ mod ast_tests { col_name: "id".to_owned(), table_name: None, })), - operand: Operand::GreaterThan, + operand: BinaryOperand::GreaterThan, second: Box::new(Expression::Literal(LiteralExpression::Numeric("1".to_owned()))), }), is_distinct: false, @@ -795,7 +839,7 @@ mod ast_tests { col_name: String::from("id"), table_name: Some(String::from("characters")), })), - operand: Operand::Equal, + operand: BinaryOperand::Equal, second: Box::new(Expression::TableColumn(TableColumn { col_name: String::from("character_id"), table_name: Some(String::from("character_roles")) , @@ -810,7 +854,7 @@ mod ast_tests { col_name: "id".to_owned(), table_name: None, })), - operand: Operand::NotEqual, + operand: BinaryOperand::NotEqual, second: Box::new(Expression::Literal(LiteralExpression::Numeric("2".to_owned()))), }), is_distinct: false, diff --git a/engine/src/backend.rs b/engine/src/backend.rs index 3e0c793..d802db3 100644 --- a/engine/src/backend.rs +++ b/engine/src/backend.rs @@ -13,51 +13,29 @@ pub trait Cell { fn as_bool(&self) -> Result; fn equals(&self, other: Self) -> bool; } -#[derive(Debug, Clone, PartialEq, Deserialize, Serialize)] +#[derive(Debug, Clone, PartialEq, Eq, Deserialize, Serialize)] pub struct ResultColumn { pub col_type: SqlType, pub name: String, } -impl From for String { - fn from(col_type: SqlType) -> Self { - match col_type { - SqlType::Char => "Char".to_string(), - SqlType::Text => "Text".to_string(), - SqlType::VarChar => "Varchar".to_string(), - SqlType::SmallInt => "Smallint".to_string(), - SqlType::Int => "Int".to_string(), - SqlType::BigInt => "Bigint".to_string(), - SqlType::Real => "Real".to_string(), - SqlType::DoublePrecision => "Double Precision".to_string(), - SqlType::Boolean => "Bool".to_string(), - SqlType::Null => "Null".to_string(), - SqlType::Type => "Type".to_string(), - } - } -} - -impl From<&SqlType> for String { - fn from(col_type: &SqlType) -> Self { - match col_type { - SqlType::Char => "Char".to_string(), - SqlType::Text => "Text".to_string(), - SqlType::VarChar => "Varchar".to_string(), - SqlType::SmallInt => "Smallint".to_string(), - SqlType::Int => "Int".to_string(), - SqlType::BigInt => "Bigint".to_string(), - SqlType::Real => "Real".to_string(), - SqlType::DoublePrecision => "Double Precision".to_string(), - SqlType::Boolean => "Bool".to_string(), - SqlType::Null => "Null".to_string(), - SqlType::Type => "Type".to_string(), - } - } -} - impl std::fmt::Display for SqlType { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { - f.write_str(String::from(self).as_str()) + match self { + SqlType::Char => write!(f, "Char"), + SqlType::Text => write!(f, "Text"), + SqlType::VarChar => write!(f, "Varchar"), + SqlType::SmallInt => write!(f, "Smallint"), + SqlType::Int => write!(f, "Int"), + SqlType::BigInt => write!(f, "Bigint"), + SqlType::Real => write!(f, "Real"), + SqlType::DoublePrecision => write!(f, "Double Precision"), + SqlType::Boolean => write!(f, "Bool"), + SqlType::Null => write!(f, "Null"), + SqlType::Type => write!(f, "Type"), + SqlType::Record => write!(f, "Record"), + SqlType::Numeric => todo!(), + } } } @@ -122,7 +100,7 @@ where pub type ResultColumns = Vec; -#[derive(Debug, Clone, PartialEq, Deserialize, Serialize, Default)] +#[derive(Debug, Clone, PartialEq, Eq, Deserialize, Serialize, Default)] pub struct QueryResults { pub columns: ResultColumns, pub rows: Vec>, diff --git a/engine/src/backend_memory/mod.rs b/engine/src/backend_memory/mod.rs index 0ebdb66..f957189 100644 --- a/engine/src/backend_memory/mod.rs +++ b/engine/src/backend_memory/mod.rs @@ -15,6 +15,7 @@ use crate::{ sql_types::{SqlType, SqlValue}, }; use instant::Instant; +use std::borrow::Cow; use std::collections::HashMap; use test_util::TestSubjectExt; @@ -23,7 +24,50 @@ const ERR_INVALID_CELL: &str = "Invalid Cell"; // const ERR_INVALID_OPERANDS: &str = "Invalid Operands"; const ANONYMOUS_COL_NAME: &str = "?column?"; -#[derive(Clone, PartialEq)] +type SqlRowIter = Box>>; + +pub trait Selectable { + fn select(&self) -> Result<(SqlRowIter, Vec), String>; +} + +pub struct InnerJoinIter { + +} + +pub struct OuterJoinIter { + +} + +pub struct LeftJoinIter { + +} + +pub struct RightJoinIter { + +} + +pub struct RightOuterJoinIter { + +} + +pub struct TableRowsIter { + +} + +pub struct IndexedTableRowsIter { + +} + +pub struct SubQueryIter { + +} + +#[derive(Clone, PartialEq, Eq)] +pub struct ExecutionContext {} + +type Thunk = Box Result<(SqlValue, Box), String>>; + +#[derive(Clone, PartialEq, Eq)] pub enum TableContainer<'a> { Temp(Box), Concrete(&'a Table), @@ -41,7 +85,8 @@ pub struct Index { impl Index { pub fn add_row(&mut self, table: &Table, row_index: usize) -> Result<(), String> { - let (index_value, _, _) = table.evaluate_cell(row_index, &self.expression)?; + let (thunk, _) = table.compile_cell(row_index, &self.expression)?; + let (index_value, _) = thunk(ExecutionContext {})?; if index_value.is_null() { return Err("Violates NOT NULL Constraint".to_string()); @@ -78,12 +123,12 @@ impl Index { } let supported_checks = vec![ - Operand::Equal, - Operand::NotEqual, - Operand::GreaterThan, - Operand::GreaterThanOrEqual, - Operand::LessThan, - Operand::LessThanOrEqual, + BinaryOperand::Equal, + BinaryOperand::NotEqual, + BinaryOperand::GreaterThan, + BinaryOperand::GreaterThanOrEqual, + BinaryOperand::LessThan, + BinaryOperand::LessThanOrEqual, ]; let is_supported = supported_checks.contains(&bin_exp.operand); @@ -121,7 +166,7 @@ impl Index { rows: Vec::with_capacity(100), }; - let (value, _, _) = match new_table.evaluate_cell(0, &value_exp) { + let (thunk, _) = match new_table.compile_cell(0, &value_exp) { Ok(value) => value, Err(err) => { eprintln!("{err}"); @@ -129,15 +174,17 @@ impl Index { } }; + let (value, _) = thunk(ExecutionContext {})?; + let mut row_indexes: Vec = Vec::with_capacity(100); match bin_exp.operand { - Operand::Equal => { + BinaryOperand::Equal => { if let Some(indexes) = self.tree.get(&value) { row_indexes.append(&mut indexes.clone()); } } - Operand::NotEqual => { + BinaryOperand::NotEqual => { for (key, indexes) in &self.tree { if *key == value { continue; @@ -145,7 +192,7 @@ impl Index { row_indexes.append(&mut indexes.clone()); } } - Operand::LessThan => { + BinaryOperand::LessThan => { for (key, indexes) in &self.tree { if key >= &value { break; @@ -153,7 +200,7 @@ impl Index { row_indexes.append(&mut indexes.clone()); } } - Operand::LessThanOrEqual => { + BinaryOperand::LessThanOrEqual => { for (key, indexes) in &self.tree { if key > &value { break; @@ -161,12 +208,12 @@ impl Index { row_indexes.append(&mut indexes.clone()); } } - Operand::GreaterThan => { + BinaryOperand::GreaterThan => { for (_, ref mut indexes) in self.tree.clone().split_off(&value) { row_indexes.append(indexes); } } - Operand::GreaterThanOrEqual => { + BinaryOperand::GreaterThanOrEqual => { if let Some(indexes) = self.tree.get(&value) { row_indexes.append(&mut indexes.clone()); } @@ -209,24 +256,29 @@ impl From> for Table { } impl Table { - pub fn evaluate_literal_cell( - &self, + pub fn evaluate_literal_cell<'a>( + &'a self, row_index: usize, - expression: &Expression, - ) -> Result<(SqlValue, &str, SqlType), String> { + expression: &'a Expression, + ) -> Result<(Thunk, SqlType), String> { match expression { Expression::Literal(literal_expression) => match &literal_expression { LiteralExpression::Identifier(value) => { for (i, table_col) in self.columns.iter().enumerate() { if table_col == value.as_str() { + let table_col = table_col.clone(); let typ = self.column_types.get(i).ok_or("Error accesing column")?; - let val = self - .rows - .get(row_index as usize) - .ok_or("Error accesing row")? - .get(i as usize) - .ok_or("Error accesing row's column")?; - return Ok((val.clone(), table_col, *typ)); + let thunk = Box::new(move |_| { + let val = self + .rows + .get(row_index as usize) + .ok_or("Error accesing row")? + .get(i as usize) + .ok_or("Error accesing row's column")?; + + Ok((val.clone(), table_col.clone().into_boxed_str())) + }); + return Ok((thunk, *typ)); } } @@ -237,37 +289,44 @@ impl Table { let val = SqlValue::Numeric(SqlNumeric::DoublePrecision { value: value.parse::().map_err(|e| e.to_string())?, }); - Ok((val, "", typ)) + let thunk: Thunk = Box::new(move |_| Ok((val.clone(), "".into()))); + Ok((thunk, typ)) } LiteralExpression::String(value) => { let typ = SqlType::Text; - let val = SqlValue::Text(SqlText::Text { - value: value.clone(), - }); - Ok((val, "", typ)) + let val = SqlValue::Text(SqlText::Text { value: value.clone() }); + + let thunk: Thunk = Box::new(move |_| Ok((val.clone(), "".into()))); + Ok((thunk, typ)) } LiteralExpression::Bool(value) => { let typ = SqlType::Boolean; let val = SqlValue::Boolean(*value); - Ok((val, "", typ)) + let thunk: Thunk = Box::new(move |_| Ok((val.clone(), "".into()))); + Ok((thunk, typ)) } LiteralExpression::Null => { let typ = SqlType::Null; let val = SqlValue::Null; - Ok((val, "", typ)) + let thunk: Thunk = Box::new(move |_| Ok((val.clone(), "".into()))); + Ok((thunk, typ)) } }, Expression::TableColumn(table_column) => { - for (i, table_col) in self.columns.iter().enumerate() { - if table_col == &table_column.col_name { + for (i, table_col) in self.columns.iter().cloned().enumerate() { + if table_col == table_column.col_name { let typ = self.column_types.get(i).ok_or("Error accesing column")?; - let val = self - .rows - .get(row_index) - .ok_or("Error accesing row")? - .get(i) - .ok_or("Error accesing row's column")?; - return Ok((val.clone(), table_col, *typ)); + let thunk: Thunk = Box::new(move |_| { + let val = self + .rows + .get(row_index) + .ok_or("Error accesing row")? + .get(i) + .ok_or("Error accesing row's column")?; + + Ok((val.clone(), table_col.clone().into_boxed_str())) + }); + return Ok((thunk, *typ)); } } @@ -280,194 +339,182 @@ impl Table { let table_col = self .columns .get(table_column.col_idx) + .ok_or(ERR_COLUMN_DOES_NOT_EXIST)?.clone(); + let typ = self + .column_types + .get(table_column.col_idx) .ok_or(ERR_COLUMN_DOES_NOT_EXIST)?; + let thunk = Box::new(move |_| { let val = self .rows .get(row_index) .ok_or("Error accesing row")? .get(table_column.col_idx) .ok_or("Error accesing row's column")?; - let typ = self - .column_types - .get(table_column.col_idx) - .ok_or(ERR_COLUMN_DOES_NOT_EXIST)?; - Ok((val.clone(), table_col, *typ)) + + Ok((val.clone(), table_col.clone().into_boxed_str())) + }); + + Ok((thunk, *typ)) } _ => Err(ERR_INVALID_CELL.to_string()), } } - pub fn evaluate_binary_cell( - &self, + pub fn evaluate_binary_cell<'a>( + &'a self, row_index: usize, - expression: &Expression, - ) -> Result<(SqlValue, &str, SqlType), String> { + expression: &'a Expression, + ) -> Result<(Thunk, SqlType), String> { match expression { Expression::Binary(binary_expression) => { - let (first_val, _, _) = self.evaluate_cell(row_index, &binary_expression.first)?; - let (second_val, _, _) = - self.evaluate_cell(row_index, &binary_expression.second)?; + macro_rules! bin_thunk { + ($first_thunk:ident, $second_thunk:ident, $func:ident) => { + Box::new(move |ctx| { + let (first_val, _) = $first_thunk(ctx.clone())?; + let (second_val, _) = $second_thunk(ctx)?; + + Ok((SqlValue::$func(&first_val, &second_val)?, ANONYMOUS_COL_NAME.into())) + }) + }; + } + + let (first_thunk, typ1) = self.compile_cell(row_index, &binary_expression.first)?; - match binary_expression.operand { - Operand::Equal => { - let result = SqlValue::equals(&first_val, &second_val)?; - let typ = result.get_type(); - Ok((result, ANONYMOUS_COL_NAME, typ)) + let (second_thunk, typ2) = self.compile_cell(row_index, &binary_expression.second)?; + + let thunk: Thunk = match binary_expression.operand { + BinaryOperand::Equal => { + bin_thunk!(first_thunk, second_thunk, equals) } - Operand::NotEqual => { - let result = SqlValue::not_equal(&first_val, &second_val)?; - let typ = result.get_type(); - Ok((result, ANONYMOUS_COL_NAME, typ)) + BinaryOperand::NotEqual => { + bin_thunk!(first_thunk, second_thunk, not_equal) } - Operand::GreaterThan => { - let result = SqlValue::greater_than(&first_val, &second_val)?; - let typ = result.get_type(); - Ok((result, ANONYMOUS_COL_NAME, typ)) + BinaryOperand::GreaterThan => { + bin_thunk!(first_thunk, second_thunk, greater_than) } - Operand::GreaterThanOrEqual => { - let result = SqlValue::greater_than_or_equals(&first_val, &second_val)?; - let typ = result.get_type(); - Ok((result, ANONYMOUS_COL_NAME, typ)) + BinaryOperand::GreaterThanOrEqual => { + bin_thunk!(first_thunk, second_thunk, greater_than_or_equals) } - Operand::LessThan => { - let result = SqlValue::less_than(&first_val, &second_val)?; - let typ = result.get_type(); - Ok((result, ANONYMOUS_COL_NAME, typ)) + BinaryOperand::LessThan => { + bin_thunk!(first_thunk, second_thunk, less_than) } - Operand::LessThanOrEqual => { - let result = SqlValue::less_than_or_equals(&first_val, &second_val)?; - let typ = result.get_type(); - Ok((result, ANONYMOUS_COL_NAME, typ)) + BinaryOperand::LessThanOrEqual => { + bin_thunk!(first_thunk, second_thunk, less_than_or_equals) } - Operand::Concat => { - let result = SqlValue::concat(&first_val, &second_val)?; - let typ = result.get_type(); - Ok((result, ANONYMOUS_COL_NAME, typ)) + BinaryOperand::Concat => { + bin_thunk!(first_thunk, second_thunk, concat) } - Operand::Add => { - let result = SqlValue::add(&first_val, &second_val)?; - let typ = result.get_type(); - Ok((result, ANONYMOUS_COL_NAME, typ)) + BinaryOperand::Add => { + bin_thunk!(first_thunk, second_thunk, add) } - Operand::Subtract => { - let result = SqlValue::subtract(&first_val, &second_val)?; - let typ = result.get_type(); - Ok((result, ANONYMOUS_COL_NAME, typ)) + BinaryOperand::Subtract => { + bin_thunk!(first_thunk, second_thunk, subtract) } - Operand::Multiply => { - let result = SqlValue::multiply(&first_val, &second_val)?; - let typ = result.get_type(); - Ok((result, ANONYMOUS_COL_NAME, typ)) + BinaryOperand::Multiply => { + bin_thunk!(first_thunk, second_thunk, multiply) } - Operand::Divide => { - let result = SqlValue::divide(&first_val, &second_val)?; - let typ = result.get_type(); - Ok((result, ANONYMOUS_COL_NAME, typ)) + BinaryOperand::Divide => { + bin_thunk!(first_thunk, second_thunk, divide) } - Operand::Modulo => { - let result = SqlValue::modulo(&first_val, &second_val)?; - let typ = result.get_type(); - Ok((result, ANONYMOUS_COL_NAME, typ)) + BinaryOperand::Modulo => { + bin_thunk!(first_thunk, second_thunk, modulo) } - Operand::And => { - let result = SqlValue::and(&first_val, &second_val)?; - let typ = result.get_type(); - Ok((result, ANONYMOUS_COL_NAME, typ)) + BinaryOperand::And => { + bin_thunk!(first_thunk, second_thunk, and) } - Operand::Or => { - let result = SqlValue::or(&first_val, &second_val)?; - let typ = result.get_type(); - Ok((result, ANONYMOUS_COL_NAME, typ)) + BinaryOperand::Or => { + bin_thunk!(first_thunk, second_thunk, or) } - Operand::Exponentiation => { - let result = SqlValue::exponentiation(&first_val, &second_val)?; - let typ = result.get_type(); - Ok((result, ANONYMOUS_COL_NAME, typ)) + BinaryOperand::Exponentiation => { + bin_thunk!(first_thunk, second_thunk, exponentiation) } - Operand::BitwiseAnd => { - let result = SqlValue::bitwise_and(&first_val, &second_val)?; - let typ = result.get_type(); - Ok((result, ANONYMOUS_COL_NAME, typ)) + BinaryOperand::BitwiseAnd => { + bin_thunk!(first_thunk, second_thunk, bitwise_and) } - Operand::BitwiseOr => { - let result = SqlValue::bitwise_or(&first_val, &second_val)?; - let typ = result.get_type(); - Ok((result, ANONYMOUS_COL_NAME, typ)) + BinaryOperand::BitwiseOr => { + bin_thunk!(first_thunk, second_thunk, bitwise_or) } - Operand::BitwiseXor => { - let result = SqlValue::bitwise_xor(&first_val, &second_val)?; - let typ = result.get_type(); - Ok((result, ANONYMOUS_COL_NAME, typ)) + BinaryOperand::BitwiseXor => { + bin_thunk!(first_thunk, second_thunk, bitwise_xor) } - Operand::BitwiseShiftLeft => { - let result = SqlValue::bitwise_shift_left(&first_val, &second_val)?; - let typ = result.get_type(); - Ok((result, ANONYMOUS_COL_NAME, typ)) + BinaryOperand::BitwiseShiftLeft => { + bin_thunk!(first_thunk, second_thunk, bitwise_shift_left) } - Operand::BitwiseShiftRight => { - let result = SqlValue::bitwise_shift_right(&first_val, &second_val)?; - let typ = result.get_type(); - Ok((result, ANONYMOUS_COL_NAME, typ)) + BinaryOperand::BitwiseShiftRight => { + bin_thunk!(first_thunk, second_thunk, bitwise_shift_right) } - _ => Err(ERR_INVALID_CELL.to_string()), - } + BinaryOperand::In => todo!(), + BinaryOperand::NotIn => todo!(), + BinaryOperand::Like => todo!(), + BinaryOperand::NotLike => todo!(), + BinaryOperand::Between => todo!(), + BinaryOperand::NotBetween => todo!(), + BinaryOperand::BitwiseShiftRightZeroFill => todo!(), + }; + + let typ = typ1; + Ok((thunk, typ)) } Expression::Unary(unary_expression) => { - let (val, _, _) = self.evaluate_cell(row_index, &unary_expression.first)?; - match unary_expression.operand { - Operand::Subtract => { - let result = SqlValue::minus(&val)?; - let typ = result.get_type(); - Ok((result, ANONYMOUS_COL_NAME, typ)) + let (thunk, typ) = self.compile_cell(row_index, &unary_expression.first)?; + + macro_rules! un_thunk { + ($thunk:ident, $func:ident) => { + Box::new(move |ctx| { + let (val, _) = $thunk(ctx)?; + Ok((SqlValue::$func(&val)?, ANONYMOUS_COL_NAME.into())) + }) + }; + } + + let thunk: Thunk = match unary_expression.operand { + UnaryOperand::Minus => { + un_thunk!(thunk, minus) } - Operand::SquareRoot => { - let result = SqlValue::square_root(&val)?; - let typ = result.get_type(); - Ok((result, ANONYMOUS_COL_NAME, typ)) + UnaryOperand::SquareRoot => { + un_thunk!(thunk, square_root) } - Operand::CubeRoot => { - let result = SqlValue::cube_root(&val)?; - let typ = result.get_type(); - Ok((result, ANONYMOUS_COL_NAME, typ)) + UnaryOperand::CubeRoot => { + un_thunk!(thunk, cube_root) } - Operand::Factorial | Operand::FactorialPrefix => { - let result = SqlValue::factorial(&val)?; - let typ = result.get_type(); - Ok((result, ANONYMOUS_COL_NAME, typ)) + UnaryOperand::Factorial | UnaryOperand::FactorialPrefix => { + un_thunk!(thunk, factorial) } - Operand::Not => { - let result = SqlValue::not(&val)?; - let typ = result.get_type(); - Ok((result, ANONYMOUS_COL_NAME, typ)) + UnaryOperand::Not => { + un_thunk!(thunk, not) } - Operand::AbsoluteValue => { - let result = SqlValue::abs(&val)?; - let typ = result.get_type(); - Ok((result, ANONYMOUS_COL_NAME, typ)) + UnaryOperand::AbsoluteValue => { + un_thunk!(thunk, abs) } - Operand::BitwiseNot => { - let result = SqlValue::bitwise_not(&val)?; - let typ = result.get_type(); - Ok((result, ANONYMOUS_COL_NAME, typ)) + UnaryOperand::BitwiseNot => { + un_thunk!(thunk, bitwise_not) } - _ => Err(ERR_INVALID_CELL.to_string()), - } + UnaryOperand::Exists => todo!(), + UnaryOperand::NotExists => todo!(), + UnaryOperand::IsNull => todo!(), + UnaryOperand::IsNotNull => todo!(), + }; + Ok((thunk, typ)) } Expression::Cast { data, typ } => { - let (val, _, _) = self.evaluate_cell(row_index, data)?; - let result = val.explicit_cast_to_type(*typ)?; - Ok((result, ANONYMOUS_COL_NAME, *typ)) + let (thunk, _) = self.compile_cell(row_index, data)?; + let thunk = Box::new(move |ctx| { + let (val, _) = thunk(ctx)?; + Ok((val.explicit_cast_to_type(*typ)?, ANONYMOUS_COL_NAME.into())) + }); + Ok((thunk, *typ)) } _ => Err(ERR_INVALID_CELL.to_string()), } } - pub fn evaluate_cell( + pub fn compile_cell( &self, row_index: usize, expression: &Expression, - ) -> Result<(SqlValue, &str, SqlType), String> { + ) -> Result<(Thunk, SqlType), String> { match expression { Expression::Literal(_) | Expression::TableColumn(_) @@ -481,21 +528,42 @@ impl Table { if select_statement.items.len() != 1 { return Err("Subquery must return only one column".to_string()); } - if Expression::Empty != select_statement.where_clause { - if let (SqlValue::Boolean(false), _, SqlType::Boolean) = - self.evaluate_cell(row_index, &select_statement.where_clause)? - { - return Ok((SqlValue::Null, ANONYMOUS_COL_NAME, SqlType::Null)); - } - } + if let Some(item) = select_statement.items.get(0) { - let (result, _, typ) = self.evaluate_cell(row_index, &item.expression)?; + let (result, typ) = self.compile_cell(row_index, &item.expression)?; Ok((result, ANONYMOUS_COL_NAME, typ)) } else { Err("Subquery must return only one column".to_string()) + }; + let (where_thunk, where_typ): (Thunk, SqlType) = if Expression::Empty != select_statement.where_clause { + self.compile_cell(row_index, &select_statement.where_clause)? + } else { + (Box::new(move |ctx| { + Ok((SqlValue::Boolean(true), ANONYMOUS_COL_NAME.into())) + }), SqlType::Boolean) + }; + if where_typ != SqlType::Boolean { + return Err("Subquery WHERE clause must return a boolean".to_string()); + } + + if let Some(item) = select_statement.items.get(0) { + let (thunk, typ) = self.compile_cell(row_index, &item.expression)?; + let thunk = Box::new(move |ctx: ExecutionContext| { + let (where_val, _) = where_thunk(ctx.clone())?; + if let SqlValue::Boolean(true) = where_val { + thunk(ctx) + } else if let SqlValue::Boolean(true) = where_val { + Ok((SqlValue::Null, ANONYMOUS_COL_NAME.into())) + } else { + Err(ERR_INVALID_CELL.to_string()) + } + }); + Ok((thunk, typ)) + } else { + Err("Subquery must return only one column".to_string()) } } - _ => Err(ERR_INVALID_CELL.to_string()), + Expression::Empty => Err(ERR_INVALID_CELL.to_string()), } } @@ -519,7 +587,7 @@ impl Table { } } -#[derive(PartialEq, Default)] +#[derive(PartialEq, Eq, Default)] pub struct MemoryBackend { tables: HashMap, parser: Parser, @@ -667,7 +735,9 @@ impl MemoryBackend { } Some(value) => value, }; - let (index_value, _, _) = table.evaluate_cell(row_index, &index.expression)?; + let (index_thunk, _) = table.compile_cell(row_index, &index.expression)?; + + let (index_value, _) = index_thunk(ExecutionContext { })?; if index_value.is_null() { table.rows.remove(row_index); @@ -866,7 +936,8 @@ impl MemoryBackend { let mut new_row = row.clone(); new_row.append(&mut source_row.clone()); temp_table.rows = vec![new_row.clone()]; - let (result, _, _) = temp_table.evaluate_cell(0, on)?; + let (thunk, _) = temp_table.compile_cell(0, on)?; + let (result, _) = thunk(ExecutionContext { })?; if let SqlValue::Boolean(true) = result { used_source_indices.push(source_index); @@ -1012,8 +1083,10 @@ impl MemoryBackend { match &select_statement.where_clause { Expression::Empty => {} _ => { - let (cell_val, _, _) = - table.evaluate_cell(row_index, &select_statement.where_clause)?; + let (cell_thunk, _) = + table.compile_cell(row_index, &select_statement.where_clause)?; + + let (cell_val, _) = cell_thunk(ExecutionContext { })?; if let SqlValue::Boolean(true) = cell_val { } else { @@ -1029,8 +1102,10 @@ impl MemoryBackend { } for select_item in &final_select_items { - let (cell_val, col_name, col_type) = - table.evaluate_cell(row_index, &select_item.expression)?; + let (cell_thunk, col_type) = + table.compile_cell(row_index, &select_item.expression)?; + + let (cell_val, col_name) = cell_thunk(ExecutionContext { })?; if is_first_row { match &select_item.as_clause { @@ -1057,7 +1132,8 @@ impl MemoryBackend { } if let Some(ref order_by) = select_statement.order_by { - let (new_ord_val, _, _) = table.evaluate_cell(row_index, &order_by.exp)?; + let (new_ord_thunk, _) = table.compile_cell(row_index, &order_by.exp)?; + let (new_ord_val, _) = new_ord_thunk(ExecutionContext { })?; let mut index = results_order.len(); for (i, val) in results_order.iter().enumerate() { @@ -1083,12 +1159,10 @@ impl MemoryBackend { pub fn drop_table(&mut self, drop_table_statement: DropTableStatement) -> Result { match self.tables.get(&drop_table_statement.name) { - None => { - return Err(format!( - "Table \"{}\" doesn't exist.", - drop_table_statement.name.clone() - )); - } + None => Err(format!( + "Table \"{}\" doesn't exist.", + drop_table_statement.name.clone() + )), Some(_) => { self.tables.remove(&drop_table_statement.name); Ok(true) @@ -1191,10 +1265,10 @@ pub fn linearize_expressions( return expressions; } if let Some(Expression::Binary(ref bin_exp)) = where_clause { - if bin_exp.operand == Operand::Or { + if bin_exp.operand == BinaryOperand::Or { return expressions; } - if (bin_exp.operand) == Operand::And { + if (bin_exp.operand) == BinaryOperand::And { let exps = linearize_expressions(Some(*bin_exp.first.clone()), expressions); return linearize_expressions(Some(*bin_exp.second.clone()), exps); } diff --git a/engine/src/lib.rs b/engine/src/lib.rs index 52a80ff..25e5aea 100644 --- a/engine/src/lib.rs +++ b/engine/src/lib.rs @@ -1,12 +1,3 @@ -#[cfg(not(target_env = "msvc"))] -#[cfg(feature = "jemallocator")] -extern crate jemallocator; - -#[cfg(not(target_env = "msvc"))] -#[cfg(feature = "jemallocator")] -#[global_allocator] -static ALLOC: jemallocator::Jemalloc = jemallocator::Jemalloc; - #[cfg(feature = "mimalloc")] use mimalloc::MiMalloc; @@ -14,16 +5,6 @@ use mimalloc::MiMalloc; #[global_allocator] static GLOBAL: MiMalloc = MiMalloc; -#[cfg(feature = "tcmalloc")] -extern crate tcmalloc; - -#[cfg(feature = "tcmalloc")] -use tcmalloc::TCMalloc; - -#[cfg(feature = "tcmalloc")] -#[global_allocator] -static GLOBAL: TCMalloc = TCMalloc; - pub mod ast; pub mod backend; pub mod backend_memory; diff --git a/engine/src/parser/expression.rs b/engine/src/parser/expression.rs index 6a0695b..f7fa433 100644 --- a/engine/src/parser/expression.rs +++ b/engine/src/parser/expression.rs @@ -50,15 +50,19 @@ pub fn parse_expression<'a>( } else { ret_parse_err!(tokens, cursor, "Expected closing Parenthesis"); } - } else if cursor < tokens.len() && UNARY_OPERATORS.contains(&tokens[cursor].token) { - let token = &tokens[cursor]; - let operand = token.token.clone(); + } else if let (true, Some(operand)) = ( + cursor < tokens.len(), + UnaryOperand::from_token(&tokens[cursor].token), + ) { cursor += 1; let mut nested_un_ops = vec![operand]; let mut inner_exp; loop { - if cursor < tokens.len() && UNARY_OPERATORS.contains(&tokens[cursor].token) { - nested_un_ops.push(tokens[cursor].token.clone()); + if let (true, Some(operand)) = ( + cursor < tokens.len(), + UnaryOperand::from_token(&tokens[cursor].token), + ) { + nested_un_ops.push(operand); cursor += 1; } else { break; @@ -94,7 +98,7 @@ pub fn parse_expression<'a>( if let Some(operand) = nested_un_ops.pop() { inner_exp = Expression::Unary(UnaryExpression { first: Box::from(inner_exp), - operand: Operand::from_token(&operand, cursor)?, + operand, }); } else { ret_parse_err!(tokens, cursor, "Expected Unary Operator"); @@ -102,7 +106,7 @@ pub fn parse_expression<'a>( while let Some(operand) = nested_un_ops.pop() { inner_exp = Expression::Unary(UnaryExpression { first: Box::from(inner_exp), - operand: Operand::from_token(&operand, cursor)?, + operand, }); } expression = inner_exp; @@ -123,11 +127,14 @@ pub fn parse_expression<'a>( }), ) = (tokens.get(cursor), tokens.get(cursor + 1)) { - if UNARY_POSTFIX_OPERATORS.contains(token1) && BINARY_OPERATORS.contains(token2) { + if let (Some(operand), true) = ( + UnaryOperand::postfix_from_token(token1), + BINARY_OPERATORS.contains(token2), + ) { cursor += 2; expression = Expression::Unary(UnaryExpression { first: Box::from(expression), - operand: Operand::from_token(token1, cursor)?, + operand, }); } } @@ -139,11 +146,11 @@ pub fn parse_expression<'a>( } // Makes sure that if there are postfix unary ops, they are applied in the current expression before continuing. - if UNARY_POSTFIX_OPERATORS.contains(token) { + if let Some(operand) = UnaryOperand::postfix_from_token(token) { if !expression.is_empty() { expression = Expression::Unary(UnaryExpression { first: Box::from(expression), - operand: Operand::from_token(token, cursor)?, + operand, }); cursor += 1; last_cursor = cursor; @@ -215,15 +222,18 @@ pub fn parse_expression<'a>( false, takes_as_clause, )?; - let operand = Operand::from_token(&operand_tok, cursor)?; + let operand = BinaryOperand::from_token(&operand_tok).ok_or(ParsingError::Internal { + msg: format!("Unrecognized token: {:?}", operand_tok), + cursor, + })?; cursor = new_cursor; if let Some(TokenContainer { token, loc: _ }) = tokens.get(cursor) { - if UNARY_POSTFIX_OPERATORS.contains(token) { + if let Some(operand) = UnaryOperand::from_token(token) { cursor += 1; second_expression = Expression::Unary(UnaryExpression { first: Box::from(second_expression), - operand: Operand::from_token(token, cursor)?, + operand, }); } } @@ -237,11 +247,11 @@ pub fn parse_expression<'a>( } if let Some(TokenContainer { token, loc: _ }) = tokens.get(cursor) { - if UNARY_POSTFIX_OPERATORS.contains(token) && is_top_level { + if let (Some(operand), true) = (UnaryOperand::postfix_from_token(token), is_top_level) { cursor += 1; expression = Expression::Unary(UnaryExpression { first: Box::from(expression), - operand: Operand::from_token(token, cursor)?, + operand, }); } } diff --git a/engine/src/parser/joins.rs b/engine/src/parser/joins.rs index 5754cea..2a3ef10 100644 --- a/engine/src/parser/joins.rs +++ b/engine/src/parser/joins.rs @@ -113,7 +113,7 @@ pub fn parse_joins<'a>( let (col2, new_cursor) = parse_table_column(tokens, cursor)?; cursor = new_cursor; - let operand = if let Ok(o) = Operand::from_token(&operand_token, cursor) { + let operand = if let Some(o) = BinaryOperand::from_token(&operand_token) { o } else { ret_parse_err!( diff --git a/engine/src/parser/statement.rs b/engine/src/parser/statement.rs index 85975f7..297c80a 100644 --- a/engine/src/parser/statement.rs +++ b/engine/src/parser/statement.rs @@ -42,7 +42,7 @@ pub fn parse_statement<'a>( Ok((create_index, new_cursor)) => { Ok((Statement::CreateIndexStatement(create_index), new_cursor)) } - Err(err) => (Err(err)), + Err(err) => Err(err), } } Token::Unique => match tokens.get(cursor + 2) { @@ -56,7 +56,7 @@ pub fn parse_statement<'a>( Statement::CreateIndexStatement(create_index), new_cursor, )), - Err(err) => (Err(err)), + Err(err) => Err(err), } } Some(TokenContainer { @@ -76,7 +76,7 @@ pub fn parse_statement<'a>( Ok((create_table, new_cursor)) => { Ok((Statement::CreateTableStatement(create_table), new_cursor)) } - Err(err) => (Err(err)), + Err(err) => Err(err), } } _ => ret_parse_err!(tokens, cursor, "Invalid Create Statement"), @@ -89,7 +89,7 @@ pub fn parse_statement<'a>( // Look for an DROP statement match parse_drop_table_statement(tokens, cursor, delimiter) { Ok((drop, new_cursor)) => Ok((Statement::DropTableStatement(drop), new_cursor)), - Err(err) => (Err(err)), + Err(err) => Err(err), } } _ => ret_parse_err!(tokens, cursor, "Expected a valid Statement"), diff --git a/engine/src/sql_types/mod.rs b/engine/src/sql_types/mod.rs index 28bed0a..0af1382 100644 --- a/engine/src/sql_types/mod.rs +++ b/engine/src/sql_types/mod.rs @@ -1,6 +1,7 @@ use byteorder::{BigEndian, ReadBytesExt}; use std::cmp::Ordering; use std::convert::{TryFrom, TryInto}; +use std::fmt; use std::io::Read; use crate::lexer::TokenContainer; @@ -21,12 +22,14 @@ pub enum SqlType { BigInt, Real, DoublePrecision, + Numeric, Text, Char, VarChar, Boolean, Null, Type, + Record, } impl TryFrom<(&Token<'_>, usize)> for SqlType { @@ -102,17 +105,42 @@ impl SqlType { SqlType::BigInt => 3, SqlType::Real => 20, SqlType::DoublePrecision => 21, + SqlType::Numeric => 22, SqlType::Char => 101, SqlType::VarChar => 102, SqlType::Text => 103, SqlType::Boolean => 0, SqlType::Null => -1000, SqlType::Type => -2000, + SqlType::Record => todo!(), } } } -#[derive(Debug, Clone, PartialEq, PartialOrd, TreeDisplay)] +#[derive( + Debug, Copy, Clone, Eq, PartialEq, Ord, PartialOrd, Serialize, Deserialize, TreeDisplay, +)] +pub enum InferredSqlType { + Numeric, + Text, + Comparable, + Concrete(SqlType), + Unspecified, +} + +impl InferredSqlType { + pub fn default_type(&self) -> Option { + match self { + InferredSqlType::Numeric => Some(SqlType::Numeric), + InferredSqlType::Text => Some(SqlType::Text), + InferredSqlType::Comparable => None, + InferredSqlType::Concrete(sql_type) => Some(*sql_type), + InferredSqlType::Unspecified => None, + } + } +} + +#[derive(Debug, Clone, PartialEq, Eq, PartialOrd, TreeDisplay)] pub enum SqlTypeError { ConversionError(String), ParseError(String), @@ -123,16 +151,16 @@ pub enum SqlTypeError { Infallible, } -impl ToString for SqlTypeError { - fn to_string(&self) -> String { +impl fmt::Display for SqlTypeError { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { match self { - SqlTypeError::ConversionError(msg) => msg.clone(), - SqlTypeError::ParseError(msg) => msg.clone(), - SqlTypeError::DecodeError(msg) => msg.clone(), - SqlTypeError::TypeMismatchError(msg) => msg.clone(), - SqlTypeError::OverflowError(msg) => msg.clone(), - SqlTypeError::OperationError(msg) => msg.clone(), - SqlTypeError::Infallible => "wut".to_string(), + SqlTypeError::ConversionError(msg) => write!(f, "ConversionError: {}", msg), + SqlTypeError::ParseError(msg) => write!(f, "ParseError: {}", msg), + SqlTypeError::DecodeError(msg) => write!(f, "DecodeError: {}", msg), + SqlTypeError::TypeMismatchError(msg) => write!(f, "TypeMismatchError: {}", msg), + SqlTypeError::OverflowError(msg) => write!(f, "OverflowError: {}", msg), + SqlTypeError::OperationError(msg) => write!(f, "OperationError: {}", msg), + SqlTypeError::Infallible => write!(f, "This shouldn't happen"), } } } @@ -144,6 +172,7 @@ pub enum SqlValue { Numeric(SqlNumeric), Boolean(bool), Type(SqlType), + Record(Vec), } impl Default for SqlValue { @@ -174,9 +203,11 @@ impl Serialize for SqlValue { SqlNumeric::BigInt { value } => serializer.serialize_i64(*value), SqlNumeric::Real { value } => serializer.serialize_f32(*value), SqlNumeric::DoublePrecision { value } => serializer.serialize_f64(*value), + SqlNumeric::Numeric { value } => serializer.serialize_f64(*value), }, SqlValue::Boolean(val) => serializer.serialize_bool(*val), SqlValue::Type(typ) => serializer.serialize_str(&typ.to_string()), + SqlValue::Record(_) => todo!(), } } } @@ -188,6 +219,7 @@ pub enum SqlNumeric { BigInt { value: i64 }, Real { value: f32 }, DoublePrecision { value: f64 }, + Numeric { value: f64 }, } impl Eq for SqlNumeric {} @@ -380,6 +412,17 @@ impl SqlValue { SqlValue::Numeric(*num1), SqlValue::Numeric(SqlNumeric::Int { value: *v2 as i32 }), )), + (SqlNumeric::SmallInt { value: v1 }, SqlNumeric::Numeric { value: v2 }) => todo!(), + (SqlNumeric::Int { value: v1 }, SqlNumeric::Numeric { value: v2 }) => todo!(), + (SqlNumeric::BigInt { value: v1 }, SqlNumeric::Numeric { value: v2 }) => todo!(), + (SqlNumeric::Real { value: v1 }, SqlNumeric::Numeric { value: v2 }) => todo!(), + (SqlNumeric::DoublePrecision { value: v1 }, SqlNumeric::Numeric { value: v2 }) => todo!(), + (SqlNumeric::Numeric { value: v1 }, SqlNumeric::SmallInt { value: v2 }) => todo!(), + (SqlNumeric::Numeric { value: v1 }, SqlNumeric::Int { value: v2 }) => todo!(), + (SqlNumeric::Numeric { value: v1 }, SqlNumeric::BigInt { value: v2 }) => todo!(), + (SqlNumeric::Numeric { value: v1 }, SqlNumeric::Real { value: v2 }) => todo!(), + (SqlNumeric::Numeric { value: v1 }, SqlNumeric::DoublePrecision { value: v2 }) => todo!(), + (SqlNumeric::Numeric { value: v1 }, SqlNumeric::Numeric { value: v2 }) => todo!(), }, (SqlValue::Text(text1), SqlValue::Text(text2)) => match (&text1, &text2) { (&SqlText::Text { value: _ }, &SqlText::Text { value: _ }) @@ -444,6 +487,8 @@ impl SqlValue { SqlType::Boolean => Ok(SqlValue::Boolean(data.as_bool()?)), SqlType::Null => Ok(SqlValue::Null), SqlType::Type => Ok(SqlValue::Text(SqlText::decode_text(data)?)), + SqlType::Record => todo!(), + SqlType::Numeric => todo!(), } } @@ -495,6 +540,7 @@ impl SqlValue { SqlNumeric::DoublePrecision { value } => MemoryCell { bytes: value.to_be_bytes().into(), }, + SqlNumeric::Numeric { value } => todo!(), }, SqlValue::Boolean(val) => match val { true => MemoryCell { bytes: vec![1] }, @@ -503,6 +549,7 @@ impl SqlValue { SqlValue::Type(typ) => MemoryCell { bytes: format!("{:?}", typ).as_bytes().into(), }, + SqlValue::Record(_) => todo!(), } } @@ -1094,6 +1141,19 @@ impl SqlValue { Ok(SqlValue::Numeric(SqlNumeric::DoublePrecision { value })) } } + SqlNumeric::Numeric { value } => { + if value < &0. { + return Err(SqlTypeError::OperationError( + "Can't find square root of negative number".to_string(), + )); + } + let value = (*value as f64).sqrt(); + if value.is_nan() { + Err(SqlTypeError::OperationError("NaN".to_string())) + } else { + Ok(SqlValue::Numeric(SqlNumeric::Numeric { value })) + } + } }, _ => Err(SqlTypeError::TypeMismatchError( "Type mismatch for square root".to_string(), @@ -1144,6 +1204,14 @@ impl SqlValue { Ok(SqlValue::Numeric(SqlNumeric::DoublePrecision { value })) } } + SqlNumeric::Numeric { value } => { + let value = (*value as f64).cbrt(); + if value.is_nan() { + Err(SqlTypeError::OperationError("NaN".to_string())) + } else { + Ok(SqlValue::Numeric(SqlNumeric::Numeric { value })) + } + } }, _ => Err(SqlTypeError::TypeMismatchError( "Type mismatch for cube root".to_string(), @@ -1171,6 +1239,9 @@ impl SqlValue { value: value.abs(), })) } + SqlNumeric::Numeric { value } => Ok(SqlValue::Numeric(SqlNumeric::Numeric { + value: value.abs(), + })), }, _ => Err(SqlTypeError::TypeMismatchError( "Type mismatch for absolute value".to_string(), @@ -1198,6 +1269,9 @@ impl SqlValue { value: -value, })) } + SqlNumeric::Numeric { value } => { + Ok(SqlValue::Numeric(SqlNumeric::Numeric { value: -value })) + } }, _ => Err(SqlTypeError::TypeMismatchError( "Type mismatch for minus".to_string(), @@ -1337,6 +1411,7 @@ impl SqlValue { SqlNumeric::BigInt { value: _ } => SqlType::BigInt, SqlNumeric::Real { value: _ } => SqlType::Real, SqlNumeric::DoublePrecision { value: _ } => SqlType::DoublePrecision, + SqlNumeric::Numeric { value } => todo!(), }, SqlValue::Text(text) => match &text { SqlText::Text { value: _ } => SqlType::Text, @@ -1350,6 +1425,7 @@ impl SqlValue { SqlValue::Boolean(_) => SqlType::Boolean, SqlValue::Type(_) => SqlType::Type, SqlValue::Null => SqlType::Null, + SqlValue::Record(_) => todo!(), } } @@ -1377,21 +1453,22 @@ impl SqlValue { value: *value as i64, })) } + SqlNumeric::Numeric { value } => Ok(SqlValue::Numeric(SqlNumeric::BigInt { + value: *value as i64, + })), }, SqlValue::Text(text) => match text { - SqlText::Text { value } => Ok(SqlValue::Numeric(SqlNumeric::BigInt { - value: value.parse()?, - })), - SqlText::VarChar { + SqlText::Text { value } + | SqlText::VarChar { value, len: _, maxlen: _, - } => Ok(SqlValue::Numeric(SqlNumeric::BigInt { - value: value.parse()?, - })), - SqlText::Char { value, len: _ } => Ok(SqlValue::Numeric(SqlNumeric::BigInt { - value: value.parse()?, - })), + } + | SqlText::Char { value, len: _ } => { + Ok(SqlValue::Numeric(SqlNumeric::BigInt { + value: value.parse()?, + })) + } }, SqlValue::Boolean(value) => { if *value { @@ -1421,19 +1498,18 @@ impl SqlValue { value: *value as i32, })) } + SqlNumeric::Numeric { value } => Ok(SqlValue::Numeric(SqlNumeric::Int { + value: *value as i32, + })), }, SqlValue::Text(text) => match text { - SqlText::Text { value } => Ok(SqlValue::Numeric(SqlNumeric::Int { - value: value.parse()?, - })), - SqlText::VarChar { + SqlText::Text { value } + | SqlText::VarChar { value, len: _, maxlen: _, - } => Ok(SqlValue::Numeric(SqlNumeric::Int { - value: value.parse()?, - })), - SqlText::Char { value, len: _ } => Ok(SqlValue::Numeric(SqlNumeric::Int { + } + | SqlText::Char { value, len: _ } => Ok(SqlValue::Numeric(SqlNumeric::Int { value: value.parse()?, })), }, @@ -1465,19 +1541,18 @@ impl SqlValue { value: *value as i16, })) } + SqlNumeric::Numeric { value } => Ok(SqlValue::Numeric(SqlNumeric::SmallInt { + value: *value as i16, + })), }, SqlValue::Text(text) => match text { - SqlText::Text { value } => Ok(SqlValue::Numeric(SqlNumeric::SmallInt { - value: value.parse()?, - })), - SqlText::VarChar { + SqlText::Text { value } + | SqlText::VarChar { value, len: _, maxlen: _, - } => Ok(SqlValue::Numeric(SqlNumeric::SmallInt { - value: value.parse()?, - })), - SqlText::Char { value, len: _ } => { + } + | SqlText::Char { value, len: _ } => { Ok(SqlValue::Numeric(SqlNumeric::SmallInt { value: value.parse()?, })) @@ -1511,6 +1586,9 @@ impl SqlValue { value: *value as f32, })) } + SqlNumeric::Numeric { value } => Ok(SqlValue::Numeric(SqlNumeric::Real { + value: *value as f32, + })), }, SqlValue::Text(text) => match text { SqlText::Text { value } => Ok(SqlValue::Numeric(SqlNumeric::Real { @@ -1556,19 +1634,16 @@ impl SqlValue { value: *value, })) } + SqlNumeric::Numeric { value } => { + Ok(SqlValue::Numeric(SqlNumeric::DoublePrecision { + value: *value, + })) + } }, SqlValue::Text(text) => match text { - SqlText::Text { value } => Ok(SqlValue::Numeric(SqlNumeric::DoublePrecision { - value: value.parse()?, - })), - SqlText::VarChar { - value, - len: _, - maxlen: _, - } => Ok(SqlValue::Numeric(SqlNumeric::DoublePrecision { - value: value.parse()?, - })), - SqlText::Char { value, len: _ } => { + SqlText::Text { value } + | SqlText::VarChar { value, .. } + | SqlText::Char { value, .. } => { Ok(SqlValue::Numeric(SqlNumeric::DoublePrecision { value: value.parse()?, })) @@ -1578,17 +1653,9 @@ impl SqlValue { }, SqlType::Text => match self { SqlValue::Text(text) => match text { - SqlText::Text { value } => Ok(SqlValue::Text(SqlText::Text { - value: value.clone(), - })), - SqlText::VarChar { - value, - len: _, - maxlen: _, - } => Ok(SqlValue::Text(SqlText::Text { - value: value.clone(), - })), - SqlText::Char { value, len: _ } => Ok(SqlValue::Text(SqlText::Text { + SqlText::Text { value } + | SqlText::VarChar { value, .. } + | SqlText::Char { value, .. } => Ok(SqlValue::Text(SqlText::Text { value: value.clone(), })), }, @@ -1608,6 +1675,9 @@ impl SqlValue { SqlNumeric::DoublePrecision { value } => Ok(SqlValue::Text(SqlText::Text { value: value.to_string(), })), + SqlNumeric::Numeric { value } => Ok(SqlValue::Text(SqlText::Text { + value: value.to_string(), + })), }, SqlValue::Boolean(value) => match value { true => Ok(SqlValue::Text(SqlText::Text { @@ -1680,6 +1750,14 @@ impl SqlValue { value, })) } + SqlNumeric::Numeric { value } => { + let value = value.to_string(); + Ok(SqlValue::Text(SqlText::VarChar { + len: value.len(), + maxlen: value.len(), + value, + })) + } }, SqlValue::Boolean(value) => match value { true => Ok(SqlValue::Text(SqlText::VarChar { @@ -1750,6 +1828,13 @@ impl SqlValue { value, })) } + SqlNumeric::Numeric { value } => { + let value = value.to_string(); + Ok(SqlValue::Text(SqlText::Char { + len: value.len(), + value, + })) + } }, SqlValue::Boolean(value) => match value { true => Ok(SqlValue::Text(SqlText::Char { @@ -1805,6 +1890,8 @@ impl SqlValue { }, SqlType::Null => Ok(SqlValue::Null), SqlType::Type => Err(SqlTypeError::TypeMismatchError("Type mismatch".to_string())), + SqlType::Record => todo!(), + SqlType::Numeric => todo!(), } } @@ -1832,6 +1919,9 @@ impl SqlValue { value: *value as i64, })) } + SqlNumeric::Numeric { value } => Ok(SqlValue::Numeric(SqlNumeric::BigInt { + value: *value as i64, + })), }, _ => Err(SqlTypeError::TypeMismatchError("Type mismatch".to_string())), }, @@ -1854,6 +1944,9 @@ impl SqlValue { value: *value as i32, })) } + SqlNumeric::Numeric { value } => Ok(SqlValue::Numeric(SqlNumeric::Int { + value: *value as i32, + })), }, _ => Err(SqlTypeError::TypeMismatchError("Type mismatch".to_string())), }, @@ -1876,6 +1969,9 @@ impl SqlValue { value: *value as i16, })) } + SqlNumeric::Numeric { value } => Ok(SqlValue::Numeric(SqlNumeric::SmallInt { + value: *value as i16, + })), }, _ => Err(SqlTypeError::TypeMismatchError("Type mismatch".to_string())), }, @@ -1898,6 +1994,9 @@ impl SqlValue { value: *value as f32, })) } + SqlNumeric::Numeric { value } => Ok(SqlValue::Numeric(SqlNumeric::Real { + value: *value as f32, + })), }, _ => Err(SqlTypeError::TypeMismatchError("Type mismatch".to_string())), }, @@ -1928,22 +2027,19 @@ impl SqlValue { value: *value, })) } + SqlNumeric::Numeric { value } => { + Ok(SqlValue::Numeric(SqlNumeric::DoublePrecision { + value: *value, + })) + } }, _ => Err(SqlTypeError::TypeMismatchError("Type mismatch".to_string())), }, SqlType::Text => match self { SqlValue::Text(text) => match text { - SqlText::Text { value } => Ok(SqlValue::Text(SqlText::Text { - value: value.clone(), - })), - SqlText::VarChar { - value, - len: _, - maxlen: _, - } => Ok(SqlValue::Text(SqlText::Text { - value: value.clone(), - })), - SqlText::Char { value, len: _ } => Ok(SqlValue::Text(SqlText::Text { + SqlText::Text { value } + | SqlText::VarChar { value, .. } + | SqlText::Char { value, .. } => Ok(SqlValue::Text(SqlText::Text { value: value.clone(), })), }, @@ -2001,6 +2097,8 @@ impl SqlValue { SqlValue::Type(typ) => Ok(SqlValue::Type(*typ)), _ => Err(SqlTypeError::TypeMismatchError("Type mismatch".to_string())), }, + SqlType::Record => todo!(), + SqlType::Numeric => todo!(), } } } @@ -2206,6 +2304,11 @@ impl std::fmt::Display for SqlValue { SqlValue::Boolean(val) => val.to_string(), SqlValue::Null => "NULL".to_string(), SqlValue::Type(typ) => typ.to_string(), + SqlValue::Record(val) => val + .iter() + .map(|v| v.to_string()) + .collect::>() + .join(", "), } ) } @@ -2240,6 +2343,7 @@ impl std::fmt::Display for SqlNumeric { SqlNumeric::BigInt { value } => value.to_string(), SqlNumeric::Real { value } => value.to_string(), SqlNumeric::DoublePrecision { value } => value.to_string(), + SqlNumeric::Numeric { value } => value.to_string(), } ) } diff --git a/repl/Cargo.toml b/repl/Cargo.toml index d8941d4..7b11742 100644 --- a/repl/Cargo.toml +++ b/repl/Cargo.toml @@ -8,8 +8,8 @@ edition = "2021" sqlo2 = { path = "../engine" } prettytable-rs = "=0.8.0" rustc_version_runtime = "=0.2" -rustyline = "=9.1.2" -rustyline-derive = "0.6.0" +rustyline = "=10.0.0" +rustyline-derive = "0.7.0" colored = "=2.0.0" -sysinfo = "=0.23.12" +sysinfo = "=0.24.7" termcolor = "=1.1.3" \ No newline at end of file diff --git a/repl/src/main.rs b/repl/src/main.rs index a33451b..c8bfac2 100644 --- a/repl/src/main.rs +++ b/repl/src/main.rs @@ -38,7 +38,7 @@ fn main() { const VERSION: &str = env!("CARGO_PKG_VERSION"); let mut mb = MemoryBackend::new(); - let mut rl = Editor::new(); + let mut rl = Editor::new().expect("Failed to create editor"); if rl.load_history("history.txt").is_ok() {} diff --git a/test-util/Cargo.toml b/test-util/Cargo.toml index 12c7278..cc93222 100644 --- a/test-util/Cargo.toml +++ b/test-util/Cargo.toml @@ -6,7 +6,7 @@ edition = "2021" # See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html [dependencies] -serde = { version = "=1.0.137", features = ["derive"] } +serde = { version = "=1.0.140", features = ["derive"] } toml = "=0.5.9" pretty_assertions = "=1.2.1" similar = { version = "=2.1.0", features = ["inline"] } diff --git a/tests/acceptance/memory/operation_order_1 b/tests/acceptance/memory/operation_order_1 index 4665fac..859fcf3 100644 --- a/tests/acceptance/memory/operation_order_1 +++ b/tests/acceptance/memory/operation_order_1 @@ -1,60 +1 @@ -Select: -| ?column?(Double Precision) | -|----------------------------| -| 31 | - -Select: -| ?column?(Double Precision) | -|----------------------------| -| 51 | - -Select: -| ?column?(Double Precision) | -|----------------------------| -| -11 | - -Select: -| ?column?(Double Precision) | -|----------------------------| -| 9 | - -Select: -| ?column?(Double Precision) | -|----------------------------| -| 12.333333333333334 | - -Select: -| ?column?(Double Precision) | -|----------------------------| -| 5.666666666666667 | - -Select: -| ?column?(Double Precision) | -|----------------------------| -| 15130 | - -Select: -| ?column?(Double Precision) | -|----------------------------| -| 15130 | - -Select: -| ?column?(Double Precision) | -|----------------------------| -| 52 | - -Select: -| ?column?(Double Precision) | -|----------------------------| -| 52 | - -Select: -| ?column?(Double Precision) | -|----------------------------| -| 3628821 | - -Select: -| ?column?(Double Precision) | -|----------------------------| -| 3628821 | - +"[20, 17]: Expected comma1, got +" \ No newline at end of file diff --git a/tests/acceptance/memory/query1 b/tests/acceptance/memory/query1 index 267a119..de2b566 100644 --- a/tests/acceptance/memory/query1 +++ b/tests/acceptance/memory/query1 @@ -1,151 +1 @@ -CreateTable: success - -Insert: success - -Insert: success - -Insert: success - -Insert: success - -Insert: success - -Insert: success - -Insert: success - -Insert: success - -Insert: success - -Insert: success - -Insert: success - -Insert: success - -Insert: success - -Insert: success - -Insert: success - -Insert: success - -Insert: success - -Insert: success - -Select: -| id(Int) | name(Text) | -|---------|-------------------| -| 0 | Phantaminum | -| 1 | The 25th Bam | -| 2 | Rachel | -| 3 | Rak WraithKaiser | -| 4 | Khun Aguero Agnes | -| 5 | King Zahard | -| 6 | Ha Yuri Zahard | -| 7 | Androssi Zahard | -| 8 | Evankhell | -| 9 | Anak Zahard | -| 10 | Yeon Yihwa | -| 11 | Yeo Miseng | -| 12 | Yeo Goseng | -| 13 | Xia Xia | -| 14 | Sachi Faker | -| 15 | Hwa Ryun | -| 16 | Khun Ran | -| 17 | Ha Yura | - -Select: -| id(Int) | name(Text) | -|---------|-------------------| -| 0 | Phantaminum | -| 1 | The 25th Bam | -| 3 | Rak WraithKaiser | -| 4 | Khun Aguero Agnes | -| 5 | King Zahard | -| 6 | Ha Yuri Zahard | -| 7 | Androssi Zahard | -| 8 | Evankhell | -| 9 | Anak Zahard | -| 10 | Yeon Yihwa | -| 11 | Yeo Miseng | -| 12 | Yeo Goseng | -| 13 | Xia Xia | -| 14 | Sachi Faker | -| 15 | Hwa Ryun | -| 16 | Khun Ran | -| 17 | Ha Yura | - -Select: -| id(Int) | name(Text) | -|---------|------------| -| 2 | Rachel | - -Select: -| id(Int) | charName(Text) | -|---------|-------------------| -| 0 | Phantaminum | -| 1 | The 25th Bam | -| 3 | Rak WraithKaiser | -| 4 | Khun Aguero Agnes | - -Select: -| name(Text) | -|-------------------| -| Anak Zahard | -| Androssi Zahard | -| Evankhell | -| Ha Yura | -| Ha Yuri Zahard | -| Hwa Ryun | -| Khun Aguero Agnes | -| Khun Ran | -| King Zahard | -| Phantaminum | -| Rachel | -| Rak WraithKaiser | -| Sachi Faker | -| The 25th Bam | -| Xia Xia | -| Yeo Goseng | -| Yeo Miseng | -| Yeon Yihwa | - -Select: -| ?column?(Int) | -|---------------| -| 0 | -| 1 | -| 2 | -| 3 | -| 4 | -| 5 | -| 6 | -| 7 | -| 8 | - -Select: -| name_with_id(Text) | -|--------------------| -| 10 Yeon Yihwa | -| 9 Anak Zahard | -| 8 Evankhell | -| 7 Androssi Zahard | - -CreateTable: success - -Insert: success - -Insert: success - -Insert: success - -Insert: success - -Insert: success - -Insert: success - +"Type mismatch for concat" \ No newline at end of file diff --git a/tests/acceptance/memory/query2 b/tests/acceptance/memory/query2 index d4887ae..917b3f8 100644 --- a/tests/acceptance/memory/query2 +++ b/tests/acceptance/memory/query2 @@ -1,9 +1 @@ -Select: -| id(Int) | name(Text) | character_id(Int) | role_name(Text) | -|---------|-------------------|-------------------|-----------------| -| 1 | The 25th Bam | 1 | Wave Controller | -| 1 | The 25th Bam | 1 | Fisherman | -| 3 | Rak WraithKaiser | 3 | Spear Bearer | -| 4 | Khun Aguero Agnes | 4 | Light Bearer | -| 4 | Khun Aguero Agnes | 4 | Spear Bearer | - +"Table does not exist." \ No newline at end of file diff --git a/tests/unit/parser1/query1 b/tests/unit/parser1/query1 index 975ca8c..e42dc9e 100644 --- a/tests/unit/parser1/query1 +++ b/tests/unit/parser1/query1 @@ -1,42 +1,93 @@ -Create Table - ├──Name: character_roles - └──Columns - ├──Int - └──Text - -Insert - ├──Name: character_roles - └──Values - ├──1 - └──"Wave Controller" - -Insert - ├──Name: character_roles - └──Values - ├──2 - └──"Light Bearer" - -Insert - ├──Name: character_roles - └──Values - ├──3 - └──"Spear Bearer" - -Insert - ├──Name: character_roles - └──Values - ├──4 - └──"Light Bearer" - -Insert - ├──Name: character_roles - └──Values - ├──1 - └──"Fisherman" - -Insert - ├──Name: character_roles - └──Values - ├──4 - └──"Spear Bearer" - +Create Table + ├──Name + | └─"character_roles" + └──Cols + ├─[0] + | ├──Name + | | └─"character_id" + | └──DataType + | └──Int + └─[1] + ├──Name + | └─"role_name" + └──DataType + └──Text + +Insert + ├──Table + | └─"character_roles" + └──Values + ├─[0] + | └──Literal + | └──Numeric + | └─"1" + └─[1] + └──Literal + └──String + └─"Wave Controller" + +Insert + ├──Table + | └─"character_roles" + └──Values + ├─[0] + | └──Literal + | └──Numeric + | └─"2" + └─[1] + └──Literal + └──String + └─"Light Bearer" + +Insert + ├──Table + | └─"character_roles" + └──Values + ├─[0] + | └──Literal + | └──Numeric + | └─"3" + └─[1] + └──Literal + └──String + └─"Spear Bearer" + +Insert + ├──Table + | └─"character_roles" + └──Values + ├─[0] + | └──Literal + | └──Numeric + | └─"4" + └─[1] + └──Literal + └──String + └─"Light Bearer" + +Insert + ├──Table + | └─"character_roles" + └──Values + ├─[0] + | └──Literal + | └──Numeric + | └─"1" + └─[1] + └──Literal + └──String + └─"Fisherman" + +Insert + ├──Table + | └─"character_roles" + └──Values + ├─[0] + | └──Literal + | └──Numeric + | └─"4" + └─[1] + └──Literal + └──String + └─"Spear Bearer" + diff --git a/tests/unit/parser1/query1.target b/tests/unit/parser1/query1.target new file mode 100644 index 0000000..975ca8c --- /dev/null +++ b/tests/unit/parser1/query1.target @@ -0,0 +1,42 @@ +Create Table + ├──Name: character_roles + └──Columns + ├──Int + └──Text + +Insert + ├──Name: character_roles + └──Values + ├──1 + └──"Wave Controller" + +Insert + ├──Name: character_roles + └──Values + ├──2 + └──"Light Bearer" + +Insert + ├──Name: character_roles + └──Values + ├──3 + └──"Spear Bearer" + +Insert + ├──Name: character_roles + └──Values + ├──4 + └──"Light Bearer" + +Insert + ├──Name: character_roles + └──Values + ├──1 + └──"Fisherman" + +Insert + ├──Name: character_roles + └──Values + ├──4 + └──"Spear Bearer" + diff --git a/tests/unit/parser2/query1 b/tests/unit/parser2/query1 index 2e5674d..1e141dc 100644 --- a/tests/unit/parser2/query1 +++ b/tests/unit/parser2/query1 @@ -1,141 +1,300 @@ -Select - ├──From - └──Items - ├──Item - | └──Expression - | └──1 - └──Item - └──Expression - └──2 - -Select - ├──From - | └──characters - └──Items - ├──Item - | └──Expression - | └──id - └──Item - └──Expression - └──name - -Select - ├──From - | └──characters - ├──Where - | └──NotEqual - | |└─id - | └──2 - └──Items - ├──Item - | └──Expression - | └──id - └──Item - └──Expression - └──name - -Select - ├──From - | └──characters - ├──Where - | └──Equal - | |└─name - | └──"Rachel" - └──Items - ├──Item - | └──Expression - | └──id - └──Item - └──Expression - └──name - -Select - ├──From - | └──characters - ├──Where - | └──And - | |└─NotEqual - | | |└─name - | | └──"Rachel" - | └──LessThan - | |└─id - | └──5 - └──Items - ├──Item - | └──Expression - | └──id - └──Item - ├──As - | └──charName - └──Expression - └──name - -Select - ├──From - | └──characters - ├──Order By - | ├──Asc - | └──name - └──Items - └──Item - └──Expression - └──name - -Select - ├──From - | └──characters - ├──Distinct - └──Items - └──Item - └──Expression - └──Cast - ├──Expression - | └──Divide - | |└─id - | └──2 - └──Type - └──Int - -Select - ├──From - | └──characters - ├──Where - | └──GreaterThan - | |└─id - | └──1 - ├──Order By - | ├──Desc - | └──id - ├──Limit - | └──4 - ├──Offset - | └──5 - └──Items - └──Item - ├──As - | └──name_with_id - └──Expression - └──Concat - |└─Cast - | ├──Expression - | | └──id - | └──Type - | └──Text - └──Concat - |└─" " - └──name - -Select - ├──From - | └──characters - ├──Where - | └──NotEqual - | |└─id - | └──2 - ├──Order By - | ├──Asc - | └──id - └──Items - └──Item - └──Asterisk - +Select + └──Items + ├─[0] + | └──Expression + | └──Literal + | └──Numeric + | └─"1" + └─[1] + └──Expression + └──Literal + └──Numeric + └─"2" + +Select + ├──From + | └─[0] + | └──Table + | └──TableName + | └─"characters" + └──Items + ├─[0] + | └──Expression + | └──TableColumn + | └──ColName + | └─"id" + └─[1] + └──Expression + └──TableColumn + └──ColName + └─"name" + +Select + ├──From + | └─[0] + | └──Table + | └──TableName + | └─"characters" + ├──WhereClause + | └──Binary + | ├──First + | | └──TableColumn + | | └──ColName + | | └─"id" + | ├──Second + | | └──Literal + | | └──Numeric + | | └─"2" + | └──Operand + | └──NotEqual + └──Items + ├─[0] + | └──Expression + | └──TableColumn + | └──ColName + | └─"id" + └─[1] + └──Expression + └──TableColumn + └──ColName + └─"name" + +Select + ├──From + | └─[0] + | └──Table + | └──TableName + | └─"characters" + ├──WhereClause + | └──Binary + | ├──First + | | └──TableColumn + | | └──ColName + | | └─"name" + | ├──Second + | | └──Literal + | | └──String + | | └─"Rachel" + | └──Operand + | └──Equal + └──Items + ├─[0] + | └──Expression + | └──TableColumn + | └──ColName + | └─"id" + └─[1] + └──Expression + └──TableColumn + └──ColName + └─"name" + +Select + ├──From + | └─[0] + | └──Table + | └──TableName + | └─"characters" + ├──WhereClause + | └──Binary + | ├──First + | | └──Binary + | | ├──First + | | | └──TableColumn + | | | └──ColName + | | | └─"name" + | | ├──Second + | | | └──Literal + | | | └──String + | | | └─"Rachel" + | | └──Operand + | | └──NotEqual + | ├──Second + | | └──Binary + | | ├──First + | | | └──TableColumn + | | | └──ColName + | | | └─"id" + | | ├──Second + | | | └──Literal + | | | └──Numeric + | | | └─"5" + | | └──Operand + | | └──LessThan + | └──Operand + | └──And + └──Items + ├─[0] + | └──Expression + | └──TableColumn + | └──ColName + | └─"id" + └─[1] + ├──Expression + | └──TableColumn + | └──ColName + | └─"name" + └──AsClause + └─"charName" + +Select + ├──From + | └─[0] + | └──Table + | └──TableName + | └─"characters" + ├──OrderBy + | ├──Asc + | | └─true + | └──Exp + | └──TableColumn + | └──ColName + | └─"name" + └──Items + └─[0] + └──Expression + └──TableColumn + └──ColName + └─"name" + +Select + ├──From + | └─[0] + | └──Table + | └──TableName + | └─"characters" + ├──Distinct + | └─true + └──Items + └─[0] + └──Expression + └──Cast + ├──Data + | └──Binary + | ├──First + | | └──TableColumn + | | └──ColName + | | └─"id" + | ├──Second + | | └──Literal + | | └──Numeric + | | └─"2" + | └──Operand + | └──Divide + └──Typ + └──Int + +Select + ├──From + | └─[0] + | └──Table + | └──TableName + | └─"characters" + ├──WhereClause + | └──Binary + | ├──First + | | └──TableColumn + | | └──ColName + | | └─"id" + | ├──Second + | | └──Literal + | | └──Numeric + | | └─"1" + | └──Operand + | └──GreaterThan + ├──OrderBy + | ├──Asc + | | └─false + | └──Exp + | └──TableColumn + | └──ColName + | └─"id" + ├──Limit + | └─4 + ├──Offset + | └─5 + └──Items + └─[0] + ├──Expression + | └──Binary + | ├──First + | | └──Cast + | | ├──Data + | | | └──TableColumn + | | | └──ColName + | | | └─"id" + | | └──Typ + | | └──Text + | ├──Second + | | └──Binary + | | ├──First + | | | └──Literal + | | | └──String + | | | └─" " + | | ├──Second + | | | └──TableColumn + | | | └──ColName + | | | └─"name" + | | └──Operand + | | └──Concat + | └──Operand + | └──Concat + └──AsClause + └─"name_with_id" + +Select + ├──From + | └─[0] + | └──Table + | ├──TableName + | | └─"characters" + | └──Joins + | └─[0] + | ├──Kind + | | └──Inner + | ├──Source + | | └──Table + | | └──TableName + | | └─"character_roles" + | └──On + | └──Binary + | ├──First + | | └──TableColumn + | | ├──ColName + | | | └─"id" + | | └──TableName + | | └─"characters" + | ├──Second + | | └──TableColumn + | | ├──ColName + | | | └─"character_id" + | | └──TableName + | | └─"character_roles" + | └──Operand + | └──Equal + ├──WhereClause + | └──Binary + | ├──First + | | └──TableColumn + | | └──ColName + | | └─"id" + | ├──Second + | | └──Literal + | | └──Numeric + | | └─"2" + | └──Operand + | └──NotEqual + ├──OrderBy + | ├──Asc + | | └─true + | └──Exp + | └──TableColumn + | └──ColName + | └─"id" + └──Items + └─[0] + ├──Expression + | └──Empty + └──Asterisk + └─true + diff --git a/tests/unit/parser2/query1.target b/tests/unit/parser2/query1.target new file mode 100644 index 0000000..2e5674d --- /dev/null +++ b/tests/unit/parser2/query1.target @@ -0,0 +1,141 @@ +Select + ├──From + └──Items + ├──Item + | └──Expression + | └──1 + └──Item + └──Expression + └──2 + +Select + ├──From + | └──characters + └──Items + ├──Item + | └──Expression + | └──id + └──Item + └──Expression + └──name + +Select + ├──From + | └──characters + ├──Where + | └──NotEqual + | |└─id + | └──2 + └──Items + ├──Item + | └──Expression + | └──id + └──Item + └──Expression + └──name + +Select + ├──From + | └──characters + ├──Where + | └──Equal + | |└─name + | └──"Rachel" + └──Items + ├──Item + | └──Expression + | └──id + └──Item + └──Expression + └──name + +Select + ├──From + | └──characters + ├──Where + | └──And + | |└─NotEqual + | | |└─name + | | └──"Rachel" + | └──LessThan + | |└─id + | └──5 + └──Items + ├──Item + | └──Expression + | └──id + └──Item + ├──As + | └──charName + └──Expression + └──name + +Select + ├──From + | └──characters + ├──Order By + | ├──Asc + | └──name + └──Items + └──Item + └──Expression + └──name + +Select + ├──From + | └──characters + ├──Distinct + └──Items + └──Item + └──Expression + └──Cast + ├──Expression + | └──Divide + | |└─id + | └──2 + └──Type + └──Int + +Select + ├──From + | └──characters + ├──Where + | └──GreaterThan + | |└─id + | └──1 + ├──Order By + | ├──Desc + | └──id + ├──Limit + | └──4 + ├──Offset + | └──5 + └──Items + └──Item + ├──As + | └──name_with_id + └──Expression + └──Concat + |└─Cast + | ├──Expression + | | └──id + | └──Type + | └──Text + └──Concat + |└─" " + └──name + +Select + ├──From + | └──characters + ├──Where + | └──NotEqual + | |└─id + | └──2 + ├──Order By + | ├──Asc + | └──id + └──Items + └──Item + └──Asterisk + diff --git a/transactional-store/Cargo.toml b/transactional-store/Cargo.toml new file mode 100644 index 0000000..2c8bc2f --- /dev/null +++ b/transactional-store/Cargo.toml @@ -0,0 +1,22 @@ +[package] +name = "transactional-store" +version = "0.1.0" +edition = "2021" + +# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html + +[dependencies] +dashmap = "5.4.0" +parking_lot = "0.12.1" +tokio = { version = "1.21.2", features = ["sync"] } + +criterion = "0.4.0" +instant = "0.1.12" +rand = "0.8.4" + +[dev-dependencies] +test-case = "2.2.2" + +[[bench]] +name = "bench" +harness = false \ No newline at end of file diff --git a/transactional-store/benches/bench.rs b/transactional-store/benches/bench.rs new file mode 100644 index 0000000..18f7118 --- /dev/null +++ b/transactional-store/benches/bench.rs @@ -0,0 +1,27 @@ +use std::sync::atomic::AtomicI64; + +use criterion::{black_box, criterion_group, criterion_main, Criterion}; + +use dashmap::DashMap; +use instant::Instant; + +fn store_set_benchmark(c: &mut Criterion) { + use transactional_store::{TransactionalStore, TransactionalStoreExt}; + let store = TransactionalStore::::new(); + // let map = DashMap::new(); + let at = AtomicI64::new(0); + // Bench here + c.bench_function("store_set", |b| { + b.iter(|| { + let k = rand::random(); + store.set(black_box(k), k, None); + // map.entry(i).and_modify(|v| *v = i).or_insert(i); + }) + }); +} + +criterion_group!( + benches, + store_set_benchmark, +); +criterion_main!(benches); \ No newline at end of file diff --git a/transactional-store/src/lib.rs b/transactional-store/src/lib.rs new file mode 100644 index 0000000..de0e40d --- /dev/null +++ b/transactional-store/src/lib.rs @@ -0,0 +1,842 @@ +use std::{ + fmt::{Debug, Display}, + sync::{ + atomic::{AtomicU64, Ordering}, + Arc, + }, +}; + +use dashmap::DashMap; +use parking_lot::RwLock; + +#[derive(Debug, Clone, PartialEq, Eq, Hash)] +pub enum TransactionalStoreError +where + K: Eq + std::hash::Hash + Clone + Send + Sync + Debug + Display, +{ + CollectionNotFound { name: String }, + ConcurrentDeleteConflict, + SerializationError, + RecordNotFound { id: K }, + TransactionNotFound { id: u64 }, + TransactionAlreadyFinished { id: u64 }, +} + +pub type StoreResult = std::result::Result>; + +#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] +pub enum IsolationLevel { + ReadUncommitted, + Serializable, +} + +#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] +pub enum TransactionStatus { + Aborted, + Committed { commit_id: u64 }, + Pending, + RolledBack, +} + +impl TransactionStatus { + pub fn is_active(&self) -> bool { + matches!(self, TransactionStatus::Pending,) + } + + pub fn is_committed(&self) -> bool { + matches!(self, TransactionStatus::Committed { .. },) + } + + pub fn is_finished(&self) -> bool { + matches!( + self, + TransactionStatus::Aborted + | TransactionStatus::Committed { .. } + | TransactionStatus::RolledBack, + ) + } +} + +#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] +pub struct TransactionState { + pub status: TransactionStatus, +} + +impl TransactionState { + pub fn is_active(&self) -> bool { + self.status.is_active() + } + + pub fn is_committed(&self) -> bool { + self.status.is_committed() + } + + pub fn is_finished(&self) -> bool { + self.status.is_finished() + } + + pub fn new(status: TransactionStatus) -> Self { + Self { status } + } +} + +pub struct TransactionalStore +where + K: Eq + std::hash::Hash + Clone + Send + Sync + Debug + Display + 'static, + V: Clone + Send + Sync + Debug + Display + 'static, +{ + last_commit_id: Arc, + last_transaction_id: Arc, + store: DashMap>>>, + transaction_catalog: Arc>, +} + +pub struct TransactionalRow +where + V: Clone + Send + Sync + Debug + Display, +{ + row_versions: Arc>>, + last_locked_by: u64, +} + +#[derive(Debug, Clone, PartialEq, Eq, Hash)] +pub struct VersionedRows +where + V: Clone + Send + Sync + Debug + Display, +{ + rows: Vec>, + was_deleted: bool, +} + +#[derive(Debug, Clone, PartialEq, Eq, Hash)] +pub struct VersionedRow +where + V: Clone + Send + Sync + Debug + Display, +{ + row: Option, + expired: bool, + xmin: u64, + xmax: Option, +} + +#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] +pub struct TransactionContext { + // Only generated when transaction has to lock a row + pub id: Option, + pub previous_commit_id: u64, + pub isolation_level: IsolationLevel, +} + +impl TransactionContext { + pub fn is_serializable(&self) -> bool { + self.isolation_level == IsolationLevel::Serializable + } + + pub fn is_read_uncommitted(&self) -> bool { + self.isolation_level == IsolationLevel::ReadUncommitted + } + + pub fn is_read_committed(&self) -> bool { + !self.is_serializable() + } + + pub fn set_isolation_level(&mut self, isolation_level: IsolationLevel) { + self.isolation_level = isolation_level; + } +} + +// Transaction id for finding transaction in catalog and given externally +// Also another transaction id generated on first write operation to use for row versions + +pub trait TransactionalStoreExt +where + K: Eq + std::hash::Hash + Clone + Send + Sync + Debug + Display + 'static, + V: Clone + Send + Sync + Debug + Display + 'static, +{ + fn new() -> Self; + fn collection(&self, collection: &str) -> StoreResult>>>; + fn create_collection(&self, collection: &str) -> StoreResult; + fn drop_collection(&self, collection: &str) -> StoreResult; + fn get( + &self, + collection: &str, + key: &K, + transaction: Option<&TransactionContext>, + ) -> StoreResult>; + fn set( + &self, + collection: &str, + key: K, + value: V, + transaction: Option<&mut TransactionContext>, + ) -> StoreResult; + fn delete( + &self, + collection: &str, + key: K, + transaction: Option<&mut TransactionContext>, + ) -> StoreResult; + fn begin_transaction(&self) -> StoreResult; + fn commit_transaction(&self, transaction: &TransactionContext) -> StoreResult; + fn rollback_transaction(&self, transaction: &TransactionContext) -> StoreResult; + fn rows_iter(&self, collection: &str) -> StoreResult>; + fn rows_iter_with_transaction_id( + &self, + collection: &str, + transaction_id: u64, + ) -> StoreResult>; +} + +impl TransactionalStore +where + K: Eq + std::hash::Hash + Clone + Send + Debug + Display + Sync + 'static, + V: Clone + Send + Sync + Debug + Display + 'static, +{ + fn lock_row( + &self, + row: &mut TransactionalRow, + transaction_id: u64, + isolation_level: IsolationLevel, + ) -> StoreResult { + if transaction_id != row.last_locked_by { + if let Some(transaction) = self.transaction_catalog.get(&row.last_locked_by) { + if transaction.is_active() && isolation_level == IsolationLevel::Serializable { + // TODO: Costumize error based on action + return Err(TransactionalStoreError::SerializationError); + } + } + } + + row.last_locked_by = transaction_id; + + Ok(transaction_id) + } + + fn get_next_transaction_id(&self) -> u64 { + self.last_transaction_id.fetch_add(1, Ordering::Relaxed) + 1 + } + + fn get_latest_commit_id(&self) -> u64 { + self.last_commit_id.load(Ordering::Relaxed) + } + + fn get_next_commit_id(&self) -> u64 { + self.last_commit_id.fetch_add(1, Ordering::Relaxed) + 1 + } + + fn get_tx_id_from_opt( + &self, + transaction: Option<&mut TransactionContext>, + ) -> StoreResult { + if let Some(transaction_context @ TransactionContext { id: None, .. }) = transaction { + let next_id = self.create_transaction_entry(); + transaction_context.id = Some(next_id); + Ok(next_id) + } else if let Some(TransactionContext { id: Some(id), .. }) = transaction { + Ok(*id) + } else { + let mut transaction = self.begin_transaction()?; + self.get_tx_id_from_opt(Some(&mut transaction)) + } + } + + fn create_transaction_entry(&self) -> u64 { + let key = self.get_next_transaction_id(); + self.transaction_catalog + .insert(key, TransactionState::new(TransactionStatus::Pending)); + key + } +} + +impl TransactionalStoreExt for TransactionalStore +where + K: Eq + std::hash::Hash + Clone + Send + Debug + Display + Sync + 'static, + V: Clone + Send + Sync + Debug + Display + 'static, +{ + fn new() -> Self { + Self { + last_commit_id: Default::default(), + last_transaction_id: Default::default(), + store: DashMap::new(), + transaction_catalog: DashMap::new().into(), + } + } + + fn collection( + &self, + collection_name: &str, + ) -> StoreResult>>> { + self.store + .get(collection_name) + .map(|collection| collection.value().clone()) + .ok_or(TransactionalStoreError::CollectionNotFound { + name: collection_name.to_string(), + }) + } + + fn create_collection(&self, collection_name: &str) -> StoreResult { + self.store + .entry(collection_name.to_string()) + .or_insert_with(|| DashMap::new().into()); + Ok(()) + } + + fn drop_collection(&self, collection_name: &str) -> StoreResult { + self.store.remove(collection_name); + Ok(()) + } + + fn get( + &self, + collection: &str, + key: &K, + transaction: Option<&TransactionContext>, + ) -> StoreResult> { + if let Some(&TransactionContext { + isolation_level: IsolationLevel::Serializable, + previous_commit_id: previous_write_transaction_id, + .. + }) = transaction + { + Ok(self.collection(collection)?.get(key).and_then(|row| { + row.row_versions + .read() + .rows + .iter() + .rev() + .find(|row| row.xmin <= previous_write_transaction_id) + .and_then(|row| row.row.clone()) + })) + } else { + Ok(self.collection(collection)?.get(key).and_then(|row| { + row.row_versions + .read() + .rows + .last() + .and_then(|row| row.row.clone()) + })) + } + } + + fn set( + &self, + collection_name: &str, + key: K, + value: V, + transaction: Option<&mut TransactionContext>, + ) -> StoreResult { + let isolation_level = transaction + .as_ref() + .map(|transaction| transaction.isolation_level) + .unwrap_or(IsolationLevel::Serializable); + let tx_id = self.get_tx_id_from_opt(transaction)?; + let mut inserted = false; + let collection = self.collection(collection_name)?; + let mut row = collection.entry(key).or_insert_with(|| { + inserted = true; + let commit_id = self.get_next_commit_id(); + self.transaction_catalog.insert( + tx_id, + TransactionState { + status: TransactionStatus::Committed { commit_id }, + }, + ); + TransactionalRow { + row_versions: Arc::new(RwLock::new(VersionedRows { + rows: vec![VersionedRow { + expired: false, + row: Some(value.clone()), + xmin: tx_id, + xmax: None, + }], + was_deleted: false, + })), + last_locked_by: tx_id, + } + }); + + if inserted { + return Ok(()); + } + + self.lock_row(&mut *row, tx_id, isolation_level)?; + let mut row = row.row_versions.write(); + row.rows.iter_mut().for_each(|row| row.expired = true); + if let Some(last_version) = row.rows.last_mut() { + last_version.xmax = Some(tx_id); + last_version.expired = true; + } + row.rows.push(VersionedRow { + row: Some(value), + xmin: tx_id, + xmax: None, + expired: false, + }); + + Ok(()) + } + + fn delete( + &self, + collection_name: &str, + key: K, + transaction: Option<&mut TransactionContext>, + ) -> StoreResult { + let isolation_level = transaction + .as_ref() + .map(|transaction| transaction.isolation_level) + .unwrap_or(IsolationLevel::Serializable); + let collection = self.collection(collection_name)?; + let mut row = if let Some(row) = collection.get_mut(&key) { + row + } else { + return Err(TransactionalStoreError::RecordNotFound { id: key }); + }; + + let transaction_id = self.get_tx_id_from_opt(transaction)?; + + self.lock_row(&mut row, transaction_id, isolation_level)?; + + drop(row); + + collection.remove(&key); + + let mut transaction = if let Some(tx) = self.transaction_catalog.get_mut(&transaction_id) { + tx + } else { + return Err(TransactionalStoreError::TransactionNotFound { id: transaction_id }); + }; + transaction.status = TransactionStatus::Committed { + commit_id: self.get_next_commit_id(), + }; + + Ok(()) + } + + fn begin_transaction(&self) -> StoreResult { + Ok(TransactionContext { + id: None, + previous_commit_id: self.get_latest_commit_id(), + isolation_level: IsolationLevel::Serializable, + }) + } + + fn commit_transaction(&self, transaction: &TransactionContext) -> StoreResult { + if let Some(id) = transaction.id { + let mut transaction = if let Some(tx) = self.transaction_catalog.get_mut(&id) { + tx + } else { + return Err(TransactionalStoreError::TransactionNotFound { id }); + }; + let commit_id = self.get_next_commit_id(); + if transaction.is_finished() { + return Err(TransactionalStoreError::TransactionAlreadyFinished { id }); + } + transaction.status = TransactionStatus::Committed { commit_id }; + } + + Ok(()) + } + + fn rollback_transaction(&self, transaction: &TransactionContext) -> StoreResult { + if let Some(id) = transaction.id { + let mut transaction = if let Some(tx) = self.transaction_catalog.get_mut(&id) { + tx + } else { + return Err(TransactionalStoreError::TransactionNotFound { id }); + }; + if transaction.is_finished() { + return Err(TransactionalStoreError::TransactionAlreadyFinished { id }); + } + transaction.status = TransactionStatus::RolledBack; + } + + Ok(()) + } + + fn rows_iter(&self, collection_name: &str) -> StoreResult> { + let current_commit_id = self.get_latest_commit_id(); + let collection = self.collection(collection_name)?; + let transaction_catalog = self.transaction_catalog.clone(); + let iter = RecordIteratorContainer::new(collection, transaction_catalog, current_commit_id); + Ok(iter) + } + + fn rows_iter_with_transaction_id( + &self, + collection_name: &str, + transaction_id: u64, + ) -> StoreResult> { + let collection = self.collection(collection_name)?; + let transaction_catalog = self.transaction_catalog.clone(); + let iter = RecordTransactionIteratorContainer::new( + collection, + transaction_catalog, + transaction_id, + ); + Ok(iter) + } +} + +pub struct RecordIteratorContainer +where + K: Eq + std::hash::Hash + Clone + Send + Debug + Display + Sync + 'static, + V: Clone + Send + Sync + Debug + Display + 'static, +{ + collection: Arc>>, + transaction_catalog: Arc>, + current_commit_id: u64, +} + +impl RecordIteratorContainer +where + K: Eq + std::hash::Hash + Clone + Send + Debug + Display + Sync + 'static, + V: Clone + Send + Sync + Debug + Display + 'static, +{ + pub fn new( + collection: Arc>>, + transaction_catalog: Arc>, + current_commit_id: u64, + ) -> Self { + Self { + collection, + transaction_catalog, + current_commit_id, + } + } + + pub fn iter<'a>(&'a self) -> Box + 'a> { + let iter = self.collection.iter().flat_map(move |record| { + let key = record.key().clone(); + let mut found = None; + let versions = record.row_versions.read(); + for row in versions.rows.iter() { + let tx = self.transaction_catalog.get(&row.xmin); + if let Some(TransactionState { + status: TransactionStatus::Committed { commit_id }, + }) = tx.as_deref() + { + if commit_id > &self.current_commit_id { + continue; + } + found = Some(row); + } + } + + found.and_then(move |row| row.clone().row.map(|r| (key.clone(), r))) + }); + Box::new(iter) + } +} + +pub struct RecordTransactionIteratorContainer +where + K: Eq + std::hash::Hash + Clone + Send + Debug + Display + Sync + 'static, + V: Clone + Send + Sync + Debug + Display + 'static, +{ + collection: Arc>>, + transaction_catalog: Arc>, + transaction_id: u64, +} + +impl RecordTransactionIteratorContainer +where + K: Eq + std::hash::Hash + Clone + Send + Debug + Display + Sync + 'static, + V: Clone + Send + Sync + Debug + Display + 'static, +{ + pub fn new( + collection: Arc>>, + transaction_catalog: Arc>, + transaction_id: u64, + ) -> Self { + Self { + collection, + transaction_catalog, + transaction_id, + } + } + + pub fn iter<'a>(&'a self) -> Box + 'a> { + let iter = self.collection.iter().flat_map(move |record| { + let key = record.key().clone(); + let mut found = None; + let versions = record.row_versions.read(); + for row in versions.rows.iter() { + if row.xmin == self.transaction_id { + found = Some(row); + break; + } else if self + .transaction_catalog + .get(&row.xmin) + .map(|tx| tx.is_committed()) + .unwrap_or(true) + { + found = Some(row); + } + } + + found.and_then(move |row| row.clone().row.map(|r| (key.clone(), r))) + }); + Box::new(iter) + } +} + +#[cfg(test)] +mod tests { + use super::*; + use std::{error::Error, str::FromStr}; + use test_case::test_case; + + pub enum StoreCommand + where + V: FromStr, + { + Set { + key: K, + val: V, + transaction_id: Option, + }, + Delete { + key: K, + transaction_id: Option, + }, + Get { + key: K, + transaction_id: Option, + }, + BeginTransaction, + CommitTransaction(u64), + RollbackTransaction(u64), + RowsIter, + RowsIterWithTransactionId(u64), + } + + pub fn store_command_handler( + tx_ctxs: &DashMap, + store: &TransactionalStore, + command: StoreCommand, + isolation_level: IsolationLevel, + ) -> StoreResult + where + K: Eq + std::hash::Hash + Clone + Send + Sync + Debug + Display + Ord + 'static, + V: Clone + Send + Sync + Debug + Display + FromStr + 'static, + { + match command { + StoreCommand::Set { + key, + val, + transaction_id, + } => { + let mut tx_ctx = if let Some(transaction_id) = transaction_id { + tx_ctxs.get_mut(&transaction_id) + } else { + None + }; + store.set("test", key, val, tx_ctx.as_deref_mut())?; + Ok(String::new()) + } + StoreCommand::Delete { + key, + transaction_id, + } => { + let mut tx_ctx = if let Some(transaction_id) = transaction_id { + tx_ctxs.get_mut(&transaction_id) + } else { + None + }; + store.delete("test", key, tx_ctx.as_deref_mut())?; + Ok(String::new()) + } + StoreCommand::Get { + key, + transaction_id, + } => { + let tx_ctx = if let Some(transaction_id) = transaction_id { + tx_ctxs.get(&transaction_id) + } else { + None + }; + let value = store.get("test", &key, tx_ctx.as_deref()); + Ok(format!("{value:?}")) + } + StoreCommand::BeginTransaction => { + let mut transaction = store.begin_transaction()?; + transaction.set_isolation_level(isolation_level); + let tx_id = tx_ctxs.iter().map(|tx| *tx.key()).max().unwrap_or(0) + 1; + tx_ctxs.insert(tx_id, transaction); + Ok(format!("{tx_id:?}")) + } + StoreCommand::CommitTransaction(transaction_id) => { + let tx_ctx = tx_ctxs.get(&transaction_id).unwrap(); + store.commit_transaction(&tx_ctx)?; + Ok(String::new()) + } + StoreCommand::RollbackTransaction(transaction_id) => { + let tx_ctx = tx_ctxs.get(&transaction_id).unwrap(); + store.rollback_transaction(&tx_ctx)?; + Ok(String::new()) + } + StoreCommand::RowsIter => { + let mut rows = store + .rows_iter("test") + .unwrap() + .iter() + .collect::>(); + rows.sort_by_key(|(key, _)| key.clone()); + Ok(format!("{rows:?}")) + } + StoreCommand::RowsIterWithTransactionId(transaction_id) => { + let mut rows = store + .rows_iter_with_transaction_id("test", transaction_id) + .unwrap() + .iter() + .collect::>(); + rows.sort_by_key(|(key, _)| key.clone()); + Ok(format!("{rows:?}")) + } + } + } + + impl FromStr for StoreCommand { + type Err = Box; + + fn from_str(s: &str) -> Result { + let mut parts = s.split_whitespace(); + let command = parts.next().ok_or("No command")?; + match command.to_ascii_lowercase().as_str() { + "set" => { + let key = parts.next().ok_or("No key")?.trim().to_string(); + let val = parts.next().ok_or("No value")?.parse()?; + let transaction_id = parts.next().map(|s| s.parse()).transpose()?; + Ok(StoreCommand::Set { + key, + val, + transaction_id, + }) + } + "delete" => { + let key = parts.next().ok_or("No key")?.trim().to_string(); + let transaction_id = parts.next().map(|s| s.parse()).transpose()?; + Ok(StoreCommand::Delete { + key, + transaction_id, + }) + } + "get" => { + let key = parts.next().ok_or("No key")?.trim().to_string(); + let transaction_id = parts.next().map(|s| s.parse()).transpose()?; + Ok(StoreCommand::Get { + key, + transaction_id, + }) + } + "begin" => Ok(StoreCommand::BeginTransaction), + "commit" => { + let transaction_id = parts.next().ok_or("No transaction id")?.parse()?; + Ok(StoreCommand::CommitTransaction(transaction_id)) + } + "rollback" => { + let transaction_id = parts.next().ok_or("No transaction id")?.parse()?; + Ok(StoreCommand::RollbackTransaction(transaction_id)) + } + "rows" => { + if let Some(transaction_id) = parts.next() { + let transaction_id = transaction_id.parse()?; + Ok(StoreCommand::RowsIterWithTransactionId(transaction_id)) + } else { + Ok(StoreCommand::RowsIter) + } + } + _ => Err(format!("Unknown command: {command}"))?, + } + } + } + + #[test_case("set a 1" => ""; "set")] + #[test_case("get a" => "Ok(None)"; "get")] + #[test_case("begin" => "1"; "begin_transaction")] + #[test_case("rows" => "[]"; "rows_iter")] + #[test_case("rows 0" => "[]"; "rows_iter_with_transaction_id")] + fn test_store_command_handler(s: &str) -> &str { + let store = TransactionalStore::new(); + store.create_collection("test").unwrap(); + let command = s.parse::>().unwrap(); + let tx_ctxs = DashMap::new(); + let res = store_command_handler(&tx_ctxs, &store, command, IsolationLevel::ReadUncommitted) + .unwrap(); + + Box::leak(res.into_boxed_str()) + } + + #[test_case("SET a 1; SET b 2; SET c 3; ROWS" => r#"[("a", 1), ("b", 2), ("c", 3)]"#; "set")] + #[test_case("SET a 1; SET b 2; SET c 3; BEGIN; SET a 4 1; SET b 5 1; SET c 6 1; ROWS" => r#"[("a", 1), ("b", 2), ("c", 3)]"#; "begin_transaction")] + #[test_case("SET a 1; SET b 2; SET c 3; BEGIN; SET a 4 1; SET b 5 1; SET c 6 1; COMMIT 1; ROWS" => r#"[("a", 4), ("b", 5), ("c", 6)]"#; "commit_transaction")] + #[test_case("SET a 1; SET b 2; SET c 3; BEGIN; SET a 4 1; SET b 5 1; SET c 6 1; ROLLBACK 1; ROWS" => r#"[("a", 1), ("b", 2), ("c", 3)]"#; "rollback_transaction")] + #[test_case("SET a 1; SET b 2; SET c 3; BEGIN; SET a 4 1; SET b 5 1; SET c 6 1; BEGIN; SET a 7 2; SET b 8 2; SET c 9 2; COMMIT 2; ROWS" => r#"[("a", 7), ("b", 8), ("c", 9)]"#; "commit_transaction_2")] + #[test_case("SET a 1; SET b 2; SET c 3; BEGIN; SET a 4 1; SET b 5 1; SET c 6 1; BEGIN; SET a 7 2; SET b 8 2; SET c 9 2; ROLLBACK 2; ROWS" => r#"[("a", 1), ("b", 2), ("c", 3)]"#; "rollback_transaction_2")] + #[test_case("SET a 1; SET b 2; SET c 3; BEGIN; SET a 4 1; SET b 5 1; SET c 6 1; BEGIN; SET a 7 2; SET b 8 2; SET c 9 2; ROLLBACK 1; ROWS" => r#"[("a", 1), ("b", 2), ("c", 3)]"#; "rollback_transaction_1")] + #[test_case("SET a 1; SET b 2; SET c 3; BEGIN; SET a 4 1; SET b 5 1; SET c 6 1; BEGIN; SET a 7 2; SET b 8 2; SET c 9 2; COMMIT 1; ROWS 2" => r#"[("a", 4), ("b", 5), ("c", 6)]"#; "commit_transaction_1")] + #[test_case("SET a 1; SET b 2; SET c 3; BEGIN; SET a 4 1; SET b 5 1; SET c 6 1; BEGIN; SET a 7 2; SET b 8 2; SET c 9 2; COMMIT 2; COMMIT 1; ROWS" => r#"[("a", 7), ("b", 8), ("c", 9)]"#; "commit_transaction_2_and_1")] + #[test_case("SET a 1; SET b 2; SET c 3; BEGIN; SET a 4 1; SET b 5 1; SET c 6 1; BEGIN; SET a 7 2; SET b 8 2; SET c 9 2; ROLLBACK 2; COMMIT 1; ROWS" => r#"[("a", 4), ("b", 5), ("c", 6)]"#; "rollback_transaction_2_and_commit_transaction_1")] + fn test_store_command_handler_with_transaction(s: &str) -> &str { + let store = TransactionalStore::new(); + store.create_collection("test").unwrap(); + let commands = s.split(';').map(|s| { + s.trim() + .parse::>() + .unwrap_or_else(|e| panic!("{s} : {e}")) + }); + let tx_ctxs = DashMap::new(); + for command in commands { + store_command_handler(&tx_ctxs, &store, command, IsolationLevel::ReadUncommitted) + .unwrap(); + } + + let res = store_command_handler( + &tx_ctxs, + &store, + StoreCommand::RowsIter, + IsolationLevel::Serializable, + ) + .unwrap(); + Box::leak(res.into_boxed_str()) + } + + #[test_case("BEGIN; SET a 4 1; COMMIT 1; COMMIT 1;" => Err(TransactionalStoreError::TransactionAlreadyFinished { id: 1}))] + #[test_case("BEGIN; SET a 4 1; COMMIT 1; BEGIN; SET a 4 2; COMMIT 2; COMMIT 2;" => Err(TransactionalStoreError::TransactionAlreadyFinished { id: 1}))] + #[test_case("BEGIN; SET a 4 1; ROLLBACK 1;" => Err(TransactionalStoreError::TransactionAlreadyFinished { id: 1}))] + #[test_case("BEGIN; SET a 4 1; BEGIN; SET a 4 2; ROLLBACK 2; ROLLBACK 2;" => Err(TransactionalStoreError::TransactionAlreadyFinished { id: 2}))] + #[test_case("BEGIN; SET a 4 1; BEGIN; SET a 4 2; COMMIT 2; ROLLBACK 2;" => Err(TransactionalStoreError::TransactionAlreadyFinished { id: 2}))] + #[test_case("BEGIN; SET a 4 1; BEGIN; SET a 4 2; ROLLBACK 2; COMMIT 2;" => Err(TransactionalStoreError::TransactionAlreadyFinished { id: 2}))] + fn test_store_command_handler_with_transaction_fail(s: &str) -> StoreResult { + let store = TransactionalStore::new(); + store.create_collection("test").unwrap(); + let commands = s.split(';').map(|s| { + s.trim() + .parse::>() + .unwrap_or_else(|e| panic!("{s} : {e}")) + }); + let tx_ctxs = DashMap::new(); + + for command in commands { + store_command_handler(&tx_ctxs, &store, command, IsolationLevel::ReadUncommitted)?; + } + + panic!("Should fail") + } + + #[test] + fn entry_transaction() { + let map = DashMap::new(); + map.insert(1, "a"); + map.insert(2, "b"); + + assert_eq!("a", *map.get(&1).unwrap()); + assert_eq!("b", *map.get(&2).unwrap()); + map.entry(2).and_modify(|v| *v = "b1"); + let entry2_2 = map.entry(2).and_modify(|v| *v = "b2"); + drop(entry2_2); + + assert_eq!("b2", *map.get(&2).unwrap()); + } +} diff --git a/transactional-store/src/main.rs b/transactional-store/src/main.rs new file mode 100644 index 0000000..fbedd92 --- /dev/null +++ b/transactional-store/src/main.rs @@ -0,0 +1,3 @@ +fn main() { + println!("Hello, world!"); +} \ No newline at end of file diff --git a/wasm-repl/Cargo.toml b/wasm-repl/Cargo.toml index aeabcd5..5c65f30 100644 --- a/wasm-repl/Cargo.toml +++ b/wasm-repl/Cargo.toml @@ -9,9 +9,9 @@ sqlo2 = { path = "../engine", features = ["stdweb", "wasm-bindgen"] } yew = "=0.19.3" yew-router = "=0.16.0" lazy_static = "=1.4.0" -serde = "=1.0.137" -serde_qs = "=0.9.2" -wasm-bindgen = { version = "=0.2.80", features = ["serde-serialize"] } +serde = "=1.0.140" +serde_qs = "=0.10.1" +wasm-bindgen = { version = "=0.2.81", features = ["serde-serialize"] } wasm-logger = "=0.2.0" -web-sys = { version = "=0.3.57", features = ["HtmlTextAreaElement"] } +web-sys = { version = "=0.3.58", features = ["HtmlTextAreaElement"] } log = "=0.4.17" diff --git a/wire-protocol/src/messages/mod.rs b/wire-protocol/src/messages/mod.rs index b2de6c8..bffb41b 100644 --- a/wire-protocol/src/messages/mod.rs +++ b/wire-protocol/src/messages/mod.rs @@ -127,7 +127,7 @@ impl<'a> DeserializableMessage<'a> for Bind<'a> { let mut read_count: i16 = 0; while read_count < numparams { let param_len = data_bytes.read_i32::()?; - let mut param = Vec::with_capacity(param_len as usize); + let mut param = vec![0; param_len as usize]; data_bytes.read_exact(&mut param)?; let param_str = Cow::Owned(String::from_utf8_lossy(¶m).to_string()); params.push(param_str); @@ -174,7 +174,7 @@ impl<'a> DeserializableMessage<'a> for Bind<'a> { } } -impl<'a> SerializableMessage for Bind<'_> { +impl SerializableMessage for Bind<'_> { fn serialize(&self) -> Vec { todo!() } @@ -263,7 +263,7 @@ impl<'a> DeserializableMessage<'a> for Describe<'a> { } } -impl<'a> SerializableMessage for Describe<'_> { +impl SerializableMessage for Describe<'_> { fn serialize(&self) -> Vec { todo!() } @@ -296,7 +296,7 @@ impl<'a> DeserializableMessage<'a> for ErrorResponse<'a> { } } -impl<'a> SerializableMessage for ErrorResponse<'_> { +impl SerializableMessage for ErrorResponse<'_> { fn serialize(&self) -> Vec { let mut content = vec![b'E', 0, 0, 0, 0]; for (c, s) in &self.errors { @@ -339,7 +339,7 @@ impl<'a> DeserializableMessage<'a> for Execute<'a> { } } -impl<'a> SerializableMessage for Execute<'_> { +impl SerializableMessage for Execute<'_> { fn serialize(&self) -> Vec { todo!() }