Compare commits

...
Sign in to create a new pull request.

3 commits

17 changed files with 1182 additions and 1456 deletions

606
Cargo.lock generated
View file

@ -29,12 +29,34 @@ version = "0.2.21"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "683d7910e743518b0e34f1186f92494becacb047c7b6bf616c96772180fef923"
[[package]]
name = "anes"
version = "0.1.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "4b46cbb362ab8752921c97e041f5e366ee6297bd428a31275b9fcf1e380f7299"
[[package]]
name = "anstyle"
version = "1.0.11"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "862ed96ca487e809f1c8e5a8447f6ee2cf102f846893800b20cebdf541fc6bbd"
[[package]]
name = "anyhow"
version = "1.0.98"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e16d2d3311acee920a9eb8d33b8cbc1787ce4a264e85f964c2404b969bdcd487"
[[package]]
name = "ariadne"
version = "0.5.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "36f5e3dca4e09a6f340a61a0e9c7b61e030c69fc27bf29d73218f7e5e3b7638f"
dependencies = [
"unicode-width",
"yansi",
]
[[package]]
name = "ascii"
version = "1.1.0"
@ -69,6 +91,12 @@ dependencies = [
"vsimd",
]
[[package]]
name = "beef"
version = "0.5.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "3a8241f3ebb85c056b509d4327ad0358fbbba6ffb340bf388f26350aeda225b1"
[[package]]
name = "better_scoped_tls"
version = "1.0.1"
@ -96,15 +124,6 @@ dependencies = [
"wyz",
]
[[package]]
name = "block-buffer"
version = "0.10.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "3078c7629b62d3f0439517fa394996acacc5cbc91c5a20d8c658e77abd503a71"
dependencies = [
"generic-array",
]
[[package]]
name = "bumpalo"
version = "3.19.0"
@ -130,6 +149,12 @@ dependencies = [
"serde",
]
[[package]]
name = "cast"
version = "0.3.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "37b2a672a2cb129a2e41c10b1224bb368f9f37a2b16b612598138befd7b37eb5"
[[package]]
name = "castaway"
version = "0.2.3"
@ -139,12 +164,87 @@ dependencies = [
"rustversion",
]
[[package]]
name = "cc"
version = "1.2.29"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5c1599538de2394445747c8cf7935946e3cc27e9625f889d979bfb2aaf569362"
dependencies = [
"shlex",
]
[[package]]
name = "cfg-if"
version = "1.0.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9555578bc9e57714c812a1f84e4fc5b4d21fcb063490c624de019f7464c91268"
[[package]]
name = "chumsky"
version = "0.10.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "14377e276b2c8300513dff55ba4cc4142b44e5d6de6d00eb5b2307d650bb4ec1"
dependencies = [
"hashbrown 0.15.4",
"regex-automata 0.3.9",
"serde",
"stacker",
"unicode-ident",
"unicode-segmentation",
]
[[package]]
name = "ciborium"
version = "0.2.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "42e69ffd6f0917f5c029256a24d0161db17cea3997d185db0d35926308770f0e"
dependencies = [
"ciborium-io",
"ciborium-ll",
"serde",
]
[[package]]
name = "ciborium-io"
version = "0.2.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "05afea1e0a06c9be33d539b876f1ce3692f4afea2cb41f740e7743225ed1c757"
[[package]]
name = "ciborium-ll"
version = "0.2.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "57663b653d948a338bfb3eeba9bb2fd5fcfaecb9e199e87e1eda4d9e8b240fd9"
dependencies = [
"ciborium-io",
"half",
]
[[package]]
name = "clap"
version = "4.5.41"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "be92d32e80243a54711e5d7ce823c35c41c9d929dc4ab58e1276f625841aadf9"
dependencies = [
"clap_builder",
]
[[package]]
name = "clap_builder"
version = "4.5.41"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "707eab41e9622f9139419d573eca0900137718000c517d47da73045f54331c3d"
dependencies = [
"anstyle",
"clap_lex",
]
[[package]]
name = "clap_lex"
version = "0.7.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b94f61472cee1439c0b966b47e3aca9ae07e45d070759512cd390ea2bebc6675"
[[package]]
name = "compact_str"
version = "0.7.1"
@ -159,24 +259,69 @@ dependencies = [
]
[[package]]
name = "cpufeatures"
version = "0.2.17"
name = "criterion"
version = "0.6.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "59ed5838eebb26a2bb2e58f6d5b5316989ae9d08bab10e0e6d103e656d1b0280"
checksum = "3bf7af66b0989381bd0be551bd7cc91912a655a58c6918420c9527b1fd8b4679"
dependencies = [
"libc",
"anes",
"cast",
"ciborium",
"clap",
"criterion-plot",
"itertools 0.13.0",
"num-traits",
"oorandom",
"plotters",
"rayon",
"regex",
"serde",
"serde_json",
"tinytemplate",
"walkdir",
]
[[package]]
name = "crypto-common"
version = "0.1.6"
name = "criterion-plot"
version = "0.5.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1bfb12502f3fc46cca1bb51ac28df9d618d813cdc3d2f25b9fe775a34af26bb3"
checksum = "6b50826342786a51a89e2da3a28f1c32b06e387201bc2d19791f622c673706b1"
dependencies = [
"generic-array",
"typenum",
"cast",
"itertools 0.10.5",
]
[[package]]
name = "crossbeam-deque"
version = "0.8.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9dd111b7b7f7d55b72c0a6ae361660ee5853c9af73f70c3c2ef6858b950e2e51"
dependencies = [
"crossbeam-epoch",
"crossbeam-utils",
]
[[package]]
name = "crossbeam-epoch"
version = "0.9.18"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5b82ac4a3c2ca9c3460964f020e1402edd5753411d7737aa39c3714ad1b5420e"
dependencies = [
"crossbeam-utils",
]
[[package]]
name = "crossbeam-utils"
version = "0.8.21"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d0a5c400df2834b80a4c3327b3aad3a4c4cd4de0629063962b03235697506a28"
[[package]]
name = "crunchy"
version = "0.2.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "460fbee9c2c2f33933d720630a6a0bac33ba7053db5344fac858d4b8952d77d5"
[[package]]
name = "data-encoding"
version = "2.9.0"
@ -193,16 +338,6 @@ dependencies = [
"uuid",
]
[[package]]
name = "digest"
version = "0.10.7"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9ed9a281f7bc9b7576e61468ba615a66a5c8cfdff42420a70aa82701a3b1e292"
dependencies = [
"block-buffer",
"crypto-common",
]
[[package]]
name = "displaydoc"
version = "0.2.5"
@ -220,6 +355,24 @@ version = "1.15.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "48c757948c5ede0e46177b7add2e67155f70e33c07fea8284df6576da70b3719"
[[package]]
name = "equivalent"
version = "1.0.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "877a4ace8713b0bcf2a4e7eec82529c029f1d0619886d18145fea96c3ffe5c0f"
[[package]]
name = "fnv"
version = "1.0.7"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "3f9eec918d3f24069decb9af1554cad7c880e2da24a9afd88aca000531ab82c1"
[[package]]
name = "foldhash"
version = "0.1.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d9c4f5dac5e15c24eb999c26181a6ca40b39fe946cbe4c263c7209467bc83af2"
[[package]]
name = "form_urlencoded"
version = "1.2.1"
@ -247,13 +400,13 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e6d5a32815ae3f33302d95fdcb2ce17862f8c65363dcfd29360480ba1001fc9c"
[[package]]
name = "generic-array"
version = "0.14.7"
name = "half"
version = "2.6.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "85649ca51fd72272d7821adaf274ad91c288277713d9c18820d8499a7ff69e9a"
checksum = "459196ed295495a68f7d7fe1d84f6c4b7ff0e21fe3017b2f283c6fac3ad803c9"
dependencies = [
"typenum",
"version_check",
"cfg-if",
"crunchy",
]
[[package]]
@ -266,6 +419,17 @@ dependencies = [
"allocator-api2",
]
[[package]]
name = "hashbrown"
version = "0.15.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5971ac85611da7067dbfcabef3c70ebb5606018acd9e2a3903a0da507521e0d5"
dependencies = [
"allocator-api2",
"equivalent",
"foldhash",
]
[[package]]
name = "heck"
version = "0.5.0"
@ -278,7 +442,7 @@ version = "1.1.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "2d1638d2018a21b9ff65d7fc28c2271c76a5af6ff4f621b204d032bc649763a4"
dependencies = [
"hashbrown",
"hashbrown 0.14.5",
"new_debug_unreachable",
"once_cell",
"phf",
@ -411,6 +575,24 @@ dependencies = [
"syn",
]
[[package]]
name = "itertools"
version = "0.10.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b0fd2260e829bddf4cb6ea802289de2f86d6a7a690192fbe91b3f46e0f2c8473"
dependencies = [
"either",
]
[[package]]
name = "itertools"
version = "0.13.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "413ee7dfc52ee1a4949ceeb7dbc8a33f2d6c088194d9f922fb8318faf1f01186"
dependencies = [
"either",
]
[[package]]
name = "itoa"
version = "1.0.15"
@ -451,6 +633,40 @@ version = "0.4.27"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "13dc2df351e3202783a1fe0d44375f7295ffb4049267b0f3018346dc122a1d94"
[[package]]
name = "logos"
version = "0.15.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ab6f536c1af4c7cc81edf73da1f8029896e7e1e16a219ef09b184e76a296f3db"
dependencies = [
"logos-derive",
]
[[package]]
name = "logos-codegen"
version = "0.15.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "189bbfd0b61330abea797e5e9276408f2edbe4f822d7ad08685d67419aafb34e"
dependencies = [
"beef",
"fnv",
"lazy_static",
"proc-macro2",
"quote",
"regex-syntax 0.8.5",
"rustc_version",
"syn",
]
[[package]]
name = "logos-derive"
version = "0.15.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ebfe8e1a19049ddbfccbd14ac834b215e11b85b90bab0c2dba7c7b92fb5d5cba"
dependencies = [
"logos-codegen",
]
[[package]]
name = "memchr"
version = "2.7.5"
@ -498,6 +714,12 @@ version = "1.21.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "42f5e15c9953c5e4ccceeb2e7382a716482c34515315f7b03532b8b4e8393d2d"
[[package]]
name = "oorandom"
version = "11.1.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d6790f58c7ff633d8771f42965289203411a5e5c68388703c06e14f24770b41e"
[[package]]
name = "outref"
version = "0.5.2"
@ -510,50 +732,6 @@ version = "2.3.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e3148f5046208a5d56bcfc03053e3ca6334e51da8dfb19b6cdc8b306fae3283e"
[[package]]
name = "pest"
version = "2.8.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1db05f56d34358a8b1066f67cbb203ee3e7ed2ba674a6263a1d5ec6db2204323"
dependencies = [
"memchr",
"thiserror",
"ucd-trie",
]
[[package]]
name = "pest_derive"
version = "2.8.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "bb056d9e8ea77922845ec74a1c4e8fb17e7c218cc4fc11a15c5d25e189aa40bc"
dependencies = [
"pest",
"pest_generator",
]
[[package]]
name = "pest_generator"
version = "2.8.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "87e404e638f781eb3202dc82db6760c8ae8a1eeef7fb3fa8264b2ef280504966"
dependencies = [
"pest",
"pest_meta",
"proc-macro2",
"quote",
"syn",
]
[[package]]
name = "pest_meta"
version = "2.8.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "edd1101f170f5903fde0914f899bb503d9ff5271d7ba76bbb70bea63690cc0d5"
dependencies = [
"pest",
"sha2",
]
[[package]]
name = "phf"
version = "0.11.3"
@ -602,6 +780,34 @@ version = "0.2.16"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "3b3cff922bd51709b605d9ead9aa71031d81447142d828eb4a6eba76fe619f9b"
[[package]]
name = "plotters"
version = "0.3.7"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5aeb6f403d7a4911efb1e33402027fc44f29b5bf6def3effcc22d7bb75f2b747"
dependencies = [
"num-traits",
"plotters-backend",
"plotters-svg",
"wasm-bindgen",
"web-sys",
]
[[package]]
name = "plotters-backend"
version = "0.3.7"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "df42e13c12958a16b3f7f4386b9ab1f3e7933914ecea48da7139435263a4172a"
[[package]]
name = "plotters-svg"
version = "0.3.7"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "51bae2ac328883f7acdfea3d66a7c35751187f870bc81f94563733a154d7a670"
dependencies = [
"plotters-backend",
]
[[package]]
name = "potential_utf"
version = "0.1.2"
@ -620,6 +826,15 @@ dependencies = [
"unicode-ident",
]
[[package]]
name = "psm"
version = "0.1.26"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "6e944464ec8536cd1beb0bbfd96987eb5e3b72f2ecdafdc5c769a37f1fa2ae1f"
dependencies = [
"cc",
]
[[package]]
name = "ptr_meta"
version = "0.3.0"
@ -670,6 +885,26 @@ version = "0.6.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ec0be4795e2f6a28069bec0b5ff3e2ac9bafc99e6a9a7dc3547996c5c816922c"
[[package]]
name = "rayon"
version = "1.10.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b418a60154510ca1a002a752ca9714984e21e4241e804d32555251faf8b78ffa"
dependencies = [
"either",
"rayon-core",
]
[[package]]
name = "rayon-core"
version = "1.12.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1465873a3dfdaa8ae7cb14b4383657caab0b3e8a0aa9ae8e04b044854c8dfce2"
dependencies = [
"crossbeam-deque",
"crossbeam-utils",
]
[[package]]
name = "regex"
version = "1.11.1"
@ -678,8 +913,19 @@ checksum = "b544ef1b4eac5dc2db33ea63606ae9ffcfac26c1416a2806ae0bf5f56b201191"
dependencies = [
"aho-corasick",
"memchr",
"regex-automata",
"regex-syntax",
"regex-automata 0.4.9",
"regex-syntax 0.8.5",
]
[[package]]
name = "regex-automata"
version = "0.3.9"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "59b23e92ee4318893fa3fe3e6fb365258efbfe6ac6ab30f090cdcbb7aa37efa9"
dependencies = [
"aho-corasick",
"memchr",
"regex-syntax 0.7.5",
]
[[package]]
@ -690,9 +936,15 @@ checksum = "809e8dc61f6de73b46c85f4c96486310fe304c434cfa43669d7b40f711150908"
dependencies = [
"aho-corasick",
"memchr",
"regex-syntax",
"regex-syntax 0.8.5",
]
[[package]]
name = "regex-syntax"
version = "0.7.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "dbb5fb1acd8a1a18b3dd5be62d25485eb770e05afb408a9627d14d451bae12da"
[[package]]
name = "regex-syntax"
version = "0.8.5"
@ -705,6 +957,15 @@ version = "2.1.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "357703d41365b4b27c590e3ed91eabb1b663f07c4c084095e60cbed4362dff0d"
[[package]]
name = "rustc_version"
version = "0.4.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "cfcb3a22ef46e85b45de6ee7e79d063319ebb6594faafcf1c225ea92ab6e9b92"
dependencies = [
"semver",
]
[[package]]
name = "rustversion"
version = "1.0.21"
@ -723,12 +984,27 @@ version = "1.0.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "dd29631678d6fb0903b69223673e122c32e9ae559d0960a38d574695ebc0ea15"
[[package]]
name = "same-file"
version = "1.0.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "93fc1dc3aaa9bfed95e02e6eadabb4baf7e3078b0bd1b4d7b6b0b68378900502"
dependencies = [
"winapi-util",
]
[[package]]
name = "scoped-tls"
version = "1.0.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e1cf6437eb19a8f4a6cc0f7dca544973b0b78843adbfeb3683d1a94a0024a294"
[[package]]
name = "semver"
version = "1.0.26"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "56e6fa9c48d24d85fb3de5ad847117517440f6beceb7798af16b4a87d616b8d0"
[[package]]
name = "serde"
version = "1.0.219"
@ -762,15 +1038,10 @@ dependencies = [
]
[[package]]
name = "sha2"
version = "0.10.9"
name = "shlex"
version = "1.3.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a7507d819769d01a365ab707794a4084392c824f54a7a6a7862f8c3d0892b283"
dependencies = [
"cfg-if",
"cpufeatures",
"digest",
]
checksum = "0fda2ff0d084019ba4d7c6f371c95d8fd75ce3524c3cb8fb653a3023f6323e64"
[[package]]
name = "siphasher"
@ -795,9 +1066,11 @@ name = "solace"
version = "0.1.0"
dependencies = [
"anyhow",
"ariadne",
"chumsky",
"criterion",
"lazy_static",
"pest",
"pest_derive",
"logos",
"swc_common",
"swc_ecma_ast",
"swc_ecma_codegen",
@ -810,6 +1083,19 @@ version = "1.2.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a8f112729512f8e442d81f95a8a7ddf2b7c6b8a1a6f509a95864142b30cab2d3"
[[package]]
name = "stacker"
version = "0.1.21"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "cddb07e32ddb770749da91081d8d0ac3a16f1a569a18b20348cd371f5dead06b"
dependencies = [
"cc",
"cfg-if",
"libc",
"psm",
"windows-sys",
]
[[package]]
name = "static_assertions"
version = "1.1.0"
@ -836,7 +1122,7 @@ checksum = "cc6b926f0d94bbb34031fe5449428cfa1268cdc0b31158d6ad9c97e0fc1e79dd"
dependencies = [
"allocator-api2",
"bumpalo",
"hashbrown",
"hashbrown 0.14.5",
"ptr_meta",
"rustc-hash",
"triomphe",
@ -1047,6 +1333,16 @@ dependencies = [
"zerovec",
]
[[package]]
name = "tinytemplate"
version = "1.2.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "be4d6b5f19ff7664e8c98d03e2139cb510db9b0a60b55f8e8709b689d939b6bc"
dependencies = [
"serde",
"serde_json",
]
[[package]]
name = "tracing"
version = "0.1.41"
@ -1088,18 +1384,6 @@ dependencies = [
"stable_deref_trait",
]
[[package]]
name = "typenum"
version = "1.18.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1dccffe3ce07af9386bfd29e80c0ab1a8205a2fc34e4bcd40364df902cfa8f3f"
[[package]]
name = "ucd-trie"
version = "0.1.7"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "2896d95c02a80c6d6a5d6e953d479f5ddf2dfdb6a244441010e373ac0fb88971"
[[package]]
name = "unicode-id-start"
version = "1.3.1"
@ -1112,6 +1396,12 @@ version = "1.0.18"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5a5f39404a5da50712a4c1eecf25e90dd62b613502b7e925fd4e4d19b5c96512"
[[package]]
name = "unicode-segmentation"
version = "1.12.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f6ccf251212114b54433ec949fd6a7841275f9ada20dddd2f29e9ceea4501493"
[[package]]
name = "unicode-width"
version = "0.1.14"
@ -1157,6 +1447,16 @@ version = "0.8.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5c3082ca00d5a5ef149bb8b555a72ae84c9c59f7250f013ac822ac2e49b19c64"
[[package]]
name = "walkdir"
version = "2.5.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "29790946404f91d9c5d06f9874efddea1dc06c5efe94541a7d6863108e3a5e4b"
dependencies = [
"same-file",
"winapi-util",
]
[[package]]
name = "wasm-bindgen"
version = "0.2.100"
@ -1215,6 +1515,98 @@ dependencies = [
"unicode-ident",
]
[[package]]
name = "web-sys"
version = "0.3.77"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "33b6dd2ef9186f1f2072e409e99cd22a975331a6b3591b12c764e0e55c60d5d2"
dependencies = [
"js-sys",
"wasm-bindgen",
]
[[package]]
name = "winapi-util"
version = "0.1.9"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "cf221c93e13a30d793f7645a0e7762c55d169dbb0a49671918a2319d289b10bb"
dependencies = [
"windows-sys",
]
[[package]]
name = "windows-sys"
version = "0.59.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1e38bc4d79ed67fd075bcc251a1c39b32a1776bbe92e5bef1f0bf1f8c531853b"
dependencies = [
"windows-targets",
]
[[package]]
name = "windows-targets"
version = "0.52.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9b724f72796e036ab90c1021d4780d4d3d648aca59e491e6b98e725b84e99973"
dependencies = [
"windows_aarch64_gnullvm",
"windows_aarch64_msvc",
"windows_i686_gnu",
"windows_i686_gnullvm",
"windows_i686_msvc",
"windows_x86_64_gnu",
"windows_x86_64_gnullvm",
"windows_x86_64_msvc",
]
[[package]]
name = "windows_aarch64_gnullvm"
version = "0.52.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "32a4622180e7a0ec044bb555404c800bc9fd9ec262ec147edd5989ccd0c02cd3"
[[package]]
name = "windows_aarch64_msvc"
version = "0.52.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "09ec2a7bb152e2252b53fa7803150007879548bc709c039df7627cabbd05d469"
[[package]]
name = "windows_i686_gnu"
version = "0.52.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8e9b5ad5ab802e97eb8e295ac6720e509ee4c243f69d781394014ebfe8bbfa0b"
[[package]]
name = "windows_i686_gnullvm"
version = "0.52.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "0eee52d38c090b3caa76c563b86c3a4bd71ef1a819287c19d586d7334ae8ed66"
[[package]]
name = "windows_i686_msvc"
version = "0.52.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "240948bc05c5e7c6dabba28bf89d89ffce3e303022809e73deaefe4f6ec56c66"
[[package]]
name = "windows_x86_64_gnu"
version = "0.52.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "147a5c80aabfbf0c7d901cb5895d1de30ef2907eb21fbbab29ca94c5b08b1a78"
[[package]]
name = "windows_x86_64_gnullvm"
version = "0.52.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "24d5b23dc417412679681396f2b49f3de8c1473deb516bd34410872eff51ed0d"
[[package]]
name = "windows_x86_64_msvc"
version = "0.52.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "589f6da84c646204747d1270a2a5661ea66ed1cced2631d546fdfb155959f9ec"
[[package]]
name = "writeable"
version = "0.6.1"
@ -1230,6 +1622,12 @@ dependencies = [
"tap",
]
[[package]]
name = "yansi"
version = "1.0.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "cfe53a6657fd280eaa890a3bc59152892ffa3e30101319d168b781ed6529b049"
[[package]]
name = "yoke"
version = "0.8.0"

View file

@ -5,10 +5,26 @@ edition = "2024"
[dependencies]
anyhow = "1.0.98"
ariadne = "0.5.1"
chumsky = "0.10.1"
lazy_static = "1.5.0"
pest = "2.8.1"
pest_derive = "2.8.1"
logos = "0.15.0"
swc_common = "13.0.2"
swc_ecma_ast = "13.0.0"
swc_ecma_codegen = "15.0.1"
thiserror = "2.0.12"
[dev-dependencies]
criterion = "0.6.0"
[lib]
name = "solace"
path = "src/lib.rs"
[[bin]]
name = "solace"
path = "src/main.rs"
[[bench]]
name = "lexer"
harness = false

46
benches/lexer.rs Normal file
View file

@ -0,0 +1,46 @@
use criterion::{Criterion, criterion_group, criterion_main};
use logos::Logos;
use solace::lexer::Token;
use std::hint::black_box;
pub fn criterion_benchmark(c: &mut Criterion) {
let input = "
const NUMBER = 1_000_000;
const HEX_NUMBER = 0x7C1;
const BIN_NUMBER = 0b10111;
const OCT_NUMBER = 0o27;
const INVALID_NUMBER = 0o28;
const MORE_TOKENS = \"More tokens to fill the 100 Tokens!\";
fn easterEgg() -> (output: string) {
/*
* Someone found the easter egg!
* Lets celebrate that with a comment!
*/
output = \"Yeah, you found the easter egg!\";
}
fn main(args: string[]) -> ArgumentError!string {
if args.length <= 2 {
return Err(\"Not enough Arguments\", ArgumentError);
}
return match args.length {
3 => \"This is actually just one argument\",
4 => \"Two arguments. Good!\",
NUMBER => easterEgg(),
_ => \"You're overdoing it... maybe?\"
}
}
";
c.bench_function("Lexer", |b| {
b.iter(|| {
let mut lexer = Token::lexer(black_box(input));
while let Some(_) = lexer.next() {}
})
});
}
criterion_group!(benches, criterion_benchmark);
criterion_main!(benches);

37
sample.nrs Normal file
View file

@ -0,0 +1,37 @@
// Run this example with `cargo run --example nano_rust -- examples/sample.nrs`
// Feel free to play around with this sample to see what errors you can generate!
// Spans are propagated to the interpreted AST so you can even invoke runtime
// errors and still have an error message that points to source code emitted!
fn mul(x, y) {
x * y
}
// Calculate the factorial of a number
fn factorial(x) {
// Conditionals are supported!
if x == 0 {
1
} else {
mul(x, factorial(x - 1))
}
}
// The main function
fn main() {
let three = 3;
let meaning_of_life = three * 14 + 1;
print("Hello, world!");
print("The meaning of life is...");
if meaning_of_life == 42 {
print(meaning_of_life);
} else {
print("...something we cannot know");
print("However, I can tell you that the factorial of 10 is...");
// Function calling
print(factorial(10));
}
}

4
sample2.nrs Normal file
View file

@ -0,0 +1,4 @@
fn mul(x, y) {
let a = x*2;
a * y
}

View file

@ -1,35 +1,24 @@
use swc_common::{SourceMap, sync::Lrc};
use swc_ecma_ast::Module;
use swc_ecma_codegen::Config;
// use swc_ecma_codegen::{text_writer::JsWriter, Config, Emitter};
// use swc_common::{sync::Lrc, SourceMap};
use thiserror::Error;
use swc_ecma_codegen::{Config, Emitter, text_writer::JsWriter};
#[derive(Error, Debug)]
pub enum EmitError {
#[error("Failed to emit JavaScript: {0}")]
EmitFailed(String),
}
pub struct JsEmitter {
config: Config,
}
pub struct JsEmitter;
impl JsEmitter {
pub fn new() -> Self {
Self {
config: Config::default(),
}
Self
}
pub fn emit(&self, ast: Module) -> (String, Lrc<SourceMap>) {
let sm = Lrc::new(SourceMap::default());
let mut buf = vec![];
let mut emitter = Emitter {
cfg: Config::default(),
cm: sm.clone(),
comments: None,
wr: JsWriter::new(sm.clone(), "\n", &mut buf, None),
};
pub fn with_minify(mut self) -> Self {
self.config.minify = true;
self
}
pub fn emit(&self, _module: Module) -> Result<String, EmitError> {
// SWC codegen here
return Err(EmitError::EmitFailed(
"Emitter not yet implemented!".to_owned(),
));
emitter.emit_module(&ast).unwrap();
(String::from_utf8(buf).unwrap(), sm)
}
}

331
src/lexer.rs Normal file
View file

@ -0,0 +1,331 @@
use logos::Logos;
fn parse_radix(s: &str, radix: u32) -> f64 {
let s = s.replace('_', "");
let (sign, num) = if s.starts_with('-') {
(-1.0, &s[3..]) // skip "-0x", "-0b" or "-0o"
} else {
(1.0, &s[2..])
};
let value = u64::from_str_radix(num, radix).unwrap();
sign * value as f64
}
fn parse_number(s: &str) -> f64 {
let s = s.replace('_', "");
s.parse::<f64>().unwrap()
}
#[derive(Logos, Clone, Debug, PartialEq)]
// #[logos(extras = (u32, u32))]
#[logos(skip r"\s+")]
pub enum Token<'src> {
#[regex(r"-?0[xX][0-9a-fA-F_]+", |lex| parse_radix(lex.slice(), 16))]
#[regex(r"-?0[bB][01_]+", |lex| parse_radix(lex.slice(), 2))]
#[regex(r"-?0[oO][0-7_]+", |lex| parse_radix(lex.slice(), 8))]
#[regex(r"-?(?:0|[1-9][0-9_]*)(?:\.\d+)?(?:[eE][+-]?\d+)?", |lex| parse_number(lex.slice()))]
Number(f64),
#[token("NaN")]
NaN,
#[regex(r#"("[^"\\\x00-\x1F]*(?:\\.[^"\\\x00-\x1F]*)*")|('[^'\\\x00-\x1F]*(?:\\.[^'\\\x00-\x1F]*)*')"#,
|lex| { let slice = lex.slice(); slice[1..slice.len()-1].to_owned() })]
String(String), // "string" or 'string'
#[token("undefined")]
Undefined, // undefined (value not initialized or not existing)
#[token("None")]
None, // none - optional with no value
#[token("Some")]
Some, // Some(value) - optional with value
#[token("Err")]
Err, // Err(Error) - result with error
#[token("Ok")]
Ok, // Ok(Value) - result with value
#[token("false", |_| false)]
#[token("true", |_| true)]
Bool(bool),
#[token("fn")]
Fn, // keyword for functions
#[token("var")]
Var, // variable
#[token("let")]
Let, // synonymous to var
#[token("const")]
Const, // constants
#[token("live")]
Live, // live variables / signals
#[token("if")]
If,
#[token("else")]
Else,
#[token("match")]
Match,
#[token("for")]
For,
#[token("while")]
While,
#[token("return")]
Return,
// Range and other multi char operators
#[token("..=")]
RangeIncl,
#[token("..<")]
RangeExcl,
#[token("==")]
Eq,
#[token("!=")]
Ne,
#[token("<=")]
Le,
#[token(">=")]
Ge,
#[token("++")]
Inc,
#[token("--")]
Dec,
#[token("**")]
Pow,
#[token("+=")]
AddEq,
#[token("-=")]
SubEq,
#[token("*=")]
MulEq,
#[token("/=")]
DivEq,
#[token("&&")]
And,
#[token("||")]
Or,
#[token("=>")]
FatArrow,
#[token("->")]
Arrow,
// Single character operators
#[token(".")]
Dot,
#[token("!")]
ExclamationMark,
#[token("?")]
QuestionMark,
#[token("&")]
BAnd,
#[token("|")]
BOr,
#[token("<")]
Lt,
#[token(">")]
Gt,
#[token("=")]
Assign,
#[token(":")]
Colon,
#[token(",")]
Comma,
#[token("+")]
Add,
#[token("-")]
Sub,
#[token("*")]
Mul,
#[token("/")]
Div,
#[token("%")]
Mod,
// Parentheses
#[token("(")]
ParenOpen,
#[token(")")]
ParenClose,
#[token("{")]
BraceOpen,
#[token("}")]
BraceClose,
#[token("[")]
BracketOpen,
#[token("]")]
BracketClose,
#[token("_")]
Default,
#[token(";")]
Semicolon,
#[regex(r"([a-zA-Z$][a-zA-Z0-9_$]*)|(_[a-zA-Z0-9_$]+)")]
Identifier(&'src str), // Identifiers start with letters, _ or $ and can contain numbers
// Comments
#[regex(r"//[^\n]*")]
LineComment(&'src str),
#[regex(r"/\*([^*]|\*[^/])*\*/")]
BlockComment(&'src str),
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_keywords() {
let mut lex = Token::lexer("let var const fn match");
assert_eq!(lex.next(), Some(Ok(Token::Let)));
assert_eq!(lex.next(), Some(Ok(Token::Var)));
assert_eq!(lex.next(), Some(Ok(Token::Const)));
assert_eq!(lex.next(), Some(Ok(Token::Fn)));
assert_eq!(lex.next(), Some(Ok(Token::Match)));
}
#[test]
fn test_operators() {
let mut lex = Token::lexer("** * == += + =");
assert_eq!(lex.next(), Some(Ok(Token::Pow)));
assert_eq!(lex.next(), Some(Ok(Token::Mul)));
assert_eq!(lex.next(), Some(Ok(Token::Eq)));
assert_eq!(lex.next(), Some(Ok(Token::AddEq)));
assert_eq!(lex.next(), Some(Ok(Token::Add)));
assert_eq!(lex.next(), Some(Ok(Token::Assign)));
}
#[test]
fn test_declaration() {
let mut lex = Token::lexer("const foo = 42;");
assert_eq!(lex.next(), Some(Ok(Token::Const)));
assert_eq!(lex.next(), Some(Ok(Token::Identifier("foo"))));
assert_eq!(lex.next(), Some(Ok(Token::Assign)));
assert_eq!(lex.next(), Some(Ok(Token::Number(42.0))));
assert_eq!(lex.next(), Some(Ok(Token::Semicolon)));
}
#[test]
fn test_numbers() {
let mut lex = Token::lexer("42 * -0.2 + 4e3 - 0xFF / 0b1010 + 1_000_000;");
assert_eq!(lex.next(), Some(Ok(Token::Number(42.0))));
assert_eq!(lex.next(), Some(Ok(Token::Mul)));
assert_eq!(lex.next(), Some(Ok(Token::Number(-0.2))));
assert_eq!(lex.next(), Some(Ok(Token::Add)));
assert_eq!(lex.next(), Some(Ok(Token::Number(4000.0))));
assert_eq!(lex.next(), Some(Ok(Token::Sub)));
assert_eq!(lex.next(), Some(Ok(Token::Number(255.0))));
assert_eq!(lex.next(), Some(Ok(Token::Div)));
assert_eq!(lex.next(), Some(Ok(Token::Number(10.0))));
assert_eq!(lex.next(), Some(Ok(Token::Add)));
assert_eq!(lex.next(), Some(Ok(Token::Number(1000000.0))));
assert_eq!(lex.next(), Some(Ok(Token::Semicolon)));
}
#[test]
fn test_strings() {
// let mut lex = Token::lexer("\"Foo\" 'Single' 'Sin\\'Esq\\'gle'");
let mut lex = Token::lexer("\"Double\" 'Single' \"With Spaces?\"");
assert_eq!(lex.next(), Some(Ok(Token::String("Double".to_owned()))));
assert_eq!(lex.next(), Some(Ok(Token::String("Single".to_owned()))));
assert_eq!(
lex.next(),
Some(Ok(Token::String("With Spaces?".to_owned())))
);
// TODO: test strings with escaped quotes
}
#[test]
fn test_full_syntax_example() {
let mut lex = Token::lexer(
"
fn main(args: string[]) -> ArgumentError!string {
if args.length <= 2 {
return Err(\"Not enough Arguments\", ArgumentError);
}
return match args.length {
3 => \"This is actually just one argument\",
4 => \"Two arguments. Good!\",
_ => \"You're overdoing it!\"
}
}
",
);
// FIRST LINE
assert_eq!(lex.next(), Some(Ok(Token::Fn)));
assert_eq!(lex.next(), Some(Ok(Token::Identifier("main"))));
assert_eq!(lex.next(), Some(Ok(Token::ParenOpen)));
assert_eq!(lex.next(), Some(Ok(Token::Identifier("args"))));
assert_eq!(lex.next(), Some(Ok(Token::Colon)));
assert_eq!(lex.next(), Some(Ok(Token::Identifier("string"))));
assert_eq!(lex.next(), Some(Ok(Token::BracketOpen)));
assert_eq!(lex.next(), Some(Ok(Token::BracketClose)));
assert_eq!(lex.next(), Some(Ok(Token::ParenClose)));
assert_eq!(lex.next(), Some(Ok(Token::Arrow)));
assert_eq!(lex.next(), Some(Ok(Token::Identifier("ArgumentError"))));
assert_eq!(lex.next(), Some(Ok(Token::ExclamationMark)));
assert_eq!(lex.next(), Some(Ok(Token::Identifier("string"))));
assert_eq!(lex.next(), Some(Ok(Token::BraceOpen)));
// SECOND LINE
assert_eq!(lex.next(), Some(Ok(Token::If)));
assert_eq!(lex.next(), Some(Ok(Token::Identifier("args"))));
assert_eq!(lex.next(), Some(Ok(Token::Dot)));
assert_eq!(lex.next(), Some(Ok(Token::Identifier("length"))));
assert_eq!(lex.next(), Some(Ok(Token::Le)));
assert_eq!(lex.next(), Some(Ok(Token::Number(2.0))));
assert_eq!(lex.next(), Some(Ok(Token::BraceOpen)));
// THIRD LINE
assert_eq!(lex.next(), Some(Ok(Token::Return)));
assert_eq!(lex.next(), Some(Ok(Token::Err)));
assert_eq!(lex.next(), Some(Ok(Token::ParenOpen)));
assert_eq!(
lex.next(),
Some(Ok(Token::String("Not enough Arguments".to_owned())))
);
assert_eq!(lex.next(), Some(Ok(Token::Comma)));
assert_eq!(lex.next(), Some(Ok(Token::Identifier("ArgumentError"))));
assert_eq!(lex.next(), Some(Ok(Token::ParenClose)));
assert_eq!(lex.next(), Some(Ok(Token::Semicolon)));
// FOURTH LINE
assert_eq!(lex.next(), Some(Ok(Token::BraceClose)));
// FIFTH LINE
assert_eq!(lex.next(), Some(Ok(Token::Return)));
assert_eq!(lex.next(), Some(Ok(Token::Match)));
assert_eq!(lex.next(), Some(Ok(Token::Identifier("args"))));
assert_eq!(lex.next(), Some(Ok(Token::Dot)));
assert_eq!(lex.next(), Some(Ok(Token::Identifier("length"))));
assert_eq!(lex.next(), Some(Ok(Token::BraceOpen)));
// SIXTH LINE
assert_eq!(lex.next(), Some(Ok(Token::Number(3.0))));
assert_eq!(lex.next(), Some(Ok(Token::FatArrow)));
assert_eq!(
lex.next(),
Some(Ok(Token::String(
"This is actually just one argument".to_owned()
)))
);
assert_eq!(lex.next(), Some(Ok(Token::Comma)));
// SEVENTH LINE
assert_eq!(lex.next(), Some(Ok(Token::Number(4.0))));
assert_eq!(lex.next(), Some(Ok(Token::FatArrow)));
assert_eq!(
lex.next(),
Some(Ok(Token::String("Two arguments. Good!".to_owned())))
);
assert_eq!(lex.next(), Some(Ok(Token::Comma)));
// EIGHTH LINE
assert_eq!(lex.next(), Some(Ok(Token::Default)));
assert_eq!(lex.next(), Some(Ok(Token::FatArrow)));
assert_eq!(
lex.next(),
Some(Ok(Token::String("You're overdoing it!".to_owned())))
);
// NINTH AND TENTH LINE
assert_eq!(lex.next(), Some(Ok(Token::BraceClose)));
assert_eq!(lex.next(), Some(Ok(Token::BraceClose)));
}
}

1
src/lib.rs Normal file
View file

@ -0,0 +1 @@
pub mod lexer;

View file

@ -1,6 +1,7 @@
mod emitter;
mod lexer;
mod parser;
mod transformer;
// mod emitter;
// mod transformer;
use anyhow::Result;
use std::{env, fs};
@ -22,19 +23,21 @@ fn main() -> Result<()> {
let file_path = &args[1];
let input = fs::read_to_string(file_path).expect(&format!("Cannot read file '{}'!", file_path));
/*
// Parse Solace Code
let ast = parser::parse(&input, /* debug */ true)?;
if let Some((ast, span)) = parser::parse(file_path.to_string(), &input, false) {
// Transform from Solace AST to SWC AST
let js_transformer = transformer::js::JsTransformer::new();
let js_ast = js_transformer.transform(ast, span);
// Transform from Solace AST to SWC AST
let js_transformer = transformer::js::JsTransformer::new();
let js_ast = js_transformer.transform(ast);
// Emit JavaScript
let js_emitter = emitter::js::JsEmitter::new();
let (js_code, _source_map) = js_emitter.emit(js_ast);
// Emit JavaScript
let js_emitter = emitter::js::JsEmitter::new();
let js_code = js_emitter.emit(js_ast)?;
// Write Output to stdout
println!("{}", js_code);
// Write Output to stdout
println!("{}", js_code);
}
*/
Ok(())
}

View file

@ -1,182 +0,0 @@
use std::fs;
use pest::Parser;
use pest::pratt_parser::{Op, Assoc, PrattParser};
use lazy_static::lazy_static;
use pest_derive::Parser;
#[derive(Parser)]
#[grammar = "solace.pest"]
pub struct SolaceParser;
lazy_static! {
static ref PRATT_PARSER: PrattParser<Rule> = {
use Rule::*;
use Assoc::Left;
PrattParser::new()
// Logical OR (lowest precedence)
.op(Op::infix(or, Left))
// Logical AND
.op(Op::infix(and, Left))
// Equality
.op(Op::infix(eq, Left) | Op::infix(neq, Left))
// Comparison
.op(Op::infix(lt, Left) | Op::infix(gt, Left) |
Op::infix(lte, Left) | Op::infix(gte, Left))
// Range operators
.op(Op::infix(range_inclusive, Left) | Op::infix(range_exclusive, Left))
// Additive
.op(Op::infix(add, Left) | Op::infix(subtract, Left))
// Multiplicative
.op(Op::infix(multiply, Left) | Op::infix(divide, Left) | Op::infix(modulo, Left))
// Unary operators
.op(Op::prefix(not) | Op::prefix(negate))
// Member access (highest precedence)
.op(Op::postfix(member_access))
// Function call (same precedence as member access)
.op(Op::postfix(call))
};
}
pub fn parse_expr(pair: pest::iterators::Pair<Rule>) -> Expr {
PRATT_PARSER
.map_primary(|primary| match primary.as_rule() {
Rule::number => Expr::Number(primary.as_str().parse().unwrap()),
Rule::string => Expr::String(primary.as_str().to_string()),
Rule::boolean => Expr::Boolean(primary.as_str() == "true"),
Rule::identifier => Expr::Ident(primary.as_str().to_string()),
Rule::array_literal => Expr::Array(parse_array(primary)),
Rule::struct_literal => Expr::Struct(parse_struct(primary)),
Rule::expr => parse_expr(primary),
_ => unreachable!("Unexpected primary expression: {:?}", primary),
})
.map_infix(|lhs, op, rhs| {
let op = match op.as_rule() {
Rule::add => BinOp::Add,
Rule::subtract => BinOp::Sub,
Rule::multiply => BinOp::Mul,
Rule::divide => BinOp::Div,
Rule::modulo => BinOp::Mod,
Rule::eq => BinOp::Eq,
Rule::neq => BinOp::Neq,
Rule::lt => BinOp::Lt,
Rule::gt => BinOp::Gt,
Rule::lte => BinOp::Lte,
Rule::gte => BinOp::Gte,
Rule::and => BinOp::And,
Rule::or => BinOp::Or,
Rule::range_inclusive => BinOp::RangeInclusive,
Rule::range_exclusive => BinOp::RangeExclusive,
_ => unreachable!(),
};
Expr::Binary(Box::new(lhs), op, Box::new(rhs))
})
.map_prefix(|op, rhs| {
let op = match op.as_rule() {
Rule::negate => UnOp::Neg,
Rule::not => UnOp::Not,
_ => unreachable!(),
};
Expr::Unary(op, Box::new(rhs))
})
.map_postfix(|lhs, op| {
match op.as_rule() {
Rule::field_access => {
let field = op.into_inner().next().unwrap();
Expr::FieldAccess(Box::new(lhs), field.as_str().to_string())
}
Rule::call => {
let args = op.into_inner()
.map(parse_expr)
.collect();
Expr::Call(Box::new(lhs), args)
}
_ => unreachable!(),
}
})
.parse(pair.into_inner())
}
// Example AST types (simplified)
#[derive(Debug)]
pub enum Expr {
Number(f64),
String(String),
Boolean(bool),
Ident(String),
Array(Vec<Expr>),
Struct(Vec<(String, Expr)>),
Binary(Box<Expr>, BinOp, Box<Expr>),
Unary(UnOp, Box<Expr>),
FieldAccess(Box<Expr>, String),
Call(Box<Expr>, Vec<Expr>),
}
#[derive(Debug)]
pub enum BinOp {
Add, Sub, Mul, Div, Mod,
Eq, Neq, Lt, Gt, Lte, Gte,
And, Or,
RangeInclusive, RangeExclusive,
}
#[derive(Debug)]
pub enum UnOp {
Neg, Not,
}
// Helper functions
fn parse_array(pair: pest::iterators::Pair<Rule>) -> Vec<Expr> {
pair.into_inner()
.filter(|p| p.as_rule() == Rule::expr)
.map(parse_expr)
.collect()
}
fn parse_struct(pair: pest::iterators::Pair<Rule>) -> Vec<(String, Expr)> {
pair.into_inner()
.filter(|p| p.as_rule() == Rule::struct_field)
.map(|f| {
let mut inner = f.into_inner();
let name = inner.next().unwrap().as_str().to_string();
let value = parse_expr(inner.next().unwrap());
(name, value)
})
.collect()
}
fn main() {
let unparsed_file = fs::read_to_string("test.solace").expect("Cannot read test file");
match SolaceParser::parse(Rule::program, &unparsed_file) {
Ok(mut pairs) => {
let program = pairs.next().unwrap();
println!("Parsing was successful.");
print_parse_tree(program, 0);
}
Err(err) => {
println!("Parse error: {}", err);
}
}
}
/* Example usage
pub fn parse(input: &str) -> Result<Vec<Expr>, pest::error::Error<Rule>> {
let pairs = SolaceParser::parse(Rule::program, input)?;
let exprs = pairs
.filter(|p| p.as_rule() == Rule::expr)
.map(parse_expr)
.collect();
Ok(exprs)
}
*/
fn print_parse_tree(pair: Pair<Rule>, indent: usize) {
let indent_str = " ".repeat(indent);
println!("{}{:?}: \"{}\"", indent_str, pair.as_rule(), pair.as_str());
for inner_pair in pair.into_inner() {
print_parse_tree(inner_pair, indent + 1)
}
}

112
src/parser.rs Normal file
View file

@ -0,0 +1,112 @@
use crate::lexer::Token;
use chumsky::{
input::{Stream, ValueInput},
prelude::*,
};
type Span = SimpleSpan;
type Spanned<T> = (T, Span);
#[derive(Debug, PartialEq)]
pub enum Expr {
Number(f64),
String(String),
Bool(bool),
Undefined,
Some(Box<Expr>),
None,
Ok(Box<Expr>),
Err(Box<Expr>),
}
/// parse primary tokens, like numbers, strings or booleans
pub fn primary<'tokens, 'src: 'tokens, I>()
-> impl Parser<'tokens, I, Expr, extra::Err<Rich<'tokens, Token<'src>>>>
where
I: ValueInput<'tokens, Token = Token<'src>, Span = SimpleSpan>,
{
select! {
Token::Number(n) => Expr::Number(n),
Token::String(s) => Expr::String(s),
Token::Undefined => Expr::Undefined,
Token::None => Expr::None,
Token::Bool(b) => Expr::Bool(b),
}
}
fn parenthesized<'tokens, 'src: 'tokens, I>(
inner: impl Parser<'tokens, I, Expr, extra::Err<Rich<'tokens, Token<'src>>>>,
) -> impl Parser<'tokens, I, Expr, extra::Err<Rich<'tokens, Token<'src>>>>
where
I: ValueInput<'tokens, Token = Token<'src>, Span = SimpleSpan>,
{
just(Token::ParenOpen)
.ignore_then(inner)
.then_ignore(just(Token::ParenClose))
}
fn constructor<'tokens, 'src: 'tokens, I>(
keyword: Token<'src>,
constructor_fn: impl Fn(Expr) -> Expr,
) -> impl Parser<'tokens, I, Expr, extra::Err<Rich<'tokens, Token<'src>>>>
where
I: ValueInput<'tokens, Token = Token<'src>, Span = SimpleSpan>,
{
just(keyword)
.ignore_then(parenthesized(primary()))
.map(constructor_fn)
}
pub fn constructors<'tokens, 'src: 'tokens, I>()
-> impl Parser<'tokens, I, Expr, extra::Err<Rich<'tokens, Token<'src>>>>
where
I: ValueInput<'tokens, Token = Token<'src>, Span = SimpleSpan>,
{
constructor(Token::Some, |inner| Expr::Some(Box::new(inner)))
.or(constructor(Token::Ok, |inner| Expr::Ok(Box::new(inner))))
.or(constructor(Token::Err, |inner| Expr::Err(Box::new(inner))))
}
#[cfg(test)]
mod tests {
use super::*;
use logos::Logos;
#[test]
fn test_parse_single_number() {
let tokens: Vec<_> = Token::lexer("42").map(|tok| tok.unwrap()).collect();
let result = primary().parse(Stream::from_iter(tokens)).into_result();
assert_eq!(result, Ok(Expr::Number(42.0)));
}
#[test]
fn test_parse_single_string() {
let tokens: Vec<_> = Token::lexer("\"Foo\"").map(|tok| tok.unwrap()).collect();
let result = primary().parse(Stream::from_iter(tokens)).into_result();
assert_eq!(result, Ok(Expr::String("Foo".to_owned())));
}
#[test]
fn test_constructors() {
let tokens_some: Vec<_> = Token::lexer("Some(23)").map(|tok| tok.unwrap()).collect();
let result_some = constructors()
.parse(Stream::from_iter(tokens_some))
.into_result();
assert_eq!(result_some, Ok(Expr::Some(Box::new(Expr::Number(23.0)))));
let tokens_ok: Vec<_> = Token::lexer("Ok(\"foo\")")
.map(|tok| tok.unwrap())
.collect();
let result_ok = constructors()
.parse(Stream::from_iter(tokens_ok))
.into_result();
assert_eq!(
result_ok,
Ok(Expr::Ok(Box::new(Expr::String("foo".to_owned()))))
);
let tokens_err: Vec<_> = Token::lexer("Err(None)").map(|tok| tok.unwrap()).collect();
let result_err = constructors()
.parse(Stream::from_iter(tokens_err))
.into_result();
assert_eq!(result_err, Ok(Expr::Err(Box::new(Expr::None))));
}
}

View file

@ -1,612 +0,0 @@
use super::{ParseError, Rule};
use pest::iterators::{Pair, Pairs};
use pest::pratt_parser::{Assoc, Op, PrattParser};
#[derive(Debug, Clone)]
pub enum Expr {
//Empty
Empty,
// Literals
Number(f64),
String(String),
Boolean(bool),
None,
Undefined,
Underscore,
// Variables and calls
Identifier(String),
MemberAccess(Box<Expr>, String),
Index(Box<Expr>, Box<Expr>),
Call(Box<Expr>, Vec<Expr>),
// Operators
Binary(BinaryOp, Box<Expr>, Box<Expr>),
Unary(UnaryOp, Box<Expr>),
Ternary(Box<Expr>, Box<Expr>, Box<Expr>),
Assignment(String, Box<Expr>),
// Control flow
If(Box<Expr>, Box<Block>, Option<Box<Block>>),
Match(Option<Box<Expr>>, Vec<MatchArm>),
// Collections
Array(Vec<Expr>),
// Postfix
PostIncrement(Box<Expr>),
PostDecrement(Box<Expr>),
}
#[derive(Debug, Clone)]
pub enum BinaryOp {
Add,
Sub,
Mul,
Div,
Mod,
Eq,
Ne,
Lt,
Gt,
Le,
Ge,
And,
Or,
RangeInclusive,
RangeExclusive,
Is,
}
#[derive(Debug, Clone)]
pub enum UnaryOp {
Not,
Neg,
PreIncrement,
PreDecrement,
}
#[derive(Debug, Clone)]
pub struct MatchArm {
pattern: MatchPattern,
body: Expr,
}
#[derive(Debug, Clone)]
pub enum MatchPattern {
Wildcard,
Expression(Expr),
Condition(Vec<(BinaryOp, Expr)>),
}
#[derive(Debug, Clone)]
pub struct Block {
statements: Vec<Statement>,
}
#[derive(Debug, Clone)]
pub enum ReturnType {
Simple(Type),
Named {
name: String,
type_annotation: Type,
default_value: Option<Expr>,
},
}
#[derive(Debug, Clone)]
pub enum Statement {
Import {
name: String,
from: String,
},
Function {
name: String,
params: Vec<Param>,
return_type: Option<ReturnType>,
extends: Option<String>,
body: Block,
},
Variable {
kind: VarKind,
name: String,
type_annotation: Option<Type>,
value: Expr,
},
Defer {
condition: Option<Expr>,
binding: Option<String>,
body: Block,
},
Watch {
target: String,
body: Block,
},
Return(Option<Expr>),
If(Expr, Box<Statement>, Option<Box<Statement>>),
For {
var: String,
index: Option<String>,
iterable: Expr,
body: Box<Statement>,
},
While(Expr, Box<Statement>),
Expression(Expr),
Block(Block),
}
#[derive(Debug, Clone)]
pub enum VarKind {
Const,
Var,
Live,
}
#[derive(Debug, Clone)]
pub struct Param {
name: String,
type_annotation: Option<Type>,
}
#[derive(Debug, Clone)]
pub enum Type {
Primitive(String),
Array(Box<Type>),
Optional(Box<Type>),
ErrorUnion(Option<String>, Box<Type>),
Named(String),
}
pub struct SolacePrattParser {
pratt: PrattParser<Rule>,
}
impl SolacePrattParser {
pub fn new() -> Self {
let pratt = PrattParser::new()
.op(Op::infix(Rule::assign, Assoc::Right))
.op(Op::infix(Rule::question, Assoc::Right) | Op::infix(Rule::colon, Assoc::Right))
.op(Op::infix(Rule::or, Assoc::Left))
.op(Op::infix(Rule::and, Assoc::Left))
.op(Op::infix(Rule::eq, Assoc::Left)
| Op::infix(Rule::ne, Assoc::Left)
| Op::infix(Rule::is_kw, Assoc::Left))
.op(Op::infix(Rule::lt, Assoc::Left)
| Op::infix(Rule::gt, Assoc::Left)
| Op::infix(Rule::le, Assoc::Left)
| Op::infix(Rule::ge, Assoc::Left))
.op(Op::infix(Rule::range_inclusive, Assoc::Left)
| Op::infix(Rule::range_exclusive, Assoc::Left))
.op(Op::infix(Rule::plus, Assoc::Left) | Op::infix(Rule::minus, Assoc::Left))
.op(Op::infix(Rule::multiply, Assoc::Left)
| Op::infix(Rule::divide, Assoc::Left)
| Op::infix(Rule::modulo, Assoc::Left))
.op(Op::prefix(Rule::not)
| Op::prefix(Rule::minus)
| Op::prefix(Rule::increment)
| Op::prefix(Rule::decrement))
.op(Op::postfix(Rule::increment) | Op::postfix(Rule::decrement));
SolacePrattParser { pratt }
}
pub fn parse_expr(&self, pairs: Pairs<Rule>) -> Result<Expr, ParseError> {
if pairs.clone().count() == 0 {
return Ok(Expr::Empty);
}
self.pratt
.map_primary(|primary| self.parse_primary(primary))
.map_infix(|lhs, op, rhs| self.parse_infix(lhs, op, rhs))
.map_prefix(|op, rhs| self.parse_prefix(op, rhs))
.map_postfix(|lhs, op| self.parse_postfix(lhs, op))
.parse(pairs)
}
fn parse_primary(&self, pair: Pair<Rule>) -> Result<Expr, ParseError> {
match pair.as_rule() {
Rule::number_literal => {
let num = pair
.as_str()
.parse::<f64>()
.map_err(|_| ParseError::InvalidNumber(pair.as_rule()))?;
Ok(Expr::Number(num))
}
Rule::string_literal => {
let s = pair.as_str();
Ok(Expr::String(s[1..s.len() - 1].to_string()))
}
Rule::boolean_literal => Ok(Expr::Boolean(pair.as_str() == "true")),
Rule::none_kw => Ok(Expr::None),
Rule::undefined_kw => Ok(Expr::Undefined),
Rule::underscore => Ok(Expr::Underscore),
Rule::identifier => Ok(Expr::Identifier(pair.as_str().to_string())),
Rule::array_literal => {
let mut elements = vec![];
for inner in pair.into_inner() {
if inner.as_rule() == Rule::expression {
elements.push(self.parse_expr(inner.into_inner())?);
}
}
Ok(Expr::Array(elements))
}
Rule::if_expr => self.parse_if_expr(pair),
Rule::match_expr => self.parse_match_expr(pair),
_ => Err(ParseError::UnknownPrimary(pair.as_rule())),
}
}
fn parse_infix(
&self,
lhs: Result<Expr, ParseError>,
op: Pair<Rule>,
rhs: Result<Expr, ParseError>,
) -> Result<Expr, ParseError> {
let lhs = lhs?;
let rhs = rhs?;
match op.as_rule() {
Rule::plus => Ok(Expr::Binary(BinaryOp::Add, Box::new(lhs), Box::new(rhs))),
Rule::minus => Ok(Expr::Binary(BinaryOp::Sub, Box::new(lhs), Box::new(rhs))),
Rule::multiply => Ok(Expr::Binary(BinaryOp::Mul, Box::new(lhs), Box::new(rhs))),
Rule::divide => Ok(Expr::Binary(BinaryOp::Div, Box::new(lhs), Box::new(rhs))),
Rule::modulo => Ok(Expr::Binary(BinaryOp::Mod, Box::new(lhs), Box::new(rhs))),
Rule::eq => Ok(Expr::Binary(BinaryOp::Eq, Box::new(lhs), Box::new(rhs))),
Rule::ne => Ok(Expr::Binary(BinaryOp::Ne, Box::new(lhs), Box::new(rhs))),
Rule::lt => Ok(Expr::Binary(BinaryOp::Lt, Box::new(lhs), Box::new(rhs))),
Rule::gt => Ok(Expr::Binary(BinaryOp::Gt, Box::new(lhs), Box::new(rhs))),
Rule::le => Ok(Expr::Binary(BinaryOp::Le, Box::new(lhs), Box::new(rhs))),
Rule::ge => Ok(Expr::Binary(BinaryOp::Ge, Box::new(lhs), Box::new(rhs))),
Rule::and => Ok(Expr::Binary(BinaryOp::And, Box::new(lhs), Box::new(rhs))),
Rule::or => Ok(Expr::Binary(BinaryOp::Or, Box::new(lhs), Box::new(rhs))),
Rule::is_kw => Ok(Expr::Binary(BinaryOp::Is, Box::new(lhs), Box::new(rhs))),
Rule::range_inclusive => Ok(Expr::Binary(
BinaryOp::RangeInclusive,
Box::new(lhs),
Box::new(rhs),
)),
Rule::range_exclusive => Ok(Expr::Binary(
BinaryOp::RangeExclusive,
Box::new(lhs),
Box::new(rhs),
)),
Rule::assign => {
if let Expr::Identifier(name) = lhs {
Ok(Expr::Assignment(name, Box::new(rhs)))
} else {
Err(ParseError::InvalidLeftHand(lhs))
}
}
Rule::question => {
// Handle ternary - need to parse the rest
// This is simplified - in practice you'd need more complex handling
Ok(Expr::Ternary(
Box::new(lhs),
Box::new(rhs),
Box::new(Expr::None),
))
}
_ => Err(ParseError::UnknownInfixOperator(op.as_rule())),
}
}
fn parse_prefix(
&self,
op: Pair<Rule>,
rhs: Result<Expr, ParseError>,
) -> Result<Expr, ParseError> {
let rhs = rhs?;
match op.as_rule() {
Rule::not => Ok(Expr::Unary(UnaryOp::Not, Box::new(rhs))),
Rule::minus => Ok(Expr::Unary(UnaryOp::Neg, Box::new(rhs))),
Rule::increment => Ok(Expr::Unary(UnaryOp::PreIncrement, Box::new(rhs))),
Rule::decrement => Ok(Expr::Unary(UnaryOp::PreDecrement, Box::new(rhs))),
_ => Err(ParseError::UnknownPrefixOperator(op.as_rule())),
}
}
fn parse_postfix(
&self,
lhs: Result<Expr, ParseError>,
op: Pair<Rule>,
) -> Result<Expr, ParseError> {
let lhs = lhs?;
match op.as_rule() {
Rule::increment => Ok(Expr::PostIncrement(Box::new(lhs))),
Rule::decrement => Ok(Expr::PostDecrement(Box::new(lhs))),
_ => Err(ParseError::UnknownPostfixOperator(op.as_rule())),
}
}
fn parse_if_expr(&self, pair: Pair<Rule>) -> Result<Expr, ParseError> {
let mut inner = pair.into_inner();
let condition = self.parse_expr(inner.next().unwrap().into_inner())?;
let then_block = self.parse_block(inner.next().unwrap())?;
let else_block = inner.next().map(|p| self.parse_block(p)).transpose()?;
Ok(Expr::If(
Box::new(condition),
Box::new(then_block),
else_block.map(Box::new),
))
}
fn parse_match_expr(&self, pair: Pair<Rule>) -> Result<Expr, ParseError> {
let mut inner = pair.into_inner();
let target = if let Some(p) = inner.peek() {
if p.as_rule() == Rule::expression {
Some(Box::new(
self.parse_expr(inner.next().unwrap().into_inner())?,
))
} else {
None
}
} else {
None
};
let mut arms = vec![];
for arm_pair in inner {
if arm_pair.as_rule() == Rule::match_arm {
arms.push(self.parse_match_arm(arm_pair)?);
}
}
Ok(Expr::Match(target, arms))
}
fn parse_match_arm(&self, pair: Pair<Rule>) -> Result<MatchArm, ParseError> {
let mut inner = pair.into_inner();
let pattern = self.parse_match_pattern(inner.next().unwrap())?;
let body = self.parse_expr(inner.next().unwrap().into_inner())?;
Ok(MatchArm { pattern, body })
}
fn parse_match_pattern(&self, pair: Pair<Rule>) -> Result<MatchPattern, ParseError> {
// Simplified pattern parsing
match pair.as_rule() {
Rule::underscore => Ok(MatchPattern::Wildcard),
_ => Ok(MatchPattern::Expression(
self.parse_expr(pair.into_inner())?,
)),
}
}
fn parse_block(&self, pair: Pair<Rule>) -> Result<Block, ParseError> {
let mut statements = vec![];
for stmt in pair.into_inner() {
statements.push(self.parse_statement(stmt)?);
}
Ok(Block { statements })
}
fn parse_statement(&self, pair: Pair<Rule>) -> Result<Statement, ParseError> {
match pair.as_rule() {
Rule::expression_stmt => {
let expr = self.parse_expr(pair.into_inner())?;
Ok(Statement::Expression(expr))
}
Rule::return_stmt => {
let mut inner = pair.into_inner();
let expr = inner
.next()
.map(|p| self.parse_expr(p.into_inner()))
.transpose()?;
Ok(Statement::Return(expr))
}
Rule::function_decl => {
let decl = self.parse_function_decl(pair.into_inner())?;
Ok(decl)
}
// Add other statement parsing here
_ => Err(ParseError::UnknownStatement(pair.as_rule())),
}
}
fn parse_type(&self, pair: Pair<Rule>) -> Result<Type, ParseError> {
match pair.as_rule() {
Rule::type_annotation => {
// Type annotation starts with colon, skip it
let mut inner = pair.into_inner();
inner.next(); // skip colon
self.parse_type_expr(inner.next().unwrap())
}
Rule::type_expr => self.parse_type_expr(pair),
_ => Err(ParseError::ShouldBeType(pair.as_rule())),
}
}
fn parse_type_expr(&self, pair: Pair<Rule>) -> Result<Type, ParseError> {
let inner = pair.into_inner();
let mut current_type = None;
let mut is_optional = false;
let mut error_type = None;
let mut array_depth = 0;
for part in inner {
match part.as_rule() {
Rule::optional_prefix => {
is_optional = true;
}
Rule::error_union_prefix => {
// Could be just "!", which defaults to Error!, or "ErrorType!"
let prefix_inner = part.into_inner();
if let Some(error_name) = prefix_inner.peek() {
error_type = Some(error_name.as_str().to_string());
} else {
error_type = Some("Error".to_string());
}
}
Rule::base_type => {
current_type = Some(self.parse_base_type(part)?);
}
Rule::array_suffix => {
array_depth += 1;
}
_ => {}
}
}
let mut result_type = current_type.ok_or(ParseError::MissingBaseType())?;
// Apply array suffixes
for _ in 0..array_depth {
result_type = Type::Array(Box::new(result_type));
}
// Apply error union
if let Some(err_type) = error_type {
result_type = Type::ErrorUnion(Some(err_type), Box::new(result_type));
}
// Apply optional
if is_optional {
result_type = Type::Optional(Box::new(result_type));
}
Ok(result_type)
}
fn parse_base_type(&self, pair: Pair<Rule>) -> Result<Type, ParseError> {
let mut inner = pair.into_inner();
let type_part = inner.next().unwrap();
match type_part.as_rule() {
Rule::primitive_type => Ok(Type::Primitive(type_part.as_str().to_string())),
Rule::identifier => Ok(Type::Named(type_part.as_str().to_string())),
_ => Err(ParseError::UnknownTypePart(type_part.as_rule())),
}
}
fn parse_param_list(&self, pair: Pair<Rule>) -> Result<Vec<Param>, ParseError> {
let mut params = vec![];
for inner in pair.into_inner() {
if inner.as_rule() == Rule::param {
params.push(self.parse_param(inner)?);
}
}
Ok(params)
}
fn parse_param(&self, pair: Pair<Rule>) -> Result<Param, ParseError> {
let mut inner = pair.into_inner();
let name = inner.next().unwrap().as_str().to_string();
let type_annotation = if let Some(type_pair) = inner.next() {
Some(self.parse_type(type_pair)?)
} else {
None
};
Ok(Param {
name,
type_annotation,
})
}
fn parse_return_type(&self, pair: Pair<Rule>) -> Result<ReturnType, ParseError> {
let mut inner = pair.into_inner();
let wrapped = inner.next().unwrap();
// Skip intermediate rules
let first = match wrapped.as_rule() {
Rule::return_type_simple => wrapped.into_inner().next().unwrap(),
Rule::return_type_named => wrapped.into_inner().next().unwrap(),
_ => wrapped,
};
match first.as_rule() {
// Named return variable: fn foo() => (name: Type = default?) {/*...*/}
Rule::arrow_fat => {
let name = inner.next().unwrap().as_str().to_string();
let type_annotation = self.parse_type(inner.next().unwrap())?;
let default_value = if let Some(expr_pair) = inner.next() {
Some(self.parse_expr(expr_pair.into_inner())?)
} else {
None
};
Ok(ReturnType::Named {
name,
type_annotation,
default_value,
})
}
// Simple return type: fn foo(): Type {/*...*/}
Rule::type_annotation => {
let type_annotation = self.parse_type(first)?;
Ok(ReturnType::Simple(type_annotation))
}
// Brother ewww... whats that?
_ => Err(ParseError::UnknownReturnType(
first.as_str().to_owned(),
first.as_rule(),
)),
}
}
fn parse_function_decl(&self, mut pairs: Pairs<Rule>) -> Result<Statement, ParseError> {
// Skip 'fn' keyword
pairs.next();
let name = pairs.next().unwrap().as_str();
let params = self.parse_param_list(pairs.next().unwrap())?;
let mut extends = None;
let mut return_type = None;
let mut body = None;
while let Some(remaining) = pairs.next() {
match remaining.as_rule() {
Rule::extends_kw => {
extends = Some(pairs.next().unwrap().as_str().to_string());
}
Rule::return_type => {
return_type = Some(self.parse_return_type(remaining)?);
}
Rule::block => {
body = Some(self.parse_block(remaining)?);
}
_ => {}
}
}
Ok(Statement::Function {
name: name.to_owned(),
params,
return_type,
extends,
body: body.ok_or(ParseError::MissingFunctionBody(name.to_owned()))?,
})
}
}
#[derive(Debug, Clone)]
pub struct Program {
pub statements: Vec<Statement>,
}
impl Program {
pub fn from_pairs(pairs: Pairs<Rule>) -> Result<Self, ParseError> {
let parser = SolacePrattParser::new();
let mut statements = Vec::new();
for pair in pairs {
if pair.as_rule() == Rule::program {
for stmt_pair in pair.into_inner() {
if stmt_pair.as_rule() != Rule::EOI {
let stmt = parser.parse_statement(stmt_pair)?;
statements.push(stmt);
}
}
}
}
Ok(Program { statements })
}
}

View file

@ -1,76 +0,0 @@
pub mod ast;
use pest::Parser;
use pest::iterators::Pair;
use pest_derive::Parser;
use thiserror::Error;
#[derive(Parser)]
#[grammar = "solace.pest"]
pub struct SolaceParser;
#[derive(Error, Debug)]
pub enum ParseError {
#[error("Parse error: {0}")]
PestError(#[from] pest::error::Error<Rule>),
#[error("Unexpected rule: {0:?}")]
UnexpectedRule(Rule),
#[error("Statement not implemented: {0:?}")]
UnknownStatement(Rule),
#[error("Unknown keyword, literal or expression: {0:?}")]
UnknownPrimary(Rule),
#[error("Left side of assignment must be an identifier: {0:?}")]
InvalidLeftHand(ast::Expr),
#[error("Unexpected infix operator: {0:?}")]
UnknownInfixOperator(Rule),
#[error("Unexpected prefix operator: {0:?}")]
UnknownPrefixOperator(Rule),
#[error("Unexpected postfix operator: {0:?}")]
UnknownPostfixOperator(Rule),
#[error("Expected type annotation or type expression, got: {0:?}")]
ShouldBeType(Rule),
#[error("Unexpected return type: \"{0}\" ({1:?})")]
UnknownReturnType(String, Rule),
#[error("Unexpected base type: {0:?}")]
UnknownTypePart(Rule),
#[error("Invalid Number: {0:?}")]
InvalidNumber(Rule),
#[error("No base type found")]
MissingBaseType(),
#[error("Function body required: {0}")]
MissingFunctionBody(String),
}
pub fn parse(input: &str, debug: bool) -> Result<ast::Program, ParseError> {
if debug {
let mut debug_pairs = SolaceParser::parse(Rule::program, input)?;
print_parse_tree(debug_pairs.next().unwrap(), 0);
}
let pairs = SolaceParser::parse(Rule::program, input)?;
let program = ast::Program::from_pairs(pairs)?;
Ok(program)
}
fn print_parse_tree(pair: Pair<Rule>, indent: usize) {
let indent_str = " ".repeat(indent);
println!("{}{:?}: \"{}\"", indent_str, pair.as_rule(), pair.as_str());
for inner_pair in pair.into_inner() {
print_parse_tree(inner_pair, indent + 1)
}
}

View file

@ -1,236 +0,0 @@
// solace.pest - Pest Grammar for Solace Language (Fixed)
WHITESPACE = _{ " " | "\t" | "\r" | "\n" }
COMMENT = _{ "/*" ~ (!"*/" ~ ANY)* ~ "*/" | "//" ~ (!NEWLINE ~ ANY)* }
NEWLINE = _{ "\n" | "\r\n" }
// Keywords
fn_kw = @{ "fn" ~ !identifier_char }
if_kw = @{ "if" ~ !identifier_char }
else_kw = @{ "else" ~ !identifier_char }
match_kw = @{ "match" ~ !identifier_char }
for_kw = @{ "for" ~ !identifier_char }
while_kw = @{ "while" ~ !identifier_char }
defer_kw = @{ "defer" ~ !identifier_char }
when_kw = @{ "when" ~ !identifier_char }
live_kw = @{ "live" ~ !identifier_char }
watch_kw = @{ "watch" ~ !identifier_char }
return_kw = @{ "return" ~ !identifier_char }
const_kw = @{ "const" ~ !identifier_char }
var_kw = @{ "var" ~ !identifier_char }
import_kw = @{ "import" ~ !identifier_char }
from_kw = @{ "from" ~ !identifier_char }
extends_kw = @{ "extends" ~ !identifier_char }
in_kw = @{ "in" ~ !identifier_char }
is_kw = @{ "is" ~ !identifier_char }
none_kw = @{ "none" ~ !identifier_char }
undefined_kw = @{ "undefined" ~ !identifier_char }
failure_kw = @{ "failure" ~ !identifier_char }
success_kw = @{ "success" ~ !identifier_char }
continue_kw = @{ "continue" ~ !identifier_char }
ok_kw = @{ "ok" ~ !identifier_char }
err_kw = @{ "err" ~ !identifier_char }
keyword = @{
(fn_kw | if_kw | else_kw | match_kw | for_kw | while_kw | defer_kw |
when_kw | live_kw | watch_kw | return_kw | const_kw | var_kw |
import_kw | from_kw | extends_kw | in_kw | is_kw | none_kw |
undefined_kw | failure_kw | success_kw | continue_kw | ok_kw | err_kw)
}
// Literals
string_literal = @{ "\"" ~ (!"\"" ~ ("\\\\" | "\\\"" | ANY))* ~ "\"" }
template_literal = @{ "`" ~ (!"`" ~ ("\\`" | ANY))* ~ "`" }
number_literal = @{ ASCII_DIGIT+ ~ ("." ~ ASCII_DIGIT+)? }
boolean_literal = @{ ("true" | "false") ~ !identifier_char }
// Identifiers
identifier_char = _{ ASCII_ALPHANUMERIC | "_" }
identifier = @{ !(keyword | ASCII_DIGIT) ~ identifier_char+ }
underscore = @{ "_" }
// Operators
eq = @{ "==" }
ne = @{ "!=" }
le = @{ "<=" }
ge = @{ ">=" }
and = @{ "&&" }
or = @{ "||" }
arrow = @{ "->" }
arrow_fat = @{ "=>" }
plus = { "+" }
minus = { "-" }
multiply = { "*" }
divide = { "/" }
modulo = { "%" }
assign = { "=" }
lt = { "<" }
gt = { ">" }
not = { "!" }
question = { "?" }
colon = { ":" }
dot = { "." }
comma = { "," }
semicolon = { ";" }
pipe = { "|" }
// Range operators
range_inclusive = @{ "..=" }
range_exclusive = @{ "..<" }
range_op = { range_inclusive | range_exclusive }
comparison_op = { le | ge | eq | ne | lt | gt }
// Increment/Decrement
increment = @{ "++" }
decrement = @{ "--" }
// Types - Fixed to avoid left recursion
primitive_type = { "string" | "number" | "boolean" | "undefined" }
base_type = { primitive_type | identifier }
// Type suffixes
array_suffix = { "[" ~ "]" }
optional_prefix = { question }
error_union_prefix = { identifier? ~ not }
// Type expression - now handles prefixes and suffixes properly
type_expr = {
optional_prefix? ~ error_union_prefix? ~ base_type ~ array_suffix* |
optional_prefix? ~ base_type ~ array_suffix*
}
// Type annotation
type_annotation = { colon ~ type_expr }
// Parameters
optional_suffix = { question }
param = { identifier ~ optional_suffix? ~ type_annotation? }
param_list = { "(" ~ (param ~ (comma ~ param)*)? ~ ")" }
// Return type with named return variable
return_type_simple = { type_annotation }
return_type_named = { arrow_fat ~ "(" ~ identifier ~ type_annotation ~ (assign ~ expression)? ~ ")" }
return_type = { return_type_simple | return_type_named }
// Expressions
primary_expr = {
underscore |
boolean_literal |
number_literal |
string_literal |
template_literal |
none_kw |
undefined_kw |
function_call_expr |
identifier |
grouped_expression |
array_literal |
if_expr |
match_expr |
continue_expr
}
grouped_expression = { "(" ~ expression ~ ")" }
// Function calls including ok() and err()
function_call_expr = { (ok_kw | err_kw | failure_kw | success_kw) ~ "(" ~ expression_list? ~ ")" }
array_literal = { "[" ~ expression_list? ~ "]" }
expression_list = { expression ~ (comma ~ expression)* }
// Member access and indexing
member_access = { dot ~ identifier }
index_access = { "[" ~ expression ~ "]" }
call_suffix = { "(" ~ expression_list? ~ ")" }
postfix_expr = { primary_expr ~ (member_access | index_access | call_suffix | increment | decrement)* }
unary_expr = { (not | minus | increment | decrement)* ~ postfix_expr }
multiplicative_expr = { unary_expr ~ ((multiply | divide | modulo) ~ unary_expr)* }
additive_expr = { multiplicative_expr ~ ((plus | minus) ~ multiplicative_expr)* }
range_expr = { additive_expr ~ (range_op ~ additive_expr)? }
relational_expr = { range_expr ~ (comparison_op ~ range_expr)* }
equality_expr = { relational_expr ~ (is_kw ~ relational_expr)* }
logical_and_expr = { equality_expr ~ (and ~ equality_expr)* }
logical_or_expr = { logical_and_expr ~ (or ~ logical_and_expr)* }
ternary_expr = { logical_or_expr ~ (question ~ expression ~ colon ~ expression)? }
assignment_expr = { lvalue ~ assign ~ expression }
lvalue = { identifier ~ (dot ~ identifier | "[" ~ expression ~ "]")* }
expression = { assignment_expr | ternary_expr }
// If expression and statement
if_expr = { if_kw ~ expression ~ "{" ~ statement* ~ "}" ~ (else_kw ~ "{" ~ statement* ~ "}")? }
if_expr_short = { if_kw ~ expression ~ colon ~ expression ~ (else_kw ~ colon ~ expression)? }
// Match expression
match_expr = { match_kw ~ expression? ~ "{" ~ match_arm* ~ "}" }
match_arm = { match_pattern ~ arrow ~ (expression | block) }
match_pattern = { expression }
// Continue can be used as an expression in matches
continue_expr = { continue_kw }
// Statements
statement = _{
import_stmt |
function_decl |
variable_decl |
defer_stmt |
watch_stmt |
return_stmt |
if_stmt |
for_stmt |
while_stmt |
continue_stmt |
match_stmt |
expression_stmt
}
import_stmt = { import_kw ~ identifier ~ from_kw ~ string_literal }
function_decl = {
fn_kw ~ identifier ~ param_list ~ (extends_kw ~ identifier)? ~ return_type? ~ block
}
variable_decl = {
(const_kw | var_kw | live_kw) ~ identifier ~ type_annotation? ~ assign ~ expression
}
defer_stmt = {
defer_kw ~ (when_kw ~ expression ~ (pipe ~ identifier ~ pipe)?)? ~ block
}
watch_stmt = {
watch_kw ~ identifier ~ block
}
return_stmt = { return_kw ~ expression? }
if_stmt = {
if_kw ~ expression ~ ((colon ~ statement) | block) ~ (else_kw ~ ((colon ~ statement) | block))?
}
for_stmt = {
for_kw ~ identifier ~ (comma ~ identifier)? ~ in_kw ~ expression ~ ((colon ~ statement) | block)
}
while_stmt = {
while_kw ~ expression? ~ ((colon ~ statement) | block)
}
continue_stmt = { continue_kw }
match_stmt = { match_kw ~ expression? ~ "{" ~ match_arm* ~ "}" }
expression_stmt = { expression }
// Blocks
block = { "{" ~ statement* ~ "}" }
// Program
program = { SOI ~ statement* ~ EOI }

View file

@ -1,196 +0,0 @@
//! Solace Grammar
WHITESPACE = _{ " " | "\t" }
COMMENT = _{ "/*" ~ (!"*/" ~ ANY)* ~ "*/" | "//" ~ (!NEWLINE ~ ANY)* }
// Newline handling for automatic semicolon insertion
NEWLINE = _{ "\n" | "\r\n" }
terminator = _{ ";" | NEWLINE }
// Identifiers
identifier = @{ ASCII_ALPHANUMERIC+ }
// ========== TYPE SYSTEM ==========
type = {
primitive_type |
array_type |
map_type |
set_type |
error_union_type |
optional_type |
identifier
}
primitive_type = { "undefined" | "string" | "number" | "boolean" }
array_type = { type ~ "[]" }
map_type = { "Map<" ~ type ~ "," ~ type ~ ">" }
set_type = { "Set<" ~ type ~ ">" }
error_union_type = { ("Error" ~ "!")? ~ type }
optional_type = { type ~ "?" | "?" ~ type }
// ========== EXPRESSIONS (Pratt-ready) ==========
// Base expression atoms
atom = {
literal |
identifier |
"(" ~ expr ~ ")" |
match_expression |
lambda_expression
}
// Literals
literal = {
number_literal |
string_literal |
boolean_literal |
"undefined" |
"none" |
array_literal |
struct_literal
}
number_literal = @{ ("-")? ~ ASCII_DIGIT+ ~ ("." ~ ASCII_DIGIT+)? }
string_literal = @{ "\"" ~ (!"\"" ~ ANY)* ~ "\"" | "'" ~ (!"'" ~ ANY)* ~ "'" }
boolean_literal = { "true" | "false" }
array_literal = { "[" ~ (expr ~ ("," ~ expr)*)? ~ "]" }
struct_literal = { "{" ~ (struct_field ~ ("," ~ struct_field)*)? ~ "}" }
struct_field = { identifier ~ ":" ~ expr }
// Postfix operators (highest precedence)
postfix_expr = {
atom ~
( call |
member_access |
array_access |
range_operator )*
}
call = { "(" ~ (expr ~ ("," ~ expr)*)? ~ ")" }
member_access = { "." ~ identifier }
array_access = { "[" ~ expr ~ "]" }
range_operator = { (range_inclusive | range_exclusive) ~ expr }
// Unary operators
unary_expr = { unary_operator* ~ postfix_expr }
unary_operator = { not | negate }
// Binary operators (will be handled by Pratt parser)
// This is just for grammar completeness - actual precedence handled in Pratt parser
binary_expr = { unary_expr ~ (binary_operator ~ unary_expr)* }
binary_operator = {
add |
substract |
multiply |
divide |
modulo |
eq |
neq |
lt |
gt |
lte |
gte |
and |
or
}
or = { "||" }
and = { "&&" }
eq = { "==" }
neq = { "!=" }
lt = { "<" }
gt = { ">" }
lte = { "<=" }
gte = { ">=" }
add = { "+" }
substract = { "-" }
multiply = { "*" }
divide = { "/" }
modulo = { "%" }
not = { "!" }
negate = { "-" }
range_inclusive = { "..=" }
range_exclusive = { "..<" }
// The main expression rule
expr = { binary_expr }
// Special expressions
match_expression = {
"match" ~ (expr)? ~ "{" ~
(match_case ~ ("," ~ match_case)*)? ~
"}"
}
match_case = { match_pattern ~ "->" ~ expr }
match_pattern = { "_" | identifier | expr }
lambda_expression = { "|" ~ (identifier ~ ("," ~ identifier)*)? ~ "|" ~ "->" ~ expr }
// ========== STATEMENTS ==========
statement = {
variable_declaration ~ terminator |
function_declaration ~ terminator? |
expr_statement ~ terminator |
return_statement ~ terminator |
if_statement ~ terminator? |
for_statement ~ terminator? |
while_statement ~ terminator? |
defer_statement ~ terminator? |
watch_statement ~ terminator? |
block_statement ~ terminator? |
import_statement ~ terminator
}
variable_declaration = {
("const" | "let" | "live") ~ identifier ~ (":" ~ type)? ~ ("=" ~ expr)?
}
function_declaration = {
"fn" ~ identifier ~ "(" ~ (parameter ~ ("," ~ parameter)*)? ~ ")" ~
("=>" ~ "(" ~ parameter ~ ")")? ~
(":" ~ type)? ~
(block | expr)
}
parameter = { identifier ~ ":" ~ type ~ ("?" | "=" ~ expr)? }
return_statement = { "return" ~ expr? }
expr_statement = { expr }
if_statement = {
"if" ~ expr ~ block ~
("else" ~ (if_statement | block))?
}
block = { "{" ~ statement* ~ "}" }
block_statement = { block }
// Loops
for_statement = {
"for" ~
(identifier ~ ",")? ~ identifier ~ "in" ~
(expr | range_operator) ~
(block | expr_statement)
}
while_statement = {
"while" ~ expr? ~ block
}
// Special statements
defer_statement = {
"defer" ~ ("when" ~ expr)? ~
(lambda_expression | block)
}
watch_statement = {
"watch" ~ expr ~ block
}
// Import statement
import_statement = {
"import" ~ identifier ~ "from" ~ string_literal
}
// ========== PROGRAM ==========
program = { SOI ~ (statement | COMMENT)* ~ EOI }

View file

@ -1,14 +1,108 @@
use crate::parser::ast;
use swc_ecma_ast as swc_ast;
use swc_common::DUMMY_SP;
use crate::parser;
use chumsky;
use std::collections::HashMap;
use swc_common::{BytePos, DUMMY_SP, Span, SyntaxContext};
use swc_ecma_ast::{
AssignExpr, BinaryOp, BindingIdent, BlockStmt, Decl, Expr, FnDecl, Function, Ident, Module,
ModuleItem, Param, Pat, Stmt,
};
pub trait ToSWC<T> {
fn to_swc(&self) -> T;
}
impl ToSWC<Span> for chumsky::span::SimpleSpan {
fn to_swc(&self) -> Span {
Span::new(BytePos(self.start as u32), BytePos(self.end as u32))
}
}
impl ToSWC<BinaryOp> for parser::BinaryOp {
fn to_swc(&self) -> BinaryOp {
match self {
parser::BinaryOp::Add => BinaryOp::Add,
parser::BinaryOp::Sub => BinaryOp::Sub,
parser::BinaryOp::Mul => BinaryOp::Mul,
parser::BinaryOp::Div => BinaryOp::Div,
parser::BinaryOp::Eq => BinaryOp::EqEqEq,
parser::BinaryOp::NotEq => BinaryOp::NotEqEq,
// TODO: implement all members of BinaryOp
}
}
}
impl ToSWC<Function> for parser::Func<'_> {
fn to_swc(&self) -> Function {
Function {
params: self
.args
.iter()
.map(|name| Param {
span: DUMMY_SP,
decorators: vec![],
pat: Pat::Ident(BindingIdent {
id: name.to_string().into(),
type_ann: None,
}),
})
.collect(),
decorators: vec![],
span: self.span.to_swc(),
body: Some(BlockStmt {
span: DUMMY_SP,
ctxt: SyntaxContext::empty(),
stmts: vec![], //TODO!
}),
is_generator: false,
is_async: false,
type_params: None,
return_type: None,
ctxt: SyntaxContext::empty(),
}
}
}
impl ToSWC<Expr> for parser::Expr<'_> {
fn to_swc(&self) -> Expr {
match self {
parser::Expr::Var(name, val, body) => Expr::Assign(AssignExpr {
span: Span::new(BytePos(self.1.1.start as u32), BytePos(self.2.1.end as u32)),
}),
}
}
}
pub struct JsTransformer;
impl JsTransformer {
impl<'src> JsTransformer {
pub fn new() -> Self {
Self
Self {}
}
pub fn transform(&self, program: ast::Program) -> swc_ast::Module {
todo!("Implement Solace AST to SWC AST transformer")
pub fn transform(
&self,
source_ast: HashMap<&'src str, parser::Func<'src>>,
span: chumsky::span::SimpleSpan,
) -> Module {
Module {
span: span.to_swc(),
body: source_ast
.into_iter()
.map(|(name, func)| self.transform_func_stmt(name, func))
.collect(),
shebang: None,
}
}
pub fn transform_func_stmt(&self, ident: &str, func: parser::Func<'_>) -> ModuleItem {
ModuleItem::Stmt(Stmt::Decl(Decl::Fn(FnDecl {
ident: Ident::new(ident.into(), DUMMY_SP, SyntaxContext::empty()),
declare: false,
function: Box::new(func.to_swc()),
})))
}
// pub fn transform_expr(&self, expr: parser::Expr) -> Stmt {
// match expr {
// Expr::Var => Stmt::Decl(Decl::Var(())),
// }
// }
}

View file

@ -1,4 +1 @@
pub mod js;
// maybe one day:
// pub mod wasm;