first steps toward chumsky

This commit is contained in:
koehr 2025-07-07 15:23:00 +02:00
parent 12c0431e69
commit ad48980f5c
17 changed files with 749 additions and 1472 deletions

309
Cargo.lock generated
View file

@ -35,6 +35,16 @@ version = "1.0.98"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e16d2d3311acee920a9eb8d33b8cbc1787ce4a264e85f964c2404b969bdcd487"
[[package]]
name = "ariadne"
version = "0.5.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "36f5e3dca4e09a6f340a61a0e9c7b61e030c69fc27bf29d73218f7e5e3b7638f"
dependencies = [
"unicode-width",
"yansi",
]
[[package]]
name = "ascii"
version = "1.1.0"
@ -96,15 +106,6 @@ dependencies = [
"wyz",
]
[[package]]
name = "block-buffer"
version = "0.10.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "3078c7629b62d3f0439517fa394996acacc5cbc91c5a20d8c658e77abd503a71"
dependencies = [
"generic-array",
]
[[package]]
name = "bumpalo"
version = "3.19.0"
@ -139,12 +140,35 @@ dependencies = [
"rustversion",
]
[[package]]
name = "cc"
version = "1.2.29"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5c1599538de2394445747c8cf7935946e3cc27e9625f889d979bfb2aaf569362"
dependencies = [
"shlex",
]
[[package]]
name = "cfg-if"
version = "1.0.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9555578bc9e57714c812a1f84e4fc5b4d21fcb063490c624de019f7464c91268"
[[package]]
name = "chumsky"
version = "0.10.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "14377e276b2c8300513dff55ba4cc4142b44e5d6de6d00eb5b2307d650bb4ec1"
dependencies = [
"hashbrown 0.15.4",
"regex-automata 0.3.9",
"serde",
"stacker",
"unicode-ident",
"unicode-segmentation",
]
[[package]]
name = "compact_str"
version = "0.7.1"
@ -158,25 +182,6 @@ dependencies = [
"static_assertions",
]
[[package]]
name = "cpufeatures"
version = "0.2.17"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "59ed5838eebb26a2bb2e58f6d5b5316989ae9d08bab10e0e6d103e656d1b0280"
dependencies = [
"libc",
]
[[package]]
name = "crypto-common"
version = "0.1.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1bfb12502f3fc46cca1bb51ac28df9d618d813cdc3d2f25b9fe775a34af26bb3"
dependencies = [
"generic-array",
"typenum",
]
[[package]]
name = "data-encoding"
version = "2.9.0"
@ -193,16 +198,6 @@ dependencies = [
"uuid",
]
[[package]]
name = "digest"
version = "0.10.7"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9ed9a281f7bc9b7576e61468ba615a66a5c8cfdff42420a70aa82701a3b1e292"
dependencies = [
"block-buffer",
"crypto-common",
]
[[package]]
name = "displaydoc"
version = "0.2.5"
@ -220,6 +215,18 @@ version = "1.15.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "48c757948c5ede0e46177b7add2e67155f70e33c07fea8284df6576da70b3719"
[[package]]
name = "equivalent"
version = "1.0.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "877a4ace8713b0bcf2a4e7eec82529c029f1d0619886d18145fea96c3ffe5c0f"
[[package]]
name = "foldhash"
version = "0.1.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d9c4f5dac5e15c24eb999c26181a6ca40b39fe946cbe4c263c7209467bc83af2"
[[package]]
name = "form_urlencoded"
version = "1.2.1"
@ -246,16 +253,6 @@ version = "2.0.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e6d5a32815ae3f33302d95fdcb2ce17862f8c65363dcfd29360480ba1001fc9c"
[[package]]
name = "generic-array"
version = "0.14.7"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "85649ca51fd72272d7821adaf274ad91c288277713d9c18820d8499a7ff69e9a"
dependencies = [
"typenum",
"version_check",
]
[[package]]
name = "hashbrown"
version = "0.14.5"
@ -266,6 +263,17 @@ dependencies = [
"allocator-api2",
]
[[package]]
name = "hashbrown"
version = "0.15.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5971ac85611da7067dbfcabef3c70ebb5606018acd9e2a3903a0da507521e0d5"
dependencies = [
"allocator-api2",
"equivalent",
"foldhash",
]
[[package]]
name = "heck"
version = "0.5.0"
@ -278,7 +286,7 @@ version = "1.1.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "2d1638d2018a21b9ff65d7fc28c2271c76a5af6ff4f621b204d032bc649763a4"
dependencies = [
"hashbrown",
"hashbrown 0.14.5",
"new_debug_unreachable",
"once_cell",
"phf",
@ -510,50 +518,6 @@ version = "2.3.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e3148f5046208a5d56bcfc03053e3ca6334e51da8dfb19b6cdc8b306fae3283e"
[[package]]
name = "pest"
version = "2.8.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1db05f56d34358a8b1066f67cbb203ee3e7ed2ba674a6263a1d5ec6db2204323"
dependencies = [
"memchr",
"thiserror",
"ucd-trie",
]
[[package]]
name = "pest_derive"
version = "2.8.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "bb056d9e8ea77922845ec74a1c4e8fb17e7c218cc4fc11a15c5d25e189aa40bc"
dependencies = [
"pest",
"pest_generator",
]
[[package]]
name = "pest_generator"
version = "2.8.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "87e404e638f781eb3202dc82db6760c8ae8a1eeef7fb3fa8264b2ef280504966"
dependencies = [
"pest",
"pest_meta",
"proc-macro2",
"quote",
"syn",
]
[[package]]
name = "pest_meta"
version = "2.8.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "edd1101f170f5903fde0914f899bb503d9ff5271d7ba76bbb70bea63690cc0d5"
dependencies = [
"pest",
"sha2",
]
[[package]]
name = "phf"
version = "0.11.3"
@ -620,6 +584,15 @@ dependencies = [
"unicode-ident",
]
[[package]]
name = "psm"
version = "0.1.26"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "6e944464ec8536cd1beb0bbfd96987eb5e3b72f2ecdafdc5c769a37f1fa2ae1f"
dependencies = [
"cc",
]
[[package]]
name = "ptr_meta"
version = "0.3.0"
@ -678,8 +651,19 @@ checksum = "b544ef1b4eac5dc2db33ea63606ae9ffcfac26c1416a2806ae0bf5f56b201191"
dependencies = [
"aho-corasick",
"memchr",
"regex-automata",
"regex-syntax",
"regex-automata 0.4.9",
"regex-syntax 0.8.5",
]
[[package]]
name = "regex-automata"
version = "0.3.9"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "59b23e92ee4318893fa3fe3e6fb365258efbfe6ac6ab30f090cdcbb7aa37efa9"
dependencies = [
"aho-corasick",
"memchr",
"regex-syntax 0.7.5",
]
[[package]]
@ -690,9 +674,15 @@ checksum = "809e8dc61f6de73b46c85f4c96486310fe304c434cfa43669d7b40f711150908"
dependencies = [
"aho-corasick",
"memchr",
"regex-syntax",
"regex-syntax 0.8.5",
]
[[package]]
name = "regex-syntax"
version = "0.7.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "dbb5fb1acd8a1a18b3dd5be62d25485eb770e05afb408a9627d14d451bae12da"
[[package]]
name = "regex-syntax"
version = "0.8.5"
@ -762,15 +752,10 @@ dependencies = [
]
[[package]]
name = "sha2"
version = "0.10.9"
name = "shlex"
version = "1.3.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a7507d819769d01a365ab707794a4084392c824f54a7a6a7862f8c3d0892b283"
dependencies = [
"cfg-if",
"cpufeatures",
"digest",
]
checksum = "0fda2ff0d084019ba4d7c6f371c95d8fd75ce3524c3cb8fb653a3023f6323e64"
[[package]]
name = "siphasher"
@ -795,9 +780,9 @@ name = "solace"
version = "0.1.0"
dependencies = [
"anyhow",
"ariadne",
"chumsky",
"lazy_static",
"pest",
"pest_derive",
"swc_common",
"swc_ecma_ast",
"swc_ecma_codegen",
@ -810,6 +795,19 @@ version = "1.2.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a8f112729512f8e442d81f95a8a7ddf2b7c6b8a1a6f509a95864142b30cab2d3"
[[package]]
name = "stacker"
version = "0.1.21"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "cddb07e32ddb770749da91081d8d0ac3a16f1a569a18b20348cd371f5dead06b"
dependencies = [
"cc",
"cfg-if",
"libc",
"psm",
"windows-sys",
]
[[package]]
name = "static_assertions"
version = "1.1.0"
@ -836,7 +834,7 @@ checksum = "cc6b926f0d94bbb34031fe5449428cfa1268cdc0b31158d6ad9c97e0fc1e79dd"
dependencies = [
"allocator-api2",
"bumpalo",
"hashbrown",
"hashbrown 0.14.5",
"ptr_meta",
"rustc-hash",
"triomphe",
@ -1088,18 +1086,6 @@ dependencies = [
"stable_deref_trait",
]
[[package]]
name = "typenum"
version = "1.18.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1dccffe3ce07af9386bfd29e80c0ab1a8205a2fc34e4bcd40364df902cfa8f3f"
[[package]]
name = "ucd-trie"
version = "0.1.7"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "2896d95c02a80c6d6a5d6e953d479f5ddf2dfdb6a244441010e373ac0fb88971"
[[package]]
name = "unicode-id-start"
version = "1.3.1"
@ -1112,6 +1098,12 @@ version = "1.0.18"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5a5f39404a5da50712a4c1eecf25e90dd62b613502b7e925fd4e4d19b5c96512"
[[package]]
name = "unicode-segmentation"
version = "1.12.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f6ccf251212114b54433ec949fd6a7841275f9ada20dddd2f29e9ceea4501493"
[[package]]
name = "unicode-width"
version = "0.1.14"
@ -1215,6 +1207,79 @@ dependencies = [
"unicode-ident",
]
[[package]]
name = "windows-sys"
version = "0.59.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1e38bc4d79ed67fd075bcc251a1c39b32a1776bbe92e5bef1f0bf1f8c531853b"
dependencies = [
"windows-targets",
]
[[package]]
name = "windows-targets"
version = "0.52.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9b724f72796e036ab90c1021d4780d4d3d648aca59e491e6b98e725b84e99973"
dependencies = [
"windows_aarch64_gnullvm",
"windows_aarch64_msvc",
"windows_i686_gnu",
"windows_i686_gnullvm",
"windows_i686_msvc",
"windows_x86_64_gnu",
"windows_x86_64_gnullvm",
"windows_x86_64_msvc",
]
[[package]]
name = "windows_aarch64_gnullvm"
version = "0.52.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "32a4622180e7a0ec044bb555404c800bc9fd9ec262ec147edd5989ccd0c02cd3"
[[package]]
name = "windows_aarch64_msvc"
version = "0.52.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "09ec2a7bb152e2252b53fa7803150007879548bc709c039df7627cabbd05d469"
[[package]]
name = "windows_i686_gnu"
version = "0.52.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8e9b5ad5ab802e97eb8e295ac6720e509ee4c243f69d781394014ebfe8bbfa0b"
[[package]]
name = "windows_i686_gnullvm"
version = "0.52.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "0eee52d38c090b3caa76c563b86c3a4bd71ef1a819287c19d586d7334ae8ed66"
[[package]]
name = "windows_i686_msvc"
version = "0.52.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "240948bc05c5e7c6dabba28bf89d89ffce3e303022809e73deaefe4f6ec56c66"
[[package]]
name = "windows_x86_64_gnu"
version = "0.52.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "147a5c80aabfbf0c7d901cb5895d1de30ef2907eb21fbbab29ca94c5b08b1a78"
[[package]]
name = "windows_x86_64_gnullvm"
version = "0.52.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "24d5b23dc417412679681396f2b49f3de8c1473deb516bd34410872eff51ed0d"
[[package]]
name = "windows_x86_64_msvc"
version = "0.52.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "589f6da84c646204747d1270a2a5661ea66ed1cced2631d546fdfb155959f9ec"
[[package]]
name = "writeable"
version = "0.6.1"
@ -1230,6 +1295,12 @@ dependencies = [
"tap",
]
[[package]]
name = "yansi"
version = "1.0.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "cfe53a6657fd280eaa890a3bc59152892ffa3e30101319d168b781ed6529b049"
[[package]]
name = "yoke"
version = "0.8.0"

View file

@ -5,9 +5,9 @@ edition = "2024"
[dependencies]
anyhow = "1.0.98"
ariadne = "0.5.1"
chumsky = "0.10.1"
lazy_static = "1.5.0"
pest = "2.8.1"
pest_derive = "2.8.1"
swc_common = "13.0.2"
swc_ecma_ast = "13.0.0"
swc_ecma_codegen = "15.0.1"

37
sample.nrs Normal file
View file

@ -0,0 +1,37 @@
// Run this example with `cargo run --example nano_rust -- examples/sample.nrs`
// Feel free to play around with this sample to see what errors you can generate!
// Spans are propagated to the interpreted AST so you can even invoke runtime
// errors and still have an error message that points to source code emitted!
fn mul(x, y) {
x * y
}
// Calculate the factorial of a number
fn factorial(x) {
// Conditionals are supported!
if x == 0 {
1
} else {
mul(x, factorial(x - 1))
}
}
// The main function
fn main() {
let three = 3;
let meaning_of_life = three * 14 + 1;
print("Hello, world!");
print("The meaning of life is...");
if meaning_of_life == 42 {
print(meaning_of_life);
} else {
print("...something we cannot know");
print("However, I can tell you that the factorial of 10 is...");
// Function calling
print(factorial(10));
}
}

7
sample2.nrs Normal file
View file

@ -0,0 +1,7 @@
let x = 2;
let y = 3;
fn mul(x, y) {
let a = x*2;
a * y
}

0
src/emitter.rs Normal file
View file

View file

@ -1,35 +0,0 @@
use swc_ecma_ast::Module;
use swc_ecma_codegen::Config;
// use swc_ecma_codegen::{text_writer::JsWriter, Config, Emitter};
// use swc_common::{sync::Lrc, SourceMap};
use thiserror::Error;
#[derive(Error, Debug)]
pub enum EmitError {
#[error("Failed to emit JavaScript: {0}")]
EmitFailed(String),
}
pub struct JsEmitter {
config: Config,
}
impl JsEmitter {
pub fn new() -> Self {
Self {
config: Config::default(),
}
}
pub fn with_minify(mut self) -> Self {
self.config.minify = true;
self
}
pub fn emit(&self, _module: Module) -> Result<String, EmitError> {
// SWC codegen here
return Err(EmitError::EmitFailed(
"Emitter not yet implemented!".to_owned(),
));
}
}

View file

@ -1 +0,0 @@
pub mod js;

95
src/lexer.rs Normal file
View file

@ -0,0 +1,95 @@
use chumsky::prelude::*;
use std::fmt::{Display, Formatter, Result};
// A few type definitions to be used by our parsers below
pub type Span = SimpleSpan;
pub type Spanned<T> = (T, Span);
#[derive(Clone, Debug, PartialEq)]
pub enum Token<'src> {
None,
Bool(bool),
Num(f64),
Str(&'src str),
Op(&'src str),
Ctrl(char),
Ident(&'src str),
Fn,
Var,
If,
Else,
}
impl Display for Token<'_> {
fn fmt(&self, f: &mut Formatter) -> Result {
match self {
Token::None => write!(f, "none"),
Token::Bool(x) => write!(f, "{x}"),
Token::Num(n) => write!(f, "{n}"),
Token::Str(s) => write!(f, "{s}"),
Token::Op(s) => write!(f, "{s}"),
Token::Ctrl(c) => write!(f, "{c}"),
Token::Ident(s) => write!(f, "{s}"),
Token::Fn => write!(f, "fn"),
Token::Var => write!(f, "var"),
Token::If => write!(f, "if"),
Token::Else => write!(f, "else"),
}
}
}
pub fn lexer<'src>()
-> impl Parser<'src, &'src str, Vec<Spanned<Token<'src>>>, extra::Err<Rich<'src, char, Span>>> {
// A parser for numbers
let num = text::int(10)
.then(just('.').then(text::digits(10)).or_not())
.to_slice()
.from_str()
.unwrapped()
.map(Token::Num);
// A parser for strings
let str_ = just('"')
.ignore_then(none_of('"').repeated().to_slice())
.then_ignore(just('"'))
.map(Token::Str);
// A parser for operators
let op = one_of("+*-/!=")
.repeated()
.at_least(1)
.to_slice()
.map(Token::Op);
// A parser for control characters (delimiters, semicolons, etc.)
let ctrl = one_of("()[]{};,").map(Token::Ctrl);
// A parser for identifiers and keywords
let ident = text::ascii::ident().map(|ident: &str| match ident {
"fn" => Token::Fn,
"var" => Token::Var,
"let" => Token::Var, // var and let are synonyms
"if" => Token::If,
"else" => Token::Else,
"true" => Token::Bool(true),
"false" => Token::Bool(false),
"none" => Token::None,
_ => Token::Ident(ident),
});
// A single token can be one of the above
let token = num.or(str_).or(op).or(ctrl).or(ident);
let comment = just("//")
.then(any().and_is(just('\n').not()).repeated())
.padded();
token
.map_with(|tok, e| (tok, e.span()))
.padded_by(comment.repeated())
.padded()
// If we encounter an error, skip and attempt to lex the next character as a token instead
.recover_with(skip_then_retry_until(any().ignored(), end()))
.repeated()
.collect()
}

View file

@ -1,4 +1,5 @@
mod emitter;
mod lexer;
mod parser;
mod transformer;
@ -23,18 +24,20 @@ fn main() -> Result<()> {
let input = fs::read_to_string(file_path).expect(&format!("Cannot read file '{}'!", file_path));
// Parse Solace Code
let ast = parser::parse(&input, /* debug */ true)?;
// Transform from Solace AST to SWC AST
let js_transformer = transformer::js::JsTransformer::new();
let js_ast = js_transformer.transform(ast);
if let Some((ast, span)) = parser::parse(file_path.to_string(), &input) {
// Transform from Solace AST to SWC AST
let js_transformer = transformer::js::JsTransformer::new();
let js_ast = js_transformer.transform(ast);
}
/*
// Emit JavaScript
let js_emitter = emitter::js::JsEmitter::new();
let js_code = js_emitter.emit(js_ast)?;
// Write Output to stdout
println!("{}", js_code);
*/
Ok(())
}

View file

@ -1,182 +0,0 @@
use std::fs;
use pest::Parser;
use pest::pratt_parser::{Op, Assoc, PrattParser};
use lazy_static::lazy_static;
use pest_derive::Parser;
#[derive(Parser)]
#[grammar = "solace.pest"]
pub struct SolaceParser;
lazy_static! {
static ref PRATT_PARSER: PrattParser<Rule> = {
use Rule::*;
use Assoc::Left;
PrattParser::new()
// Logical OR (lowest precedence)
.op(Op::infix(or, Left))
// Logical AND
.op(Op::infix(and, Left))
// Equality
.op(Op::infix(eq, Left) | Op::infix(neq, Left))
// Comparison
.op(Op::infix(lt, Left) | Op::infix(gt, Left) |
Op::infix(lte, Left) | Op::infix(gte, Left))
// Range operators
.op(Op::infix(range_inclusive, Left) | Op::infix(range_exclusive, Left))
// Additive
.op(Op::infix(add, Left) | Op::infix(subtract, Left))
// Multiplicative
.op(Op::infix(multiply, Left) | Op::infix(divide, Left) | Op::infix(modulo, Left))
// Unary operators
.op(Op::prefix(not) | Op::prefix(negate))
// Member access (highest precedence)
.op(Op::postfix(member_access))
// Function call (same precedence as member access)
.op(Op::postfix(call))
};
}
pub fn parse_expr(pair: pest::iterators::Pair<Rule>) -> Expr {
PRATT_PARSER
.map_primary(|primary| match primary.as_rule() {
Rule::number => Expr::Number(primary.as_str().parse().unwrap()),
Rule::string => Expr::String(primary.as_str().to_string()),
Rule::boolean => Expr::Boolean(primary.as_str() == "true"),
Rule::identifier => Expr::Ident(primary.as_str().to_string()),
Rule::array_literal => Expr::Array(parse_array(primary)),
Rule::struct_literal => Expr::Struct(parse_struct(primary)),
Rule::expr => parse_expr(primary),
_ => unreachable!("Unexpected primary expression: {:?}", primary),
})
.map_infix(|lhs, op, rhs| {
let op = match op.as_rule() {
Rule::add => BinOp::Add,
Rule::subtract => BinOp::Sub,
Rule::multiply => BinOp::Mul,
Rule::divide => BinOp::Div,
Rule::modulo => BinOp::Mod,
Rule::eq => BinOp::Eq,
Rule::neq => BinOp::Neq,
Rule::lt => BinOp::Lt,
Rule::gt => BinOp::Gt,
Rule::lte => BinOp::Lte,
Rule::gte => BinOp::Gte,
Rule::and => BinOp::And,
Rule::or => BinOp::Or,
Rule::range_inclusive => BinOp::RangeInclusive,
Rule::range_exclusive => BinOp::RangeExclusive,
_ => unreachable!(),
};
Expr::Binary(Box::new(lhs), op, Box::new(rhs))
})
.map_prefix(|op, rhs| {
let op = match op.as_rule() {
Rule::negate => UnOp::Neg,
Rule::not => UnOp::Not,
_ => unreachable!(),
};
Expr::Unary(op, Box::new(rhs))
})
.map_postfix(|lhs, op| {
match op.as_rule() {
Rule::field_access => {
let field = op.into_inner().next().unwrap();
Expr::FieldAccess(Box::new(lhs), field.as_str().to_string())
}
Rule::call => {
let args = op.into_inner()
.map(parse_expr)
.collect();
Expr::Call(Box::new(lhs), args)
}
_ => unreachable!(),
}
})
.parse(pair.into_inner())
}
// Example AST types (simplified)
#[derive(Debug)]
pub enum Expr {
Number(f64),
String(String),
Boolean(bool),
Ident(String),
Array(Vec<Expr>),
Struct(Vec<(String, Expr)>),
Binary(Box<Expr>, BinOp, Box<Expr>),
Unary(UnOp, Box<Expr>),
FieldAccess(Box<Expr>, String),
Call(Box<Expr>, Vec<Expr>),
}
#[derive(Debug)]
pub enum BinOp {
Add, Sub, Mul, Div, Mod,
Eq, Neq, Lt, Gt, Lte, Gte,
And, Or,
RangeInclusive, RangeExclusive,
}
#[derive(Debug)]
pub enum UnOp {
Neg, Not,
}
// Helper functions
fn parse_array(pair: pest::iterators::Pair<Rule>) -> Vec<Expr> {
pair.into_inner()
.filter(|p| p.as_rule() == Rule::expr)
.map(parse_expr)
.collect()
}
fn parse_struct(pair: pest::iterators::Pair<Rule>) -> Vec<(String, Expr)> {
pair.into_inner()
.filter(|p| p.as_rule() == Rule::struct_field)
.map(|f| {
let mut inner = f.into_inner();
let name = inner.next().unwrap().as_str().to_string();
let value = parse_expr(inner.next().unwrap());
(name, value)
})
.collect()
}
fn main() {
let unparsed_file = fs::read_to_string("test.solace").expect("Cannot read test file");
match SolaceParser::parse(Rule::program, &unparsed_file) {
Ok(mut pairs) => {
let program = pairs.next().unwrap();
println!("Parsing was successful.");
print_parse_tree(program, 0);
}
Err(err) => {
println!("Parse error: {}", err);
}
}
}
/* Example usage
pub fn parse(input: &str) -> Result<Vec<Expr>, pest::error::Error<Rule>> {
let pairs = SolaceParser::parse(Rule::program, input)?;
let exprs = pairs
.filter(|p| p.as_rule() == Rule::expr)
.map(parse_expr)
.collect();
Ok(exprs)
}
*/
fn print_parse_tree(pair: Pair<Rule>, indent: usize) {
let indent_str = " ".repeat(indent);
println!("{}{:?}: \"{}\"", indent_str, pair.as_rule(), pair.as_str());
for inner_pair in pair.into_inner() {
print_parse_tree(inner_pair, indent + 1)
}
}

394
src/parser.rs Normal file
View file

@ -0,0 +1,394 @@
use crate::lexer::{Span, Spanned, Token, lexer};
use chumsky::{input::ValueInput, prelude::*};
use ariadne::{Color, Label, Report, ReportKind, sources};
use std::collections::HashMap;
#[derive(Clone, Debug, PartialEq)]
pub enum Value<'src> {
None,
Bool(bool),
Num(f64),
Str(&'src str),
List(Vec<Self>),
Func(&'src str),
}
impl std::fmt::Display for Value<'_> {
fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {
match self {
Self::None => write!(f, "none"),
Self::Bool(x) => write!(f, "{x}"),
Self::Num(x) => write!(f, "{x}"),
Self::Str(x) => write!(f, "{x}"),
Self::List(xs) => write!(
f,
"[{}]",
xs.iter()
.map(|x| x.to_string())
.collect::<Vec<_>>()
.join(", ")
),
Self::Func(name) => write!(f, "<function: {name}>"),
}
}
}
#[derive(Clone, Debug)]
pub enum BinaryOp {
Add,
Sub,
Mul,
Div,
Eq,
NotEq,
}
// An expression node in the AST. Children are spanned so we can generate useful runtime errors.
#[derive(Debug)]
pub enum Expr<'src> {
Error,
Value(Value<'src>),
List(Vec<Spanned<Self>>),
Local(&'src str),
Var(&'src str, Box<Spanned<Self>>, Box<Spanned<Self>>),
Then(Box<Spanned<Self>>, Box<Spanned<Self>>),
Binary(Box<Spanned<Self>>, BinaryOp, Box<Spanned<Self>>),
Call(Box<Spanned<Self>>, Spanned<Vec<Spanned<Self>>>),
If(Box<Spanned<Self>>, Box<Spanned<Self>>, Box<Spanned<Self>>),
Print(Box<Spanned<Self>>),
}
// A function node in the AST.
#[derive(Debug)]
pub struct Func<'src> {
args: Vec<&'src str>,
span: Span,
body: Spanned<Expr<'src>>,
}
fn expr_parser<'tokens, 'src: 'tokens, I>()
-> impl Parser<'tokens, I, Spanned<Expr<'src>>, extra::Err<Rich<'tokens, Token<'src>, Span>>> + Clone
where
I: ValueInput<'tokens, Token = Token<'src>, Span = Span>,
{
recursive(|expr| {
let inline_expr = recursive(|inline_expr| {
let val = select! {
Token::None => Expr::Value(Value::None),
Token::Bool(x) => Expr::Value(Value::Bool(x)),
Token::Num(n) => Expr::Value(Value::Num(n)),
Token::Str(s) => Expr::Value(Value::Str(s)),
}
.labelled("value");
let ident = select! { Token::Ident(ident) => ident }.labelled("identifier");
// A list of expressions
let items = expr
.clone()
.separated_by(just(Token::Ctrl(',')))
.allow_trailing()
.collect::<Vec<_>>();
// A let expression
let let_ = just(Token::Var)
.ignore_then(ident)
.then_ignore(just(Token::Op("=")))
.then(inline_expr)
.then_ignore(just(Token::Ctrl(';')))
.then(expr.clone())
.map(|((name, val), body)| Expr::Var(name, Box::new(val), Box::new(body)));
let list = items
.clone()
.map(Expr::List)
.delimited_by(just(Token::Ctrl('[')), just(Token::Ctrl(']')));
// 'Atoms' are expressions that contain no ambiguity
let atom = val
.or(ident.map(Expr::Local))
.or(let_)
.or(list)
// In Nano Rust, `print` is just a keyword, just like Python 2, for simplicity
.map_with(|expr, e| (expr, e.span()))
// Atoms can also just be normal expressions, but surrounded with parentheses
.or(expr
.clone()
.delimited_by(just(Token::Ctrl('(')), just(Token::Ctrl(')'))))
// Attempt to recover anything that looks like a parenthesised expression but contains errors
.recover_with(via_parser(nested_delimiters(
Token::Ctrl('('),
Token::Ctrl(')'),
[
(Token::Ctrl('['), Token::Ctrl(']')),
(Token::Ctrl('{'), Token::Ctrl('}')),
],
|span| (Expr::Error, span),
)))
// Attempt to recover anything that looks like a list but contains errors
.recover_with(via_parser(nested_delimiters(
Token::Ctrl('['),
Token::Ctrl(']'),
[
(Token::Ctrl('('), Token::Ctrl(')')),
(Token::Ctrl('{'), Token::Ctrl('}')),
],
|span| (Expr::Error, span),
)))
.boxed();
// Function calls have very high precedence so we prioritise them
let call = atom.foldl_with(
items
.delimited_by(just(Token::Ctrl('(')), just(Token::Ctrl(')')))
.map_with(|args, e| (args, e.span()))
.repeated(),
|f, args, e| (Expr::Call(Box::new(f), args), e.span()),
);
// Product ops (multiply and divide) have equal precedence
let op = just(Token::Op("*"))
.to(BinaryOp::Mul)
.or(just(Token::Op("/")).to(BinaryOp::Div));
let product = call
.clone()
.foldl_with(op.then(call).repeated(), |a, (op, b), e| {
(Expr::Binary(Box::new(a), op, Box::new(b)), e.span())
});
// Sum ops (add and subtract) have equal precedence
let op = just(Token::Op("+"))
.to(BinaryOp::Add)
.or(just(Token::Op("-")).to(BinaryOp::Sub));
let sum = product
.clone()
.foldl_with(op.then(product).repeated(), |a, (op, b), e| {
(Expr::Binary(Box::new(a), op, Box::new(b)), e.span())
});
// Comparison ops (equal, not-equal) have equal precedence
let op = just(Token::Op("=="))
.to(BinaryOp::Eq)
.or(just(Token::Op("!=")).to(BinaryOp::NotEq));
let compare = sum
.clone()
.foldl_with(op.then(sum).repeated(), |a, (op, b), e| {
(Expr::Binary(Box::new(a), op, Box::new(b)), e.span())
});
compare.labelled("expression").as_context()
});
// Blocks are expressions but delimited with braces
let block = expr
.clone()
.delimited_by(just(Token::Ctrl('{')), just(Token::Ctrl('}')))
// Attempt to recover anything that looks like a block but contains errors
.recover_with(via_parser(nested_delimiters(
Token::Ctrl('{'),
Token::Ctrl('}'),
[
(Token::Ctrl('('), Token::Ctrl(')')),
(Token::Ctrl('['), Token::Ctrl(']')),
],
|span| (Expr::Error, span),
)));
let if_ = recursive(|if_| {
just(Token::If)
.ignore_then(expr.clone())
.then(block.clone())
.then(
just(Token::Else)
.ignore_then(block.clone().or(if_))
.or_not(),
)
.map_with(|((cond, a), b), e| {
(
Expr::If(
Box::new(cond),
Box::new(a),
// If an `if` expression has no trailing `else` block, we magic up one that just produces none
Box::new(b.unwrap_or_else(|| (Expr::Value(Value::None), e.span()))),
),
e.span(),
)
})
});
// Both blocks and `if` are 'block expressions' and can appear in the place of statements
let block_expr = block.or(if_);
let block_chain = block_expr
.clone()
.foldl_with(block_expr.clone().repeated(), |a, b, e| {
(Expr::Then(Box::new(a), Box::new(b)), e.span())
});
let block_recovery = nested_delimiters(
Token::Ctrl('{'),
Token::Ctrl('}'),
[
(Token::Ctrl('('), Token::Ctrl(')')),
(Token::Ctrl('['), Token::Ctrl(']')),
],
|span| (Expr::Error, span),
);
block_chain
.labelled("block")
// Expressions, chained by semicolons, are statements
.or(inline_expr.clone())
.recover_with(skip_then_retry_until(
block_recovery.ignored().or(any().ignored()),
one_of([
Token::Ctrl(';'),
Token::Ctrl('}'),
Token::Ctrl(')'),
Token::Ctrl(']'),
])
.ignored(),
))
.foldl_with(
just(Token::Ctrl(';')).ignore_then(expr.or_not()).repeated(),
|a, b, e| {
let span: Span = e.span();
(
Expr::Then(
Box::new(a),
// If there is no b expression then its span is the end of the statement/block.
Box::new(
b.unwrap_or_else(|| (Expr::Value(Value::None), span.to_end())),
),
),
span,
)
},
)
})
}
fn funcs_parser<'tokens, 'src: 'tokens, I>()
-> impl Parser<'tokens, I, HashMap<&'src str, Func<'src>>, extra::Err<Rich<'tokens, Token<'src>, Span>>>
+ Clone
where
I: ValueInput<'tokens, Token = Token<'src>, Span = Span>,
{
let ident = select! { Token::Ident(ident) => ident };
// Argument lists are just identifiers separated by commas, surrounded by parentheses
let args = ident
.separated_by(just(Token::Ctrl(',')))
.allow_trailing()
.collect()
.delimited_by(just(Token::Ctrl('(')), just(Token::Ctrl(')')))
.labelled("function args");
let func = just(Token::Fn)
.ignore_then(
ident
.map_with(|name, e| (name, e.span()))
.labelled("function name"),
)
.then(args)
.map_with(|start, e| (start, e.span()))
.then(
expr_parser()
.delimited_by(just(Token::Ctrl('{')), just(Token::Ctrl('}')))
// Attempt to recover anything that looks like a function body but contains errors
.recover_with(via_parser(nested_delimiters(
Token::Ctrl('{'),
Token::Ctrl('}'),
[
(Token::Ctrl('('), Token::Ctrl(')')),
(Token::Ctrl('['), Token::Ctrl(']')),
],
|span| (Expr::Error, span),
))),
)
.map(|(((name, args), span), body)| (name, Func { args, span, body }))
.labelled("function");
func.repeated()
.collect::<Vec<_>>()
.validate(|fs, _, emitter| {
let mut funcs = HashMap::new();
for ((name, name_span), f) in fs {
if funcs.insert(name, f).is_some() {
emitter.emit(Rich::custom(
name_span,
format!("Function '{name}' already exists"),
));
}
}
funcs
})
}
pub fn parse<'src>(
filename: String,
src: &'src str,
) -> Option<(HashMap<&'src str, Func<'src>>, SimpleSpan)> {
let (tokens, lex_errs) = lexer().parse(src).into_output_errors();
let (ast, parse_errs) = if let Some(tokens) = &tokens {
let (ast, parse_errs) = funcs_parser()
.map_with(|ast, e| (ast, e.span()))
.parse(
tokens
.as_slice()
.map((src.len()..src.len()).into(), |(t, s)| (t, s)),
)
.into_output_errors();
if let Some((funcs, _file_span)) = ast
.as_ref()
.filter(|_| lex_errs.len() + parse_errs.len() == 0)
{
println!("{funcs:#?}")
}
(ast, parse_errs)
} else {
(None, Vec::new())
};
diagnostics(filename, lex_errs, parse_errs, src);
ast
}
fn diagnostics<'src>(
filename: String,
lex_errs: Vec<Rich<'_, char>>,
parse_errs: Vec<Rich<'_, Token<'_>>>,
src: &'src str,
) {
lex_errs
.into_iter()
.map(|e| e.map_token(|c| c.to_string()))
.chain(
parse_errs
.into_iter()
.map(|e| e.map_token(|tok| tok.to_string())),
)
.for_each(|e| {
Report::build(ReportKind::Error, (filename.clone(), e.span().into_range()))
.with_config(ariadne::Config::new().with_index_type(ariadne::IndexType::Byte))
.with_message(e.to_string())
.with_label(
Label::new((filename.clone(), e.span().into_range()))
.with_message(e.reason().to_string())
.with_color(Color::Red),
)
.with_labels(e.contexts().map(|(label, span)| {
Label::new((filename.clone(), span.into_range()))
.with_message(format!("while parsing this {label}"))
.with_color(Color::Yellow)
}))
.finish()
.print(sources([(filename.clone(), src)]))
.unwrap()
});
}

View file

@ -1,612 +0,0 @@
use super::{ParseError, Rule};
use pest::iterators::{Pair, Pairs};
use pest::pratt_parser::{Assoc, Op, PrattParser};
#[derive(Debug, Clone)]
pub enum Expr {
//Empty
Empty,
// Literals
Number(f64),
String(String),
Boolean(bool),
None,
Undefined,
Underscore,
// Variables and calls
Identifier(String),
MemberAccess(Box<Expr>, String),
Index(Box<Expr>, Box<Expr>),
Call(Box<Expr>, Vec<Expr>),
// Operators
Binary(BinaryOp, Box<Expr>, Box<Expr>),
Unary(UnaryOp, Box<Expr>),
Ternary(Box<Expr>, Box<Expr>, Box<Expr>),
Assignment(String, Box<Expr>),
// Control flow
If(Box<Expr>, Box<Block>, Option<Box<Block>>),
Match(Option<Box<Expr>>, Vec<MatchArm>),
// Collections
Array(Vec<Expr>),
// Postfix
PostIncrement(Box<Expr>),
PostDecrement(Box<Expr>),
}
#[derive(Debug, Clone)]
pub enum BinaryOp {
Add,
Sub,
Mul,
Div,
Mod,
Eq,
Ne,
Lt,
Gt,
Le,
Ge,
And,
Or,
RangeInclusive,
RangeExclusive,
Is,
}
#[derive(Debug, Clone)]
pub enum UnaryOp {
Not,
Neg,
PreIncrement,
PreDecrement,
}
#[derive(Debug, Clone)]
pub struct MatchArm {
pattern: MatchPattern,
body: Expr,
}
#[derive(Debug, Clone)]
pub enum MatchPattern {
Wildcard,
Expression(Expr),
Condition(Vec<(BinaryOp, Expr)>),
}
#[derive(Debug, Clone)]
pub struct Block {
statements: Vec<Statement>,
}
#[derive(Debug, Clone)]
pub enum ReturnType {
Simple(Type),
Named {
name: String,
type_annotation: Type,
default_value: Option<Expr>,
},
}
#[derive(Debug, Clone)]
pub enum Statement {
Import {
name: String,
from: String,
},
Function {
name: String,
params: Vec<Param>,
return_type: Option<ReturnType>,
extends: Option<String>,
body: Block,
},
Variable {
kind: VarKind,
name: String,
type_annotation: Option<Type>,
value: Expr,
},
Defer {
condition: Option<Expr>,
binding: Option<String>,
body: Block,
},
Watch {
target: String,
body: Block,
},
Return(Option<Expr>),
If(Expr, Box<Statement>, Option<Box<Statement>>),
For {
var: String,
index: Option<String>,
iterable: Expr,
body: Box<Statement>,
},
While(Expr, Box<Statement>),
Expression(Expr),
Block(Block),
}
#[derive(Debug, Clone)]
pub enum VarKind {
Const,
Var,
Live,
}
#[derive(Debug, Clone)]
pub struct Param {
name: String,
type_annotation: Option<Type>,
}
#[derive(Debug, Clone)]
pub enum Type {
Primitive(String),
Array(Box<Type>),
Optional(Box<Type>),
ErrorUnion(Option<String>, Box<Type>),
Named(String),
}
pub struct SolacePrattParser {
pratt: PrattParser<Rule>,
}
impl SolacePrattParser {
pub fn new() -> Self {
let pratt = PrattParser::new()
.op(Op::infix(Rule::assign, Assoc::Right))
.op(Op::infix(Rule::question, Assoc::Right) | Op::infix(Rule::colon, Assoc::Right))
.op(Op::infix(Rule::or, Assoc::Left))
.op(Op::infix(Rule::and, Assoc::Left))
.op(Op::infix(Rule::eq, Assoc::Left)
| Op::infix(Rule::ne, Assoc::Left)
| Op::infix(Rule::is_kw, Assoc::Left))
.op(Op::infix(Rule::lt, Assoc::Left)
| Op::infix(Rule::gt, Assoc::Left)
| Op::infix(Rule::le, Assoc::Left)
| Op::infix(Rule::ge, Assoc::Left))
.op(Op::infix(Rule::range_inclusive, Assoc::Left)
| Op::infix(Rule::range_exclusive, Assoc::Left))
.op(Op::infix(Rule::plus, Assoc::Left) | Op::infix(Rule::minus, Assoc::Left))
.op(Op::infix(Rule::multiply, Assoc::Left)
| Op::infix(Rule::divide, Assoc::Left)
| Op::infix(Rule::modulo, Assoc::Left))
.op(Op::prefix(Rule::not)
| Op::prefix(Rule::minus)
| Op::prefix(Rule::increment)
| Op::prefix(Rule::decrement))
.op(Op::postfix(Rule::increment) | Op::postfix(Rule::decrement));
SolacePrattParser { pratt }
}
pub fn parse_expr(&self, pairs: Pairs<Rule>) -> Result<Expr, ParseError> {
if pairs.clone().count() == 0 {
return Ok(Expr::Empty);
}
self.pratt
.map_primary(|primary| self.parse_primary(primary))
.map_infix(|lhs, op, rhs| self.parse_infix(lhs, op, rhs))
.map_prefix(|op, rhs| self.parse_prefix(op, rhs))
.map_postfix(|lhs, op| self.parse_postfix(lhs, op))
.parse(pairs)
}
fn parse_primary(&self, pair: Pair<Rule>) -> Result<Expr, ParseError> {
match pair.as_rule() {
Rule::number_literal => {
let num = pair
.as_str()
.parse::<f64>()
.map_err(|_| ParseError::InvalidNumber(pair.as_rule()))?;
Ok(Expr::Number(num))
}
Rule::string_literal => {
let s = pair.as_str();
Ok(Expr::String(s[1..s.len() - 1].to_string()))
}
Rule::boolean_literal => Ok(Expr::Boolean(pair.as_str() == "true")),
Rule::none_kw => Ok(Expr::None),
Rule::undefined_kw => Ok(Expr::Undefined),
Rule::underscore => Ok(Expr::Underscore),
Rule::identifier => Ok(Expr::Identifier(pair.as_str().to_string())),
Rule::array_literal => {
let mut elements = vec![];
for inner in pair.into_inner() {
if inner.as_rule() == Rule::expression {
elements.push(self.parse_expr(inner.into_inner())?);
}
}
Ok(Expr::Array(elements))
}
Rule::if_expr => self.parse_if_expr(pair),
Rule::match_expr => self.parse_match_expr(pair),
_ => Err(ParseError::UnknownPrimary(pair.as_rule())),
}
}
fn parse_infix(
&self,
lhs: Result<Expr, ParseError>,
op: Pair<Rule>,
rhs: Result<Expr, ParseError>,
) -> Result<Expr, ParseError> {
let lhs = lhs?;
let rhs = rhs?;
match op.as_rule() {
Rule::plus => Ok(Expr::Binary(BinaryOp::Add, Box::new(lhs), Box::new(rhs))),
Rule::minus => Ok(Expr::Binary(BinaryOp::Sub, Box::new(lhs), Box::new(rhs))),
Rule::multiply => Ok(Expr::Binary(BinaryOp::Mul, Box::new(lhs), Box::new(rhs))),
Rule::divide => Ok(Expr::Binary(BinaryOp::Div, Box::new(lhs), Box::new(rhs))),
Rule::modulo => Ok(Expr::Binary(BinaryOp::Mod, Box::new(lhs), Box::new(rhs))),
Rule::eq => Ok(Expr::Binary(BinaryOp::Eq, Box::new(lhs), Box::new(rhs))),
Rule::ne => Ok(Expr::Binary(BinaryOp::Ne, Box::new(lhs), Box::new(rhs))),
Rule::lt => Ok(Expr::Binary(BinaryOp::Lt, Box::new(lhs), Box::new(rhs))),
Rule::gt => Ok(Expr::Binary(BinaryOp::Gt, Box::new(lhs), Box::new(rhs))),
Rule::le => Ok(Expr::Binary(BinaryOp::Le, Box::new(lhs), Box::new(rhs))),
Rule::ge => Ok(Expr::Binary(BinaryOp::Ge, Box::new(lhs), Box::new(rhs))),
Rule::and => Ok(Expr::Binary(BinaryOp::And, Box::new(lhs), Box::new(rhs))),
Rule::or => Ok(Expr::Binary(BinaryOp::Or, Box::new(lhs), Box::new(rhs))),
Rule::is_kw => Ok(Expr::Binary(BinaryOp::Is, Box::new(lhs), Box::new(rhs))),
Rule::range_inclusive => Ok(Expr::Binary(
BinaryOp::RangeInclusive,
Box::new(lhs),
Box::new(rhs),
)),
Rule::range_exclusive => Ok(Expr::Binary(
BinaryOp::RangeExclusive,
Box::new(lhs),
Box::new(rhs),
)),
Rule::assign => {
if let Expr::Identifier(name) = lhs {
Ok(Expr::Assignment(name, Box::new(rhs)))
} else {
Err(ParseError::InvalidLeftHand(lhs))
}
}
Rule::question => {
// Handle ternary - need to parse the rest
// This is simplified - in practice you'd need more complex handling
Ok(Expr::Ternary(
Box::new(lhs),
Box::new(rhs),
Box::new(Expr::None),
))
}
_ => Err(ParseError::UnknownInfixOperator(op.as_rule())),
}
}
fn parse_prefix(
&self,
op: Pair<Rule>,
rhs: Result<Expr, ParseError>,
) -> Result<Expr, ParseError> {
let rhs = rhs?;
match op.as_rule() {
Rule::not => Ok(Expr::Unary(UnaryOp::Not, Box::new(rhs))),
Rule::minus => Ok(Expr::Unary(UnaryOp::Neg, Box::new(rhs))),
Rule::increment => Ok(Expr::Unary(UnaryOp::PreIncrement, Box::new(rhs))),
Rule::decrement => Ok(Expr::Unary(UnaryOp::PreDecrement, Box::new(rhs))),
_ => Err(ParseError::UnknownPrefixOperator(op.as_rule())),
}
}
fn parse_postfix(
&self,
lhs: Result<Expr, ParseError>,
op: Pair<Rule>,
) -> Result<Expr, ParseError> {
let lhs = lhs?;
match op.as_rule() {
Rule::increment => Ok(Expr::PostIncrement(Box::new(lhs))),
Rule::decrement => Ok(Expr::PostDecrement(Box::new(lhs))),
_ => Err(ParseError::UnknownPostfixOperator(op.as_rule())),
}
}
fn parse_if_expr(&self, pair: Pair<Rule>) -> Result<Expr, ParseError> {
let mut inner = pair.into_inner();
let condition = self.parse_expr(inner.next().unwrap().into_inner())?;
let then_block = self.parse_block(inner.next().unwrap())?;
let else_block = inner.next().map(|p| self.parse_block(p)).transpose()?;
Ok(Expr::If(
Box::new(condition),
Box::new(then_block),
else_block.map(Box::new),
))
}
fn parse_match_expr(&self, pair: Pair<Rule>) -> Result<Expr, ParseError> {
let mut inner = pair.into_inner();
let target = if let Some(p) = inner.peek() {
if p.as_rule() == Rule::expression {
Some(Box::new(
self.parse_expr(inner.next().unwrap().into_inner())?,
))
} else {
None
}
} else {
None
};
let mut arms = vec![];
for arm_pair in inner {
if arm_pair.as_rule() == Rule::match_arm {
arms.push(self.parse_match_arm(arm_pair)?);
}
}
Ok(Expr::Match(target, arms))
}
fn parse_match_arm(&self, pair: Pair<Rule>) -> Result<MatchArm, ParseError> {
let mut inner = pair.into_inner();
let pattern = self.parse_match_pattern(inner.next().unwrap())?;
let body = self.parse_expr(inner.next().unwrap().into_inner())?;
Ok(MatchArm { pattern, body })
}
fn parse_match_pattern(&self, pair: Pair<Rule>) -> Result<MatchPattern, ParseError> {
// Simplified pattern parsing
match pair.as_rule() {
Rule::underscore => Ok(MatchPattern::Wildcard),
_ => Ok(MatchPattern::Expression(
self.parse_expr(pair.into_inner())?,
)),
}
}
fn parse_block(&self, pair: Pair<Rule>) -> Result<Block, ParseError> {
let mut statements = vec![];
for stmt in pair.into_inner() {
statements.push(self.parse_statement(stmt)?);
}
Ok(Block { statements })
}
fn parse_statement(&self, pair: Pair<Rule>) -> Result<Statement, ParseError> {
match pair.as_rule() {
Rule::expression_stmt => {
let expr = self.parse_expr(pair.into_inner())?;
Ok(Statement::Expression(expr))
}
Rule::return_stmt => {
let mut inner = pair.into_inner();
let expr = inner
.next()
.map(|p| self.parse_expr(p.into_inner()))
.transpose()?;
Ok(Statement::Return(expr))
}
Rule::function_decl => {
let decl = self.parse_function_decl(pair.into_inner())?;
Ok(decl)
}
// Add other statement parsing here
_ => Err(ParseError::UnknownStatement(pair.as_rule())),
}
}
fn parse_type(&self, pair: Pair<Rule>) -> Result<Type, ParseError> {
match pair.as_rule() {
Rule::type_annotation => {
// Type annotation starts with colon, skip it
let mut inner = pair.into_inner();
inner.next(); // skip colon
self.parse_type_expr(inner.next().unwrap())
}
Rule::type_expr => self.parse_type_expr(pair),
_ => Err(ParseError::ShouldBeType(pair.as_rule())),
}
}
fn parse_type_expr(&self, pair: Pair<Rule>) -> Result<Type, ParseError> {
let inner = pair.into_inner();
let mut current_type = None;
let mut is_optional = false;
let mut error_type = None;
let mut array_depth = 0;
for part in inner {
match part.as_rule() {
Rule::optional_prefix => {
is_optional = true;
}
Rule::error_union_prefix => {
// Could be just "!", which defaults to Error!, or "ErrorType!"
let prefix_inner = part.into_inner();
if let Some(error_name) = prefix_inner.peek() {
error_type = Some(error_name.as_str().to_string());
} else {
error_type = Some("Error".to_string());
}
}
Rule::base_type => {
current_type = Some(self.parse_base_type(part)?);
}
Rule::array_suffix => {
array_depth += 1;
}
_ => {}
}
}
let mut result_type = current_type.ok_or(ParseError::MissingBaseType())?;
// Apply array suffixes
for _ in 0..array_depth {
result_type = Type::Array(Box::new(result_type));
}
// Apply error union
if let Some(err_type) = error_type {
result_type = Type::ErrorUnion(Some(err_type), Box::new(result_type));
}
// Apply optional
if is_optional {
result_type = Type::Optional(Box::new(result_type));
}
Ok(result_type)
}
fn parse_base_type(&self, pair: Pair<Rule>) -> Result<Type, ParseError> {
let mut inner = pair.into_inner();
let type_part = inner.next().unwrap();
match type_part.as_rule() {
Rule::primitive_type => Ok(Type::Primitive(type_part.as_str().to_string())),
Rule::identifier => Ok(Type::Named(type_part.as_str().to_string())),
_ => Err(ParseError::UnknownTypePart(type_part.as_rule())),
}
}
fn parse_param_list(&self, pair: Pair<Rule>) -> Result<Vec<Param>, ParseError> {
let mut params = vec![];
for inner in pair.into_inner() {
if inner.as_rule() == Rule::param {
params.push(self.parse_param(inner)?);
}
}
Ok(params)
}
fn parse_param(&self, pair: Pair<Rule>) -> Result<Param, ParseError> {
let mut inner = pair.into_inner();
let name = inner.next().unwrap().as_str().to_string();
let type_annotation = if let Some(type_pair) = inner.next() {
Some(self.parse_type(type_pair)?)
} else {
None
};
Ok(Param {
name,
type_annotation,
})
}
fn parse_return_type(&self, pair: Pair<Rule>) -> Result<ReturnType, ParseError> {
let mut inner = pair.into_inner();
let wrapped = inner.next().unwrap();
// Skip intermediate rules
let first = match wrapped.as_rule() {
Rule::return_type_simple => wrapped.into_inner().next().unwrap(),
Rule::return_type_named => wrapped.into_inner().next().unwrap(),
_ => wrapped,
};
match first.as_rule() {
// Named return variable: fn foo() => (name: Type = default?) {/*...*/}
Rule::arrow_fat => {
let name = inner.next().unwrap().as_str().to_string();
let type_annotation = self.parse_type(inner.next().unwrap())?;
let default_value = if let Some(expr_pair) = inner.next() {
Some(self.parse_expr(expr_pair.into_inner())?)
} else {
None
};
Ok(ReturnType::Named {
name,
type_annotation,
default_value,
})
}
// Simple return type: fn foo(): Type {/*...*/}
Rule::type_annotation => {
let type_annotation = self.parse_type(first)?;
Ok(ReturnType::Simple(type_annotation))
}
// Brother ewww... whats that?
_ => Err(ParseError::UnknownReturnType(
first.as_str().to_owned(),
first.as_rule(),
)),
}
}
fn parse_function_decl(&self, mut pairs: Pairs<Rule>) -> Result<Statement, ParseError> {
// Skip 'fn' keyword
pairs.next();
let name = pairs.next().unwrap().as_str();
let params = self.parse_param_list(pairs.next().unwrap())?;
let mut extends = None;
let mut return_type = None;
let mut body = None;
while let Some(remaining) = pairs.next() {
match remaining.as_rule() {
Rule::extends_kw => {
extends = Some(pairs.next().unwrap().as_str().to_string());
}
Rule::return_type => {
return_type = Some(self.parse_return_type(remaining)?);
}
Rule::block => {
body = Some(self.parse_block(remaining)?);
}
_ => {}
}
}
Ok(Statement::Function {
name: name.to_owned(),
params,
return_type,
extends,
body: body.ok_or(ParseError::MissingFunctionBody(name.to_owned()))?,
})
}
}
#[derive(Debug, Clone)]
pub struct Program {
pub statements: Vec<Statement>,
}
impl Program {
pub fn from_pairs(pairs: Pairs<Rule>) -> Result<Self, ParseError> {
let parser = SolacePrattParser::new();
let mut statements = Vec::new();
for pair in pairs {
if pair.as_rule() == Rule::program {
for stmt_pair in pair.into_inner() {
if stmt_pair.as_rule() != Rule::EOI {
let stmt = parser.parse_statement(stmt_pair)?;
statements.push(stmt);
}
}
}
}
Ok(Program { statements })
}
}

View file

@ -1,76 +0,0 @@
pub mod ast;
use pest::Parser;
use pest::iterators::Pair;
use pest_derive::Parser;
use thiserror::Error;
#[derive(Parser)]
#[grammar = "solace.pest"]
pub struct SolaceParser;
#[derive(Error, Debug)]
pub enum ParseError {
#[error("Parse error: {0}")]
PestError(#[from] pest::error::Error<Rule>),
#[error("Unexpected rule: {0:?}")]
UnexpectedRule(Rule),
#[error("Statement not implemented: {0:?}")]
UnknownStatement(Rule),
#[error("Unknown keyword, literal or expression: {0:?}")]
UnknownPrimary(Rule),
#[error("Left side of assignment must be an identifier: {0:?}")]
InvalidLeftHand(ast::Expr),
#[error("Unexpected infix operator: {0:?}")]
UnknownInfixOperator(Rule),
#[error("Unexpected prefix operator: {0:?}")]
UnknownPrefixOperator(Rule),
#[error("Unexpected postfix operator: {0:?}")]
UnknownPostfixOperator(Rule),
#[error("Expected type annotation or type expression, got: {0:?}")]
ShouldBeType(Rule),
#[error("Unexpected return type: \"{0}\" ({1:?})")]
UnknownReturnType(String, Rule),
#[error("Unexpected base type: {0:?}")]
UnknownTypePart(Rule),
#[error("Invalid Number: {0:?}")]
InvalidNumber(Rule),
#[error("No base type found")]
MissingBaseType(),
#[error("Function body required: {0}")]
MissingFunctionBody(String),
}
pub fn parse(input: &str, debug: bool) -> Result<ast::Program, ParseError> {
if debug {
let mut debug_pairs = SolaceParser::parse(Rule::program, input)?;
print_parse_tree(debug_pairs.next().unwrap(), 0);
}
let pairs = SolaceParser::parse(Rule::program, input)?;
let program = ast::Program::from_pairs(pairs)?;
Ok(program)
}
fn print_parse_tree(pair: Pair<Rule>, indent: usize) {
let indent_str = " ".repeat(indent);
println!("{}{:?}: \"{}\"", indent_str, pair.as_rule(), pair.as_str());
for inner_pair in pair.into_inner() {
print_parse_tree(inner_pair, indent + 1)
}
}

View file

@ -1,236 +0,0 @@
// solace.pest - Pest Grammar for Solace Language (Fixed)
WHITESPACE = _{ " " | "\t" | "\r" | "\n" }
COMMENT = _{ "/*" ~ (!"*/" ~ ANY)* ~ "*/" | "//" ~ (!NEWLINE ~ ANY)* }
NEWLINE = _{ "\n" | "\r\n" }
// Keywords
fn_kw = @{ "fn" ~ !identifier_char }
if_kw = @{ "if" ~ !identifier_char }
else_kw = @{ "else" ~ !identifier_char }
match_kw = @{ "match" ~ !identifier_char }
for_kw = @{ "for" ~ !identifier_char }
while_kw = @{ "while" ~ !identifier_char }
defer_kw = @{ "defer" ~ !identifier_char }
when_kw = @{ "when" ~ !identifier_char }
live_kw = @{ "live" ~ !identifier_char }
watch_kw = @{ "watch" ~ !identifier_char }
return_kw = @{ "return" ~ !identifier_char }
const_kw = @{ "const" ~ !identifier_char }
var_kw = @{ "var" ~ !identifier_char }
import_kw = @{ "import" ~ !identifier_char }
from_kw = @{ "from" ~ !identifier_char }
extends_kw = @{ "extends" ~ !identifier_char }
in_kw = @{ "in" ~ !identifier_char }
is_kw = @{ "is" ~ !identifier_char }
none_kw = @{ "none" ~ !identifier_char }
undefined_kw = @{ "undefined" ~ !identifier_char }
failure_kw = @{ "failure" ~ !identifier_char }
success_kw = @{ "success" ~ !identifier_char }
continue_kw = @{ "continue" ~ !identifier_char }
ok_kw = @{ "ok" ~ !identifier_char }
err_kw = @{ "err" ~ !identifier_char }
keyword = @{
(fn_kw | if_kw | else_kw | match_kw | for_kw | while_kw | defer_kw |
when_kw | live_kw | watch_kw | return_kw | const_kw | var_kw |
import_kw | from_kw | extends_kw | in_kw | is_kw | none_kw |
undefined_kw | failure_kw | success_kw | continue_kw | ok_kw | err_kw)
}
// Literals
string_literal = @{ "\"" ~ (!"\"" ~ ("\\\\" | "\\\"" | ANY))* ~ "\"" }
template_literal = @{ "`" ~ (!"`" ~ ("\\`" | ANY))* ~ "`" }
number_literal = @{ ASCII_DIGIT+ ~ ("." ~ ASCII_DIGIT+)? }
boolean_literal = @{ ("true" | "false") ~ !identifier_char }
// Identifiers
identifier_char = _{ ASCII_ALPHANUMERIC | "_" }
identifier = @{ !(keyword | ASCII_DIGIT) ~ identifier_char+ }
underscore = @{ "_" }
// Operators
eq = @{ "==" }
ne = @{ "!=" }
le = @{ "<=" }
ge = @{ ">=" }
and = @{ "&&" }
or = @{ "||" }
arrow = @{ "->" }
arrow_fat = @{ "=>" }
plus = { "+" }
minus = { "-" }
multiply = { "*" }
divide = { "/" }
modulo = { "%" }
assign = { "=" }
lt = { "<" }
gt = { ">" }
not = { "!" }
question = { "?" }
colon = { ":" }
dot = { "." }
comma = { "," }
semicolon = { ";" }
pipe = { "|" }
// Range operators
range_inclusive = @{ "..=" }
range_exclusive = @{ "..<" }
range_op = { range_inclusive | range_exclusive }
comparison_op = { le | ge | eq | ne | lt | gt }
// Increment/Decrement
increment = @{ "++" }
decrement = @{ "--" }
// Types - Fixed to avoid left recursion
primitive_type = { "string" | "number" | "boolean" | "undefined" }
base_type = { primitive_type | identifier }
// Type suffixes
array_suffix = { "[" ~ "]" }
optional_prefix = { question }
error_union_prefix = { identifier? ~ not }
// Type expression - now handles prefixes and suffixes properly
type_expr = {
optional_prefix? ~ error_union_prefix? ~ base_type ~ array_suffix* |
optional_prefix? ~ base_type ~ array_suffix*
}
// Type annotation
type_annotation = { colon ~ type_expr }
// Parameters
optional_suffix = { question }
param = { identifier ~ optional_suffix? ~ type_annotation? }
param_list = { "(" ~ (param ~ (comma ~ param)*)? ~ ")" }
// Return type with named return variable
return_type_simple = { type_annotation }
return_type_named = { arrow_fat ~ "(" ~ identifier ~ type_annotation ~ (assign ~ expression)? ~ ")" }
return_type = { return_type_simple | return_type_named }
// Expressions
primary_expr = {
underscore |
boolean_literal |
number_literal |
string_literal |
template_literal |
none_kw |
undefined_kw |
function_call_expr |
identifier |
grouped_expression |
array_literal |
if_expr |
match_expr |
continue_expr
}
grouped_expression = { "(" ~ expression ~ ")" }
// Function calls including ok() and err()
function_call_expr = { (ok_kw | err_kw | failure_kw | success_kw) ~ "(" ~ expression_list? ~ ")" }
array_literal = { "[" ~ expression_list? ~ "]" }
expression_list = { expression ~ (comma ~ expression)* }
// Member access and indexing
member_access = { dot ~ identifier }
index_access = { "[" ~ expression ~ "]" }
call_suffix = { "(" ~ expression_list? ~ ")" }
postfix_expr = { primary_expr ~ (member_access | index_access | call_suffix | increment | decrement)* }
unary_expr = { (not | minus | increment | decrement)* ~ postfix_expr }
multiplicative_expr = { unary_expr ~ ((multiply | divide | modulo) ~ unary_expr)* }
additive_expr = { multiplicative_expr ~ ((plus | minus) ~ multiplicative_expr)* }
range_expr = { additive_expr ~ (range_op ~ additive_expr)? }
relational_expr = { range_expr ~ (comparison_op ~ range_expr)* }
equality_expr = { relational_expr ~ (is_kw ~ relational_expr)* }
logical_and_expr = { equality_expr ~ (and ~ equality_expr)* }
logical_or_expr = { logical_and_expr ~ (or ~ logical_and_expr)* }
ternary_expr = { logical_or_expr ~ (question ~ expression ~ colon ~ expression)? }
assignment_expr = { lvalue ~ assign ~ expression }
lvalue = { identifier ~ (dot ~ identifier | "[" ~ expression ~ "]")* }
expression = { assignment_expr | ternary_expr }
// If expression and statement
if_expr = { if_kw ~ expression ~ "{" ~ statement* ~ "}" ~ (else_kw ~ "{" ~ statement* ~ "}")? }
if_expr_short = { if_kw ~ expression ~ colon ~ expression ~ (else_kw ~ colon ~ expression)? }
// Match expression
match_expr = { match_kw ~ expression? ~ "{" ~ match_arm* ~ "}" }
match_arm = { match_pattern ~ arrow ~ (expression | block) }
match_pattern = { expression }
// Continue can be used as an expression in matches
continue_expr = { continue_kw }
// Statements
statement = _{
import_stmt |
function_decl |
variable_decl |
defer_stmt |
watch_stmt |
return_stmt |
if_stmt |
for_stmt |
while_stmt |
continue_stmt |
match_stmt |
expression_stmt
}
import_stmt = { import_kw ~ identifier ~ from_kw ~ string_literal }
function_decl = {
fn_kw ~ identifier ~ param_list ~ (extends_kw ~ identifier)? ~ return_type? ~ block
}
variable_decl = {
(const_kw | var_kw | live_kw) ~ identifier ~ type_annotation? ~ assign ~ expression
}
defer_stmt = {
defer_kw ~ (when_kw ~ expression ~ (pipe ~ identifier ~ pipe)?)? ~ block
}
watch_stmt = {
watch_kw ~ identifier ~ block
}
return_stmt = { return_kw ~ expression? }
if_stmt = {
if_kw ~ expression ~ ((colon ~ statement) | block) ~ (else_kw ~ ((colon ~ statement) | block))?
}
for_stmt = {
for_kw ~ identifier ~ (comma ~ identifier)? ~ in_kw ~ expression ~ ((colon ~ statement) | block)
}
while_stmt = {
while_kw ~ expression? ~ ((colon ~ statement) | block)
}
continue_stmt = { continue_kw }
match_stmt = { match_kw ~ expression? ~ "{" ~ match_arm* ~ "}" }
expression_stmt = { expression }
// Blocks
block = { "{" ~ statement* ~ "}" }
// Program
program = { SOI ~ statement* ~ EOI }

View file

@ -1,196 +0,0 @@
//! Solace Grammar
WHITESPACE = _{ " " | "\t" }
COMMENT = _{ "/*" ~ (!"*/" ~ ANY)* ~ "*/" | "//" ~ (!NEWLINE ~ ANY)* }
// Newline handling for automatic semicolon insertion
NEWLINE = _{ "\n" | "\r\n" }
terminator = _{ ";" | NEWLINE }
// Identifiers
identifier = @{ ASCII_ALPHANUMERIC+ }
// ========== TYPE SYSTEM ==========
type = {
primitive_type |
array_type |
map_type |
set_type |
error_union_type |
optional_type |
identifier
}
primitive_type = { "undefined" | "string" | "number" | "boolean" }
array_type = { type ~ "[]" }
map_type = { "Map<" ~ type ~ "," ~ type ~ ">" }
set_type = { "Set<" ~ type ~ ">" }
error_union_type = { ("Error" ~ "!")? ~ type }
optional_type = { type ~ "?" | "?" ~ type }
// ========== EXPRESSIONS (Pratt-ready) ==========
// Base expression atoms
atom = {
literal |
identifier |
"(" ~ expr ~ ")" |
match_expression |
lambda_expression
}
// Literals
literal = {
number_literal |
string_literal |
boolean_literal |
"undefined" |
"none" |
array_literal |
struct_literal
}
number_literal = @{ ("-")? ~ ASCII_DIGIT+ ~ ("." ~ ASCII_DIGIT+)? }
string_literal = @{ "\"" ~ (!"\"" ~ ANY)* ~ "\"" | "'" ~ (!"'" ~ ANY)* ~ "'" }
boolean_literal = { "true" | "false" }
array_literal = { "[" ~ (expr ~ ("," ~ expr)*)? ~ "]" }
struct_literal = { "{" ~ (struct_field ~ ("," ~ struct_field)*)? ~ "}" }
struct_field = { identifier ~ ":" ~ expr }
// Postfix operators (highest precedence)
postfix_expr = {
atom ~
( call |
member_access |
array_access |
range_operator )*
}
call = { "(" ~ (expr ~ ("," ~ expr)*)? ~ ")" }
member_access = { "." ~ identifier }
array_access = { "[" ~ expr ~ "]" }
range_operator = { (range_inclusive | range_exclusive) ~ expr }
// Unary operators
unary_expr = { unary_operator* ~ postfix_expr }
unary_operator = { not | negate }
// Binary operators (will be handled by Pratt parser)
// This is just for grammar completeness - actual precedence handled in Pratt parser
binary_expr = { unary_expr ~ (binary_operator ~ unary_expr)* }
binary_operator = {
add |
substract |
multiply |
divide |
modulo |
eq |
neq |
lt |
gt |
lte |
gte |
and |
or
}
or = { "||" }
and = { "&&" }
eq = { "==" }
neq = { "!=" }
lt = { "<" }
gt = { ">" }
lte = { "<=" }
gte = { ">=" }
add = { "+" }
substract = { "-" }
multiply = { "*" }
divide = { "/" }
modulo = { "%" }
not = { "!" }
negate = { "-" }
range_inclusive = { "..=" }
range_exclusive = { "..<" }
// The main expression rule
expr = { binary_expr }
// Special expressions
match_expression = {
"match" ~ (expr)? ~ "{" ~
(match_case ~ ("," ~ match_case)*)? ~
"}"
}
match_case = { match_pattern ~ "->" ~ expr }
match_pattern = { "_" | identifier | expr }
lambda_expression = { "|" ~ (identifier ~ ("," ~ identifier)*)? ~ "|" ~ "->" ~ expr }
// ========== STATEMENTS ==========
statement = {
variable_declaration ~ terminator |
function_declaration ~ terminator? |
expr_statement ~ terminator |
return_statement ~ terminator |
if_statement ~ terminator? |
for_statement ~ terminator? |
while_statement ~ terminator? |
defer_statement ~ terminator? |
watch_statement ~ terminator? |
block_statement ~ terminator? |
import_statement ~ terminator
}
variable_declaration = {
("const" | "let" | "live") ~ identifier ~ (":" ~ type)? ~ ("=" ~ expr)?
}
function_declaration = {
"fn" ~ identifier ~ "(" ~ (parameter ~ ("," ~ parameter)*)? ~ ")" ~
("=>" ~ "(" ~ parameter ~ ")")? ~
(":" ~ type)? ~
(block | expr)
}
parameter = { identifier ~ ":" ~ type ~ ("?" | "=" ~ expr)? }
return_statement = { "return" ~ expr? }
expr_statement = { expr }
if_statement = {
"if" ~ expr ~ block ~
("else" ~ (if_statement | block))?
}
block = { "{" ~ statement* ~ "}" }
block_statement = { block }
// Loops
for_statement = {
"for" ~
(identifier ~ ",")? ~ identifier ~ "in" ~
(expr | range_operator) ~
(block | expr_statement)
}
while_statement = {
"while" ~ expr? ~ block
}
// Special statements
defer_statement = {
"defer" ~ ("when" ~ expr)? ~
(lambda_expression | block)
}
watch_statement = {
"watch" ~ expr ~ block
}
// Import statement
import_statement = {
"import" ~ identifier ~ "from" ~ string_literal
}
// ========== PROGRAM ==========
program = { SOI ~ (statement | COMMENT)* ~ EOI }

View file

@ -1,14 +1,25 @@
use crate::parser::ast;
use swc_ecma_ast as swc_ast;
use crate::parser::Func;
use std::collections::HashMap;
use swc_common::DUMMY_SP;
use swc_ecma_ast as js_ast;
pub struct JsTransformer;
impl JsTransformer {
impl<'src> JsTransformer {
pub fn new() -> Self {
Self
}
pub fn transform(&self, program: ast::Program) -> swc_ast::Module {
todo!("Implement Solace AST to SWC AST transformer")
pub fn transform(&self, solace_ast: HashMap<&'src str, Func<'_>>) -> js_ast::Module {
js_ast::Module {
span: DUMMY_SP,
body: solace_ast
.into_iter()
.map(|(name, func)| self.transform_func(name, func))
.collect(),
shebang: None,
}
}
pub fn transform_func(&self, name: &str, func: Func<'_>) -> js_ast::Function {
todo!("Implement me")
}
}

View file

@ -1,4 +1 @@
pub mod js;
// maybe one day:
// pub mod wasm;