From 537244dfce7e470702e0b5f0b34d6f78da73a619 Mon Sep 17 00:00:00 2001 From: Julio Biason Date: Mon, 15 Jul 2024 15:00:33 -0300 Subject: [PATCH] Some clean ups --- logostest/Cargo.toml | 2 +- logostest/src/main.rs | 49 ++++++++++++++++++++----------------------- 2 files changed, 24 insertions(+), 27 deletions(-) diff --git a/logostest/Cargo.toml b/logostest/Cargo.toml index ca4c509..7590e11 100644 --- a/logostest/Cargo.toml +++ b/logostest/Cargo.toml @@ -4,4 +4,4 @@ version = "0.1.0" edition = "2021" [dependencies] -logos = "0.14.0" +logos = { version = "0.14.0" } diff --git a/logostest/src/main.rs b/logostest/src/main.rs index 494a027..f622a3e 100644 --- a/logostest/src/main.rs +++ b/logostest/src/main.rs @@ -2,20 +2,21 @@ use logos::Logos; #[derive(Logos, Debug)] #[logos(skip r"[ \t\n\r]")] +#[allow(dead_code)] enum Token<'a> { #[regex(r#"\/\*[^\/\*]*\*\/"#, |lex| lex.slice())] MultilineComment(&'a str), - #[regex(r#""[^"]+""#, |lex| lex.slice().trim_start_matches('"').trim_end_matches('"'))] - #[regex("[a-zA-Z0-9]+", |lex| lex.slice())] - Keyword(&'a str), - #[regex(r#"//[^\n]*"#, |lex| lex.slice())] Comment(&'a str), - #[token(";")] - End, + #[regex(r#""[^"]+""#, |lex| lex.slice().trim_start_matches('"').trim_end_matches('"'))] + #[regex("[a-zA-Z0-9]+", |lex| lex.slice())] + Keyword(&'a str), + // #[regex(r#""[^"]+"[ \t\n\r]+\{"#, |lex| lex.slice().trim_start_matches('"').trim_end_matches('"'))] + // #[regex(r#"[a-zA-Z0-9]+[ \t\n\r]+\{"#, |lex| lex.slice())] + // DictStart(&'a str), #[token("{")] DictStart, @@ -28,31 +29,27 @@ enum Token<'a> { #[token(")")] ListEnd, + #[token(";")] + End, } -fn main() { - let lex = Token::lexer("variable \"value is weird(but cool)[not so much]\" ;"); - let content = lex.collect::>(); - println!("{content:#?}"); - - let lex = Token::lexer("var1 2;\nvar2 2;"); - let content = lex.collect::>(); - println!("{content:#?}"); - - let lex = Token::lexer(r#"var 1 2 3;\nvar2 (1 2 3);"#); +fn print(source: &str) { + let lex = Token::lexer(source); let content = lex.collect::>(); - println!("{content:#?}"); + println!("{source}:\n{content:?}"); + println!(); +} - let lex = Token::lexer("variables (\"phi\" \"meanT\");\nruns\n( 1 2 3 );"); - let content = lex.collect::>(); - println!("{content:#?}"); +fn main() { + print("variable \"value is weird(but cool)[not so much]\" ;"); + print("dict { dict { var value; } }"); + print("var1 2;\nvar2 2;"); + print(r#"var 1 2 3;\nvar2 (1 2 3);"#); + print("variables (\"phi\" \"meanT\");\nruns\n( 1 2 3 );"); let source = std::fs::read("src/example.foam").unwrap(); - let lex = Token::lexer(&std::str::from_utf8(&source).unwrap()); - let content = lex.collect::>(); - println!("{content:#?}"); + print(&std::str::from_utf8(&source).unwrap()); - let lex = Token::lexer("/* multiline\ncomment*/var value;"); - let content = lex.collect::>(); - println!("{content:#?}"); + print("/* multiline\ncomment*/var value;"); + print("## 123;"); }