Browse Source

Some clean ups

master
Julio Biason 3 months ago
parent
commit
537244dfce
  1. 2
      logostest/Cargo.toml
  2. 49
      logostest/src/main.rs

2
logostest/Cargo.toml

@ -4,4 +4,4 @@ version = "0.1.0"
edition = "2021" edition = "2021"
[dependencies] [dependencies]
logos = "0.14.0" logos = { version = "0.14.0" }

49
logostest/src/main.rs

@ -2,20 +2,21 @@ use logos::Logos;
#[derive(Logos, Debug)] #[derive(Logos, Debug)]
#[logos(skip r"[ \t\n\r]")] #[logos(skip r"[ \t\n\r]")]
#[allow(dead_code)]
enum Token<'a> { enum Token<'a> {
#[regex(r#"\/\*[^\/\*]*\*\/"#, |lex| lex.slice())] #[regex(r#"\/\*[^\/\*]*\*\/"#, |lex| lex.slice())]
MultilineComment(&'a str), MultilineComment(&'a str),
#[regex(r#""[^"]+""#, |lex| lex.slice().trim_start_matches('"').trim_end_matches('"'))]
#[regex("[a-zA-Z0-9]+", |lex| lex.slice())]
Keyword(&'a str),
#[regex(r#"//[^\n]*"#, |lex| lex.slice())] #[regex(r#"//[^\n]*"#, |lex| lex.slice())]
Comment(&'a str), Comment(&'a str),
#[token(";")] #[regex(r#""[^"]+""#, |lex| lex.slice().trim_start_matches('"').trim_end_matches('"'))]
End, #[regex("[a-zA-Z0-9]+", |lex| lex.slice())]
Keyword(&'a str),
// #[regex(r#""[^"]+"[ \t\n\r]+\{"#, |lex| lex.slice().trim_start_matches('"').trim_end_matches('"'))]
// #[regex(r#"[a-zA-Z0-9]+[ \t\n\r]+\{"#, |lex| lex.slice())]
// DictStart(&'a str),
#[token("{")] #[token("{")]
DictStart, DictStart,
@ -28,31 +29,27 @@ enum Token<'a> {
#[token(")")] #[token(")")]
ListEnd, ListEnd,
#[token(";")]
End,
} }
fn main() { fn print(source: &str) {
let lex = Token::lexer("variable \"value is weird(but cool)[not so much]\" ;"); let lex = Token::lexer(source);
let content = lex.collect::<Vec<_>>();
println!("{content:#?}");
let lex = Token::lexer("var1 2;\nvar2 2;");
let content = lex.collect::<Vec<_>>();
println!("{content:#?}");
let lex = Token::lexer(r#"var 1 2 3;\nvar2 (1 2 3);"#);
let content = lex.collect::<Vec<_>>(); let content = lex.collect::<Vec<_>>();
println!("{content:#?}"); println!("{source}:\n{content:?}");
println!();
}
let lex = Token::lexer("variables (\"phi\" \"meanT\");\nruns\n( 1 2 3 );"); fn main() {
let content = lex.collect::<Vec<_>>(); print("variable \"value is weird(but cool)[not so much]\" ;");
println!("{content:#?}"); print("dict { dict { var value; } }");
print("var1 2;\nvar2 2;");
print(r#"var 1 2 3;\nvar2 (1 2 3);"#);
print("variables (\"phi\" \"meanT\");\nruns\n( 1 2 3 );");
let source = std::fs::read("src/example.foam").unwrap(); let source = std::fs::read("src/example.foam").unwrap();
let lex = Token::lexer(&std::str::from_utf8(&source).unwrap()); print(&std::str::from_utf8(&source).unwrap());
let content = lex.collect::<Vec<_>>();
println!("{content:#?}");
let lex = Token::lexer("/* multiline\ncomment*/var value;"); print("/* multiline\ncomment*/var value;");
let content = lex.collect::<Vec<_>>(); print("## 123;");
println!("{content:#?}");
} }

Loading…
Cancel
Save