diff --git a/schala-lang/language/src/parsing.rs b/schala-lang/language/src/parsing.rs index 24f2146..7cdd1ee 100644 --- a/schala-lang/language/src/parsing.rs +++ b/schala-lang/language/src/parsing.rs @@ -1231,6 +1231,7 @@ mod parse_tests { #[test] fn parsing_identifiers() { parse_test!("a", AST(vec![exst!(val!("a"))])); + parse_test!("some_value", AST(vec![exst!(val!("some_value"))])); parse_test!("a + b", AST(vec![exst!(binexp!("+", val!("a"), val!("b")))])); //parse_test!("a[b]", AST(vec![Expression( //parse_test!("a[]", <- TODO THIS NEEDS TO FAIL diff --git a/schala-lang/language/src/tokenizing.rs b/schala-lang/language/src/tokenizing.rs index bd1ca56..a5bd19d 100644 --- a/schala-lang/language/src/tokenizing.rs +++ b/schala-lang/language/src/tokenizing.rs @@ -217,7 +217,7 @@ fn handle_alphabetic(c: char, input: &mut Peekable> loop { match input.peek().map(|&(_, _, c)| { c }) { - Some(c) if c.is_alphanumeric() => { + Some(c) if c.is_alphanumeric() || c == '_' => { input.next(); buf.push(c); }, @@ -300,6 +300,9 @@ mod schala_tokenizer_tests { fn underscores() { let token_types: Vec = tokenize("4_8").into_iter().map(move |t| t.token_type).collect(); assert_eq!(token_types, vec![digit!("4"), Underscore, digit!("8")]); + + let token_types2: Vec = tokenize("aba_yo").into_iter().map(move |t| t.token_type).collect(); + assert_eq!(token_types2, vec![ident!("aba_yo")]); } #[test]