talc/talc-lang/tests/parser.rs
2024-11-12 15:40:51 -05:00

76 lines
1.7 KiB
Rust

use talc_lang::parser::{Lexer, TokenKind};
use TokenKind as T;
fn assert_tokens(src: &str, tokens: &[(TokenKind, &str)]) {
let lexer = Lexer::new(src);
for (i, tok) in lexer.enumerate() {
let tok = if i >= tokens.len() {
tok.expect("end of tokens")
} else {
tok.expect(&format!("token {} {}", tokens[i].0, tokens[i].1))
};
assert_eq!(tok.kind, tokens[i].0, "token kind did not match");
assert_eq!(tok.content, tokens[i].1, "token content did not match");
if tok.kind == TokenKind::Eof {
break
}
}
}
fn assert_error(src: &str) {
let lexer = Lexer::new(src);
for tok in lexer {
match tok {
Err(_) => return,
Ok(t) if t.kind == T::Eof => break,
_ => (),
}
}
panic!("expected error in source '{}'", src)
}
#[test]
fn int_literals() {
let src = "1 100 98_765_432 0x0 0x551 0x_12_34 0xabcABC 0o127 0b01100110 0s012345";
let tokens = vec![
(T::Integer, "1"),
(T::Integer, "100"),
(T::Integer, "98_765_432"),
(T::Integer, "0x0"),
(T::Integer, "0x551"),
(T::Integer, "0x_12_34"),
(T::Integer, "0xabcABC"),
(T::Integer, "0o127"),
(T::Integer, "0b01100110"),
(T::Integer, "0s012345"),
(T::Eof, ""),
];
assert_tokens(src, &tokens);
assert_error("0m123");
assert_error("55p");
assert_error("0xabcdefg");
assert_error("0o178");
assert_error("0s156");
assert_error("0b012");
}
#[test]
fn float_literals() {
let src = "1. 1.0 1e2 1.e2 1.0e2 1e+2 1.e+2 1.0e+2 1e-2 1___2_3_e+_5__";
let tokens = vec![
(T::Float, "1."),
(T::Float, "1.0"),
(T::Float, "1e2"),
(T::Float, "1.e2"),
(T::Float, "1.0e2"),
(T::Float, "1e+2"),
(T::Float, "1.e+2"),
(T::Float, "1.0e+2"),
(T::Float, "1e-2"),
(T::Float, "1___2_3_e+_5__"),
(T::Eof, ""),
];
assert_tokens(src, &tokens);
}