From 1995706a8b0eff655dca40fe0bd42bba8c26895b Mon Sep 17 00:00:00 2001 From: threeoh6000 Date: Wed, 17 Jan 2024 21:03:41 +0000 Subject: [PATCH] add integration tests and stuff --- Cargo.lock | 2 +- Cargo.toml | 2 +- src/lexer.rs | 5 +++++ src/lexer_tests.rs | 22 ++++++++++++++++++++++ src/validator_tests.rs | 21 +++++++++++++++++++++ tests/integration.rs | 30 ++++++++++++++++++++++++++++++ 6 files changed, 80 insertions(+), 2 deletions(-) create mode 100644 tests/integration.rs diff --git a/Cargo.lock b/Cargo.lock index 778e664..909408e 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -4,4 +4,4 @@ version = 3 [[package]] name = "frostwalker" -version = "0.0.3" +version = "0.0.5" diff --git a/Cargo.toml b/Cargo.toml index cc3aab7..f548fc6 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "frostwalker" -version = "0.0.4" +version = "0.0.5" edition = "2021" # See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html diff --git a/src/lexer.rs b/src/lexer.rs index b723abc..5030dbb 100644 --- a/src/lexer.rs +++ b/src/lexer.rs @@ -38,6 +38,11 @@ pub fn tokenize(source: &str) -> Vec { words.insert(i+1, ","); } + if words[i].ends_with("#") && words[i] != "#" && !words[i].starts_with("\"") { + words[i] = words[i].chars().next_back().map(|_| &words[i][..words[i].len()-1]).unwrap_or(""); + words.insert(i+1, "#"); + } + if words[i] == "," { tree.push(Token { class: Class::SEPARATOR, value: Some(",".to_string()) }); added = true; diff --git a/src/lexer_tests.rs b/src/lexer_tests.rs index e81ca62..1c4a499 100644 --- a/src/lexer_tests.rs +++ b/src/lexer_tests.rs @@ -11,6 +11,28 @@ fn single_key() { assert_eq!(tree, manual_tree); } +#[test] +fn utf_single_key() { + let tree = lexer::tokenize("\"ʎǝʞ\" = \"value\""); + + let id = Token { class: Class::IDENTIFIER, value: Some("ʎǝʞ".to_string()) }; + let op = Token { class: Class::EQUALS, value: None }; + let strn = Token { class: Class::LITERAL, value: Some("value".to_string()) }; + let manual_tree = vec![id, op, strn]; + assert_eq!(tree, manual_tree); +} + +#[test] +fn comment_in_string() { + let tree = lexer::tokenize("key = \"# Should not become comment\""); + + let id = Token { class: Class::IDENTIFIER, value: Some("key".to_string()) }; + let op = Token { class: Class::EQUALS, value: None }; + let strn = Token { class: Class::LITERAL, value: Some("# Should not become comment".to_string()) }; + let manual_tree = vec![id, op, strn]; + assert_eq!(tree, manual_tree); +} + #[test] fn single_key_array() { let tree = lexer::tokenize("key = [ \"\\\"value\", 6 ]"); diff --git a/src/validator_tests.rs b/src/validator_tests.rs index 7b31460..738422f 100644 --- a/src/validator_tests.rs +++ b/src/validator_tests.rs @@ -11,6 +11,27 @@ fn single_key() { assert_eq!(result.is_none(), true); } +#[test] +fn utf_single_key() { + + let id = Token { class: Class::IDENTIFIER, value: Some("ʎǝʞ".to_string()) }; + let op = Token { class: Class::EQUALS, value: None }; + let strn = Token { class: Class::LITERAL, value: Some("value".to_string()) }; + let result = validator::validate(&vec![id, op, strn]); + + assert_eq!(result.is_none(), true); +} + +#[test] +fn single_key_negative_integer() { + let t1 = Token { class: Class::IDENTIFIER, value: Some("key".to_string()) }; + let t2 = Token { class: Class::EQUALS, value: None }; + let t3 = Token { class: Class::LITERAL, value: Some("-10".to_string()) }; + let result = validator::validate(&vec![t1, t2, t3]); + + assert_eq!(result.is_none(), true); +} + #[test] fn single_key_array() { let id = Token { class: Class::IDENTIFIER, value: Some("key".to_string()) }; diff --git a/tests/integration.rs b/tests/integration.rs new file mode 100644 index 0000000..e45c1d7 --- /dev/null +++ b/tests/integration.rs @@ -0,0 +1,30 @@ +use frostwalker::lexer::{self, Token, Class}; +use frostwalker::validator; + +#[test] +fn lv_single_key() { + let tree = lexer::tokenize("key = \"value\""); + let result = validator::validate(&tree); + assert_eq!(result.is_none(), true); +} + +#[test] +fn lv_triple_key() { + let tree = lexer::tokenize("key = \"value\"\r\nkey1 = 128\nkey2 = [ 6, 7 ]"); + let result = validator::validate(&tree); + assert_eq!(result.is_none(), true); +} + +#[test] +fn lv_triple_key_missing_newline() { + let tree = lexer::tokenize("key = \"value\"key1 = 128\nkey2 = [ 6, 7 ]"); + let result = validator::validate(&tree); + assert_eq!(result.is_some(), true); +} + +#[test] +fn lv_double_equals() { + let tree = lexer::tokenize("key = = \"value\"\r\nkey1 = 128\nkey2 = [ 6, 7 ]"); + let result = validator::validate(&tree); + assert_eq!(result.is_some(), true); +}