add integration tests and stuff

This commit is contained in:
abbie 2024-01-17 21:03:41 +00:00
parent c83183e0e8
commit 1995706a8b
Signed by: threeoh6000
GPG key ID: 801FE4AD456E922C
6 changed files with 80 additions and 2 deletions

2
Cargo.lock generated
View file

@ -4,4 +4,4 @@ version = 3
[[package]]
name = "frostwalker"
version = "0.0.3"
version = "0.0.5"

View file

@ -1,6 +1,6 @@
[package]
name = "frostwalker"
version = "0.0.4"
version = "0.0.5"
edition = "2021"
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html

View file

@ -38,6 +38,11 @@ pub fn tokenize(source: &str) -> Vec<Token> {
words.insert(i+1, ",");
}
if words[i].ends_with("#") && words[i] != "#" && !words[i].starts_with("\"") {
words[i] = words[i].chars().next_back().map(|_| &words[i][..words[i].len()-1]).unwrap_or("");
words.insert(i+1, "#");
}
if words[i] == "," {
tree.push(Token { class: Class::SEPARATOR, value: Some(",".to_string()) });
added = true;

View file

@ -11,6 +11,28 @@ fn single_key() {
assert_eq!(tree, manual_tree);
}
#[test]
fn utf_single_key() {
let tree = lexer::tokenize("\"ʎǝʞ\" = \"value\"");
let id = Token { class: Class::IDENTIFIER, value: Some("ʎǝʞ".to_string()) };
let op = Token { class: Class::EQUALS, value: None };
let strn = Token { class: Class::LITERAL, value: Some("value".to_string()) };
let manual_tree = vec![id, op, strn];
assert_eq!(tree, manual_tree);
}
#[test]
fn comment_in_string() {
let tree = lexer::tokenize("key = \"# Should not become comment\"");
let id = Token { class: Class::IDENTIFIER, value: Some("key".to_string()) };
let op = Token { class: Class::EQUALS, value: None };
let strn = Token { class: Class::LITERAL, value: Some("# Should not become comment".to_string()) };
let manual_tree = vec![id, op, strn];
assert_eq!(tree, manual_tree);
}
#[test]
fn single_key_array() {
let tree = lexer::tokenize("key = [ \"\\\"value\", 6 ]");

View file

@ -11,6 +11,27 @@ fn single_key() {
assert_eq!(result.is_none(), true);
}
#[test]
fn utf_single_key() {
let id = Token { class: Class::IDENTIFIER, value: Some("ʎǝʞ".to_string()) };
let op = Token { class: Class::EQUALS, value: None };
let strn = Token { class: Class::LITERAL, value: Some("value".to_string()) };
let result = validator::validate(&vec![id, op, strn]);
assert_eq!(result.is_none(), true);
}
#[test]
fn single_key_negative_integer() {
let t1 = Token { class: Class::IDENTIFIER, value: Some("key".to_string()) };
let t2 = Token { class: Class::EQUALS, value: None };
let t3 = Token { class: Class::LITERAL, value: Some("-10".to_string()) };
let result = validator::validate(&vec![t1, t2, t3]);
assert_eq!(result.is_none(), true);
}
#[test]
fn single_key_array() {
let id = Token { class: Class::IDENTIFIER, value: Some("key".to_string()) };

30
tests/integration.rs Normal file
View file

@ -0,0 +1,30 @@
use frostwalker::lexer::{self, Token, Class};
use frostwalker::validator;
#[test]
fn lv_single_key() {
let tree = lexer::tokenize("key = \"value\"");
let result = validator::validate(&tree);
assert_eq!(result.is_none(), true);
}
#[test]
fn lv_triple_key() {
let tree = lexer::tokenize("key = \"value\"\r\nkey1 = 128\nkey2 = [ 6, 7 ]");
let result = validator::validate(&tree);
assert_eq!(result.is_none(), true);
}
#[test]
fn lv_triple_key_missing_newline() {
let tree = lexer::tokenize("key = \"value\"key1 = 128\nkey2 = [ 6, 7 ]");
let result = validator::validate(&tree);
assert_eq!(result.is_some(), true);
}
#[test]
fn lv_double_equals() {
let tree = lexer::tokenize("key = = \"value\"\r\nkey1 = 128\nkey2 = [ 6, 7 ]");
let result = validator::validate(&tree);
assert_eq!(result.is_some(), true);
}