frostwalker/tests/integration.rs
2024-01-19 14:22:07 +00:00

123 lines
3.6 KiB
Rust

use frostwalker::{parse, lexer, validator, formatter};
#[test]
fn lv_single_key() {
let tree = lexer::tokenize("key = \"value\"");
let result = validator::validate(&tree);
assert_eq!(result.is_none(), true);
}
#[test]
fn lv_triple_key() {
let tree = lexer::tokenize("key = \"value\"\r\nkey1 = 128\nkey2 = [ 6, 7 ]");
let result = validator::validate(&tree);
assert_eq!(result.is_none(), true);
}
#[test]
#[should_panic]
fn lv_triple_key_missing_newline() {
let tree = lexer::tokenize("key = \"value\"key1 = 128\nkey2 = [ 6, 7 ]");
let result = validator::validate(&tree);
assert_eq!(result.is_none(), true);
}
#[test]
#[should_panic]
fn lv_double_equals() {
let tree = lexer::tokenize("key = = \"value\"\r\nkey1 = 128\nkey2 = [ 6, 7 ]");
let result = validator::validate(&tree);
assert_eq!(result.is_none(), true);
}
#[test]
fn full_stack_single_key() {
let tree = lexer::tokenize("key = \"value\"");
let result = validator::validate(&tree);
if result.is_some() {
panic!("{}", result.unwrap());
}
let hashmap = formatter::format(tree);
if !(hashmap.get("key").unwrap() == "value") {
panic!("Formatter error.");
}
}
#[test]
fn full_stack_boolean() {
let tree = lexer::tokenize("key = true\r\nkey2 = false");
let result = validator::validate(&tree);
if result.is_some() {
panic!("{}", result.unwrap());
}
let hashmap = formatter::format(tree);
if !(hashmap.get("key").unwrap() == "true") || !(hashmap.get("key2").unwrap() == "false") {
panic!("Formatter error.");
}
}
#[test]
fn full_stack_varied_array() {
let tree = lexer::tokenize("key = [ \"value\", 150, -30, \"\\\"value\" ]");
let result = validator::validate(&tree);
if result.is_some() {
panic!("{}", result.unwrap());
}
let hashmap = formatter::format(tree);
if !(hashmap.get("key").unwrap() == "4") || !(hashmap.get("key[0]").unwrap() == "value") || !(hashmap.get("key[1]").unwrap() == "150") || !(hashmap.get("key[2]").unwrap() == "-30") || !(hashmap.get("key[3]").unwrap() == "\"value") {
panic!("Formatter error.");
}
}
#[test]
#[should_panic]
fn full_stack_single_key_double_equals() {
let tree = lexer::tokenize("key = = \"value\"");
let result = validator::validate(&tree);
if result.is_some() {
panic!("{}", result.unwrap());
}
let hashmap = formatter::format(tree);
if !(hashmap.get("key").unwrap() == "value") {
panic!("Formatter error.");
}
}
#[test]
fn full_stack_single_key_emoji() {
let tree = lexer::tokenize("🦀 = \"value\"");
let result = validator::validate(&tree);
if result.is_some() {
panic!("{}", result.unwrap());
}
let hashmap = formatter::format(tree);
if !(hashmap.get("🦀").unwrap() == "value") {
panic!("Formatter error.");
}
}
#[test]
fn parser_single_key() {
let hashmap = parse("key = \"value\"");
assert_eq!(hashmap.unwrap().get("key").unwrap(), "value");
}
#[test]
fn parser_single_key_emoji() {
let hashmap = parse("🦀 = \"🦀🦀\"");
assert_eq!(hashmap.unwrap().get("🦀").unwrap(), "🦀🦀");
}
#[test]
fn parser_all_types() {
let hashmap = parse("key = 1\r\nkey2 = \"abc\"\r\nkey3 = [ 2, 3, 4 ]\r\nkey4 = true\r\nkey5 = FALSE");
assert_eq!(hashmap.clone().unwrap().get("key").unwrap(), "1");
assert_eq!(hashmap.clone().unwrap().get("key2").unwrap(), "abc");
assert_eq!(hashmap.clone().unwrap().get("key3").unwrap(), "3");
assert_eq!(hashmap.clone().unwrap().get("key3[0]").unwrap(), "2");
assert_eq!(hashmap.clone().unwrap().get("key3[1]").unwrap(), "3");
assert_eq!(hashmap.clone().unwrap().get("key3[2]").unwrap(), "4");
assert_eq!(hashmap.clone().unwrap().get("key4").unwrap(), "true");
assert_eq!(hashmap.clone().unwrap().get("key5").unwrap(), "false");
}