frostwalker/tests/integration.rs

73 lines
2 KiB
Rust

use frostwalker::lexer;
use frostwalker::validator;
use frostwalker::formatter;
#[test]
fn lv_single_key() {
let tree = lexer::tokenize("key = \"value\"");
let result = validator::validate(&tree);
assert_eq!(result.is_none(), true);
}
#[test]
fn lv_triple_key() {
let tree = lexer::tokenize("key = \"value\"\r\nkey1 = 128\nkey2 = [ 6, 7 ]");
let result = validator::validate(&tree);
assert_eq!(result.is_none(), true);
}
#[test]
#[should_panic]
fn lv_triple_key_missing_newline() {
let tree = lexer::tokenize("key = \"value\"key1 = 128\nkey2 = [ 6, 7 ]");
let result = validator::validate(&tree);
assert_eq!(result.is_none(), true);
}
#[test]
#[should_panic]
fn lv_double_equals() {
let tree = lexer::tokenize("key = = \"value\"\r\nkey1 = 128\nkey2 = [ 6, 7 ]");
let result = validator::validate(&tree);
assert_eq!(result.is_none(), true);
}
#[test]
fn full_stack_single_key() {
let tree = lexer::tokenize("key = \"value\"");
let result = validator::validate(&tree);
if result.is_some() {
panic!("{}", result.unwrap());
}
let hashmap = formatter::format(tree);
if !(hashmap.get("key").unwrap() == "value") {
panic!("Formatter error.");
}
}
#[test]
fn full_stack_varied_array() {
let tree = lexer::tokenize("key = [ \"value\", 150, -30, \"\\\"value\" ]");
let result = validator::validate(&tree);
if result.is_some() {
panic!("{}", result.unwrap());
}
let hashmap = formatter::format(tree);
if !(hashmap.get("key").unwrap() == "4") || !(hashmap.get("key[0]").unwrap() == "value") || !(hashmap.get("key[1]").unwrap() == "150") || !(hashmap.get("key[2]").unwrap() == "-30") || !(hashmap.get("key[3]").unwrap() == "\"value") {
panic!("Formatter error.");
}
}
#[test]
#[should_panic]
fn full_stack_single_key_double_equals() {
let tree = lexer::tokenize("key = = \"value\"");
let result = validator::validate(&tree);
if result.is_some() {
panic!("{}", result.unwrap());
}
let hashmap = formatter::format(tree);
if !(hashmap.get("key").unwrap() == "value") {
panic!("Formatter error.");
}
}