last couple of tests

This commit is contained in:
abbie 2024-01-19 14:22:07 +00:00
parent 06aa0d275c
commit 5420ea59ae
Signed by: threeoh6000
GPG key ID: 801FE4AD456E922C
5 changed files with 67 additions and 5 deletions

2
Cargo.lock generated
View file

@ -4,4 +4,4 @@ version = 3
[[package]] [[package]]
name = "frostwalker" name = "frostwalker"
version = "0.0.9" version = "0.0.10"

View file

@ -1,6 +1,6 @@
[package] [package]
name = "frostwalker" name = "frostwalker"
version = "0.0.9" version = "0.0.10"
edition = "2021" edition = "2021"
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html # See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html

View file

@ -58,3 +58,15 @@ fn array() {
panic!(); panic!();
} }
} }
#[test]
#[should_panic]
fn unknown_element() {
let t1 = Token { class: Class::IDENTIFIER, value: Some("key".to_string()) };
let t4 = Token { class: Class::EQUALS, value: None };
let t3 = Token { class: Class::UNKNOWN, value: Some("10".to_string()) };
let tree = formatter::format(vec![t1, t4, t3]);
tree.get("key").unwrap();
}

View file

@ -1,3 +1,5 @@
use std::collections::HashMap;
#[derive(Debug)] #[derive(Debug)]
#[derive(PartialEq)] #[derive(PartialEq)]
pub struct Token { pub struct Token {
@ -21,6 +23,17 @@ pub mod lexer;
pub mod validator; pub mod validator;
pub mod formatter; pub mod formatter;
pub fn parse(source: &str) -> Result<HashMap<String, String>, String> {
let tree = lexer::tokenize(source);
let result = validator::validate(&tree);
if result.is_some() {
return Err(result.unwrap_or("Error unwrapping result string.".to_string()));
}
let output = formatter::format(tree);
return Ok(output);
}
#[cfg(test)] #[cfg(test)]
mod lexer_tests; mod lexer_tests;

View file

@ -1,6 +1,4 @@
use frostwalker::lexer; use frostwalker::{parse, lexer, validator, formatter};
use frostwalker::validator;
use frostwalker::formatter;
#[test] #[test]
fn lv_single_key() { fn lv_single_key() {
@ -84,3 +82,42 @@ fn full_stack_single_key_double_equals() {
panic!("Formatter error."); panic!("Formatter error.");
} }
} }
#[test]
fn full_stack_single_key_emoji() {
let tree = lexer::tokenize("🦀 = \"value\"");
let result = validator::validate(&tree);
if result.is_some() {
panic!("{}", result.unwrap());
}
let hashmap = formatter::format(tree);
if !(hashmap.get("🦀").unwrap() == "value") {
panic!("Formatter error.");
}
}
#[test]
fn parser_single_key() {
let hashmap = parse("key = \"value\"");
assert_eq!(hashmap.unwrap().get("key").unwrap(), "value");
}
#[test]
fn parser_single_key_emoji() {
let hashmap = parse("🦀 = \"🦀🦀\"");
assert_eq!(hashmap.unwrap().get("🦀").unwrap(), "🦀🦀");
}
#[test]
fn parser_all_types() {
let hashmap = parse("key = 1\r\nkey2 = \"abc\"\r\nkey3 = [ 2, 3, 4 ]\r\nkey4 = true\r\nkey5 = FALSE");
assert_eq!(hashmap.clone().unwrap().get("key").unwrap(), "1");
assert_eq!(hashmap.clone().unwrap().get("key2").unwrap(), "abc");
assert_eq!(hashmap.clone().unwrap().get("key3").unwrap(), "3");
assert_eq!(hashmap.clone().unwrap().get("key3[0]").unwrap(), "2");
assert_eq!(hashmap.clone().unwrap().get("key3[1]").unwrap(), "3");
assert_eq!(hashmap.clone().unwrap().get("key3[2]").unwrap(), "4");
assert_eq!(hashmap.clone().unwrap().get("key4").unwrap(), "true");
assert_eq!(hashmap.clone().unwrap().get("key5").unwrap(), "false");
}