diff --git a/Cargo.lock b/Cargo.lock index 5d9101e..3e5cab7 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -4,4 +4,4 @@ version = 3 [[package]] name = "frostwalker" -version = "0.0.9" +version = "0.0.10" diff --git a/Cargo.toml b/Cargo.toml index b44bd3e..21a05e2 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "frostwalker" -version = "0.0.9" +version = "0.0.10" edition = "2021" # See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html diff --git a/src/formatter_tests.rs b/src/formatter_tests.rs index ee17122..3c628f9 100644 --- a/src/formatter_tests.rs +++ b/src/formatter_tests.rs @@ -58,3 +58,15 @@ fn array() { panic!(); } } + +#[test] +#[should_panic] +fn unknown_element() { + let t1 = Token { class: Class::IDENTIFIER, value: Some("key".to_string()) }; + let t4 = Token { class: Class::EQUALS, value: None }; + let t3 = Token { class: Class::UNKNOWN, value: Some("10".to_string()) }; + let tree = formatter::format(vec![t1, t4, t3]); + + tree.get("key").unwrap(); +} + diff --git a/src/lib.rs b/src/lib.rs index 238f65d..230a6a8 100644 --- a/src/lib.rs +++ b/src/lib.rs @@ -1,3 +1,5 @@ +use std::collections::HashMap; + #[derive(Debug)] #[derive(PartialEq)] pub struct Token { @@ -21,6 +23,17 @@ pub mod lexer; pub mod validator; pub mod formatter; +pub fn parse(source: &str) -> Result, String> { + let tree = lexer::tokenize(source); + let result = validator::validate(&tree); + if result.is_some() { + return Err(result.unwrap_or("Error unwrapping result string.".to_string())); + } + + let output = formatter::format(tree); + return Ok(output); +} + #[cfg(test)] mod lexer_tests; diff --git a/tests/integration.rs b/tests/integration.rs index 32009f9..04807fb 100644 --- a/tests/integration.rs +++ b/tests/integration.rs @@ -1,6 +1,4 @@ -use frostwalker::lexer; -use frostwalker::validator; -use frostwalker::formatter; +use frostwalker::{parse, lexer, validator, formatter}; #[test] fn lv_single_key() { @@ -84,3 +82,42 @@ fn full_stack_single_key_double_equals() { panic!("Formatter error."); } } + +#[test] +fn full_stack_single_key_emoji() { + let tree = lexer::tokenize("🦀 = \"value\""); + let result = validator::validate(&tree); + if result.is_some() { + panic!("{}", result.unwrap()); + } + let hashmap = formatter::format(tree); + if !(hashmap.get("🦀").unwrap() == "value") { + panic!("Formatter error."); + } +} + +#[test] +fn parser_single_key() { + let hashmap = parse("key = \"value\""); + assert_eq!(hashmap.unwrap().get("key").unwrap(), "value"); +} + +#[test] +fn parser_single_key_emoji() { + let hashmap = parse("🦀 = \"🦀🦀\""); + assert_eq!(hashmap.unwrap().get("🦀").unwrap(), "🦀🦀"); +} + +#[test] +fn parser_all_types() { + let hashmap = parse("key = 1\r\nkey2 = \"abc\"\r\nkey3 = [ 2, 3, 4 ]\r\nkey4 = true\r\nkey5 = FALSE"); + assert_eq!(hashmap.clone().unwrap().get("key").unwrap(), "1"); + assert_eq!(hashmap.clone().unwrap().get("key2").unwrap(), "abc"); + assert_eq!(hashmap.clone().unwrap().get("key3").unwrap(), "3"); + assert_eq!(hashmap.clone().unwrap().get("key3[0]").unwrap(), "2"); + assert_eq!(hashmap.clone().unwrap().get("key3[1]").unwrap(), "3"); + assert_eq!(hashmap.clone().unwrap().get("key3[2]").unwrap(), "4"); + assert_eq!(hashmap.clone().unwrap().get("key4").unwrap(), "true"); + assert_eq!(hashmap.clone().unwrap().get("key5").unwrap(), "false"); +} +