last couple of tests
This commit is contained in:
parent
06aa0d275c
commit
5420ea59ae
5 changed files with 67 additions and 5 deletions
2
Cargo.lock
generated
2
Cargo.lock
generated
|
@ -4,4 +4,4 @@ version = 3
|
|||
|
||||
[[package]]
|
||||
name = "frostwalker"
|
||||
version = "0.0.9"
|
||||
version = "0.0.10"
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
[package]
|
||||
name = "frostwalker"
|
||||
version = "0.0.9"
|
||||
version = "0.0.10"
|
||||
edition = "2021"
|
||||
|
||||
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
|
||||
|
|
|
@ -58,3 +58,15 @@ fn array() {
|
|||
panic!();
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
#[should_panic]
|
||||
fn unknown_element() {
|
||||
let t1 = Token { class: Class::IDENTIFIER, value: Some("key".to_string()) };
|
||||
let t4 = Token { class: Class::EQUALS, value: None };
|
||||
let t3 = Token { class: Class::UNKNOWN, value: Some("10".to_string()) };
|
||||
let tree = formatter::format(vec![t1, t4, t3]);
|
||||
|
||||
tree.get("key").unwrap();
|
||||
}
|
||||
|
||||
|
|
13
src/lib.rs
13
src/lib.rs
|
@ -1,3 +1,5 @@
|
|||
use std::collections::HashMap;
|
||||
|
||||
#[derive(Debug)]
|
||||
#[derive(PartialEq)]
|
||||
pub struct Token {
|
||||
|
@ -21,6 +23,17 @@ pub mod lexer;
|
|||
pub mod validator;
|
||||
pub mod formatter;
|
||||
|
||||
pub fn parse(source: &str) -> Result<HashMap<String, String>, String> {
|
||||
let tree = lexer::tokenize(source);
|
||||
let result = validator::validate(&tree);
|
||||
if result.is_some() {
|
||||
return Err(result.unwrap_or("Error unwrapping result string.".to_string()));
|
||||
}
|
||||
|
||||
let output = formatter::format(tree);
|
||||
return Ok(output);
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod lexer_tests;
|
||||
|
||||
|
|
|
@ -1,6 +1,4 @@
|
|||
use frostwalker::lexer;
|
||||
use frostwalker::validator;
|
||||
use frostwalker::formatter;
|
||||
use frostwalker::{parse, lexer, validator, formatter};
|
||||
|
||||
#[test]
|
||||
fn lv_single_key() {
|
||||
|
@ -84,3 +82,42 @@ fn full_stack_single_key_double_equals() {
|
|||
panic!("Formatter error.");
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn full_stack_single_key_emoji() {
|
||||
let tree = lexer::tokenize("🦀 = \"value\"");
|
||||
let result = validator::validate(&tree);
|
||||
if result.is_some() {
|
||||
panic!("{}", result.unwrap());
|
||||
}
|
||||
let hashmap = formatter::format(tree);
|
||||
if !(hashmap.get("🦀").unwrap() == "value") {
|
||||
panic!("Formatter error.");
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn parser_single_key() {
|
||||
let hashmap = parse("key = \"value\"");
|
||||
assert_eq!(hashmap.unwrap().get("key").unwrap(), "value");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn parser_single_key_emoji() {
|
||||
let hashmap = parse("🦀 = \"🦀🦀\"");
|
||||
assert_eq!(hashmap.unwrap().get("🦀").unwrap(), "🦀🦀");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn parser_all_types() {
|
||||
let hashmap = parse("key = 1\r\nkey2 = \"abc\"\r\nkey3 = [ 2, 3, 4 ]\r\nkey4 = true\r\nkey5 = FALSE");
|
||||
assert_eq!(hashmap.clone().unwrap().get("key").unwrap(), "1");
|
||||
assert_eq!(hashmap.clone().unwrap().get("key2").unwrap(), "abc");
|
||||
assert_eq!(hashmap.clone().unwrap().get("key3").unwrap(), "3");
|
||||
assert_eq!(hashmap.clone().unwrap().get("key3[0]").unwrap(), "2");
|
||||
assert_eq!(hashmap.clone().unwrap().get("key3[1]").unwrap(), "3");
|
||||
assert_eq!(hashmap.clone().unwrap().get("key3[2]").unwrap(), "4");
|
||||
assert_eq!(hashmap.clone().unwrap().get("key4").unwrap(), "true");
|
||||
assert_eq!(hashmap.clone().unwrap().get("key5").unwrap(), "false");
|
||||
}
|
||||
|
||||
|
|
Loading…
Reference in a new issue