diff --git a/Cargo.lock b/Cargo.lock index 3e5cab7..7ef1dc2 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -4,4 +4,4 @@ version = 3 [[package]] name = "frostwalker" -version = "0.0.10" +version = "0.1.0" diff --git a/Cargo.toml b/Cargo.toml index 21a05e2..7a5ee06 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "frostwalker" -version = "0.0.10" +version = "0.1.0" edition = "2021" # See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html diff --git a/src/formatter.rs b/src/formatter.rs index 1de4a36..0bfa343 100644 --- a/src/formatter.rs +++ b/src/formatter.rs @@ -1,6 +1,21 @@ +//! Module containing the formatter which turns the tokens into a HashMap. use super::{Token, Class}; use std::collections::HashMap; +/// This function creates a HashMap from tokens. +/// +/// It does not do any verification of the source tokens and may panic if something goes wrong. +/// +/// ``` +/// use frostwalker::{formatter, Token, Class}; +/// +/// let identifier = Token { class: Class::IDENTIFIER, value: Some("key".to_string()) }; +/// let equals = Token { class: Class::EQUALS, value: None }; +/// let value = Token { class: Class::LITERAL, value: Some("5".to_string()) }; +/// +/// let hashmap = formatter::format(vec![identifier, equals, value]); +/// assert_eq!(hashmap.get("key").unwrap(), "5"); +/// ``` pub fn format(tree: Vec) -> HashMap { let mut output = HashMap::new(); let mut current_key = "".to_string(); diff --git a/src/lexer.rs b/src/lexer.rs index 4a673b1..c09a5a7 100644 --- a/src/lexer.rs +++ b/src/lexer.rs @@ -1,9 +1,25 @@ +//! Module containing the lexer which turns the source into a Token list. use super::{Token, Class}; +/// British English spelling for `tokenize()`. pub fn tokenise(source: &str) -> Vec { return tokenize(source); } +/// Takes in a source string and turns it into a Token list. +/// +/// Any unknown text will be given the class UNKNOWN and will fail when reaching the validator. +/// +/// ``` +/// use frostwalker::{lexer, Class, Token}; +/// +/// let list = lexer::tokenize("meaning_of_life = 42"); +/// let ident = Token { class: Class::IDENTIFIER, value: Some("meaning_of_life".to_string()) }; +/// let equals = Token { class: Class::EQUALS, value: None }; +/// let val = Token { class: Class::LITERAL, value: Some("42".to_string()) }; +/// +/// assert_eq!(list, vec![ident, equals, val]); +/// ``` pub fn tokenize(source: &str) -> Vec { let lines: Vec<&str> = source.lines().collect(); let lines_len = lines.len(); diff --git a/src/lib.rs b/src/lib.rs index 230a6a8..3a392cb 100644 --- a/src/lib.rs +++ b/src/lib.rs @@ -1,5 +1,27 @@ +//! A TOML-like configuration language parser that supports single depth arrays, integers, strings and boolean literals. +//! +//! This library isn't intended to compete with `toml`. Frostwalker and `toml` have differing goals and I would recommend you use `toml` over Frostwalker as it supports more features, but Frostwalker has no dependencies apart from the standard library. +//! +//! The use of this library is easy and doesn't require much work: +//! ``` +//! use frostwalker::parse; +//! +//! let parsed_output = parse("yes = true\r\nkey = \"value\"\r\narray = [ 1, 5 ]"); +//! let hashmap = parsed_output.unwrap(); +//! assert_eq!(hashmap.get("yes").unwrap(), "true"); +//! +//! assert_eq!(hashmap.get("key").unwrap(), "value"); +//! +//! assert_eq!(hashmap.get("array").unwrap(), "2"); +//! assert_eq!(hashmap.get("array[0]").unwrap(), "1"); +//! assert_eq!(hashmap.get("array[1]").unwrap(), "5"); +//! ``` +//! use std::collections::HashMap; +/// A structure for a lexical token. +/// +/// It is used by all components of the parser stack and is here as to keep each component separate from the others. #[derive(Debug)] #[derive(PartialEq)] pub struct Token { @@ -7,15 +29,25 @@ pub struct Token { pub value: Option, } +/// An enumerator for types of lexical tokens. +/// +/// It is used by all components of the parser stack and is here as to keep each component separate from the others. #[derive(Debug)] #[derive(PartialEq)] pub enum Class { + /// Names and keys assigned by the programmer. IDENTIFIER, + /// Punctuation and delimiters. SEPARATOR, + /// Equals. EQUALS, + /// Strings and integers. LITERAL, + /// New lines. NEWLINE, + /// True and false. BOOLEAN, + /// Tokens that do not fit the lexer's ruleset. UNKNOWN, } @@ -23,6 +55,9 @@ pub mod lexer; pub mod validator; pub mod formatter; +/// The parsing function. This function takes in a string with configuration in it and either outputs a HashMap containing keys and values or an error message. +/// +/// The parser wraps around each function of the parser stack (lexer, validator, and formatter) as to ensure the configuration text is parsed properly. Errors originate from the validator. pub fn parse(source: &str) -> Result, String> { let tree = lexer::tokenize(source); let result = validator::validate(&tree); diff --git a/src/validator.rs b/src/validator.rs index b45089a..12fd28c 100644 --- a/src/validator.rs +++ b/src/validator.rs @@ -1,3 +1,4 @@ +//! Module containing the validator which makes sure that a Token list is correct. use super::{Token, Class}; #[derive(Debug)] @@ -11,6 +12,17 @@ enum ExpectedClass { SEPARATOR, } +/// Borrows a token list and either outputs None, meaning no error, or Some(String) with an error message inside. +/// +/// ``` +/// use frostwalker::{validator, Class, Token}; +/// +/// let ident = Token { class: Class::IDENTIFIER, value: Some("friend".to_string()) }; +/// let equals = Token { class: Class::EQUALS, value: None }; +/// let val = Token { class: Class::LITERAL, value: Some("ferris".to_string()) }; +/// +/// assert_eq!(validator::validate(&vec![ident, equals, val]), None); +/// ``` pub fn validate(tree: &Vec) -> Option { let mut expected_token: ExpectedClass = ExpectedClass::IDENTIFIER; let mut i = 0;