add documentation and finish off.

This commit is contained in:
abbie 2024-01-19 20:05:40 +00:00
parent 5420ea59ae
commit 878f81b1a4
Signed by: threeoh6000
GPG key ID: 801FE4AD456E922C
6 changed files with 80 additions and 2 deletions

2
Cargo.lock generated
View file

@ -4,4 +4,4 @@ version = 3
[[package]]
name = "frostwalker"
version = "0.0.10"
version = "0.1.0"

View file

@ -1,6 +1,6 @@
[package]
name = "frostwalker"
version = "0.0.10"
version = "0.1.0"
edition = "2021"
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html

View file

@ -1,6 +1,21 @@
//! Module containing the formatter which turns the tokens into a HashMap.
use super::{Token, Class};
use std::collections::HashMap;
/// This function creates a HashMap from tokens.
///
/// It does not do any verification of the source tokens and may panic if something goes wrong.
///
/// ```
/// use frostwalker::{formatter, Token, Class};
///
/// let identifier = Token { class: Class::IDENTIFIER, value: Some("key".to_string()) };
/// let equals = Token { class: Class::EQUALS, value: None };
/// let value = Token { class: Class::LITERAL, value: Some("5".to_string()) };
///
/// let hashmap = formatter::format(vec![identifier, equals, value]);
/// assert_eq!(hashmap.get("key").unwrap(), "5");
/// ```
pub fn format(tree: Vec<Token>) -> HashMap<String, String> {
let mut output = HashMap::new();
let mut current_key = "".to_string();

View file

@ -1,9 +1,25 @@
//! Module containing the lexer which turns the source into a Token list.
use super::{Token, Class};
/// British English spelling for `tokenize()`.
pub fn tokenise(source: &str) -> Vec<Token> {
return tokenize(source);
}
/// Takes in a source string and turns it into a Token list.
///
/// Any unknown text will be given the class UNKNOWN and will fail when reaching the validator.
///
/// ```
/// use frostwalker::{lexer, Class, Token};
///
/// let list = lexer::tokenize("meaning_of_life = 42");
/// let ident = Token { class: Class::IDENTIFIER, value: Some("meaning_of_life".to_string()) };
/// let equals = Token { class: Class::EQUALS, value: None };
/// let val = Token { class: Class::LITERAL, value: Some("42".to_string()) };
///
/// assert_eq!(list, vec![ident, equals, val]);
/// ```
pub fn tokenize(source: &str) -> Vec<Token> {
let lines: Vec<&str> = source.lines().collect();
let lines_len = lines.len();

View file

@ -1,5 +1,27 @@
//! A TOML-like configuration language parser that supports single depth arrays, integers, strings and boolean literals.
//!
//! This library isn't intended to compete with `toml`. Frostwalker and `toml` have differing goals and I would recommend you use `toml` over Frostwalker as it supports more features, but Frostwalker has no dependencies apart from the standard library.
//!
//! The use of this library is easy and doesn't require much work:
//! ```
//! use frostwalker::parse;
//!
//! let parsed_output = parse("yes = true\r\nkey = \"value\"\r\narray = [ 1, 5 ]");
//! let hashmap = parsed_output.unwrap();
//! assert_eq!(hashmap.get("yes").unwrap(), "true");
//!
//! assert_eq!(hashmap.get("key").unwrap(), "value");
//!
//! assert_eq!(hashmap.get("array").unwrap(), "2");
//! assert_eq!(hashmap.get("array[0]").unwrap(), "1");
//! assert_eq!(hashmap.get("array[1]").unwrap(), "5");
//! ```
//!
use std::collections::HashMap;
/// A structure for a lexical token.
///
/// It is used by all components of the parser stack and is here as to keep each component separate from the others.
#[derive(Debug)]
#[derive(PartialEq)]
pub struct Token {
@ -7,15 +29,25 @@ pub struct Token {
pub value: Option<String>,
}
/// An enumerator for types of lexical tokens.
///
/// It is used by all components of the parser stack and is here as to keep each component separate from the others.
#[derive(Debug)]
#[derive(PartialEq)]
pub enum Class {
/// Names and keys assigned by the programmer.
IDENTIFIER,
/// Punctuation and delimiters.
SEPARATOR,
/// Equals.
EQUALS,
/// Strings and integers.
LITERAL,
/// New lines.
NEWLINE,
/// True and false.
BOOLEAN,
/// Tokens that do not fit the lexer's ruleset.
UNKNOWN,
}
@ -23,6 +55,9 @@ pub mod lexer;
pub mod validator;
pub mod formatter;
/// The parsing function. This function takes in a string with configuration in it and either outputs a HashMap containing keys and values or an error message.
///
/// The parser wraps around each function of the parser stack (lexer, validator, and formatter) as to ensure the configuration text is parsed properly. Errors originate from the validator.
pub fn parse(source: &str) -> Result<HashMap<String, String>, String> {
let tree = lexer::tokenize(source);
let result = validator::validate(&tree);

View file

@ -1,3 +1,4 @@
//! Module containing the validator which makes sure that a Token list is correct.
use super::{Token, Class};
#[derive(Debug)]
@ -11,6 +12,17 @@ enum ExpectedClass {
SEPARATOR,
}
/// Borrows a token list and either outputs None, meaning no error, or Some(String) with an error message inside.
///
/// ```
/// use frostwalker::{validator, Class, Token};
///
/// let ident = Token { class: Class::IDENTIFIER, value: Some("friend".to_string()) };
/// let equals = Token { class: Class::EQUALS, value: None };
/// let val = Token { class: Class::LITERAL, value: Some("ferris".to_string()) };
///
/// assert_eq!(validator::validate(&vec![ident, equals, val]), None);
/// ```
pub fn validate(tree: &Vec<Token>) -> Option<String> {
let mut expected_token: ExpectedClass = ExpectedClass::IDENTIFIER;
let mut i = 0;