From d9d28bfdcb07946a56f4127c12117d4caa479ea1 Mon Sep 17 00:00:00 2001 From: delta Date: Sun, 6 Nov 2022 17:42:55 +0100 Subject: [PATCH] Repo init --- Cargo.lock | 7 ++++ Cargo.toml | 8 +++++ src/lib.rs | 6 ++++ src/tokenizer.rs | 93 ++++++++++++++++++++++++++++++++++++++++++++++++ 4 files changed, 114 insertions(+) create mode 100644 Cargo.lock create mode 100644 Cargo.toml create mode 100644 src/lib.rs create mode 100644 src/tokenizer.rs diff --git a/Cargo.lock b/Cargo.lock new file mode 100644 index 0000000..5f45e1c --- /dev/null +++ b/Cargo.lock @@ -0,0 +1,7 @@ +# This file is automatically @generated by Cargo. +# It is not intended for manual editing. +version = 3 + +[[package]] +name = "litelighter" +version = "0.1.0" diff --git a/Cargo.toml b/Cargo.toml new file mode 100644 index 0000000..e8d51dc --- /dev/null +++ b/Cargo.toml @@ -0,0 +1,8 @@ +[package] +name = "litelighter" +version = "0.1.0" +edition = "2021" + +# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html + +[dependencies] diff --git a/src/lib.rs b/src/lib.rs new file mode 100644 index 0000000..4b64d0d --- /dev/null +++ b/src/lib.rs @@ -0,0 +1,6 @@ +#![allow(dead_code)] +mod tokenizer; + +fn main() { + println!("Hello, world!"); +} diff --git a/src/tokenizer.rs b/src/tokenizer.rs new file mode 100644 index 0000000..045c47c --- /dev/null +++ b/src/tokenizer.rs @@ -0,0 +1,93 @@ +pub enum TokenizerError { + ExceededDepthRange, +} + +mod state { + use crate::tokenizer::TokenizerError; + + /// State is divided into four 8 byte long ints, each int represents the rule id + /// Should never be modified directly. Instead use the push_id and pop_id methods. + pub struct State { + depth: u8, // Valid range is 0-3 + id_stack: [u8; 4] + } + + impl State { + pub fn new() -> Self { + State { + depth: 0, + id_stack: [0, 0, 0, 0] + } + } + + fn get_state() { + + } + + pub fn push_id(&mut self, id: u8) -> Result<(), TokenizerError> { + if self.depth + 1 > 3 { + return Err(TokenizerError::ExceededDepthRange); + } + + self.depth += 1; + self.id_stack[self.depth as usize] = id; + + Ok(()) + } + + pub fn pop_id(&mut self) -> Result { + if self.depth.checked_sub(1) == None { + return Err(TokenizerError::ExceededDepthRange); + } + + let id = self.id_stack[self.depth as usize]; + self.id_stack[self.depth as usize] = 0; + self.depth -= 1; + + Ok(id) + } + } +} + +use crate::tokenizer::state::State; + +enum TokenTypes<'t> { + Normal, + Comment, + String, + Number, + Operator, + Symbol, + Literal, + Whitespace, + Function, + Keyword, + KeywordAlt, + Custom(&'t str) +} + +pub struct Tokenizer<'t> { + pub syntax: &'t str, + syntax_tree: Vec>, + state: State +} + +impl <'t>Tokenizer<'t> { + pub fn new(syntax: &'t str) -> Self { + Tokenizer { + syntax, + syntax_tree: Vec::new(), + state: State::new() + } + } + + fn push_token() { + + } + + pub fn tokenize() { + + } +} + +