Repo init

This commit is contained in:
delta 2022-11-06 17:42:55 +01:00
commit d9d28bfdcb
4 changed files with 114 additions and 0 deletions

7
Cargo.lock generated Normal file
View file

@ -0,0 +1,7 @@
# This file is automatically @generated by Cargo.
# It is not intended for manual editing.
version = 3
[[package]]
name = "litelighter"
version = "0.1.0"

8
Cargo.toml Normal file
View file

@ -0,0 +1,8 @@
[package]
name = "litelighter"
version = "0.1.0"
edition = "2021"
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
[dependencies]

6
src/lib.rs Normal file
View file

@ -0,0 +1,6 @@
#![allow(dead_code)]
mod tokenizer;
fn main() {
println!("Hello, world!");
}

93
src/tokenizer.rs Normal file
View file

@ -0,0 +1,93 @@
pub enum TokenizerError {
ExceededDepthRange,
}
mod state {
use crate::tokenizer::TokenizerError;
/// State is divided into four 8 byte long ints, each int represents the rule id
/// Should never be modified directly. Instead use the push_id and pop_id methods.
pub struct State {
depth: u8, // Valid range is 0-3
id_stack: [u8; 4]
}
impl State {
pub fn new() -> Self {
State {
depth: 0,
id_stack: [0, 0, 0, 0]
}
}
fn get_state() {
}
pub fn push_id(&mut self, id: u8) -> Result<(), TokenizerError> {
if self.depth + 1 > 3 {
return Err(TokenizerError::ExceededDepthRange);
}
self.depth += 1;
self.id_stack[self.depth as usize] = id;
Ok(())
}
pub fn pop_id(&mut self) -> Result<u8, TokenizerError> {
if self.depth.checked_sub(1) == None {
return Err(TokenizerError::ExceededDepthRange);
}
let id = self.id_stack[self.depth as usize];
self.id_stack[self.depth as usize] = 0;
self.depth -= 1;
Ok(id)
}
}
}
use crate::tokenizer::state::State;
enum TokenTypes<'t> {
Normal,
Comment,
String,
Number,
Operator,
Symbol,
Literal,
Whitespace,
Function,
Keyword,
KeywordAlt,
Custom(&'t str)
}
pub struct Tokenizer<'t> {
pub syntax: &'t str,
syntax_tree: Vec<TokenTypes<'t>>,
state: State
}
impl <'t>Tokenizer<'t> {
pub fn new(syntax: &'t str) -> Self {
Tokenizer {
syntax,
syntax_tree: Vec::new(),
state: State::new()
}
}
fn push_token() {
}
pub fn tokenize() {
}
}