properly implement code tracing

This commit is contained in:
Daniella 2024-01-16 06:39:38 +01:00
parent 72ae4d8ef2
commit d36381bb4e
Signed by: TudbuT
GPG key ID: B3CF345217F202D3
7 changed files with 58 additions and 41 deletions

5
src/lib.rs Normal file
View file

@ -0,0 +1,5 @@
pub mod parser;
pub mod tokenizer;
pub mod ty;
pub mod value;
pub mod word;

View file

@ -1,15 +1,13 @@
use std::fs;
use crate::{parser::Parser, tokenizer::Tokenizer, value::Value, word::Word};
pub mod parser;
pub mod tokenizer;
pub mod ty;
pub mod value;
pub mod word;
use aeapa::{parser::Parser, tokenizer::tokenize, value::Value, word::Word};
fn main() {
let tokens = Tokenizer::tokenize(fs::read_to_string("std.aea").unwrap()).unwrap();
let tokens = tokenize(
fs::read_to_string("std.aea").unwrap(),
Some("std.aea".to_owned()),
)
.unwrap();
println!("{tokens:?}");
let words = Parser::parse(&tokens).unwrap();
println!("{words:?}");
@ -32,8 +30,15 @@ fn main() {
println!("{}", stack.pop().unwrap());
}
}
Word::Function(name, args, ret, words) => {
println!("If i was an interpreter I would define function {name}({args:?}) {ret:?} {{{words:?}}}")
Word::Function {
name,
args,
rets,
code,
file,
line,
} => {
println!("If i was an interpreter I would define function {name}({args:?}) {rets:?} {{{code:?}}} of {file}:{line}")
}
_ => todo!(),
}

View file

@ -23,6 +23,7 @@ pub enum ParseError {
UnexpectedToken(CodePos, Token, String),
ExpectedIdentifierAfterColon(CodePos),
ExpectedParen(CodePos),
NeedFileToken,
}
#[derive(Default)]
@ -34,6 +35,7 @@ struct State {
pub struct Parser<'a> {
slice: &'a [Token],
index: usize,
line: usize,
file: Option<String>,
}
@ -42,6 +44,7 @@ impl<'a> Parser<'a> {
Parser {
slice: tokens,
index: 0,
line: 0,
file: None,
}
}
@ -92,20 +95,13 @@ impl<'a> Parser<'a> {
file: self.file.as_ref().cloned(),
char_in_file: None,
token: Some(self.index),
line: Some(
self.slice[0..self.index]
.iter()
.filter(|t| matches!(t, Token::Newline(_)))
.count()
+ 1,
),
line: Some(self.line),
}
}
pub fn read_level_at(&mut self, index: usize) -> Result<Vec<Word>, ParseError> {
self.index = index;
let mut words = Vec::new();
let mut file = None;
let mut state = State {
..Default::default()
@ -172,11 +168,8 @@ impl<'a> Parser<'a> {
state.paren_mode.push(false);
}
}
Token::Newline(_) => (),
Token::Filename(f) => file = Some(f),
}
if self.file != file {
self.file = file.clone();
Token::Newline(n) => self.line = n,
Token::Filename(f) => self.file = Some(f),
}
}
Ok(words)
@ -185,6 +178,7 @@ impl<'a> Parser<'a> {
fn try_match_keyword(&mut self, ident: &String) -> Result<Option<Word>, ParseError> {
match ident.as_str() {
"fn" => {
let line = self.line;
let name = self.next_as_ident()?;
self.assert_next_eq(Token::Open)?;
let mut arr = Vec::new();
@ -220,12 +214,14 @@ impl<'a> Parser<'a> {
))?,
}
}
Ok(Some(Word::Function(
Ok(Some(Word::Function {
name,
args,
arr,
self.read_level_at(self.index)?,
)))
rets: arr,
code: self.read_level_at(self.index)?,
file: self.file.as_ref().ok_or(ParseError::NeedFileToken)?.clone(),
line,
}))
}
"if" => Ok(Some(Word::StartIf)),
"while" => Ok(Some(Word::StartWhile)),

1
src/runtime.rs Normal file
View file

@ -0,0 +1 @@
pub struct Runtime {}

View file

@ -7,21 +7,23 @@ use crate::{
word::Token,
};
pub struct Tokenizer<'a> {
struct Tokenizer<'a> {
char_iter: Chars<'a>,
string: String,
file: String,
}
pub fn tokenize(string: String, file: Option<String>) -> Result<Vec<Token>, ParseError> {
let tokenizer = Tokenizer {
// SAFETY: string will not be dropped while Parser is in scope
char_iter: unsafe { deborrow(&string).chars() },
string,
file: file.unwrap_or("<dynamic>".to_owned()),
};
tokenizer.read_tokens()
}
impl<'a> Tokenizer<'a> {
pub fn tokenize(string: String) -> Result<Vec<Token>, ParseError> {
let mut parser = Tokenizer {
// SAFETY: string will not be dropped while Parser is in scope
char_iter: unsafe { deborrow(&string).chars() },
string,
};
parser.read_tokens()
}
fn tpos(&self, tokens: &[Token]) -> CodePos {
CodePos {
file: None,
@ -37,13 +39,14 @@ impl<'a> Tokenizer<'a> {
}
}
fn read_tokens(&mut self) -> Result<Vec<Token>, ParseError> {
fn read_tokens(mut self) -> Result<Vec<Token>, ParseError> {
let mut ret = Vec::new();
ret.push(Token::Filename(self.file.clone()));
let mut accum = String::new();
let mut in_string = false;
let mut in_escape = false;
let mut line = 0;
let mut line = 1;
while let Some(char) = self.char_iter.next() {
if in_string {
if in_escape {

View file

@ -25,7 +25,14 @@ pub enum Word {
StartWhile,
Confirm(bool),
Block(Vec<Word>),
Function(String, Vec<Argdef>, Vec<Argdef>, Vec<Word>),
Function {
name: String,
args: Vec<Argdef>,
rets: Vec<Argdef>,
code: Vec<Word>,
file: String,
line: usize,
},
Return,
}

View file

@ -9,7 +9,7 @@ print("Hi 2");
"hi":print;
fn test(a) ret {
ret= print(a) !
ret= print(a!)!
}
"hiiii" print;