From 32ba3f38c39ebb9887566e444eaec9f4f3978ed9e3b18feed49a787abb6bafba Mon Sep 17 00:00:00 2001 From: Apache Date: Mon, 10 Jun 2024 20:52:15 -0500 Subject: [PATCH] Functionality laid out Todo: turn tokens into opcodes! --- src/executor.rs | 47 ++++++++++++++++++++++++++++++++++++++++++++ src/main.rs | 11 +++++++++++ src/parser.rs | 51 ++++++++++++++++++++++++++++++++++++++++++++++++ src/tokenizer.rs | 1 - 4 files changed, 109 insertions(+), 1 deletion(-) create mode 100644 src/executor.rs create mode 100644 src/parser.rs diff --git a/src/executor.rs b/src/executor.rs new file mode 100644 index 0000000..a4ffee9 --- /dev/null +++ b/src/executor.rs @@ -0,0 +1,47 @@ +use std::collections::HashMap; + +use crate::{OpCode, LispValue}; + +pub struct LispState { + table: HashMap, +} + +impl LispState { + pub fn new() -> LispState { + let mut table = HashMap::new(); + + table.insert(String::from("print"), LispValue::RustFunction(String::from("print"), |x| { + let mut strings = Vec::new(); + for val in x { + strings.push(val.to_string()); + } + + let str = strings.join(" "); + println!("{}", str); + + LispValue::Nil + })); + + LispState { + table + } + } + + pub fn execute(&self, instructions: Vec) { + for op in instructions { + match op { + OpCode::Call(func, args) => { + let f = self.table.get(&func).unwrap(); + if let LispValue::RustFunction(_, f) = f { + f(args); + } else { + todo!(); + } + }, + OpCode::Eval(ins) => { + self.execute(ins); + } + } + } + } +} diff --git a/src/main.rs b/src/main.rs index 8d261ba..802ed59 100644 --- a/src/main.rs +++ b/src/main.rs @@ -1,6 +1,12 @@ mod tokenizer; use tokenizer::*; +mod parser; +use parser::*; + +mod executor; +use executor::*; + fn main() { let source = std::fs::read_to_string("src/test.lisp").unwrap(); let mut tokenizer = Tokenizer::new(source); @@ -12,4 +18,9 @@ fn main() { } }; println!("{:?}", tokens); + let instructions = parse(tokens); + + let state = LispState::new(); + + state.execute(instructions); } diff --git a/src/parser.rs b/src/parser.rs new file mode 100644 index 0000000..f790f16 --- /dev/null +++ b/src/parser.rs @@ -0,0 +1,51 @@ +use std::fmt; + +use crate::Token; + +pub enum LispValue { + Nil, + String(String), + LispFunction(String, Vec), + RustFunction(String, fn(Vec) -> LispValue) +} + +impl fmt::Display for LispValue { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + match self { + LispValue::Nil => write!(f, "nil"), + LispValue::String(str) => write!(f, "{}", str), + LispValue::LispFunction(name, _) => write!(f, "<'{}': Lisp Function>", name), + LispValue::RustFunction(name, _) => write!(f, "<'{}': Rust Function>", name) + } + } +} + +impl From<&str> for LispValue { + fn from(value: &str) -> Self { + LispValue::String(value.to_string()) + } +} + +impl From for LispValue { + fn from(value: String) -> Self { + LispValue::String(value) + } +} + +pub enum OpCode { + Call(String, Vec), + Eval(Vec) +} + + + +pub fn parse(tokens: Vec) -> Vec { + let mut opcodes = Vec::new(); + + // TODO: + opcodes.push(OpCode::Call("print".to_string(), vec![ + LispValue::from("Hello, World!") + ])); + + opcodes +} \ No newline at end of file diff --git a/src/tokenizer.rs b/src/tokenizer.rs index ff653d8..605f09a 100644 --- a/src/tokenizer.rs +++ b/src/tokenizer.rs @@ -6,7 +6,6 @@ pub enum Token { CloseParen, Identifier(String), String(String), - Integer(i32) }