diff --git a/sallyforth/kernel.py b/sallyforth/kernel.py index 3f512b3..37bf936 100644 --- a/sallyforth/kernel.py +++ b/sallyforth/kernel.py @@ -1,7 +1,7 @@ import sys from os import path from words import * -from lex import forth_prompt, read_tokens, is_string +from lex import forth_prompt, read_tokens, is_string, tokenize from stack import Stack def to_number(token): @@ -23,9 +23,12 @@ class Forth: def __init__(self, startup=None): self.stack = Stack() self.dictionary = { + '*prompt*': const_f("SallyForth>> "), 'true': const_f(True), 'false': const_f(False), 'nil': const_f(None), + 'def': w_def, + 'import': w_import, '0': const_f(0), '1': const_f(1), '2': const_f(2), @@ -56,17 +59,37 @@ class Forth: if startup: execute_startup(startup) + def defvar(self, name, value): + self.dictionary[name] = const_f(value) + + def evaluate_token(self, token): + self.execute_token(token) + return self.stack.pop() + def compiling(self): return self.compiler - def process_line(self, readline_f=forth_prompt): + def execute_line(self, readline_f=forth_prompt): tokens = read_tokens(readline_f) + self.execute_tokens(tokens) + + def execute_tokens(self, tokens): for token in tokens: if not self.compiling(): - self.interpret_token(token) + self.execute_token(token) else: self.compile_token(token) + def execute_file(self, fpath): + self.defvar('*source*', fpath) + with open(fpath) as f: + line = f.readline() + while line: + tokens = tokenize(line) + self.execute_tokens(tokens) + line = f.readline() + self.defvar('*source*', '') + def compile_token(self, token): if self.compiler.name == None: self.compiler.name = token @@ -93,9 +116,9 @@ class Forth: else: self.compiler.add_instruction(push_value_f(n)) - def interpret_token(self, token): + def execute_token(self, token): if is_string(token): - self.stack.push(token) + self.stack.push(token[1::]) return if token in self.dictionary: