mirror of
https://github.com/russolsen/sallyforth
synced 2024-12-25 21:58:18 +01:00
Add namespaces, fix parser bugs, add compile to python back in.
This commit is contained in:
parent
66a4e271d8
commit
960e4b0033
23 changed files with 746 additions and 330 deletions
|
@ -1,6 +1,4 @@
|
|||
"Hello from 0.sf" p
|
||||
|
||||
\ Pull in libs.
|
||||
/ Pull in libs.
|
||||
|
||||
"builtins" load
|
||||
"time" load
|
||||
|
@ -13,64 +11,66 @@
|
|||
'builtins import
|
||||
'time import
|
||||
|
||||
\ Basic aliases
|
||||
/ Basic aliases
|
||||
|
||||
: ->compile { *last-word* compile }
|
||||
: ->inline { *last-word* inline }
|
||||
: ->optimize { ->inline ->compile }
|
||||
: ->static { false *last-word* dynamic }
|
||||
: ->dynamic { true *last-word* dynamic }
|
||||
|
||||
: -- { 1 - } ->inline
|
||||
: ++ { 1 + } ->inline
|
||||
: =0 { 0 = } ->inline
|
||||
: pos? { 0 > } ->inline
|
||||
: neg? { 0 < } ->inline
|
||||
: zero? { 0 = } ->inline
|
||||
: drop1 { drop } ->inline
|
||||
: drop2 { drop drop } ->inline
|
||||
: drop3 { drop2 drop } ->inline
|
||||
: -- { 1 - } ->compile
|
||||
: ++ { 1 + } ->compile
|
||||
: =0 { 0 = } ->compile
|
||||
: pos? { 0 > } ->compile
|
||||
: neg? { 0 < } ->compile
|
||||
: zero? { 0 = } ->compile
|
||||
: drop1 { drop } ->compile
|
||||
: drop2 { drop drop } ->compile
|
||||
: drop3 { drop2 drop } ->compile
|
||||
|
||||
\ List making.
|
||||
/ List making.
|
||||
|
||||
unique 'list-marker =!
|
||||
: [ list-marker
|
||||
: ] { list-marker [list] }
|
||||
: [] { [ ] }
|
||||
: [ list-marker ->compile
|
||||
: ] { list-marker [list] } ->compile
|
||||
: [] { [ ] } ->compile
|
||||
|
||||
unique 'map-marker =!
|
||||
: {{ map-marker
|
||||
: }} { map-marker [list] list->map }
|
||||
: {{}} { {{ }} }
|
||||
: <<= map-marker ->compile
|
||||
: =>> { map-marker [list] list->map } ->compile
|
||||
: <<=>> { <<= =>> } ->compile
|
||||
|
||||
|
||||
\ Spelunk thru objects and properties.
|
||||
/ Spelunk thru objects and properties.
|
||||
|
||||
: <. [
|
||||
: .> { ] @@ }
|
||||
: $? swap
|
||||
|
||||
\ Set the interactive prompt.
|
||||
|
||||
: *prompt* "sallySh> "
|
||||
|
||||
\ Function calling.
|
||||
/ Function calling.
|
||||
|
||||
: !!0 { [] swap !! }
|
||||
: !!1 { swap 1 ->list swap !! }
|
||||
: !!2 { mbt 2 ->list swap !! }
|
||||
|
||||
: getattr ( obj attr -- attr-value ) {
|
||||
/ obj attr -- attr-value
|
||||
: getattr {
|
||||
swap 2 ->list builtins/getattr !!
|
||||
}
|
||||
|
||||
: setattr ( obj attr value -- ) {
|
||||
/ obj attr value
|
||||
: setattr {
|
||||
bmt 3 ->list builtins/setattr
|
||||
}
|
||||
|
||||
: .!! (obj args method-name -- result) {
|
||||
/ obj args method-name -- result
|
||||
: .!! {
|
||||
tbm getattr !!
|
||||
}
|
||||
|
||||
\ Handy utilities
|
||||
/ Handy utilities
|
||||
|
||||
: str { builtins/str !!1 }
|
||||
: type { builtins/type !!1 }
|
||||
|
@ -78,7 +78,12 @@ unique 'map-marker =!
|
|||
: sleep { time/sleep !!1 drop }
|
||||
: ctime { time/ctime !!0 }
|
||||
|
||||
: assert ( bool msg -- ) {
|
||||
/ Set the interactive prompt.
|
||||
|
||||
: *prompt* { <. *ns* 'name .> str " sf>> " + }
|
||||
|
||||
/ bool msg --
|
||||
: assert {
|
||||
dup
|
||||
p
|
||||
swap
|
||||
|
@ -87,7 +92,24 @@ unique 'map-marker =!
|
|||
{ builtins/AssertionError !!1 raise }
|
||||
}
|
||||
|
||||
\ Other startup files.
|
||||
/ Namespaces
|
||||
|
||||
: namespaces { <. forth 'namespaces .> }
|
||||
|
||||
: namespace {
|
||||
dup ns? ifelse
|
||||
{ stack set-ns }
|
||||
{ dup new-ns set-ns }
|
||||
}
|
||||
|
||||
: require {
|
||||
dup dup
|
||||
'.sf + source
|
||||
ns? not ifelse { "Namespace " . . " not loaded." p }
|
||||
{ "Namespace " . . " loaded." p }
|
||||
}
|
||||
|
||||
/ Other startup files.
|
||||
|
||||
*sallyforth-dir* "/" "io.sf" + + source
|
||||
*sallyforth-dir* "/" "list.sf" + + source
|
||||
|
|
136
sallyforth/ast_utils.py
Normal file
136
sallyforth/ast_utils.py
Normal file
|
@ -0,0 +1,136 @@
|
|||
from ast import *
|
||||
import ast
|
||||
|
||||
forth_stack_ast = Attribute(value=Name(id='forth', ctx=Load()),
|
||||
attr='stack', ctx=Load())
|
||||
|
||||
forth_push_ast = Attribute(value=forth_stack_ast, attr='push', ctx=Load())
|
||||
|
||||
def push_ast(val_ast):
|
||||
return call(func=forth_push_ast, args=[val_ast], keywords=[])
|
||||
|
||||
def value_ast(value):
|
||||
print("value ast:", value)
|
||||
if isinstance(value, str):
|
||||
return Str(value)
|
||||
elif isinstance(value, int):
|
||||
return Num(value)
|
||||
elif isinstance(value, float):
|
||||
return Num(value)
|
||||
else:
|
||||
return None
|
||||
|
||||
def push_value_ast(value, name='constant'):
|
||||
vast = value_ast(value)
|
||||
if vast:
|
||||
result = FunctionDef(
|
||||
name=name,
|
||||
vararg=None,
|
||||
kw_defaults=[],
|
||||
decorator_list=[],
|
||||
args=arguments(args=[arg(arg='forth', annotation=None)], vararg=None, kwonlyargs=[], kw_defaults=[], defaults=[]),
|
||||
body=[Expr(value=push_ast(vast))])
|
||||
fix_missing_locations(result)
|
||||
return result
|
||||
return None
|
||||
|
||||
|
||||
|
||||
def dump(x):
|
||||
#print("dump", x, type(x))
|
||||
if x == None:
|
||||
print("None!")
|
||||
elif isinstance(x,str):
|
||||
print("String:", x)
|
||||
elif isinstance(x,list) or isinstance(x, tuple):
|
||||
for el in x:
|
||||
print("List dump:")
|
||||
dump(el)
|
||||
else:
|
||||
ast.dump(x)
|
||||
|
||||
def indent(s, level=0):
|
||||
spaces = " " * level
|
||||
return spaces + str(s)
|
||||
|
||||
def nl(s):
|
||||
return s + "\n"
|
||||
|
||||
def dump_coll(kind, ast, level):
|
||||
n = str(len(ast))
|
||||
result = nl(indent(kind + "(" + n + ") =>", level))
|
||||
for x in ast:
|
||||
result += dump(x, level+1)
|
||||
return result
|
||||
|
||||
def dump_tuple(ast, level=0):
|
||||
return dump_coll("tuple", ast, level)
|
||||
|
||||
def dump_list(ast, level=0):
|
||||
return dump_coll("list", ast, level)
|
||||
|
||||
def dump_plain_str(x, level=0):
|
||||
return nl(indent("str:" + x, level))
|
||||
|
||||
def dump_expr(x, level=0):
|
||||
return nl(indent("expr!!", level))
|
||||
|
||||
def dump_name(x, level=0):
|
||||
return nl(indent(f'name({x.id})', level))
|
||||
|
||||
def ast_dump(x, level=0):
|
||||
return nl(indent(ast.dump(x), level))
|
||||
|
||||
def dump_expr(x, level=0):
|
||||
return nl(indent("Expr:", level)) \
|
||||
+ dump(x.value, level+1)
|
||||
|
||||
def dump_module(m, level=0):
|
||||
return nl(indent("Module:", level)) + \
|
||||
dump_coll("body", m.body, level+1)
|
||||
|
||||
def dump_assign(a, level=0):
|
||||
return nl(indent("Assign:", level)) + \
|
||||
dump_coll("targets", a.targets, level+1) + \
|
||||
dump(a.value, level+1)
|
||||
|
||||
def dump_call(c, level=0):
|
||||
return nl(indent("Call", level)) + \
|
||||
dump(c.func, level+1) + \
|
||||
dump_coll("Args:", c.args, level+1)
|
||||
|
||||
def dump_fdef(fd, level=0):
|
||||
return nl(indent("FunctionDef", level)) + \
|
||||
nl(indent(fd.name, level+1)) + \
|
||||
dump_coll(str(type(fd.body)), fd.body, level+1)
|
||||
|
||||
def dump_attr(a, level=0):
|
||||
return nl(indent("Attr", level)) + \
|
||||
dump(a.attr, level+1) + \
|
||||
dump(a.value, level+1)
|
||||
|
||||
switcher = {
|
||||
list: dump_list,
|
||||
tuple: dump_tuple,
|
||||
str: dump_plain_str,
|
||||
Name: dump_name,
|
||||
Expr: dump_expr,
|
||||
FunctionDef: dump_fdef,
|
||||
Module: dump_module,
|
||||
Assign: dump_assign,
|
||||
Attribute: dump_attr,
|
||||
Call: dump_call}
|
||||
|
||||
def dump(ast, level=0):
|
||||
print(">>Dump", ast)
|
||||
if ast == None:
|
||||
return nl(indent("None", level))
|
||||
t = type(ast)
|
||||
if t in switcher:
|
||||
f = switcher[t]
|
||||
return f(ast, level)
|
||||
else:
|
||||
print("?????", ast)
|
||||
return str(ast)
|
||||
|
||||
|
|
@ -1,16 +1,34 @@
|
|||
import tokenstream as ts
|
||||
from wrappers import noop
|
||||
from namespace import Namespace
|
||||
from util import word, native_word
|
||||
from unique import Unique
|
||||
import python_compiler as pc
|
||||
import inliner
|
||||
import importlib
|
||||
from pprint import pprint
|
||||
|
||||
@word()
|
||||
def compile(forth):
|
||||
name = forth.stack.pop()
|
||||
var = forth.ns[name]
|
||||
word_f = var.value
|
||||
new_f = pc.compile_word_f(word_f, name)
|
||||
forth.set(name, new_f)
|
||||
|
||||
@word()
|
||||
def inline(forth):
|
||||
name = forth.stack.pop()
|
||||
var = forth.ns[name]
|
||||
word_f = var.value
|
||||
new_f = inliner.compile_word_f(word_f, name)
|
||||
forth.set(name, new_f)
|
||||
|
||||
@word()
|
||||
def dynamic(forth):
|
||||
name = forth.stack.pop()
|
||||
isdyn = forth.stack.pop()
|
||||
var = forth.ns[name]
|
||||
print(f'name: {name} var: {var} dyn: {isdyn}')
|
||||
var.dynamic = isdyn
|
||||
|
||||
@word()
|
||||
|
@ -26,6 +44,11 @@ def native(forth):
|
|||
wrapped_f = native_word(native_f, name, n, has_return)
|
||||
forth.set(name, wrapped_f)
|
||||
|
||||
@word("go!")
|
||||
def exec_word(forth):
|
||||
func = forth.stack.pop()
|
||||
func(forth)
|
||||
|
||||
@word("function")
|
||||
def function_word(forth):
|
||||
name = forth.stack.pop()
|
||||
|
@ -54,13 +77,11 @@ def readtoken(forth):
|
|||
def w_call(forth):
|
||||
func = forth.stack.pop()
|
||||
args = forth.stack.pop()
|
||||
#print('f', f, 'args', args)
|
||||
try:
|
||||
result = func(*args)
|
||||
except:
|
||||
print(f'Error executing {func}({args})')
|
||||
raise
|
||||
#print('result', result)
|
||||
forth.stack.push(result)
|
||||
|
||||
@word()
|
||||
|
@ -144,28 +165,48 @@ def splat(forth):
|
|||
def stack(forth):
|
||||
print(forth.stack)
|
||||
|
||||
@word()
|
||||
def ns(forth):
|
||||
@word('debug-ns')
|
||||
def debug_ns(forth):
|
||||
print('debug ns')
|
||||
print(forth.ns.name)
|
||||
pprint(forth.ns.includes)
|
||||
pprint(forth.ns.contents)
|
||||
|
||||
@word('*ns*')
|
||||
def star_ns_star(forth):
|
||||
forth.stack.push(forth.ns)
|
||||
|
||||
@word('new-ns')
|
||||
def new_ns(forth):
|
||||
name = forth.stack.pop()
|
||||
core = forth.namespaces['core']
|
||||
namespace = Namespace(name, [core])
|
||||
forth.namespaces[name] = namespace
|
||||
|
||||
@word('include')
|
||||
def include_ns(forth):
|
||||
name = forth.stack.pop()
|
||||
included = forth.namespaces[name]
|
||||
forth.ns.include_ns(included)
|
||||
|
||||
@word('set-ns')
|
||||
def set_ns_word(forth):
|
||||
name = forth.stack.pop()
|
||||
forth.set_ns(name)
|
||||
|
||||
@word('ns?')
|
||||
def ns_question(forth):
|
||||
name = forth.stack.pop()
|
||||
forth.stack.push(name in forth.namespaces)
|
||||
|
||||
@word(':', True)
|
||||
def colon(forth):
|
||||
name = forth.stream.get_token().value
|
||||
body = forth.compile_next()
|
||||
forth.set(name, body)
|
||||
forth.set_constant('*last-word*', name)
|
||||
forth.core.set_constant('*last-word*', name)
|
||||
return noop
|
||||
|
||||
@word()
|
||||
def inline(forth):
|
||||
name = forth.stack.pop()
|
||||
print('name', name)
|
||||
var = forth.ns[name]
|
||||
value = var.value
|
||||
if not value.forth_primitive:
|
||||
value.forth_inline = True
|
||||
|
||||
@word()
|
||||
def current_stream(forth):
|
||||
forth.stack.push(forth.stream)
|
||||
|
@ -196,27 +237,26 @@ def word_bang(forth):
|
|||
def w_while(forth):
|
||||
cond = forth.compile_next()
|
||||
body = forth.compile_next()
|
||||
#print("cond:", cond)
|
||||
#print("body", body)
|
||||
def dowhile(xforth):
|
||||
b = fresult(xforth, cond)
|
||||
while b:
|
||||
body(xforth)
|
||||
b = fresult(xforth, cond)
|
||||
dowhile.forth_inline = False
|
||||
dowhile.forth_primitive = True
|
||||
dowhile.forth_immediate = False
|
||||
dowhile.operation_type = 'while'
|
||||
dowhile.immediate = False
|
||||
return dowhile
|
||||
|
||||
@word('if', True)
|
||||
def w_if(forth):
|
||||
compiled = forth.compile_next()
|
||||
print("compiled", compiled)
|
||||
def doif(forth):
|
||||
value = forth.stack.pop()
|
||||
if value:
|
||||
compiled(forth)
|
||||
doif.forth_inline = False
|
||||
doif.forth_primitive = True
|
||||
doif.forth_immediate = False
|
||||
doif.operation_type = 'if'
|
||||
doif.immediate = False
|
||||
return doif
|
||||
|
||||
@word('ifelse', True)
|
||||
|
@ -229,7 +269,6 @@ def ifelse(forth):
|
|||
compiled_true(forth)
|
||||
else:
|
||||
compiled_false(forth)
|
||||
doif.forth_inline = False
|
||||
doif.forth_primitive = True
|
||||
doif.forth_immediate = False
|
||||
doif.operation_type = 'ifelse'
|
||||
doif.immediate = False
|
||||
return doif
|
||||
|
|
|
@ -1,9 +0,0 @@
|
|||
|
||||
: doc { <. $? '__doc__ .> }
|
||||
|
||||
'power builtins/pow 2 true native
|
||||
'abs builtins/abs 1 true native
|
||||
'round builtins/round 1 true native
|
||||
|
||||
\ 'open builtins/open 2 true native
|
||||
|
|
@ -1,105 +0,0 @@
|
|||
from tokenstream import Token
|
||||
from wrappers import value_f, inner_f, inner2_f, inner3_f, noop
|
||||
from recoder import concat_functions
|
||||
|
||||
LBrace = Token('word', '{')
|
||||
RBrace = Token('word', '}')
|
||||
|
||||
def composite_function(contents):
|
||||
asts = []
|
||||
for f in contents:
|
||||
ast = getattr(f, 'ast', None)
|
||||
if not ast:
|
||||
print("No ast for:", f)
|
||||
return None
|
||||
asts.append(ast)
|
||||
return concat_functions(asts)
|
||||
|
||||
def compile_word(forth, w):
|
||||
name = w.value
|
||||
var = forth.ns[name]
|
||||
value = var.value
|
||||
|
||||
if value.forth_immediate:
|
||||
return value(forth)
|
||||
elif var.dynamic:
|
||||
return var
|
||||
else:
|
||||
return value
|
||||
|
||||
def compile_token(forth, t):
|
||||
if t.kind in ['number', 'string', 'keyword']:
|
||||
f = value_f(t.value)
|
||||
elif t.kind == 'word':
|
||||
f = compile_word(forth, t)
|
||||
else:
|
||||
print(f'{n}??')
|
||||
raise ValueError()
|
||||
return f
|
||||
|
||||
def compile_value(contents, v):
|
||||
#print("compiling", v, v.__dict__)
|
||||
if v.forth_inline and v.forth_contents:
|
||||
contents.extend(v.forth_contents)
|
||||
else:
|
||||
contents.append(v)
|
||||
return contents
|
||||
|
||||
def compile_block(forth, stream, wrap_block):
|
||||
contents = []
|
||||
t = stream.get_token()
|
||||
while t != RBrace:
|
||||
compile_value(contents, compile_next(forth, stream, t))
|
||||
t = stream.get_token()
|
||||
|
||||
if len(contents) == 0:
|
||||
f = noop
|
||||
elif len(contents) == 1:
|
||||
f = contents[0]
|
||||
elif len(contents) == 2:
|
||||
f = inner2_f(contents[0], contents[1])
|
||||
elif len(contents) == 3:
|
||||
f = inner3_f(contents[0], contents[1], contents[2])
|
||||
else:
|
||||
f = inner_f(contents)
|
||||
|
||||
if wrap_block:
|
||||
f = value_f(f)
|
||||
return f
|
||||
|
||||
def xxx_compile_block(forth, stream, wrap_block):
|
||||
contents = []
|
||||
t = stream.get_token()
|
||||
while t != RBrace:
|
||||
compile_value(contents, compile_next(forth, stream, t))
|
||||
t = stream.get_token()
|
||||
|
||||
f = composite_function(contents)
|
||||
if not f:
|
||||
f = inner_f(contents)
|
||||
|
||||
if wrap_block:
|
||||
f = value_f(f)
|
||||
return f
|
||||
|
||||
def compile_next(forth, stream, current_token=None, wrap_block=False):
|
||||
if current_token:
|
||||
t = current_token
|
||||
else:
|
||||
t = stream.get_token()
|
||||
|
||||
if t == None:
|
||||
return None
|
||||
|
||||
if t != LBrace:
|
||||
return compile_token(forth, t)
|
||||
|
||||
return compile_block(forth, stream, wrap_block)
|
||||
|
||||
def eval_stream(forth, stream):
|
||||
t = stream.get_token()
|
||||
while t:
|
||||
compiled = compile_next(forth, stream, t, True)
|
||||
#print(f"*** compiled {t} => {compiled}")
|
||||
compiled(forth)
|
||||
t = stream.get_token()
|
|
@ -5,7 +5,7 @@ from unique import Unique
|
|||
def w_bounded_list(forth):
|
||||
"""Create a list from delimted values on the stack.
|
||||
[list]
|
||||
(marker a b c marker -- [a b c]
|
||||
(marker a b c marker -- [a b c])
|
||||
"""
|
||||
marker = forth.stack.pop()
|
||||
l = []
|
||||
|
|
19
sallyforth/inliner.py
Normal file
19
sallyforth/inliner.py
Normal file
|
@ -0,0 +1,19 @@
|
|||
from wrappers import inner_f
|
||||
|
||||
def compile_f(contents, name):
|
||||
new_contents = []
|
||||
for f in contents:
|
||||
sub_contents = getattr(f, "contents", None)
|
||||
if sub_contents:
|
||||
new_contents.extend(sub_contents)
|
||||
else:
|
||||
new_contents.append(f)
|
||||
new_func = inner_f(new_contents)
|
||||
new_func.name = name
|
||||
return new_func
|
||||
|
||||
def compile_word_f(f, name=None):
|
||||
contents = getattr(f, 'contents', None)
|
||||
if contents and len(contents) > 1:
|
||||
return compile_f(contents, name)
|
||||
return f
|
|
@ -1,8 +1,8 @@
|
|||
|
||||
'io namespace
|
||||
: open { builtins/open !!1 }
|
||||
: close { <. $? 'close .> !!0 drop }
|
||||
|
||||
: read-file (path -- contents) { open dup <. $? 'read .> !!0 swap close }
|
||||
: read-lines (path -- contents) { open dup <. $? 'readlines .> !!0 swap close }
|
||||
: read-file { open dup <. $? 'read .> !!0 swap close }
|
||||
: read-lines { open dup <. $? 'readlines .> !!0 swap close }
|
||||
|
||||
: read-line (prompt -- input-line) { builtins/input !!1 }
|
||||
: read-line { builtins/input !!1 }
|
||||
|
|
|
@ -7,65 +7,136 @@ from namespace import Namespace
|
|||
#import operator_words
|
||||
#import data_words
|
||||
import tokenstream as ts
|
||||
import compiler
|
||||
import threaded_compiler as compiler
|
||||
from wrappers import value_f
|
||||
|
||||
class Forth:
|
||||
"""
|
||||
A class to represent a SallyForth execution context.
|
||||
|
||||
An instance of the Forth class is all you need to execute
|
||||
SallyForth code.
|
||||
|
||||
Attributes
|
||||
----------
|
||||
stack : Stack
|
||||
Data stack used by most every word.
|
||||
namespaces : String -> Namespace dictionary
|
||||
All of the Forth namespaces indexed by ns name.
|
||||
ns : Namespace
|
||||
The currently active Namespace.
|
||||
core : Namespace
|
||||
The core namespace. Has all the Forth built-in words.
|
||||
user : Namespace
|
||||
The more or less empty default namespace.
|
||||
"""
|
||||
|
||||
def __init__(self):
|
||||
"""
|
||||
Construct a new SallyForth execution environment.
|
||||
"""
|
||||
self.stack = Stack()
|
||||
self.stream = None
|
||||
self.ns = Namespace('core')
|
||||
core = Namespace('core')
|
||||
user = Namespace('user', [core])
|
||||
user.include_ns(core)
|
||||
self.namespaces = {}
|
||||
self.namespaces[core.name] = core
|
||||
self.namespaces[user.name] = user
|
||||
self.ns = core
|
||||
self.core = core
|
||||
self.set_constant('forth', self)
|
||||
self.set_constant('nil', None)
|
||||
self.set_constant('true', True)
|
||||
self.set_constant('false', False)
|
||||
self.set_constant('*source*', '<<input>>')
|
||||
self.set_constant('*last-word*', None)
|
||||
self.set_constant('*sallyforth-dir*',
|
||||
os.path.dirname(os.path.abspath(__file__)))
|
||||
sally_dir = os.path.dirname(os.path.abspath(__file__))
|
||||
self.set_constant('*sallyforth-dir*', sally_dir)
|
||||
self.ns.import_from_module('basic_words')
|
||||
self.ns.import_from_module('stack_words')
|
||||
self.ns.import_from_module('operator_words')
|
||||
self.ns.import_from_module('data_words')
|
||||
self.eval_file(f'{sally_dir}/0.sf')
|
||||
self.ns = user
|
||||
|
||||
def set_constant(self, name, value):
|
||||
return self.ns.set(name, value_f(value))
|
||||
"""
|
||||
Sets name in the current namespace to a function that will push value onto the stack.
|
||||
"""
|
||||
return self.ns.set_constant(name, value)
|
||||
|
||||
def set(self, name, fvalue):
|
||||
"""
|
||||
Sets name in the current namespace to the given function.
|
||||
"""
|
||||
return self.ns.set(name, fvalue)
|
||||
|
||||
def get(self, name, def_value=None):
|
||||
"""
|
||||
Get the value associated with name in the current namespace (and it's includes).
|
||||
"""
|
||||
if name in self.ns:
|
||||
return self.ns[name]
|
||||
return def_value
|
||||
|
||||
def alias(self, new_name, old_name):
|
||||
"""
|
||||
Given an existing value in the current namespace an additional name.
|
||||
"""
|
||||
self.ns.alias(new_name, old_name)
|
||||
|
||||
def set_ns(self, new_ns_name):
|
||||
"""
|
||||
Set the current namespace.
|
||||
"""
|
||||
self.ns = self.namespaces[new_ns_name]
|
||||
|
||||
def compile_next(self, current_token=None):
|
||||
"""
|
||||
Compile the next token, either the one passed in or the next one on the current token stream.
|
||||
"""
|
||||
return compiler.compile_next(self, self.stream, current_token)
|
||||
|
||||
def eval_stream(self, stream):
|
||||
"""
|
||||
Evaluate the contents of the given token stream.
|
||||
"""
|
||||
old_stream = self.stream
|
||||
self.stream = stream
|
||||
compiler.eval_stream(self, stream)
|
||||
self.stream = old_stream
|
||||
|
||||
def eval_file(self, path):
|
||||
"""
|
||||
Evaluate the contents of the given file as Forth source code.
|
||||
"""
|
||||
old_source = self.ns['*source*']
|
||||
old_ns = self.ns
|
||||
with open(path) as f:
|
||||
fns = ts.file_token_stream(f)
|
||||
return self.eval_stream(fns)
|
||||
result = self.eval_stream(fns)
|
||||
self.ns = old_ns
|
||||
self.ns['*source*'] = old_source
|
||||
return result
|
||||
|
||||
def eval_string(self, s):
|
||||
"""
|
||||
Evaluate a string as Forth source code.
|
||||
"""
|
||||
self.eval_stream(ts.string_token_stream(s))
|
||||
|
||||
def eval_string_r(self, s):
|
||||
"""
|
||||
Evaluate a string and return the top of the resulting stack.
|
||||
"""
|
||||
self.eval_string(s)
|
||||
return self.stack.pop()
|
||||
|
||||
def lookup(self, name):
|
||||
"""
|
||||
Return the value of the given name in the current namespace.
|
||||
"""
|
||||
return self.ns[name]
|
||||
|
||||
if __name__ == "__main__":
|
||||
|
|
|
@ -1,6 +1,11 @@
|
|||
from collections import UserString
|
||||
|
||||
class Keyword(UserString):
|
||||
"""
|
||||
A Keyword is more or less a specialized string. The main difference
|
||||
between strings and keywords is that Keyswords, when called as a function
|
||||
with a dictionary as an argument will look themselves up in the dictionary.
|
||||
"""
|
||||
def __init__(self, value):
|
||||
value = value[1::]
|
||||
UserString.__init__(self, value)
|
||||
|
|
|
@ -1,55 +1,55 @@
|
|||
\ Index into the x'th item.
|
||||
/ Index into the x'th item.
|
||||
|
||||
: [x] (col key -- value) { 1 ->list '__getitem__ .!! }
|
||||
: [x] { 1 ->list '__getitem__ .!! }
|
||||
|
||||
: first (list -- first-item) { 0 [x] }
|
||||
: second (list -- second-item) { 1 [x] }
|
||||
: third (list -- third-item) { 2 [x] }
|
||||
: fourth (list -- fourth-item) { 3 [x] }
|
||||
: first { 0 [x] } ->compile
|
||||
: second { 1 [x] } ->compile
|
||||
: third { 2 [x] } ->compile
|
||||
: fourth { 3 [x] } ->compile
|
||||
|
||||
: last (list -- last-item) { -1 [x] }
|
||||
: last { -1 [x] } ->compile
|
||||
|
||||
: slice (start stop -- slice-obj) {
|
||||
: slice {
|
||||
swap
|
||||
2 ->list
|
||||
builtins/slice
|
||||
!!
|
||||
}
|
||||
} ->compile
|
||||
|
||||
: take (n list -- first-n-items) {
|
||||
swap 0 swap slice \ Make the 0..n slice.
|
||||
[x] \ Do a[0..n].
|
||||
}
|
||||
: take {
|
||||
swap 0 swap slice / Make the 0..n slice.
|
||||
[x] / Do a[0..n].
|
||||
} ->compile
|
||||
|
||||
: skip (n list -- all-but-first-n-items) {
|
||||
swap nil slice \ Make the n..None slice.
|
||||
: skip {
|
||||
swap nil slice / Make the n..None slice.
|
||||
[x]
|
||||
}
|
||||
} ->compile
|
||||
|
||||
: n-of (n x -- list-of-x-repeated-n-times) {
|
||||
: n-of {
|
||||
1 ->list *
|
||||
}
|
||||
} ->compile
|
||||
|
||||
: len { builtins/len !!1 }
|
||||
|
||||
: empty? { len zero? }
|
||||
|
||||
: rest (list -- all-but-first) { 1 swap skip }
|
||||
: rrest (list -- rest-of-rest) { rest rest }
|
||||
: rrrest (list -- all-but-first) { rest rest rest }
|
||||
: rest { 1 swap skip } ->compile
|
||||
: rrest { rest rest } ->compile
|
||||
: rrrest { rest rest rest } ->compile
|
||||
|
||||
: ffirst (list -- first-of-first) { first first }
|
||||
: fffirst (list -- fff-irst) { first first first }
|
||||
: ffirst { first first } ->compile
|
||||
: fffirst { first first first } ->compile
|
||||
|
||||
: append (x list -- list-with-x-appended) {
|
||||
: append {
|
||||
dup tbm
|
||||
<. $? 'append .> !!1
|
||||
drop
|
||||
}
|
||||
} ->compile
|
||||
|
||||
\ Execute a native function in a list.
|
||||
/ Execute a native function in a list.
|
||||
|
||||
: [! [
|
||||
: !] { ] dup rest swap first !! }
|
||||
: !] { ] dup rest swap first !! } ->compile
|
||||
|
||||
|
||||
|
|
|
@ -4,6 +4,7 @@ import ast
|
|||
import copy
|
||||
from pprint import pprint
|
||||
from util import word
|
||||
#from ast_utils import dump
|
||||
|
||||
class FunctionVisitor(ast.NodeVisitor):
|
||||
def __init__(self):
|
||||
|
@ -53,12 +54,16 @@ def load_module(name):
|
|||
return m
|
||||
|
||||
def build_composite_function(function_asts, name='generated_function'):
|
||||
print("*** name:", name)
|
||||
#dump(function_asts)
|
||||
new_body = []
|
||||
for other_f in function_asts:
|
||||
print("Other f:")
|
||||
ast.dump(other_f)
|
||||
print("Other f:", type(other_f))
|
||||
#dump(other_f.body)
|
||||
new_body.extend(other_f.body)
|
||||
new_f = copy.deepcopy(function_asts[0])
|
||||
new_f.forth_primitive = False
|
||||
new_f.forth_immediate = False
|
||||
new_f.name = name
|
||||
new_f.body = new_body
|
||||
return new_f
|
||||
|
@ -72,13 +77,18 @@ def concat_functions(function_asts, name='generated_function'):
|
|||
of arguments as the first function on the list.
|
||||
Returns None if it's unable to build the new function.
|
||||
"""
|
||||
print(name)
|
||||
new_f = build_composite_function(function_asts, name)
|
||||
new_m = ast.Module([new_f])
|
||||
print("===== ", name, "====")
|
||||
#dump(new_m)
|
||||
code = compile(new_m, "*generated*", "exec")
|
||||
eval(code)
|
||||
f = locals()[name]
|
||||
f.ast = new_m.body[0]
|
||||
f.forth_primitive = True
|
||||
f.forth_immediate = False
|
||||
f.forth_inline = False
|
||||
print("generated function:", f)
|
||||
return f
|
||||
|
||||
#m = load_module('m1')
|
|
@ -1,50 +1,64 @@
|
|||
from util import get_attribute
|
||||
from wrappers import value_f
|
||||
from recoder import load_module
|
||||
from module_loader import load_module
|
||||
|
||||
class Var:
|
||||
"""
|
||||
A Var is a named container for a value.
|
||||
Vars contain the name, the value and a dynamic flag,
|
||||
which indicates if the value is static or should be looked
|
||||
up anew for each use.
|
||||
|
||||
Since the major use for Vars is to store the functions
|
||||
associated with Forth words, Vars can also proxy many
|
||||
of the methods of a Forth word.
|
||||
"""
|
||||
|
||||
def __init__(self, name, value, dynamic=True):
|
||||
self.name = name
|
||||
self.value = value
|
||||
self.dynamic = dynamic
|
||||
|
||||
def __call__(self, forth):
|
||||
#print("indirect call on", self.name)
|
||||
return self.value(forth)
|
||||
|
||||
@property
|
||||
def forth_immediate(self):
|
||||
return self.value.forth_immediate
|
||||
def immediate(self):
|
||||
return self.value.immediate
|
||||
|
||||
@property
|
||||
def forth_contents(self):
|
||||
#print("indirect contents on", self.name)
|
||||
return self.value.forth_contents
|
||||
def contents(self):
|
||||
return self.value.contents
|
||||
|
||||
@property
|
||||
def forth_primitive(self):
|
||||
return self.value.forth_primitive
|
||||
|
||||
@property
|
||||
def forth_name(self):
|
||||
#print("indirect name on", self.name)
|
||||
return self.value.forth_name
|
||||
|
||||
@property
|
||||
def forth_inline(self):
|
||||
return self.value.forth_inline
|
||||
def operation_type(self):
|
||||
return self.value.operation_type
|
||||
|
||||
def __str__(self):
|
||||
return f'[[[[Var({self.name}/{self.dynamic}::{self.value})]]]'
|
||||
return f' Var({self.name}/{self.dynamic}::{self.value}) '
|
||||
|
||||
def __repr__(self):
|
||||
return str(self)
|
||||
|
||||
class Namespace:
|
||||
def __init__(self, name):
|
||||
"""
|
||||
A Namespace is basically a string name -> function dictionary.
|
||||
Namespaces also know about includes which are a list of other
|
||||
namespaces.
|
||||
|
||||
When you look up a name in a namespace it first looks in its
|
||||
own dictionary (contents) and then searchs its includes, in
|
||||
the order in which they were included.
|
||||
"""
|
||||
|
||||
def __init__(self, name, includes=[]):
|
||||
self.includes = includes.copy()
|
||||
self.contents = {}
|
||||
self.name = name
|
||||
|
||||
def include_ns(self, other):
|
||||
self.includes.append(other)
|
||||
|
||||
def alias(self, new_name, existing_name):
|
||||
self.contents[new_name] = self.contents[existing_name]
|
||||
|
||||
|
@ -55,11 +69,9 @@ class Namespace:
|
|||
into this namespace. Removes the prefix.
|
||||
"""
|
||||
m = load_module(module_name)
|
||||
#print(m)
|
||||
names = dir(m)
|
||||
for name in names:
|
||||
value = getattr(m, name)
|
||||
#print("IMP", name, value, '=>', getattr(value, 'ast', None))
|
||||
if get_attribute(value, 'forth_word'):
|
||||
forth_name = value.forth_name or name
|
||||
var = self.set(forth_name, value, False)
|
||||
|
@ -68,13 +80,11 @@ class Namespace:
|
|||
if not alias:
|
||||
alias = m.__name__
|
||||
alias = alias.replace(".", "/")
|
||||
#print(m, alias)
|
||||
|
||||
names = dir(m)
|
||||
for name in names:
|
||||
localname = f'{alias}/{name}'
|
||||
val = getattr(m, name)
|
||||
#print("setting", localname)
|
||||
var = self.set(localname, value_f(val), False)
|
||||
|
||||
def set(self, key, value, dynamic=True):
|
||||
|
@ -87,12 +97,29 @@ class Namespace:
|
|||
var.dynamic = dynamic
|
||||
return var
|
||||
|
||||
def set_constant(self, key, value):
|
||||
return self.set(key, value_f(value))
|
||||
|
||||
def keys(self):
|
||||
return self.contents.keys()
|
||||
|
||||
def __contains__(self, key):
|
||||
def all_keys(self):
|
||||
result = set(self.contents.keys())
|
||||
for h in self.includes:
|
||||
result = result.union(set(h.keys()))
|
||||
return result
|
||||
|
||||
def private_contains(self, key):
|
||||
return self.contents.__contains__(key)
|
||||
|
||||
def __contains__(self, key):
|
||||
if key in self.contents:
|
||||
return True
|
||||
for h in self.includes:
|
||||
if key in h:
|
||||
return True
|
||||
return False
|
||||
|
||||
def __delattr__(self, key):
|
||||
return self.contents.__delattr__(key)
|
||||
|
||||
|
@ -102,9 +129,16 @@ class Namespace:
|
|||
def __iter__(self):
|
||||
return self.contents.__iter__()
|
||||
|
||||
def private_lookup(self, key):
|
||||
return self.contents[key]
|
||||
|
||||
def __getitem__(self, key):
|
||||
#print("getitem:", key)
|
||||
return self.contents.__getitem__(key)
|
||||
if key in self.contents:
|
||||
return self.contents[key]
|
||||
for h in self.includes:
|
||||
if key in h:
|
||||
return h[key]
|
||||
raise KeyError(key)
|
||||
|
||||
def __str__(self):
|
||||
return f'Namespace({self.name})'
|
||||
|
|
61
sallyforth/python_compiler.py
Normal file
61
sallyforth/python_compiler.py
Normal file
|
@ -0,0 +1,61 @@
|
|||
#from ast import Attribute, Name, Call, dump, Load, fix_missing_locations, Module, parse, Expr, Expression, FunctionDef, arguments, arg, Interactive, Str
|
||||
from ast import *
|
||||
from pprint import pprint
|
||||
import ast_utils
|
||||
|
||||
def fdef_ast(name, body):
|
||||
return FunctionDef(name=name,
|
||||
args=arguments(args=[arg(arg='forth', annotation=None)],
|
||||
vararg=None, kwonlyargs=[], kw_defaults=[], kwarg=None, defaults=[]),
|
||||
body=body, decorator_list=[], returns=None)
|
||||
|
||||
def call_ast(fname):
|
||||
r = Expr(
|
||||
value=Call(
|
||||
func=Name(id=fname, ctx=Load()),
|
||||
args=[Name(id='forth', ctx=Load())],
|
||||
keywords=[]))
|
||||
return r
|
||||
|
||||
def print_ast(name):
|
||||
name = name or "generated function"
|
||||
r = Expr(
|
||||
value=Call(
|
||||
func=Name(id="print", ctx=Load()),
|
||||
args=[Str(s=name)],
|
||||
keywords=[]))
|
||||
return r
|
||||
|
||||
def compile_f(contents, name):
|
||||
d = locals().copy()
|
||||
exprs = []
|
||||
for i, val in enumerate(contents):
|
||||
fname = f'f_{i}'
|
||||
d[fname] = val
|
||||
exprs.append(call_ast(fname))
|
||||
f_ast = fdef_ast('generated_function', exprs)
|
||||
m = Module(body=[f_ast])
|
||||
fix_missing_locations(m)
|
||||
code = compile(m, 'source', 'exec')
|
||||
exec(code, d)
|
||||
f = d['generated_function']
|
||||
f.immediate = False
|
||||
f.operation_type = 'compiled'
|
||||
f.name = name
|
||||
f.contents = contents
|
||||
return f
|
||||
|
||||
def compile_word_f(f, name=None):
|
||||
"""
|
||||
Given a Forth word function return an equivalent function.
|
||||
Compile_word_f works by building up a Python AST for a function
|
||||
that executes all of the content functions and then compiling
|
||||
it.
|
||||
|
||||
The idea is that compiled functions skip all of the overhead
|
||||
of running thru the contents array at runtime.
|
||||
"""
|
||||
contents = getattr(f, 'contents', None)
|
||||
if contents and len(contents) > 1:
|
||||
return compile_f(contents, name)
|
||||
return f
|
|
@ -3,19 +3,23 @@ import sys
|
|||
import atexit
|
||||
import readline
|
||||
import traceback
|
||||
import argparse
|
||||
from kernel import Forth
|
||||
from tokenstream import prompt_token_stream
|
||||
|
||||
HistoryFile=".sallyforth"
|
||||
HistoryFile='.sallyforth'
|
||||
|
||||
hist_file = os.path.join(os.path.expanduser("~"), HistoryFile)
|
||||
hist_file = os.path.join(os.path.expanduser('~'), HistoryFile)
|
||||
|
||||
class Completer:
|
||||
"""
|
||||
Supply the list of words available in the current namespace.
|
||||
"""
|
||||
def __init__(self, f):
|
||||
self.f = f
|
||||
def complete(self, prefix, index):
|
||||
self.matching_words = \
|
||||
[w for w in self.f.ns.keys() if w.startswith(prefix)]
|
||||
[w for w in self.f.ns.all_keys() if w.startswith(prefix)]
|
||||
try:
|
||||
return self.matching_words[index]
|
||||
except IndexError:
|
||||
|
@ -27,40 +31,52 @@ def setup_readline(history_path, f):
|
|||
readline.read_history_file(history_path)
|
||||
except FileNotFoundError:
|
||||
pass
|
||||
readline.parse_and_bind("tab: complete")
|
||||
readline.set_completer_delims(' \t\n()[{]}\\|;:\'",')
|
||||
readline.parse_and_bind('tab: complete')
|
||||
readline.set_completer_delims(" \t\n()[{]}\\|;:\"',")
|
||||
readline.set_completer(completer.complete)
|
||||
def save_history():
|
||||
readline.write_history_file(history_path)
|
||||
atexit.register(save_history)
|
||||
|
||||
def setup_forth():
|
||||
source_dir = os.path.dirname(os.path.abspath(__file__))
|
||||
startup_file = f'{source_dir}/0.sf'
|
||||
|
||||
def setup_forth(run_startups, additional_scripts):
|
||||
f = Forth()
|
||||
if os.path.exists(startup_file):
|
||||
f.eval_file(startup_file)
|
||||
for s in additional_scripts:
|
||||
f.eval_file(s)
|
||||
f.eval_string(": *i-cmd* { 'Icmd: p p }")
|
||||
|
||||
return f
|
||||
|
||||
def repl(f):
|
||||
print('Sally welcomes you!')
|
||||
while True:
|
||||
try:
|
||||
prompt = f.eval_string_r('*prompt*')
|
||||
try:
|
||||
line = input(prompt)
|
||||
line += "\n"
|
||||
except EOFError:
|
||||
return
|
||||
try:
|
||||
if len(line) > 0 and line[0] == '/':
|
||||
print('special handline:', line)
|
||||
f.stack.push(line)
|
||||
f.eval_string('*i-cmd*')
|
||||
else:
|
||||
f.eval_string(line)
|
||||
except:
|
||||
traceback.print_exc()
|
||||
except KeyboardInterrupt:
|
||||
print()
|
||||
|
||||
if __name__ == "__main__":
|
||||
f = setup_forth()
|
||||
def process_args():
|
||||
parser = argparse.ArgumentParser()
|
||||
parser.add_argument('--nostartup', help='Skip startup scripts', action='store_true')
|
||||
parser.add_argument('scripts', nargs='*')
|
||||
args = parser.parse_args()
|
||||
return (not args.nostartup), args.scripts
|
||||
|
||||
if __name__ == '__main__':
|
||||
run_startup, scripts = process_args()
|
||||
f = setup_forth(run_startup, scripts)
|
||||
setup_readline(hist_file, f)
|
||||
repl(f)
|
||||
print("Bye!")
|
||||
print('Bye!')
|
||||
|
|
|
@ -1,6 +1,9 @@
|
|||
from collections.abc import Sequence
|
||||
|
||||
class Stack(Sequence):
|
||||
"""
|
||||
A Stack is a traditional last in, first out data stack.
|
||||
"""
|
||||
def __init__(self):
|
||||
self.contents = []
|
||||
|
||||
|
|
|
@ -1,9 +1,11 @@
|
|||
: split (delimit str -- tokens) {
|
||||
/ split (delimit str -- tokens)
|
||||
: split {
|
||||
2 ->list
|
||||
<. builtins/str 'split .>
|
||||
!!
|
||||
}
|
||||
|
||||
: dot-split (str -- tokens) { "." swap split }
|
||||
/ dot-split (str -- tokens)
|
||||
: dot-split { "." swap split }
|
||||
|
||||
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
|
||||
\ Test stack and arithmetic.
|
||||
/ Test stack and arithmetic.
|
||||
|
||||
reset stackdepth 0 = "Stack starts empty." assert
|
||||
reset 111 stackdepth 1 = "One item on stack" assert
|
||||
|
@ -27,7 +27,7 @@ reset 1 2 3 reset stackdepth 0 = "Reset empties the stack." assert
|
|||
1 ++ 2 = "1++ is two." assert
|
||||
0 -- -1 = "0-- is -1." assert
|
||||
|
||||
\ Booleans
|
||||
/ Booleans
|
||||
|
||||
true "True is true." assert
|
||||
true not false = "Not true is false" assert
|
||||
|
@ -43,7 +43,7 @@ true false and not "T and F is F." assert
|
|||
false true and not "F and T is F." assert
|
||||
false false and not "F and F is F." assert
|
||||
|
||||
\ Secondary words
|
||||
/ Secondary words
|
||||
|
||||
: push8 8
|
||||
push8 8 = "A word can push a number." assert
|
||||
|
@ -54,7 +54,7 @@ push8-again 8 = "A word can call another word." assert
|
|||
: push64 { push8 push8 * }
|
||||
push64 64 = "A word can use primitive and sec words." assert
|
||||
|
||||
\ Logic
|
||||
/ Logic
|
||||
|
||||
: 1-if-true { if { 1 } }
|
||||
|
||||
|
@ -66,17 +66,17 @@ reset false 1-if-true stackdepth 0 = "if does not fire on false." assert
|
|||
reset true 1-or-2 1 = "True part of ifelse fires." assert
|
||||
reset false 1-or-2 2 = "False part of ifelse fires." assert
|
||||
|
||||
\ built in functions
|
||||
/ built in functions
|
||||
|
||||
123 str "123" = "Str turns numbers into strings." assert
|
||||
"abcd" len 4 = "Len gets length of strings." assert
|
||||
|
||||
\ Name lookup and calls
|
||||
/ Name lookup and calls
|
||||
|
||||
"12" <. builtins 'len .> !!1 2 = "Can use bracket dot notation." assert
|
||||
"12" builtins/len !!1 2 = "Can use sharp lookup notation." assert
|
||||
|
||||
\ Lists
|
||||
/ Lists
|
||||
|
||||
0 ->list len 0 = "->list with a lenght of 0 gives you empty list." assert
|
||||
44 1 ->list len 1 = "->list with a lenght of 1 gives you 1 list." assert
|
||||
|
@ -93,9 +93,11 @@ reset false 1-or-2 2 = "False part of ifelse fires." assert
|
|||
[ 1 ] empty? not "Empty? knows a non-empty list." assert
|
||||
[ 1 2 ] empty? not "Empty? knows a non-empty list." assert
|
||||
|
||||
\ Loop
|
||||
/ Loop
|
||||
|
||||
: test-while ( n -- ) { -999 swap while { dup zero? } { -- } -888 }
|
||||
'While p
|
||||
|
||||
: test-while { -999 swap while { dup zero? } { -- } -888 }
|
||||
|
||||
5 test-while 3 ->list [ -999 0 -888 ] "While loop works" assert
|
||||
|
||||
|
@ -103,6 +105,6 @@ reset false 1-or-2 2 = "False part of ifelse fires." assert
|
|||
|
||||
888 zero-trip-while 888 = "While should handle zero trip case." assert
|
||||
|
||||
\ Strings
|
||||
/ Strings
|
||||
|
||||
reset "abc.def.h" dot-split [ "abc" "def" "h" ] = "Dot split splits" assert
|
||||
|
|
62
sallyforth/threaded_compiler.py
Normal file
62
sallyforth/threaded_compiler.py
Normal file
|
@ -0,0 +1,62 @@
|
|||
from tokenstream import Token
|
||||
from wrappers import value_f, inner_f
|
||||
LBrace = Token('word', '{')
|
||||
RBrace = Token('word', '}')
|
||||
|
||||
def compile_word(forth, w):
|
||||
name = w.value
|
||||
var = forth.ns[name]
|
||||
value = var.value
|
||||
if value.immediate:
|
||||
result = value(forth)
|
||||
elif var.dynamic:
|
||||
result = var
|
||||
else:
|
||||
result = value
|
||||
return result
|
||||
|
||||
def compile_token(forth, t):
|
||||
if t.kind in ['number', 'string', 'keyword']:
|
||||
f = value_f(t.value)
|
||||
elif t.kind == 'word':
|
||||
f = compile_word(forth, t)
|
||||
else:
|
||||
print(f'{n}??')
|
||||
raise ValueError()
|
||||
return f
|
||||
|
||||
def compile_value(contents, v):
|
||||
contents.append(v)
|
||||
return contents
|
||||
|
||||
def compile_block(forth, stream, wrap_block):
|
||||
contents = []
|
||||
t = stream.get_token()
|
||||
while t != RBrace:
|
||||
compile_value(contents, compile_next(forth, stream, t))
|
||||
t = stream.get_token()
|
||||
f = inner_f(contents)
|
||||
if wrap_block:
|
||||
f = value_f(f)
|
||||
return f
|
||||
|
||||
def compile_next(forth, stream, current_token=None, wrap_block=False):
|
||||
if current_token:
|
||||
t = current_token
|
||||
else:
|
||||
t = stream.get_token()
|
||||
|
||||
if t == None:
|
||||
return None
|
||||
|
||||
if t != LBrace:
|
||||
return compile_token(forth, t)
|
||||
|
||||
return compile_block(forth, stream, wrap_block)
|
||||
|
||||
def eval_stream(forth, stream):
|
||||
t = stream.get_token()
|
||||
while t:
|
||||
compiled = compile_next(forth, stream, t, True)
|
||||
compiled(forth)
|
||||
t = stream.get_token()
|
|
@ -10,6 +10,10 @@ def to_number(token):
|
|||
return None
|
||||
|
||||
class Token:
|
||||
"""
|
||||
A Token consists of a string, something like "123" or "dup"
|
||||
and kind, also a string, something like "number" or "word".
|
||||
"""
|
||||
def __init__(self, kind, value):
|
||||
self.kind = kind
|
||||
self.value = value
|
||||
|
@ -50,6 +54,7 @@ def stoken(value):
|
|||
return Token('string', value)
|
||||
|
||||
class PromptInputStream:
|
||||
"A stream of characters from in input prompt."
|
||||
def __init__(self, prompt_f):
|
||||
self.prompt_f = prompt_f
|
||||
self.buffer = []
|
||||
|
@ -67,46 +72,72 @@ class PromptInputStream:
|
|||
return ''
|
||||
|
||||
class TokenStream:
|
||||
"""
|
||||
A TokenStream reads and returns one token at a time.
|
||||
To create a TokenStream instance you supply the constructor
|
||||
with a function that returns one character at a time.
|
||||
"""
|
||||
def __init__(self, read_f):
|
||||
#print("Tokenstream", read_f)
|
||||
self.read_f = read_f
|
||||
self.pushed_char = None
|
||||
|
||||
def special(self, ch):
|
||||
return ch in ['(', ')', '{', '}']
|
||||
|
||||
def whitespace(self, ch):
|
||||
return ch in [' ', '\t', '\n']
|
||||
|
||||
def ender(self, ch):
|
||||
return self.whitespace(ch) or self.special(ch)
|
||||
|
||||
def get_token(self):
|
||||
t = self.do_get_token()
|
||||
#print("GET token:", t)
|
||||
return t
|
||||
|
||||
def next_ch(self):
|
||||
if self.pushed_char:
|
||||
ch = self.pushed_char
|
||||
self.pushed_char = None
|
||||
return ch
|
||||
return self.read_f()
|
||||
|
||||
def unread(self, ch):
|
||||
self.pushed_char = ch
|
||||
|
||||
def number_or_word(self, s):
|
||||
n = to_number(s)
|
||||
if n != None:
|
||||
return Token('number', n)
|
||||
else:
|
||||
return Token('word', s)
|
||||
|
||||
def do_get_token(self):
|
||||
state = 'start'
|
||||
token = ''
|
||||
while True:
|
||||
ch = self.read_f()
|
||||
#print(f'ch: {ch} typech {type(ch)} state {state}')
|
||||
ch = self.next_ch()
|
||||
if ch in ['', None]:
|
||||
if state in ['sqstring', 'dqstring']:
|
||||
return Token('string', token)
|
||||
if state in ['word']:
|
||||
return Token('word', token)
|
||||
if state == 'number':
|
||||
return Token('number', token)
|
||||
#print("x get returning NONE")
|
||||
return self.number_or_word(token)
|
||||
return None
|
||||
elif state == 'start' and self.special(ch):
|
||||
return Token('word', ch)
|
||||
|
||||
elif state == 'start' and ch == ':':
|
||||
token = ch
|
||||
state = 'keyword'
|
||||
elif state == 'start' and ch in "+-0123456789":
|
||||
token = ch
|
||||
state = 'number'
|
||||
elif state == 'start' and ch == '\\':
|
||||
state = 'lcomment'
|
||||
elif state == 'lcomment' and ch == '\n':
|
||||
state = 'start'
|
||||
elif state == 'start' and ch == '(':
|
||||
elif state == 'start' and ch == '/':
|
||||
state = 'icomment'
|
||||
elif state == 'icomment' and ch == ')':
|
||||
elif state == 'icomment' and ch in ['\n', '/']:
|
||||
state = 'start'
|
||||
elif state == 'start' and self.whitespace(ch):
|
||||
continue
|
||||
|
@ -119,19 +150,17 @@ class TokenStream:
|
|||
elif state == 'start':
|
||||
state = 'word'
|
||||
token += ch
|
||||
elif state == 'number' and self.whitespace(ch):
|
||||
n = to_number(token)
|
||||
if n != None:
|
||||
#print("returning number", n)
|
||||
return Token('number', n)
|
||||
else:
|
||||
return Token('word', token)
|
||||
elif state == 'word' and self.whitespace(ch):
|
||||
elif state == 'number' and self.ender(ch):
|
||||
self.unread(ch)
|
||||
return self.number_or_word(token)
|
||||
elif state == 'word' and self.ender(ch):
|
||||
self.unread(ch)
|
||||
return Token('word', token)
|
||||
elif state == 'sqstring' and self.whitespace(ch):
|
||||
self.unread(ch)
|
||||
return Token('string', token)
|
||||
elif state == 'keyword' and self.whitespace(ch):
|
||||
state = 'start'
|
||||
elif state == 'keyword' and self.ender(ch):
|
||||
self.unread(ch)
|
||||
if token in [':']:
|
||||
return Token('word', token)
|
||||
return Token('keyword', token)
|
||||
|
@ -139,6 +168,11 @@ class TokenStream:
|
|||
token += ch
|
||||
|
||||
class MacroTokenStream:
|
||||
"""
|
||||
MacroTokenStream adds a bit of preprocessing to a regular
|
||||
token stream. Specifically it turns tokens of the form #aa.bb.cc
|
||||
into a sequence of tokens of the form <. aa 'bb 'cc .>.
|
||||
"""
|
||||
def __init__(self, stream):
|
||||
self.stream = stream
|
||||
self.tokens = []
|
||||
|
@ -166,7 +200,6 @@ class MacroTokenStream:
|
|||
return None
|
||||
|
||||
def file_token_stream(f):
|
||||
#print("file token stream:", f)
|
||||
return MacroTokenStream(TokenStream(lambda : f.read(1)))
|
||||
|
||||
def string_token_stream(s):
|
||||
|
@ -179,7 +212,6 @@ def prompt_token_stream(prompt_f):
|
|||
|
||||
if __name__ == "__main__":
|
||||
x = 0
|
||||
|
||||
def pmt():
|
||||
global x
|
||||
x += 1
|
||||
|
|
|
@ -1,3 +1,6 @@
|
|||
class Unique:
|
||||
"""
|
||||
Simple untility class that only exists to be different.
|
||||
"""
|
||||
def __str__(self):
|
||||
return f'Unique[{id(self)}]'
|
||||
|
|
|
@ -12,15 +12,12 @@ class word:
|
|||
f.forth_name = self.name
|
||||
else:
|
||||
f.forth_name = f.__name__
|
||||
f.forth_primitive = True
|
||||
f.forth_inline = False
|
||||
f.forth_immediate = self.immediate
|
||||
f.immediate = self.immediate
|
||||
return f
|
||||
|
||||
def wrap_native_f(f, n, hasreturn):
|
||||
if n > 0 and hasreturn:
|
||||
def wrapper(forth):
|
||||
print("both")
|
||||
args = []
|
||||
for i in range(n):
|
||||
args.append(forth.stack.pop())
|
||||
|
@ -37,7 +34,6 @@ def wrap_native_f(f, n, hasreturn):
|
|||
forth.stack.push(f(*args))
|
||||
else:
|
||||
def wrapper(forth):
|
||||
print("nothing")
|
||||
f()
|
||||
return wrapper
|
||||
|
||||
|
@ -50,9 +46,8 @@ def determine_nargs(f, n):
|
|||
def native_word(f, name=None, nargs=None, hasreturn=False):
|
||||
nargs = determine_nargs(f, nargs)
|
||||
f = wrap_native_f(f, nargs, hasreturn)
|
||||
f.forth_type = 'wrapped_primitive'
|
||||
f.forth_inline = False
|
||||
f.forth_immediate = False
|
||||
f.operation_type = 'wrapped_primitive'
|
||||
f.immediate = False
|
||||
return f
|
||||
|
||||
|
||||
|
|
|
@ -1,49 +1,67 @@
|
|||
import ast
|
||||
import ast_utils
|
||||
|
||||
def value_f(value):
|
||||
def push_constant(f):
|
||||
f.stack.push(value)
|
||||
push_constant.forth_inline = False
|
||||
push_constant.forth_primitive = True
|
||||
push_constant.forth_name = 'pushv'
|
||||
push_constant.forth_immediate = False
|
||||
push_constant.immediate = False
|
||||
push_constant.operation_type = 'pushv'
|
||||
push_constant.value = value
|
||||
return push_constant
|
||||
|
||||
def inner_f(contents):
|
||||
def inner(forth):
|
||||
if len(contents) == 0:
|
||||
f = noop
|
||||
elif len(contents) == 1:
|
||||
f = contents[0]
|
||||
elif len(contents) == 2:
|
||||
f = inner_f2(contents)
|
||||
elif len(contents) == 3:
|
||||
f = inner_f3(contents)
|
||||
else:
|
||||
f = inner_fn(contents)
|
||||
|
||||
#print("f", f)
|
||||
return f
|
||||
|
||||
|
||||
def inner_fn(contents):
|
||||
def i_n(forth):
|
||||
#print("inner_fn:", contents)
|
||||
for fn in contents:
|
||||
fn(forth)
|
||||
inner.forth_primitive = False
|
||||
inner.forth_immediate = False
|
||||
inner.forth_contents = contents
|
||||
inner.forth_inline = False
|
||||
return inner
|
||||
#print("i_n", i_n)
|
||||
i_n.immediate = False
|
||||
i_n.operation_type = 'inner'
|
||||
i_n.contents = contents
|
||||
return i_n
|
||||
|
||||
def inner2_f(f1, f2):
|
||||
def inner2(forth):
|
||||
def inner_f2(contents):
|
||||
f1 = contents[0]
|
||||
f2 = contents[1]
|
||||
def i_2(forth):
|
||||
#print('inner2:', f1, f2)
|
||||
f1(forth)
|
||||
f2(forth)
|
||||
inner2.forth_primitive = False
|
||||
inner2.forth_contents = [f1, f2]
|
||||
inner2.forth_primitive = True
|
||||
inner2.forth_immediate = False
|
||||
inner2.forth_inline = False
|
||||
return inner2
|
||||
i_2.immediate = False
|
||||
i_2.operation_type = 'inner'
|
||||
i_2.contents = contents
|
||||
return i_2
|
||||
|
||||
def inner3_f(f1, f2, f3):
|
||||
def inner3(forth):
|
||||
def inner_f3(contents):
|
||||
f1 = contents[0]
|
||||
f2 = contents[1]
|
||||
f3 = contents[2]
|
||||
def i_3(forth):
|
||||
f1(forth)
|
||||
f2(forth)
|
||||
f3(forth)
|
||||
inner3.forth_primitive = False
|
||||
inner3.forth_contents = [f1, f2, f3]
|
||||
inner3.forth_immediate = False
|
||||
inner3.forth_inline = False
|
||||
return inner3
|
||||
i_3.immediate = False
|
||||
i_3.operation_type = 'inner'
|
||||
i_3.contents = contents
|
||||
return i_3
|
||||
|
||||
def noop(value):
|
||||
pass
|
||||
noop.forth_inline = False
|
||||
noop.forth_primitive = True
|
||||
noop.forth_immediate = False
|
||||
|
||||
|
||||
noop.immediate = False
|
||||
noop.operation_type = 'noop'
|
||||
|
|
Loading…
Reference in a new issue