Add support for #foo.bar form back in. Add support for inlining words. Very WIP work on expanding words to their Python code and expanding that code in inlined words.

This commit is contained in:
Russ Olsen 2020-05-15 14:03:03 -04:00
parent 5cdae26aeb
commit 3fb272156c
11 changed files with 282 additions and 50 deletions

View file

@ -15,14 +15,18 @@
\ Basic aliases
: -- { 1 - }
: ++ { 1 + }
: =0 { 0 = }
: pos? { 0 > }
: neg? { 0 < }
: zero? { 0 = }
: ->inline { *last-word* inline }
: -- { 1 - } ->inline
: ++ { 1 + } ->inline
: =0 { 0 = } ->inline
: pos? { 0 > } ->inline
: neg? { 0 < } ->inline
: zero? { 0 = } ->inline
: drop1 { drop } ->inline
: drop2 { drop drop } ->inline
: drop3 { drop2 drop } ->inline
\ List making.
unique 'list-marker =!

View file

@ -5,6 +5,13 @@ from unique import Unique
import importlib
from pprint import pprint
@word()
def dynamic(forth):
name = forth.stack.pop()
isdyn = forth.stack.pop()
var = forth.ns[name]
var.dynamic = isdyn
@word()
def native(forth):
has_return = forth.stack.pop()
@ -62,8 +69,7 @@ def w_import(f):
@word()
def lexicon(f):
name = f.stack.pop()
m = importlib.import_module(name)
f.ns.import_from_module(m)
f.ns.import_from_module(name)
@word('source')
def w_source(f):
@ -142,7 +148,9 @@ def colon(forth):
def inline(forth):
name = forth.stack.pop()
var = forth.ns[name]
var.value.forth_inline = True
value = var.value
if not value.forth_primitive:
value.forth_inline = True
@word()
def current_stream(forth):
@ -165,6 +173,11 @@ def fresult(forth, f):
def compilenext(forth):
forth.stack.push(forth.compile_next())
@word('word!')
def word_bang(forth):
f = forth.stack.pop()
f(forth)
@word('while', True)
def w_while(forth):
cond = forth.compile_next()

View file

@ -1,9 +1,20 @@
from tokenstream import Token
from wrappers import value_f, inner_f, ref_f
from wrappers import value_f, inner_f, inner2_f, inner3_f, ref_f, noop
from recoder import concat_functions
LBrace = Token('word', '{')
RBrace = Token('word', '}')
def composite_function(contents):
asts = []
for f in contents:
ast = getattr(f, 'ast', None)
if not ast:
print("No ast for:", f)
return None
asts.append(ast)
return concat_functions(asts)
def compile_word(forth, w):
name = w.value
var = forth.ns[name]
@ -27,14 +38,51 @@ def compile_token(forth, t):
return f
def compile_value(contents, v):
#print("compiling", v, v.__dict__)
print("compiling", v, v.__dict__)
if v.forth_inline and v.forth_contents:
contents.extend(v.forth_contents)
else:
contents.append(v)
return contents
def compile_next(forth, stream, current_token=None):
def compile_block(forth, stream, wrap_block):
contents = []
t = stream.get_token()
while t != RBrace:
compile_value(contents, compile_next(forth, stream, t))
t = stream.get_token()
if len(contents) == 0:
f = noop
elif len(contents) == 1:
f = contents[0]
elif len(contents) == 2:
f = inner2_f(contents[0], contents[1])
elif len(contents) == 3:
f = inner3_f(contents[0], contents[1], contents[2])
else:
f = inner_f(contents)
if wrap_block:
f = value_f(f)
return f
def xxx_compile_block(forth, stream, wrap_block):
contents = []
t = stream.get_token()
while t != RBrace:
compile_value(contents, compile_next(forth, stream, t))
t = stream.get_token()
f = composite_function(contents)
if not f:
f = inner_f(contents)
if wrap_block:
f = value_f(f)
return f
def compile_next(forth, stream, current_token=None, wrap_block=False):
if current_token:
t = current_token
else:
@ -45,19 +93,13 @@ def compile_next(forth, stream, current_token=None):
if t != LBrace:
return compile_token(forth, t)
contents = []
t = stream.get_token()
while t != RBrace:
compile_value(contents, compile_next(forth, stream, t))
t = stream.get_token()
f = inner_f(contents)
return f
return compile_block(forth, stream, wrap_block)
def eval_stream(forth, stream):
t = stream.get_token()
while t:
compiled = compile_next(forth, stream, t)
compiled = compile_next(forth, stream, t, True)
#print(f"*** compiled {t} => {compiled}")
compiled(forth)
t = stream.get_token()

View file

@ -2,10 +2,10 @@ import sys
import os
from stack import Stack
from namespace import Namespace
import basic_words
import stack_words
import operator_words
import data_words
#import basic_words
#import stack_words
#import operator_words
#import data_words
import tokenstream as ts
import compiler
from wrappers import value_f
@ -20,12 +20,13 @@ class Forth:
self.set_constant('true', True)
self.set_constant('false', False)
self.set_constant('*source*', '<<input>>')
self.set_constant('*last-word*', None)
self.set_constant('*sallyforth-dir*',
os.path.dirname(os.path.abspath(__file__)))
self.ns.import_from_module(basic_words)
self.ns.import_from_module(stack_words)
self.ns.import_from_module(operator_words)
self.ns.import_from_module(data_words)
self.ns.import_from_module('basic_words')
self.ns.import_from_module('stack_words')
self.ns.import_from_module('operator_words')
self.ns.import_from_module('data_words')
def set_constant(self, name, value):
return self.ns.set(name, value_f(value))

View file

@ -1,5 +1,6 @@
from util import get_attribute
from wrappers import value_f
from recoder import load_module
class Var:
def __init__(self, name, value, dynamic=True):
@ -21,22 +22,25 @@ class Namespace:
def alias(self, new_name, existing_name):
self.contents[new_name] = self.contents[existing_name]
def import_from_module(self, m):
def import_from_module(self, module_name):
"""
Import all of the word defining functions in
module m whose function names start with prefix
into this namespace. Removes the prefix.
"""
m = load_module(module_name)
print(m)
names = dir(m)
for name in names:
value = getattr(m, name)
print("IMP", name, value, '=>', getattr(value, 'ast', None))
if get_attribute(value, 'forth_word'):
forth_name = value.forth_name or name
var = self.set(forth_name, value, False)
var.immediate = value.forth_immediate
#var.immediate = value.forth_immediate
#print(var)
if var.immediate:
print(name, 'immediate')
#if var.immediate:
# print(name, 'immediate')
def import_native_module(self, m, alias=None):
if not alias:

93
sallyforth/recoder.py Normal file
View file

@ -0,0 +1,93 @@
import imp
import importlib
import ast
import copy
from pprint import pprint
from util import word
class FunctionVisitor(ast.NodeVisitor):
def __init__(self):
self.index = {}
def visit_FunctionDef(self, node):
self.index[node.name] = node
def find_module(name):
minfo = imp.find_module(name)[0]
return minfo.name
def parse_module(path):
with open(path) as f:
text = f.read()
tree = ast.parse(text)
ast.fix_missing_locations(tree)
return tree
def index_functions(tree):
fv = FunctionVisitor()
fv.visit(tree)
return fv.index
def function_index(module_name):
path = find_module(module_name)
tree = parse_module(path)
return index_functions(tree)
def add_ast(m, function_index):
names = dir(m)
for name in names:
if name in function_index:
f = getattr(m, name)
f.ast = function_index[name]
return m
def load_module(name):
"""
Loads and returns the module with name.
The difference is that this function also adds
the Python ast to each function in the module
along the way.
"""
findex = function_index(name)
m = importlib.import_module(name)
add_ast(m, findex)
return m
def build_composite_function(function_asts, name='generated_function'):
new_body = []
for other_f in function_asts:
print("Other f:")
ast.dump(other_f)
new_body.extend(other_f.body)
new_f = copy.deepcopy(function_asts[0])
new_f.name = name
new_f.body = new_body
return new_f
def concat_functions(function_asts, name='generated_function'):
"""
Given an array of function AST objects,
attempt to produce a new function whose
body is the concatenation of the existing functions.
Note that the new function will take the same number
of arguments as the first function on the list.
Returns None if it's unable to build the new function.
"""
print(name)
new_f = build_composite_function(function_asts, name)
new_m = ast.Module([new_f])
code = compile(new_m, "*generated*", "exec")
eval(code)
f = locals()[name]
f.ast = new_m.body[0]
return f
#m = load_module('m1')
#
#a = m.do1.ast
#b = m.do2.ast
#c = m.do3.ast
#
#f = concat_functions([a,b,c])
#print(f(9))
#
#

View file

@ -1,33 +1,37 @@
class Stack:
from collections.abc import Sequence
class Stack(Sequence):
def __init__(self):
self.stack = []
self.contents = []
def push(self, x):
self.stack.append(x)
self.contents.append(x)
return x
def pop(self):
return self.stack.pop()
return self.contents.pop()
def __iter__(self):
for x in self.stack:
yield x
def __getitem__(self, i):
return self.contents[i]
def __len__(self):
return len(self.contents[i])
def depth(self):
return len(self.stack)
return len(self.contents)
def empty(self):
return len(self.stack) == 0
return len(self.contents) == 0
def peek(self):
return self.stack[-1]
return self.contents[-1]
def reset(self):
self.stack = []
self.contents = []
def __str__(self):
result = ''
for x in self.stack:
for x in self.contents:
result += str(x)
result += ' '
return result

View file

@ -24,6 +24,18 @@ def swap(f):
f.stack.push(a)
f.stack.push(b)
@word()
def t(f):
dup(f)
@word()
def m(f):
f.stack.push(f.stack[-2])
@word()
def b(f):
f.stack.push(f.stack[-3])
@word()
def tmb(f): # A noop
pass

View file

@ -43,6 +43,11 @@ class Token:
def __str__(self):
return f'Token {self.kind} => {self.value}'
def wtoken(value):
return Token('word', value)
def stoken(value):
return Token('string', value)
class PromptInputStream:
def __init__(self, prompt_f):
@ -70,11 +75,11 @@ class TokenStream:
return ch in [' ', '\t', '\n']
def get_token(self):
t = self.x_get_token()
t = self.do_get_token()
#print("GET token:", t)
return t
def x_get_token(self):
def do_get_token(self):
state = 'start'
token = ''
while True:
@ -133,9 +138,36 @@ class TokenStream:
elif state in ['word', 'dqstring', 'sqstring', 'number', 'keyword']:
token += ch
class MacroTokenStream:
def __init__(self, stream):
self.stream = stream
self.tokens = []
def get_more_tokens(self):
raw_token = self.stream.get_token()
if raw_token \
and raw_token.isword() \
and raw_token.value[0] == '#':
parts = raw_token.value[1::].split('.')
result = [wtoken('<.'), wtoken(parts[0])]
for p in parts[1::]:
result.append(stoken(p))
result.append(wtoken('.>'))
result.reverse()
self.tokens.extend(result)
else:
self.tokens.append(raw_token)
def get_token(self):
if len(self.tokens) == 0:
self.get_more_tokens()
if len(self.tokens):
return self.tokens.pop()
return None
def file_token_stream(f):
#print("file token stream:", f)
return TokenStream(lambda : f.read(1))
return MacroTokenStream(TokenStream(lambda : f.read(1)))
def string_token_stream(s):
sio = io.StringIO(s)
@ -143,7 +175,7 @@ def string_token_stream(s):
def prompt_token_stream(prompt_f):
pis = PromptInputStream(prompt_f)
return TokenStream(pis.getc)
return MacroTokenStream(TokenStream(pis.getc))
if __name__ == "__main__":
x = 0

View file

@ -12,7 +12,7 @@ class word:
f.forth_name = self.name
else:
f.forth_name = f.__name__
f.forth_type = 'primitive'
f.forth_primitive = True
f.forth_inline = False
f.forth_immediate = self.immediate
return f

View file

@ -48,5 +48,32 @@ def inner_f(contents):
inner.forth_inline = False
return inner
def inner2_f(f1, f2):
def inner2(forth):
f1(forth)
f2(forth)
inner2.forth_primitive = False
inner2.forth_contents = [f1, f2]
inner2.forth_primitive = True
inner2.forth_immediate = False
inner2.forth_inline = False
return inner2
def inner3_f(f1, f2, f3):
def inner3(forth):
f1(forth)
f2(forth)
f3(forth)
inner3.forth_primitive = False
inner3.forth_contents = [f1, f2, f3]
inner3.forth_immediate = False
inner3.forth_inline = False
return inner3
def noop(value):
pass
noop.forth_inline = False
noop.forth_primitive = True
noop.forth_immediate = False