mirror of
https://github.com/russolsen/sallyforth
synced 2024-12-26 21:58:32 +01:00
Add support for #foo.bar form back in. Add support for inlining words. Very WIP work on expanding words to their Python code and expanding that code in inlined words.
This commit is contained in:
parent
5cdae26aeb
commit
3fb272156c
11 changed files with 282 additions and 50 deletions
|
@ -15,14 +15,18 @@
|
||||||
|
|
||||||
\ Basic aliases
|
\ Basic aliases
|
||||||
|
|
||||||
: -- { 1 - }
|
|
||||||
: ++ { 1 + }
|
|
||||||
: =0 { 0 = }
|
|
||||||
: pos? { 0 > }
|
|
||||||
: neg? { 0 < }
|
|
||||||
: zero? { 0 = }
|
|
||||||
: ->inline { *last-word* inline }
|
: ->inline { *last-word* inline }
|
||||||
|
|
||||||
|
: -- { 1 - } ->inline
|
||||||
|
: ++ { 1 + } ->inline
|
||||||
|
: =0 { 0 = } ->inline
|
||||||
|
: pos? { 0 > } ->inline
|
||||||
|
: neg? { 0 < } ->inline
|
||||||
|
: zero? { 0 = } ->inline
|
||||||
|
: drop1 { drop } ->inline
|
||||||
|
: drop2 { drop drop } ->inline
|
||||||
|
: drop3 { drop2 drop } ->inline
|
||||||
|
|
||||||
\ List making.
|
\ List making.
|
||||||
|
|
||||||
unique 'list-marker =!
|
unique 'list-marker =!
|
||||||
|
|
|
@ -5,6 +5,13 @@ from unique import Unique
|
||||||
import importlib
|
import importlib
|
||||||
from pprint import pprint
|
from pprint import pprint
|
||||||
|
|
||||||
|
@word()
|
||||||
|
def dynamic(forth):
|
||||||
|
name = forth.stack.pop()
|
||||||
|
isdyn = forth.stack.pop()
|
||||||
|
var = forth.ns[name]
|
||||||
|
var.dynamic = isdyn
|
||||||
|
|
||||||
@word()
|
@word()
|
||||||
def native(forth):
|
def native(forth):
|
||||||
has_return = forth.stack.pop()
|
has_return = forth.stack.pop()
|
||||||
|
@ -62,8 +69,7 @@ def w_import(f):
|
||||||
@word()
|
@word()
|
||||||
def lexicon(f):
|
def lexicon(f):
|
||||||
name = f.stack.pop()
|
name = f.stack.pop()
|
||||||
m = importlib.import_module(name)
|
f.ns.import_from_module(name)
|
||||||
f.ns.import_from_module(m)
|
|
||||||
|
|
||||||
@word('source')
|
@word('source')
|
||||||
def w_source(f):
|
def w_source(f):
|
||||||
|
@ -142,7 +148,9 @@ def colon(forth):
|
||||||
def inline(forth):
|
def inline(forth):
|
||||||
name = forth.stack.pop()
|
name = forth.stack.pop()
|
||||||
var = forth.ns[name]
|
var = forth.ns[name]
|
||||||
var.value.forth_inline = True
|
value = var.value
|
||||||
|
if not value.forth_primitive:
|
||||||
|
value.forth_inline = True
|
||||||
|
|
||||||
@word()
|
@word()
|
||||||
def current_stream(forth):
|
def current_stream(forth):
|
||||||
|
@ -165,6 +173,11 @@ def fresult(forth, f):
|
||||||
def compilenext(forth):
|
def compilenext(forth):
|
||||||
forth.stack.push(forth.compile_next())
|
forth.stack.push(forth.compile_next())
|
||||||
|
|
||||||
|
@word('word!')
|
||||||
|
def word_bang(forth):
|
||||||
|
f = forth.stack.pop()
|
||||||
|
f(forth)
|
||||||
|
|
||||||
@word('while', True)
|
@word('while', True)
|
||||||
def w_while(forth):
|
def w_while(forth):
|
||||||
cond = forth.compile_next()
|
cond = forth.compile_next()
|
||||||
|
|
|
@ -1,9 +1,20 @@
|
||||||
from tokenstream import Token
|
from tokenstream import Token
|
||||||
from wrappers import value_f, inner_f, ref_f
|
from wrappers import value_f, inner_f, inner2_f, inner3_f, ref_f, noop
|
||||||
|
from recoder import concat_functions
|
||||||
|
|
||||||
LBrace = Token('word', '{')
|
LBrace = Token('word', '{')
|
||||||
RBrace = Token('word', '}')
|
RBrace = Token('word', '}')
|
||||||
|
|
||||||
|
def composite_function(contents):
|
||||||
|
asts = []
|
||||||
|
for f in contents:
|
||||||
|
ast = getattr(f, 'ast', None)
|
||||||
|
if not ast:
|
||||||
|
print("No ast for:", f)
|
||||||
|
return None
|
||||||
|
asts.append(ast)
|
||||||
|
return concat_functions(asts)
|
||||||
|
|
||||||
def compile_word(forth, w):
|
def compile_word(forth, w):
|
||||||
name = w.value
|
name = w.value
|
||||||
var = forth.ns[name]
|
var = forth.ns[name]
|
||||||
|
@ -27,14 +38,51 @@ def compile_token(forth, t):
|
||||||
return f
|
return f
|
||||||
|
|
||||||
def compile_value(contents, v):
|
def compile_value(contents, v):
|
||||||
#print("compiling", v, v.__dict__)
|
print("compiling", v, v.__dict__)
|
||||||
if v.forth_inline and v.forth_contents:
|
if v.forth_inline and v.forth_contents:
|
||||||
contents.extend(v.forth_contents)
|
contents.extend(v.forth_contents)
|
||||||
else:
|
else:
|
||||||
contents.append(v)
|
contents.append(v)
|
||||||
return contents
|
return contents
|
||||||
|
|
||||||
def compile_next(forth, stream, current_token=None):
|
def compile_block(forth, stream, wrap_block):
|
||||||
|
contents = []
|
||||||
|
t = stream.get_token()
|
||||||
|
while t != RBrace:
|
||||||
|
compile_value(contents, compile_next(forth, stream, t))
|
||||||
|
t = stream.get_token()
|
||||||
|
|
||||||
|
if len(contents) == 0:
|
||||||
|
f = noop
|
||||||
|
elif len(contents) == 1:
|
||||||
|
f = contents[0]
|
||||||
|
elif len(contents) == 2:
|
||||||
|
f = inner2_f(contents[0], contents[1])
|
||||||
|
elif len(contents) == 3:
|
||||||
|
f = inner3_f(contents[0], contents[1], contents[2])
|
||||||
|
else:
|
||||||
|
f = inner_f(contents)
|
||||||
|
|
||||||
|
if wrap_block:
|
||||||
|
f = value_f(f)
|
||||||
|
return f
|
||||||
|
|
||||||
|
def xxx_compile_block(forth, stream, wrap_block):
|
||||||
|
contents = []
|
||||||
|
t = stream.get_token()
|
||||||
|
while t != RBrace:
|
||||||
|
compile_value(contents, compile_next(forth, stream, t))
|
||||||
|
t = stream.get_token()
|
||||||
|
|
||||||
|
f = composite_function(contents)
|
||||||
|
if not f:
|
||||||
|
f = inner_f(contents)
|
||||||
|
|
||||||
|
if wrap_block:
|
||||||
|
f = value_f(f)
|
||||||
|
return f
|
||||||
|
|
||||||
|
def compile_next(forth, stream, current_token=None, wrap_block=False):
|
||||||
if current_token:
|
if current_token:
|
||||||
t = current_token
|
t = current_token
|
||||||
else:
|
else:
|
||||||
|
@ -46,18 +94,12 @@ def compile_next(forth, stream, current_token=None):
|
||||||
if t != LBrace:
|
if t != LBrace:
|
||||||
return compile_token(forth, t)
|
return compile_token(forth, t)
|
||||||
|
|
||||||
contents = []
|
return compile_block(forth, stream, wrap_block)
|
||||||
t = stream.get_token()
|
|
||||||
while t != RBrace:
|
|
||||||
compile_value(contents, compile_next(forth, stream, t))
|
|
||||||
t = stream.get_token()
|
|
||||||
f = inner_f(contents)
|
|
||||||
return f
|
|
||||||
|
|
||||||
def eval_stream(forth, stream):
|
def eval_stream(forth, stream):
|
||||||
t = stream.get_token()
|
t = stream.get_token()
|
||||||
while t:
|
while t:
|
||||||
compiled = compile_next(forth, stream, t)
|
compiled = compile_next(forth, stream, t, True)
|
||||||
#print(f"*** compiled {t} => {compiled}")
|
#print(f"*** compiled {t} => {compiled}")
|
||||||
compiled(forth)
|
compiled(forth)
|
||||||
t = stream.get_token()
|
t = stream.get_token()
|
||||||
|
|
|
@ -2,10 +2,10 @@ import sys
|
||||||
import os
|
import os
|
||||||
from stack import Stack
|
from stack import Stack
|
||||||
from namespace import Namespace
|
from namespace import Namespace
|
||||||
import basic_words
|
#import basic_words
|
||||||
import stack_words
|
#import stack_words
|
||||||
import operator_words
|
#import operator_words
|
||||||
import data_words
|
#import data_words
|
||||||
import tokenstream as ts
|
import tokenstream as ts
|
||||||
import compiler
|
import compiler
|
||||||
from wrappers import value_f
|
from wrappers import value_f
|
||||||
|
@ -20,12 +20,13 @@ class Forth:
|
||||||
self.set_constant('true', True)
|
self.set_constant('true', True)
|
||||||
self.set_constant('false', False)
|
self.set_constant('false', False)
|
||||||
self.set_constant('*source*', '<<input>>')
|
self.set_constant('*source*', '<<input>>')
|
||||||
|
self.set_constant('*last-word*', None)
|
||||||
self.set_constant('*sallyforth-dir*',
|
self.set_constant('*sallyforth-dir*',
|
||||||
os.path.dirname(os.path.abspath(__file__)))
|
os.path.dirname(os.path.abspath(__file__)))
|
||||||
self.ns.import_from_module(basic_words)
|
self.ns.import_from_module('basic_words')
|
||||||
self.ns.import_from_module(stack_words)
|
self.ns.import_from_module('stack_words')
|
||||||
self.ns.import_from_module(operator_words)
|
self.ns.import_from_module('operator_words')
|
||||||
self.ns.import_from_module(data_words)
|
self.ns.import_from_module('data_words')
|
||||||
|
|
||||||
def set_constant(self, name, value):
|
def set_constant(self, name, value):
|
||||||
return self.ns.set(name, value_f(value))
|
return self.ns.set(name, value_f(value))
|
||||||
|
|
|
@ -1,5 +1,6 @@
|
||||||
from util import get_attribute
|
from util import get_attribute
|
||||||
from wrappers import value_f
|
from wrappers import value_f
|
||||||
|
from recoder import load_module
|
||||||
|
|
||||||
class Var:
|
class Var:
|
||||||
def __init__(self, name, value, dynamic=True):
|
def __init__(self, name, value, dynamic=True):
|
||||||
|
@ -21,22 +22,25 @@ class Namespace:
|
||||||
def alias(self, new_name, existing_name):
|
def alias(self, new_name, existing_name):
|
||||||
self.contents[new_name] = self.contents[existing_name]
|
self.contents[new_name] = self.contents[existing_name]
|
||||||
|
|
||||||
def import_from_module(self, m):
|
def import_from_module(self, module_name):
|
||||||
"""
|
"""
|
||||||
Import all of the word defining functions in
|
Import all of the word defining functions in
|
||||||
module m whose function names start with prefix
|
module m whose function names start with prefix
|
||||||
into this namespace. Removes the prefix.
|
into this namespace. Removes the prefix.
|
||||||
"""
|
"""
|
||||||
|
m = load_module(module_name)
|
||||||
|
print(m)
|
||||||
names = dir(m)
|
names = dir(m)
|
||||||
for name in names:
|
for name in names:
|
||||||
value = getattr(m, name)
|
value = getattr(m, name)
|
||||||
|
print("IMP", name, value, '=>', getattr(value, 'ast', None))
|
||||||
if get_attribute(value, 'forth_word'):
|
if get_attribute(value, 'forth_word'):
|
||||||
forth_name = value.forth_name or name
|
forth_name = value.forth_name or name
|
||||||
var = self.set(forth_name, value, False)
|
var = self.set(forth_name, value, False)
|
||||||
var.immediate = value.forth_immediate
|
#var.immediate = value.forth_immediate
|
||||||
#print(var)
|
#print(var)
|
||||||
if var.immediate:
|
#if var.immediate:
|
||||||
print(name, 'immediate')
|
# print(name, 'immediate')
|
||||||
|
|
||||||
def import_native_module(self, m, alias=None):
|
def import_native_module(self, m, alias=None):
|
||||||
if not alias:
|
if not alias:
|
||||||
|
|
93
sallyforth/recoder.py
Normal file
93
sallyforth/recoder.py
Normal file
|
@ -0,0 +1,93 @@
|
||||||
|
import imp
|
||||||
|
import importlib
|
||||||
|
import ast
|
||||||
|
import copy
|
||||||
|
from pprint import pprint
|
||||||
|
from util import word
|
||||||
|
|
||||||
|
class FunctionVisitor(ast.NodeVisitor):
|
||||||
|
def __init__(self):
|
||||||
|
self.index = {}
|
||||||
|
def visit_FunctionDef(self, node):
|
||||||
|
self.index[node.name] = node
|
||||||
|
|
||||||
|
def find_module(name):
|
||||||
|
minfo = imp.find_module(name)[0]
|
||||||
|
return minfo.name
|
||||||
|
|
||||||
|
def parse_module(path):
|
||||||
|
with open(path) as f:
|
||||||
|
text = f.read()
|
||||||
|
tree = ast.parse(text)
|
||||||
|
ast.fix_missing_locations(tree)
|
||||||
|
return tree
|
||||||
|
|
||||||
|
def index_functions(tree):
|
||||||
|
fv = FunctionVisitor()
|
||||||
|
fv.visit(tree)
|
||||||
|
return fv.index
|
||||||
|
|
||||||
|
def function_index(module_name):
|
||||||
|
path = find_module(module_name)
|
||||||
|
tree = parse_module(path)
|
||||||
|
return index_functions(tree)
|
||||||
|
|
||||||
|
def add_ast(m, function_index):
|
||||||
|
names = dir(m)
|
||||||
|
for name in names:
|
||||||
|
if name in function_index:
|
||||||
|
f = getattr(m, name)
|
||||||
|
f.ast = function_index[name]
|
||||||
|
return m
|
||||||
|
|
||||||
|
def load_module(name):
|
||||||
|
"""
|
||||||
|
Loads and returns the module with name.
|
||||||
|
The difference is that this function also adds
|
||||||
|
the Python ast to each function in the module
|
||||||
|
along the way.
|
||||||
|
"""
|
||||||
|
findex = function_index(name)
|
||||||
|
m = importlib.import_module(name)
|
||||||
|
add_ast(m, findex)
|
||||||
|
return m
|
||||||
|
|
||||||
|
def build_composite_function(function_asts, name='generated_function'):
|
||||||
|
new_body = []
|
||||||
|
for other_f in function_asts:
|
||||||
|
print("Other f:")
|
||||||
|
ast.dump(other_f)
|
||||||
|
new_body.extend(other_f.body)
|
||||||
|
new_f = copy.deepcopy(function_asts[0])
|
||||||
|
new_f.name = name
|
||||||
|
new_f.body = new_body
|
||||||
|
return new_f
|
||||||
|
|
||||||
|
def concat_functions(function_asts, name='generated_function'):
|
||||||
|
"""
|
||||||
|
Given an array of function AST objects,
|
||||||
|
attempt to produce a new function whose
|
||||||
|
body is the concatenation of the existing functions.
|
||||||
|
Note that the new function will take the same number
|
||||||
|
of arguments as the first function on the list.
|
||||||
|
Returns None if it's unable to build the new function.
|
||||||
|
"""
|
||||||
|
print(name)
|
||||||
|
new_f = build_composite_function(function_asts, name)
|
||||||
|
new_m = ast.Module([new_f])
|
||||||
|
code = compile(new_m, "*generated*", "exec")
|
||||||
|
eval(code)
|
||||||
|
f = locals()[name]
|
||||||
|
f.ast = new_m.body[0]
|
||||||
|
return f
|
||||||
|
|
||||||
|
#m = load_module('m1')
|
||||||
|
#
|
||||||
|
#a = m.do1.ast
|
||||||
|
#b = m.do2.ast
|
||||||
|
#c = m.do3.ast
|
||||||
|
#
|
||||||
|
#f = concat_functions([a,b,c])
|
||||||
|
#print(f(9))
|
||||||
|
#
|
||||||
|
#
|
|
@ -1,33 +1,37 @@
|
||||||
class Stack:
|
from collections.abc import Sequence
|
||||||
|
|
||||||
|
class Stack(Sequence):
|
||||||
def __init__(self):
|
def __init__(self):
|
||||||
self.stack = []
|
self.contents = []
|
||||||
|
|
||||||
def push(self, x):
|
def push(self, x):
|
||||||
self.stack.append(x)
|
self.contents.append(x)
|
||||||
return x
|
return x
|
||||||
|
|
||||||
def pop(self):
|
def pop(self):
|
||||||
return self.stack.pop()
|
return self.contents.pop()
|
||||||
|
|
||||||
def __iter__(self):
|
def __getitem__(self, i):
|
||||||
for x in self.stack:
|
return self.contents[i]
|
||||||
yield x
|
|
||||||
|
def __len__(self):
|
||||||
|
return len(self.contents[i])
|
||||||
|
|
||||||
def depth(self):
|
def depth(self):
|
||||||
return len(self.stack)
|
return len(self.contents)
|
||||||
|
|
||||||
def empty(self):
|
def empty(self):
|
||||||
return len(self.stack) == 0
|
return len(self.contents) == 0
|
||||||
|
|
||||||
def peek(self):
|
def peek(self):
|
||||||
return self.stack[-1]
|
return self.contents[-1]
|
||||||
|
|
||||||
def reset(self):
|
def reset(self):
|
||||||
self.stack = []
|
self.contents = []
|
||||||
|
|
||||||
def __str__(self):
|
def __str__(self):
|
||||||
result = ''
|
result = ''
|
||||||
for x in self.stack:
|
for x in self.contents:
|
||||||
result += str(x)
|
result += str(x)
|
||||||
result += ' '
|
result += ' '
|
||||||
return result
|
return result
|
||||||
|
|
|
@ -24,6 +24,18 @@ def swap(f):
|
||||||
f.stack.push(a)
|
f.stack.push(a)
|
||||||
f.stack.push(b)
|
f.stack.push(b)
|
||||||
|
|
||||||
|
@word()
|
||||||
|
def t(f):
|
||||||
|
dup(f)
|
||||||
|
|
||||||
|
@word()
|
||||||
|
def m(f):
|
||||||
|
f.stack.push(f.stack[-2])
|
||||||
|
|
||||||
|
@word()
|
||||||
|
def b(f):
|
||||||
|
f.stack.push(f.stack[-3])
|
||||||
|
|
||||||
@word()
|
@word()
|
||||||
def tmb(f): # A noop
|
def tmb(f): # A noop
|
||||||
pass
|
pass
|
||||||
|
|
|
@ -43,6 +43,11 @@ class Token:
|
||||||
def __str__(self):
|
def __str__(self):
|
||||||
return f'Token {self.kind} => {self.value}'
|
return f'Token {self.kind} => {self.value}'
|
||||||
|
|
||||||
|
def wtoken(value):
|
||||||
|
return Token('word', value)
|
||||||
|
|
||||||
|
def stoken(value):
|
||||||
|
return Token('string', value)
|
||||||
|
|
||||||
class PromptInputStream:
|
class PromptInputStream:
|
||||||
def __init__(self, prompt_f):
|
def __init__(self, prompt_f):
|
||||||
|
@ -70,11 +75,11 @@ class TokenStream:
|
||||||
return ch in [' ', '\t', '\n']
|
return ch in [' ', '\t', '\n']
|
||||||
|
|
||||||
def get_token(self):
|
def get_token(self):
|
||||||
t = self.x_get_token()
|
t = self.do_get_token()
|
||||||
#print("GET token:", t)
|
#print("GET token:", t)
|
||||||
return t
|
return t
|
||||||
|
|
||||||
def x_get_token(self):
|
def do_get_token(self):
|
||||||
state = 'start'
|
state = 'start'
|
||||||
token = ''
|
token = ''
|
||||||
while True:
|
while True:
|
||||||
|
@ -133,9 +138,36 @@ class TokenStream:
|
||||||
elif state in ['word', 'dqstring', 'sqstring', 'number', 'keyword']:
|
elif state in ['word', 'dqstring', 'sqstring', 'number', 'keyword']:
|
||||||
token += ch
|
token += ch
|
||||||
|
|
||||||
|
class MacroTokenStream:
|
||||||
|
def __init__(self, stream):
|
||||||
|
self.stream = stream
|
||||||
|
self.tokens = []
|
||||||
|
|
||||||
|
def get_more_tokens(self):
|
||||||
|
raw_token = self.stream.get_token()
|
||||||
|
if raw_token \
|
||||||
|
and raw_token.isword() \
|
||||||
|
and raw_token.value[0] == '#':
|
||||||
|
parts = raw_token.value[1::].split('.')
|
||||||
|
result = [wtoken('<.'), wtoken(parts[0])]
|
||||||
|
for p in parts[1::]:
|
||||||
|
result.append(stoken(p))
|
||||||
|
result.append(wtoken('.>'))
|
||||||
|
result.reverse()
|
||||||
|
self.tokens.extend(result)
|
||||||
|
else:
|
||||||
|
self.tokens.append(raw_token)
|
||||||
|
|
||||||
|
def get_token(self):
|
||||||
|
if len(self.tokens) == 0:
|
||||||
|
self.get_more_tokens()
|
||||||
|
if len(self.tokens):
|
||||||
|
return self.tokens.pop()
|
||||||
|
return None
|
||||||
|
|
||||||
def file_token_stream(f):
|
def file_token_stream(f):
|
||||||
#print("file token stream:", f)
|
#print("file token stream:", f)
|
||||||
return TokenStream(lambda : f.read(1))
|
return MacroTokenStream(TokenStream(lambda : f.read(1)))
|
||||||
|
|
||||||
def string_token_stream(s):
|
def string_token_stream(s):
|
||||||
sio = io.StringIO(s)
|
sio = io.StringIO(s)
|
||||||
|
@ -143,7 +175,7 @@ def string_token_stream(s):
|
||||||
|
|
||||||
def prompt_token_stream(prompt_f):
|
def prompt_token_stream(prompt_f):
|
||||||
pis = PromptInputStream(prompt_f)
|
pis = PromptInputStream(prompt_f)
|
||||||
return TokenStream(pis.getc)
|
return MacroTokenStream(TokenStream(pis.getc))
|
||||||
|
|
||||||
if __name__ == "__main__":
|
if __name__ == "__main__":
|
||||||
x = 0
|
x = 0
|
||||||
|
|
|
@ -12,7 +12,7 @@ class word:
|
||||||
f.forth_name = self.name
|
f.forth_name = self.name
|
||||||
else:
|
else:
|
||||||
f.forth_name = f.__name__
|
f.forth_name = f.__name__
|
||||||
f.forth_type = 'primitive'
|
f.forth_primitive = True
|
||||||
f.forth_inline = False
|
f.forth_inline = False
|
||||||
f.forth_immediate = self.immediate
|
f.forth_immediate = self.immediate
|
||||||
return f
|
return f
|
||||||
|
|
|
@ -48,5 +48,32 @@ def inner_f(contents):
|
||||||
inner.forth_inline = False
|
inner.forth_inline = False
|
||||||
return inner
|
return inner
|
||||||
|
|
||||||
|
def inner2_f(f1, f2):
|
||||||
|
def inner2(forth):
|
||||||
|
f1(forth)
|
||||||
|
f2(forth)
|
||||||
|
inner2.forth_primitive = False
|
||||||
|
inner2.forth_contents = [f1, f2]
|
||||||
|
inner2.forth_primitive = True
|
||||||
|
inner2.forth_immediate = False
|
||||||
|
inner2.forth_inline = False
|
||||||
|
return inner2
|
||||||
|
|
||||||
|
def inner3_f(f1, f2, f3):
|
||||||
|
def inner3(forth):
|
||||||
|
f1(forth)
|
||||||
|
f2(forth)
|
||||||
|
f3(forth)
|
||||||
|
inner3.forth_primitive = False
|
||||||
|
inner3.forth_contents = [f1, f2, f3]
|
||||||
|
inner3.forth_immediate = False
|
||||||
|
inner3.forth_inline = False
|
||||||
|
return inner3
|
||||||
|
|
||||||
def noop(value):
|
def noop(value):
|
||||||
pass
|
pass
|
||||||
|
noop.forth_inline = False
|
||||||
|
noop.forth_primitive = True
|
||||||
|
noop.forth_immediate = False
|
||||||
|
|
||||||
|
|
||||||
|
|
Loading…
Reference in a new issue