mirror of
https://github.com/russolsen/sallyforth
synced 2024-11-16 19:48:49 +01:00
Added (comments)
This commit is contained in:
parent
f8ceb9b3a3
commit
c74f666a19
1 changed files with 8 additions and 4 deletions
|
@ -1,4 +1,5 @@
|
|||
import sys
|
||||
import readline
|
||||
from os import path
|
||||
|
||||
def is_string(token):
|
||||
|
@ -14,7 +15,13 @@ def tokenize(s):
|
|||
tokens = []
|
||||
for ch in s:
|
||||
#print(f'Loop state {state} token {token} ch {ch}')
|
||||
if state == 'start' and is_space(ch):
|
||||
if state == 'start' and ch == '(':
|
||||
state = 'comment'
|
||||
elif state == 'comment' and ch == ')':
|
||||
state = 'start'
|
||||
elif state == 'comment':
|
||||
continue
|
||||
elif state == 'start' and is_space(ch):
|
||||
continue
|
||||
elif state == 'start' and ch == '"':
|
||||
token = ch
|
||||
|
@ -39,9 +46,6 @@ def tokenize(s):
|
|||
tokens.append(token)
|
||||
return tokens
|
||||
|
||||
def read_ch():
|
||||
return sys.stdin.read(1)
|
||||
|
||||
def read_tokens(read_f):
|
||||
line = read_f()
|
||||
return tokenize(line)
|
||||
|
|
Loading…
Reference in a new issue