Skip to content
This repository has been archived by the owner on Jun 27, 2019. It is now read-only.

Commit

Permalink
Finished umm I don't even know what, something that passes functions …
Browse files Browse the repository at this point in the history
…to other things that make functions to pass to other functions
  • Loading branch information
Arghnews committed Feb 13, 2017
1 parent 5600649 commit 9e93a6c
Show file tree
Hide file tree
Showing 6 changed files with 64 additions and 48 deletions.
Binary file modified .actual_grammar.swp
Binary file not shown.
Binary file modified .baba.py.swp
Binary file not shown.
Binary file modified .example.lua.swp
Binary file not shown.
6 changes: 3 additions & 3 deletions actual_grammar
Original file line number Diff line number Diff line change
Expand Up @@ -34,23 +34,23 @@ exp_back ::= `[´ exp `]´ | `.´ Name

args_back ::= args | `:´ Name args

namelist ::= Name { `,´ Name }

explist ::= { exp `,´ } exp

exp ::= nil exp_p | false exp_p | true exp_p | Number exp_p | String exp_p | `...´ exp_p | function exp_p |
prefixexp exp_p | tableconstructor exp_p | unop exp exp_p

exp_p ::= binop exp exp_p | epsilon

args ::= `(´ [ explist ] `)´ | tableconstructor | String
args ::= `(´ [ explist ] `)´ | tableconstructor | String

function ::= function funcbody

funcbody ::= `(´ [ parlist ] `)´ block end

parlist ::= namelist [ `,´ `...´ ] | `...´

namelist ::= Name { `,´ Name }

tableconstructor ::= `{´ [ fieldlist ] `}´

fieldlist ::= field { fieldsep field } [ fieldsep ]
Expand Down
103 changes: 60 additions & 43 deletions baba.py
Original file line number Diff line number Diff line change
Expand Up @@ -81,6 +81,11 @@ def tokenize(code):
btick = r'`'
ftick = r'´'

# used in funcs when telling lookahead func
# what to look for, ie tuples of ",",MATCH_VALUE
MATCH_TYPE = "type"
MATCH_VALUE = "value"

def parse(fname):
# ^([0-9]*)(\.[0-9]+)?([eE]-?[0-9]+)?$|^([0-9]+)(\.[0-9]*)?([eE]-?[0-9]+)?$|^0x([0-9a-fA-F]*)(\.[0-9a-fA-F]+)?([pP]-?[0-9]+)?$|^0x([0-9a-fA-F]+)(\.[0-9a-fA-F]*)?([pP]-?[0-9]+)?$
program = ""
Expand All @@ -94,7 +99,7 @@ def parse(fname):
for token in tokenize(program):
tokens.append(token)
# append EOF token
EOF = Token("EOF","EOF",tokens[-1].line+1,0)
EOF = Token("EOF","EOF",0 if len(tokens) == 0 else tokens[-1].line+1,0)
tokens.append(EOF)

for i, t in enumerate(tokens):
Expand All @@ -109,9 +114,14 @@ def parse(fname):
#i, tokens = namelist(i, tokens)
#print("Consumed in namelist",[str(t.type)+": "+str(t.value) for t in tokens[i_b:i]])

def namelist(i, tokens):
i, tokens = matchType("Name")(i,tokens)
i, tokens = optional(i, tokens, [(",",MATCH_VALUE), ("Name",MATCH_TYPE)], 2)
return i, tokens

def matchType(type):
def f(i, tokens):
if match_t(tokens[i],type):
if match_t(tokens,i,type):
i += 1
return i, tokens
# for nice error print
Expand All @@ -120,70 +130,77 @@ def f(i, tokens):

def matchValue(value):
def f(i, tokens):
if match_v(tokens[i],value):
if match_v(tokens,i,value):
i += 1
return i, tokens
# for nice error print
f.__name__ = value
return f

def name_suffix(i, tokens):
if match_v(tokens[i],","):
if match_v(tokens,i,","):
i += 1
i, tokens = name(i, tokens)
else:
pass
return i, tokens

def star(i, tokens, funcs):
cont = True
while cont:
original_i = i
def star_do(i, tokens, funcs):
while True:
for f in funcs:
last_i = i
i, tokens = f(i, tokens)
cont = last_i != i
if i == original_i:
# fine, not another instance of a in
# a*
print("Fine return",tokens[i])
return original_i, tokens
elif not cont:
# error, as for a -> b { c d}
# got b c q as d != q
print("Error, expected ",f.__name__)
return original_i, tokens
return i, tokens
yield i, tokens

def optional(i, tokens, funcs):
cont = True
original_i = i
def optional_do(i, tokens, funcs):
for f in funcs:
last_i = i
i, tokens = f(i, tokens)
cont = last_i != i
if i == original_i:
# fine, not another instance of a in
# a*
print("Fine return",tokens[i])
return original_i, tokens
elif not cont:
# error, as for a -> b { c d}
# got b c q as d != q
print("Error, expected ",f.__name__)
return original_i, tokens
yield i, tokens

def parlist(i, tokens):
i, tokens = namelist(i, tokens)
i, tokens = optional(i, tokens, [matchValue(","),matchValue("...")])
return i, tokens

def namelist(i, tokens):
i, tokens = matchType("Name")(i,tokens)
i, tokens = optional(i, tokens, [matchValue(","),matchType("Name")])
def something(i, tokens, func_tuples, lookahead, repeater):
# creates list of funcs that are that grammar
# thing that we're doing
funcs = []
for j, (match, match_type) in enumerate(func_tuples):
if match_type == MATCH_VALUE:
funcs.append(matchValue(match))
elif match_type == MATCH_TYPE:
funcs.append(matchType(match))

# lookahead function right here
def cont():
for j in range(0, min(len(func_tuples),lookahead)):
b = False
if func_tuples[j][1] == MATCH_VALUE:
b = match_v(tokens, i+j, func_tuples[j][0])
elif func_tuples[j][1] == MATCH_TYPE:
b = match_t(tokens, i+j, func_tuples[j][0])
if not b:
return False
return True

if cont():
for i, tokens in repeater(i, tokens, funcs):
if not cont():
break
return i, tokens

def match_t(token,type):
return token.type == type

def match_v(token, val):
return token.value == val
def optional(i, tokens, func_tuples, lookahead):
return something(i, tokens, func_tuples, lookahead, optional_do)

def star(i, tokens, func_tuples, lookahead):
return something(i, tokens, func_tuples, lookahead, star_do)

def match_t(tokens,i,type):
return i < len(tokens) and tokens[i].type == type

def match_v(tokens,i,val):
return i < len(tokens) and tokens[i].value == val

if __name__ == "__main__":
parse(sys.argv[1])
3 changes: 1 addition & 2 deletions example.lua
Original file line number Diff line number Diff line change
@@ -1,3 +1,2 @@
hugh,joe,justin,

hugh,joe,ben,patrick,,,.

0 comments on commit 9e93a6c

Please sign in to comment.