cli menu done, e input/output parsing
This commit is contained in:
parent
5948922c15
commit
acd8a6a8c7
4 changed files with 96 additions and 24 deletions
|
@ -17,7 +17,6 @@ tokens = [
|
||||||
"BOOL",
|
"BOOL",
|
||||||
"INF",
|
"INF",
|
||||||
"NAN",
|
"NAN",
|
||||||
"COMMENT",
|
|
||||||
"NEWLINE",
|
"NEWLINE",
|
||||||
]
|
]
|
||||||
|
|
||||||
|
|
|
@ -1,11 +1,12 @@
|
||||||
from ply import yacc
|
from ply import yacc
|
||||||
from lexer import tokens
|
from lexer import tokens
|
||||||
import json
|
import json
|
||||||
|
import sys
|
||||||
|
|
||||||
|
|
||||||
def p_toml(p):
|
def p_toml(p):
|
||||||
"""toml : newlines content
|
"""toml : newlines content
|
||||||
| content"""
|
| content"""
|
||||||
|
|
||||||
|
|
||||||
def p_content_multi(p):
|
def p_content_multi(p):
|
||||||
|
@ -23,9 +24,11 @@ def p_tomlEntries_table(p):
|
||||||
def p_tomlEntries_object(p):
|
def p_tomlEntries_object(p):
|
||||||
"""tomlEntries : object newlines"""
|
"""tomlEntries : object newlines"""
|
||||||
|
|
||||||
|
|
||||||
def p_newlines(p):
|
def p_newlines(p):
|
||||||
"""newlines : newlines NEWLINE
|
"""newlines : newlines NEWLINE
|
||||||
| NEWLINE"""
|
| NEWLINE"""
|
||||||
|
|
||||||
|
|
||||||
def p_table_simple(p):
|
def p_table_simple(p):
|
||||||
"""table : TABLE"""
|
"""table : TABLE"""
|
||||||
|
@ -40,7 +43,7 @@ def p_table_simple(p):
|
||||||
if isinstance(temp[header], list):
|
if isinstance(temp[header], list):
|
||||||
temp = temp[header][-1]
|
temp = temp[header][-1]
|
||||||
elif isinstance(temp[header], dict):
|
elif isinstance(temp[header], dict):
|
||||||
temp = temp[header]
|
temp = temp[header]
|
||||||
if headers[-1] not in temp:
|
if headers[-1] not in temp:
|
||||||
temp[headers[-1]] = {}
|
temp[headers[-1]] = {}
|
||||||
temp = temp[headers[-1]]
|
temp = temp[headers[-1]]
|
||||||
|
@ -57,7 +60,7 @@ def p_table_array(p):
|
||||||
p.parser.current_inline_tables = []
|
p.parser.current_inline_tables = []
|
||||||
p.parser.current_tables = []
|
p.parser.current_tables = []
|
||||||
p.parser.syntax_error = False
|
p.parser.syntax_error = False
|
||||||
path = '.'.join(p.parser.current_header_name + p[1])
|
path = ".".join(p.parser.current_header_name + p[1])
|
||||||
headers = p[1]
|
headers = p[1]
|
||||||
temp = p.parser.root_dict
|
temp = p.parser.root_dict
|
||||||
for header in headers[:-1]:
|
for header in headers[:-1]:
|
||||||
|
@ -84,18 +87,18 @@ def p_object(p):
|
||||||
return
|
return
|
||||||
headers = p[1]
|
headers = p[1]
|
||||||
temp = p.parser.current_header
|
temp = p.parser.current_header
|
||||||
path = '.'.join(p.parser.current_header_name + p[1])
|
path = ".".join(p.parser.current_header_name + p[1])
|
||||||
is_table = False
|
is_table = False
|
||||||
|
|
||||||
if len(p[1])>1:
|
if len(p[1]) > 1:
|
||||||
for table in p.parser.current_inline_tables:
|
for table in p.parser.current_inline_tables:
|
||||||
if p[1][:len(table)]==table:
|
if p[1][: len(table)] == table:
|
||||||
print(f"Error, trying to redefine {path}, an inline table")
|
print(f"Error, trying to redefine {path}, an inline table")
|
||||||
return
|
return
|
||||||
is_table=True
|
is_table = True
|
||||||
if isinstance(p[3],dict):
|
if isinstance(p[3], dict):
|
||||||
for table in p.parser.current_tables:
|
for table in p.parser.current_tables:
|
||||||
if table[:len(p[1])]==p[1]:
|
if table[: len(p[1])] == p[1]:
|
||||||
print(f"Error, trying to redefine {path}, a table")
|
print(f"Error, trying to redefine {path}, a table")
|
||||||
return
|
return
|
||||||
p.parser.current_inline_tables.append(p[1])
|
p.parser.current_inline_tables.append(p[1])
|
||||||
|
@ -105,7 +108,7 @@ def p_object(p):
|
||||||
if header not in temp:
|
if header not in temp:
|
||||||
temp[header] = {}
|
temp[header] = {}
|
||||||
temp = temp[header]
|
temp = temp[header]
|
||||||
if not isinstance(temp,dict):
|
if not isinstance(temp, dict):
|
||||||
print("Error, cannot add {p[3]} to a {type(temp)} variable")
|
print("Error, cannot add {p[3]} to a {type(temp)} variable")
|
||||||
return
|
return
|
||||||
if headers[-1] in temp:
|
if headers[-1] in temp:
|
||||||
|
@ -159,7 +162,7 @@ def p_dictCont_multiple(p):
|
||||||
duplicate_list = [k for k in p[1] if k in p[3]]
|
duplicate_list = [k for k in p[1] if k in p[3]]
|
||||||
for dup in duplicate_list:
|
for dup in duplicate_list:
|
||||||
print(f"Duplicate inline-table key {dup}")
|
print(f"Duplicate inline-table key {dup}")
|
||||||
if len(duplicate_list)==0:
|
if len(duplicate_list) == 0:
|
||||||
p[1].update(p[3])
|
p[1].update(p[3])
|
||||||
p[0] = p[1]
|
p[0] = p[1]
|
||||||
|
|
||||||
|
@ -188,6 +191,7 @@ def p_key_id(p):
|
||||||
"""key : ID"""
|
"""key : ID"""
|
||||||
p[0] = p[1]
|
p[0] = p[1]
|
||||||
|
|
||||||
|
|
||||||
# the rest of the cases are the specific cases where the key as the same format as a float/int/etc
|
# the rest of the cases are the specific cases where the key as the same format as a float/int/etc
|
||||||
# so we need make them a singleton list.
|
# so we need make them a singleton list.
|
||||||
def p_key_rest(p):
|
def p_key_rest(p):
|
||||||
|
@ -203,6 +207,7 @@ def p_key_rest(p):
|
||||||
| NAN"""
|
| NAN"""
|
||||||
p[0] = [p[1]]
|
p[0] = [p[1]]
|
||||||
|
|
||||||
|
|
||||||
def p_value_str(p):
|
def p_value_str(p):
|
||||||
"""value : STR"""
|
"""value : STR"""
|
||||||
p[0] = p[1]
|
p[0] = p[1]
|
||||||
|
@ -262,10 +267,12 @@ def p_value_nan(p):
|
||||||
"""value : NAN"""
|
"""value : NAN"""
|
||||||
p[0] = p[1]
|
p[0] = p[1]
|
||||||
|
|
||||||
|
|
||||||
def p_value_bool(p):
|
def p_value_bool(p):
|
||||||
"""value : BOOL"""
|
"""value : BOOL"""
|
||||||
p[0] = bool(p[1])
|
p[0] = bool(p[1])
|
||||||
|
|
||||||
|
|
||||||
def p_error(p):
|
def p_error(p):
|
||||||
print(p)
|
print(p)
|
||||||
|
|
||||||
|
@ -279,7 +286,29 @@ parser.syntax_error = False
|
||||||
parser.current_inline_tables = []
|
parser.current_inline_tables = []
|
||||||
parser.current_tables = []
|
parser.current_tables = []
|
||||||
|
|
||||||
f = open("example.toml", "r")
|
|
||||||
parser.parse(f.read())
|
|
||||||
|
|
||||||
#print(json.dumps(parser.root_dict, indent=2))
|
def parse(in_file, out_file):
|
||||||
|
if in_file is not None:
|
||||||
|
try:
|
||||||
|
in_fd = open(in_file, "r")
|
||||||
|
in_content = in_fd.read()
|
||||||
|
in_fd.close()
|
||||||
|
except Exception as e:
|
||||||
|
print(f"Error: {e}")
|
||||||
|
return
|
||||||
|
else:
|
||||||
|
in_content = sys.stdin.read()
|
||||||
|
|
||||||
|
parser.parse(in_content)
|
||||||
|
|
||||||
|
output = json.dumps(parser.root_dict, indent=2)
|
||||||
|
if out_file is not None:
|
||||||
|
try:
|
||||||
|
out_fd = open(out_file, "w")
|
||||||
|
out_fd.write(output)
|
||||||
|
out_fd.close()
|
||||||
|
except Exception as e:
|
||||||
|
print(f"Error: {str(e)}")
|
||||||
|
return
|
||||||
|
else:
|
||||||
|
print(output)
|
||||||
|
|
23
src/pltoml.py
Executable file
23
src/pltoml.py
Executable file
|
@ -0,0 +1,23 @@
|
||||||
|
#! /usr/bin/env python3
|
||||||
|
from parser import parse
|
||||||
|
from tokenizer import tokenizer
|
||||||
|
import sys
|
||||||
|
import argparse
|
||||||
|
|
||||||
|
def main():
|
||||||
|
sys.argv
|
||||||
|
argv_parser = argparse.ArgumentParser(prog="PLTOML",description="A command line tool to convert toml files into json using ply.")
|
||||||
|
argv_parser.add_argument("-i","--input",help="The filepath to the target input file")
|
||||||
|
argv_parser.add_argument("-o","--output",help="The filepath to the target output file")
|
||||||
|
argv_parser.add_argument("-t","--tokenizer",help="This feature allows you to inspect all the tokens captured by the lexer (should only be used for debugging)")
|
||||||
|
|
||||||
|
args = argv_parser.parse_args()
|
||||||
|
if args.tokenizer is not None:
|
||||||
|
tokenizer(args.input,args.output)
|
||||||
|
else:
|
||||||
|
parse(args.input,args.output)
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
main()
|
||||||
|
|
||||||
|
|
|
@ -1,8 +1,29 @@
|
||||||
from lexer import lexer
|
from lexer import lexer
|
||||||
|
import sys
|
||||||
f = open("example.toml","r")
|
def tokenizer(in_file,out_file):
|
||||||
|
in_content = ""
|
||||||
lexer.input(f.read())
|
if in_file is not None:
|
||||||
|
try:
|
||||||
for tok in lexer:
|
in_fd = open(in_file,"r")
|
||||||
print(tok)
|
in_content = in_fd.read()
|
||||||
|
in_fd.close()
|
||||||
|
except Exception as e:
|
||||||
|
print(f"Error: {e}")
|
||||||
|
return
|
||||||
|
else:
|
||||||
|
in_content = sys.stdin.read()
|
||||||
|
|
||||||
|
lexer.input(in_content)
|
||||||
|
output = ""
|
||||||
|
for tok in lexer:
|
||||||
|
output += tok + "\n"
|
||||||
|
if out_file is not None:
|
||||||
|
try:
|
||||||
|
out_fd = open(out_file,"w")
|
||||||
|
out_fd.write(output)
|
||||||
|
out_fd.close()
|
||||||
|
except Exception as e:
|
||||||
|
print(f"Error: {e}")
|
||||||
|
return
|
||||||
|
else:
|
||||||
|
print(output)
|
||||||
|
|
Loading…
Reference in a new issue