cli menu done, e input/output parsing

This commit is contained in:
Tiago Sousa 2023-05-28 16:23:14 +01:00
parent 5948922c15
commit acd8a6a8c7
4 changed files with 96 additions and 24 deletions

View file

@ -17,7 +17,6 @@ tokens = [
"BOOL",
"INF",
"NAN",
"COMMENT",
"NEWLINE",
]

View file

@ -1,11 +1,12 @@
from ply import yacc
from lexer import tokens
import json
import sys
def p_toml(p):
"""toml : newlines content
| content"""
| content"""
def p_content_multi(p):
@ -23,9 +24,11 @@ def p_tomlEntries_table(p):
def p_tomlEntries_object(p):
"""tomlEntries : object newlines"""
def p_newlines(p):
"""newlines : newlines NEWLINE
| NEWLINE"""
| NEWLINE"""
def p_table_simple(p):
"""table : TABLE"""
@ -40,7 +43,7 @@ def p_table_simple(p):
if isinstance(temp[header], list):
temp = temp[header][-1]
elif isinstance(temp[header], dict):
temp = temp[header]
temp = temp[header]
if headers[-1] not in temp:
temp[headers[-1]] = {}
temp = temp[headers[-1]]
@ -57,7 +60,7 @@ def p_table_array(p):
p.parser.current_inline_tables = []
p.parser.current_tables = []
p.parser.syntax_error = False
path = '.'.join(p.parser.current_header_name + p[1])
path = ".".join(p.parser.current_header_name + p[1])
headers = p[1]
temp = p.parser.root_dict
for header in headers[:-1]:
@ -84,18 +87,18 @@ def p_object(p):
return
headers = p[1]
temp = p.parser.current_header
path = '.'.join(p.parser.current_header_name + p[1])
path = ".".join(p.parser.current_header_name + p[1])
is_table = False
if len(p[1])>1:
if len(p[1]) > 1:
for table in p.parser.current_inline_tables:
if p[1][:len(table)]==table:
if p[1][: len(table)] == table:
print(f"Error, trying to redefine {path}, an inline table")
return
is_table=True
if isinstance(p[3],dict):
is_table = True
if isinstance(p[3], dict):
for table in p.parser.current_tables:
if table[:len(p[1])]==p[1]:
if table[: len(p[1])] == p[1]:
print(f"Error, trying to redefine {path}, a table")
return
p.parser.current_inline_tables.append(p[1])
@ -105,7 +108,7 @@ def p_object(p):
if header not in temp:
temp[header] = {}
temp = temp[header]
if not isinstance(temp,dict):
if not isinstance(temp, dict):
print("Error, cannot add {p[3]} to a {type(temp)} variable")
return
if headers[-1] in temp:
@ -159,7 +162,7 @@ def p_dictCont_multiple(p):
duplicate_list = [k for k in p[1] if k in p[3]]
for dup in duplicate_list:
print(f"Duplicate inline-table key {dup}")
if len(duplicate_list)==0:
if len(duplicate_list) == 0:
p[1].update(p[3])
p[0] = p[1]
@ -188,6 +191,7 @@ def p_key_id(p):
"""key : ID"""
p[0] = p[1]
# the rest of the cases are the specific cases where the key as the same format as a float/int/etc
# so we need make them a singleton list.
def p_key_rest(p):
@ -203,6 +207,7 @@ def p_key_rest(p):
| NAN"""
p[0] = [p[1]]
def p_value_str(p):
"""value : STR"""
p[0] = p[1]
@ -262,10 +267,12 @@ def p_value_nan(p):
"""value : NAN"""
p[0] = p[1]
def p_value_bool(p):
"""value : BOOL"""
p[0] = bool(p[1])
def p_error(p):
print(p)
@ -279,7 +286,29 @@ parser.syntax_error = False
parser.current_inline_tables = []
parser.current_tables = []
f = open("example.toml", "r")
parser.parse(f.read())
#print(json.dumps(parser.root_dict, indent=2))
def parse(in_file, out_file):
if in_file is not None:
try:
in_fd = open(in_file, "r")
in_content = in_fd.read()
in_fd.close()
except Exception as e:
print(f"Error: {e}")
return
else:
in_content = sys.stdin.read()
parser.parse(in_content)
output = json.dumps(parser.root_dict, indent=2)
if out_file is not None:
try:
out_fd = open(out_file, "w")
out_fd.write(output)
out_fd.close()
except Exception as e:
print(f"Error: {str(e)}")
return
else:
print(output)

23
src/pltoml.py Executable file
View file

@ -0,0 +1,23 @@
#! /usr/bin/env python3
from parser import parse
from tokenizer import tokenizer
import sys
import argparse
def main():
sys.argv
argv_parser = argparse.ArgumentParser(prog="PLTOML",description="A command line tool to convert toml files into json using ply.")
argv_parser.add_argument("-i","--input",help="The filepath to the target input file")
argv_parser.add_argument("-o","--output",help="The filepath to the target output file")
argv_parser.add_argument("-t","--tokenizer",help="This feature allows you to inspect all the tokens captured by the lexer (should only be used for debugging)")
args = argv_parser.parse_args()
if args.tokenizer is not None:
tokenizer(args.input,args.output)
else:
parse(args.input,args.output)
if __name__ == "__main__":
main()

View file

@ -1,8 +1,29 @@
from lexer import lexer
f = open("example.toml","r")
lexer.input(f.read())
for tok in lexer:
print(tok)
import sys
def tokenizer(in_file,out_file):
in_content = ""
if in_file is not None:
try:
in_fd = open(in_file,"r")
in_content = in_fd.read()
in_fd.close()
except Exception as e:
print(f"Error: {e}")
return
else:
in_content = sys.stdin.read()
lexer.input(in_content)
output = ""
for tok in lexer:
output += tok + "\n"
if out_file is not None:
try:
out_fd = open(out_file,"w")
out_fd.write(output)
out_fd.close()
except Exception as e:
print(f"Error: {e}")
return
else:
print(output)