Refactor HLDS to avoid loading files always
This commit is contained in:
parent
1bfbbfde6d
commit
edb1677523
4 changed files with 97 additions and 52 deletions
59
app/hlds/__init__.py
Executable file → Normal file
59
app/hlds/__init__.py
Executable file → Normal file
|
@ -1,54 +1,9 @@
|
||||||
#!/usr/bin/env python3
|
"""
|
||||||
|
If you want to access the definitions, then just do
|
||||||
|
>>> from hlds.definitions import location_definitions
|
||||||
|
|
||||||
from glob import glob
|
These are not imported in this module's init, to avoid opening the definition files and running the
|
||||||
from os import path, walk
|
parser on them when testing other code in this module, or when testing the parser on other files.
|
||||||
from tatsu import parse as tatsu_parse
|
"""
|
||||||
import itertools
|
|
||||||
from .models import Location
|
|
||||||
|
|
||||||
|
from .models import Location, Choice, Option
|
||||||
# TODO Use proper way to get resources, see https://stackoverflow.com/a/10935674
|
|
||||||
with open(path.join(path.dirname(__file__), "hlds.tatsu")) as fh:
|
|
||||||
GRAMMAR = fh.read()
|
|
||||||
|
|
||||||
|
|
||||||
def kind_comparer(compare_to):
|
|
||||||
return lambda item: item["kind"] == compare_to
|
|
||||||
|
|
||||||
|
|
||||||
def parse(menu):
|
|
||||||
parsed = tatsu_parse(GRAMMAR, menu)
|
|
||||||
return parsed
|
|
||||||
return dict((
|
|
||||||
*((att["key"], att["value"]) for att in parsed["attributes"]),
|
|
||||||
("id", parsed["id"]),
|
|
||||||
("name", parsed["name"]),
|
|
||||||
("choices", filter(kind_comparer("choice_declaration"), parsed["items_"])),
|
|
||||||
("bases", filter(kind_comparer("base"), parsed["items_"])),
|
|
||||||
))
|
|
||||||
|
|
||||||
|
|
||||||
def parse_file(filename):
|
|
||||||
with open(filename, "r") as fh:
|
|
||||||
return parse(fh.read())
|
|
||||||
|
|
||||||
|
|
||||||
def load_all():
|
|
||||||
# TODO Use proper way to get resources, see https://stackoverflow.com/a/10935674
|
|
||||||
data_dir = path.join(path.dirname(__file__), "..", "..", "data")
|
|
||||||
files = glob(path.join(data_dir, "**.hlds"))
|
|
||||||
menus = map(parse_file, files)
|
|
||||||
return list(itertools.chain.from_iterable(menus))
|
|
||||||
|
|
||||||
|
|
||||||
def main(filename):
|
|
||||||
import json
|
|
||||||
from tatsu.util import asjson
|
|
||||||
|
|
||||||
ast = parse_file(filename)
|
|
||||||
print(json.dumps(asjson(ast), indent="\t"))
|
|
||||||
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
|
||||||
import sys
|
|
||||||
main(*sys.argv[1:])
|
|
||||||
|
|
13
app/hlds/definitions.py
Normal file
13
app/hlds/definitions.py
Normal file
|
@ -0,0 +1,13 @@
|
||||||
|
# Import this class to load the standard HLDS definitions
|
||||||
|
|
||||||
|
from os import path
|
||||||
|
import itertools
|
||||||
|
from .loader import parse_all_directory
|
||||||
|
|
||||||
|
|
||||||
|
__all__ = ["definitions"]
|
||||||
|
|
||||||
|
# TODO Use proper way to get resources, see https://stackoverflow.com/a/10935674
|
||||||
|
data_dir = path.join(path.dirname(__file__), "..", "..", "data")
|
||||||
|
|
||||||
|
location_definitions = parse_all_directory(data_dir)
|
44
app/hlds/loader.py
Normal file
44
app/hlds/loader.py
Normal file
|
@ -0,0 +1,44 @@
|
||||||
|
#!/usr/bin/env python3
|
||||||
|
|
||||||
|
from glob import glob
|
||||||
|
from os import path, walk
|
||||||
|
from tatsu import parse as tatsu_parse
|
||||||
|
import itertools
|
||||||
|
from .models import Location
|
||||||
|
|
||||||
|
|
||||||
|
# TODO Use proper way to get resources, see https://stackoverflow.com/a/10935674
|
||||||
|
with open(path.join(path.dirname(__file__), "hlds.tatsu")) as fh:
|
||||||
|
GRAMMAR = fh.read()
|
||||||
|
|
||||||
|
|
||||||
|
def kind_equal_to(compare_to):
|
||||||
|
return lambda item: item["kind"] == compare_to
|
||||||
|
|
||||||
|
|
||||||
|
def parse(menu):
|
||||||
|
parsed = tatsu_parse(GRAMMAR, menu)
|
||||||
|
return parsed
|
||||||
|
return dict((
|
||||||
|
*((att["key"], att["value"]) for att in parsed["attributes"]),
|
||||||
|
("id", parsed["id"]),
|
||||||
|
("name", parsed["name"]),
|
||||||
|
("choices", (kind_equal_to("choice_declaration"), parsed["items_"])),
|
||||||
|
("bases", (kind_equal_to("base"), parsed["items_"])),
|
||||||
|
))
|
||||||
|
|
||||||
|
|
||||||
|
def parse_file(filename):
|
||||||
|
with open(filename, "r") as fh:
|
||||||
|
return parse(fh.read())
|
||||||
|
|
||||||
|
|
||||||
|
def parse_files(files):
|
||||||
|
menus = map(parse_file, files)
|
||||||
|
return list(itertools.chain.from_iterable(menus))
|
||||||
|
|
||||||
|
|
||||||
|
def parse_all_directory(directory):
|
||||||
|
# TODO Use proper way to get resources, see https://stackoverflow.com/a/10935674
|
||||||
|
files = glob(path.join(directory, "**.hlds"), recursive=True)
|
||||||
|
return parse_files(files)
|
33
parse_hlds.py
Executable file
33
parse_hlds.py
Executable file
|
@ -0,0 +1,33 @@
|
||||||
|
#!/usr/bin/env python3
|
||||||
|
|
||||||
|
import json
|
||||||
|
from tatsu.util import asjson
|
||||||
|
from app.hlds.loader import parse_files
|
||||||
|
|
||||||
|
|
||||||
|
USAGE = """{0} [filename]...
|
||||||
|
Parse HLDS files, print as JSON
|
||||||
|
|
||||||
|
Without arguments, parse the default definitions.
|
||||||
|
With filenames as arguments, parse those files as HLDS.
|
||||||
|
|
||||||
|
{} --help Print this help text"""
|
||||||
|
|
||||||
|
|
||||||
|
def definitions():
|
||||||
|
from app.hlds.definitions import location_definitions
|
||||||
|
return location_definitions
|
||||||
|
|
||||||
|
|
||||||
|
def main(filenames):
|
||||||
|
locations = parse_files(filenames) if filenames else definitions()
|
||||||
|
print(json.dumps(asjson(locations), indent="\t"))
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
import sys
|
||||||
|
args = sys.argv[1:]
|
||||||
|
if "-h" in args or "--help" in args:
|
||||||
|
print(USAGE.format(sys.argv[0]), file=sys.stderr)
|
||||||
|
else:
|
||||||
|
main(args)
|
Loading…
Reference in a new issue