summaryrefslogtreecommitdiffstats
path: root/src/generate.py
blob: 2f3a2d934fc8a952271f0563d53e9d0ec929d461 (plain) (blame)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
from __future__ import annotations
from typing import Dict, Optional
from entity import Type, Enum, Struct, Method, Field
from util import to_camel_case, parse_param
from render import render

def get_logger():
    import logging
    logger = logging.getLogger('tdlib-autogen')

    formatter = logging.Formatter(
        "[%(asctime)s] [%(name)s] %(levelname)s: %(message)s",
        datefmt="%d-%b-%y %H:%M:%S",
    )
    handler = logging.StreamHandler()
    handler.setFormatter(formatter)
    logger.addHandler(handler)
    logger.setLevel(logging.DEBUG)

    return logger


log = get_logger()

GRAMMAR = r'''
start: type_decls "---functions---" "\n"* decls
type_decls: (decl | class_decl)+
decls: decl+

decl: docstring CNAME params "=" type ";" "\n"+
params: param*
param: CNAME ":" type

class_decl: "//@class" CNAME nameddoc "\n"+

docdescr: ((LF "//-")? DOCTEXT)*
nameddoc: "@" CNAME " " docdescr
docstring: ("//" nameddoc+ "\n"?)+

?type: CNAME | vector
vector: "vector" "<" type ">"


DOCTEXT: /[^\n@]+/
WHITESPACE: (" ")

%import common.CNAME
%ignore WHITESPACE
%import common.LF
'''

if __name__ == '__main__':
    import argparse

    argparser = argparse.ArgumentParser(description='Generate bindings')
    argparser.add_argument('entities', metavar='entity', action='extend', type=str, nargs='*',
                        help='entities in addition to target.json')
    argparser.add_argument('--target', dest='target', action='store', default='target.json',
                        help='file with target entities (default: target.json)')
    argparser.add_argument('--no-write-cache', dest='write_cache', action='store_false',
                        help='do not write cache')
    argparser.add_argument('--no-read-cache', dest='read_cache', action='store_false',
                        help='do not read cache')
    argparser.add_argument('--cache', dest='cache', action='store', default='cache.pkl',
                        help='cache path (default: cache.pkl)')

    args = argparser.parse_args()


    def try_read_cache(cache_path: str):
        try:
            with open(cache_path, "rb") as tree:
                import pickle
                parsed = pickle.load(tree)
                return parsed
        except Exception as e:
            log.debug(f'could not read cache: {e}')
            return None

    def try_true_parse(cache_path: Optional[str] = None):
        from lark import Lark
        lark = Lark(GRAMMAR)
        try:
            with open("td_api.tl") as f:
                for _ in range(14):
                    f.readline()
                text = f.read()
                parsed = lark.parse(text)
        
            if cache_path is not None:
                with open(cache_path, "wb") as tree:
                    import pickle
                    pickle.dump(parsed, tree)
                    log.debug(f'written to cache: {cache_path}')

            return parsed

        except Exception as e:
            log.debug(f'could not parse tdlib api: {e}')
            return None

    entities = args.entities
    try:
        with open(args.target, 'r') as f:
            import json
            entities.extend(json.load(f))
    except Exception as e:
        log.debug(f'could not read entities from file: {e}')
    

    parsed = None
    if args.read_cache:
        parsed = try_read_cache(args.cache)

    if parsed is None:
        read_cache_path = args.cache if args.write_cache else None
        parsed = try_true_parse(read_cache_path)

    render(parsed, args.entities, log)