Passed
Push — main ( ced03c...e8bc85 )
by Eran
01:33
created

tokens.tokenize_file()   A

Complexity

Conditions 1

Size

Total Lines 4
Code Lines 4

Duplication

Lines 0
Ratio 0 %

Importance

Changes 0
Metric Value
cc 1
eloc 4
nop 1
dl 0
loc 4
rs 10
c 0
b 0
f 0
1
import itertools
2
import operator
3
import sys
4
5
from pygments import lex
6
from pygments.lexers import guess_lexer_for_filename
7
8
import graphinate
9
10
11
def load_file(file_path):
12
    with open(file_path) as file:
13
        return file.read()
14
15
16
def tokenize_file(file_path):
17
    content = load_file(file_path)
18
    lexer = guess_lexer_for_filename(file_path, content)
19
    return lex(content, lexer)
20
21
22
def token_graph_model(file_path):
23
    graph_model = graphinate.model(name="Token Graph")
24
25
    def token_type(v):
26
        return str(v[0]).replace('.', '_')
27
28
    def token_key(v):
29
        return f"{v[0]}-{v[1]}"
30
31
    @graph_model.node(token_type, key=token_key)
32
    def token():
33
        yield from tokenize_file(file_path)
34
35
    @graph_model.edge(source=operator.itemgetter(0), target=operator.itemgetter(1))
36
    def edge():
37
        yield from itertools.pairwise(token_key(t) for t in tokenize_file(file_path))
38
39
    return graph_model
40
41
42
if __name__ == '__main__':
43
    if len(sys.argv) != 2:
44
        print("Usage: python tokens.py <file_path>")
45
        sys.exit(1)
46
47
    file_path = sys.argv[1]
48
    token_model = token_graph_model(file_path)
49
    graphinate.materialize(
50
        token_model,
51
        builder=graphinate.builders.GraphQLBuilder,
52
        builder_output_handler=graphinate.graphql
53
    )
54