| Total Complexity | 4 |
| Total Lines | 53 |
| Duplicated Lines | 0 % |
| Changes | 0 | ||
| 1 | import itertools |
||
| 2 | import operator |
||
| 3 | import sys |
||
| 4 | |||
| 5 | from pygments import lex |
||
| 6 | from pygments.lexers import guess_lexer_for_filename |
||
| 7 | |||
| 8 | import graphinate |
||
| 9 | |||
| 10 | |||
| 11 | def load_file(file_path): |
||
| 12 | with open(file_path) as file: |
||
| 13 | return file.read() |
||
| 14 | |||
| 15 | |||
| 16 | def tokenize_file(file_path): |
||
| 17 | content = load_file(file_path) |
||
| 18 | lexer = guess_lexer_for_filename(file_path, content) |
||
| 19 | return lex(content, lexer) |
||
| 20 | |||
| 21 | |||
| 22 | def token_graph_model(file_path): |
||
| 23 | graph_model = graphinate.model(name="Token Graph") |
||
| 24 | |||
| 25 | def token_type(v): |
||
| 26 | return str(v[0]).replace('.', '_') |
||
| 27 | |||
| 28 | def token_key(v): |
||
| 29 | return f"{v[0]}-{v[1]}" |
||
| 30 | |||
| 31 | @graph_model.node(token_type, key=token_key) |
||
| 32 | def token(): |
||
| 33 | yield from tokenize_file(file_path) |
||
| 34 | |||
| 35 | @graph_model.edge(source=operator.itemgetter(0), target=operator.itemgetter(1)) |
||
| 36 | def edge(): |
||
| 37 | yield from itertools.pairwise(token_key(t) for t in tokenize_file(file_path)) |
||
| 38 | |||
| 39 | return graph_model |
||
| 40 | |||
| 41 | |||
| 42 | if __name__ == '__main__': |
||
| 43 | if len(sys.argv) != 2: |
||
| 44 | print("Usage: python tokens.py <file_path>") |
||
| 45 | sys.exit(1) |
||
| 46 | |||
| 47 | file_path = sys.argv[1] |
||
| 48 | token_model = token_graph_model(file_path) |
||
| 49 | graphinate.materialize( |
||
| 50 | token_model, |
||
| 51 | builder=graphinate.builders.GraphQLBuilder, |
||
| 52 | builder_output_handler=graphinate.graphql |
||
| 53 | ) |
||
| 54 |