|
1
|
|
|
import logging |
|
2
|
|
|
import optparse |
|
3
|
|
|
import sys |
|
4
|
|
|
import os |
|
5
|
|
|
import time |
|
6
|
|
|
import textwrap |
|
7
|
|
|
|
|
8
|
|
|
from pyramid.paster import bootstrap, setup_logging |
|
9
|
|
|
|
|
10
|
|
|
import json |
|
11
|
|
|
|
|
12
|
|
|
from atramhasis.errors import SkosRegistryNotFoundException |
|
13
|
|
|
|
|
14
|
|
|
log = logging.getLogger(__name__) |
|
15
|
|
|
|
|
16
|
|
|
|
|
17
|
|
|
def main(): |
|
18
|
|
|
description = """\ |
|
19
|
|
|
Generate a config file for a LDF server. |
|
20
|
|
|
""" |
|
21
|
|
|
usage = "usage: %prog config_uri" |
|
22
|
|
|
parser = optparse.OptionParser(usage=usage, description=textwrap.dedent(description)) |
|
23
|
|
|
parser.add_option( |
|
24
|
|
|
"-l", |
|
25
|
|
|
"--location", |
|
26
|
|
|
dest="config_location", |
|
27
|
|
|
type="string", |
|
28
|
|
|
help=( |
|
29
|
|
|
"Specify where to put the config file. If not specified, this is set to the" |
|
30
|
|
|
" atramhasis.ldf.config_location from your ini file." |
|
31
|
|
|
), |
|
32
|
|
|
) |
|
33
|
|
|
|
|
34
|
|
|
options, args = parser.parse_args(sys.argv[1:]) |
|
35
|
|
|
|
|
36
|
|
|
if not len(args) >= 1: |
|
37
|
|
|
log.error("You must provide at least one argument.") |
|
38
|
|
|
return 2 |
|
39
|
|
|
|
|
40
|
|
|
config_uri = args[0] |
|
41
|
|
|
|
|
42
|
|
|
env = bootstrap(config_uri) |
|
43
|
|
|
setup_logging(config_uri) |
|
44
|
|
|
|
|
45
|
|
|
config_location = options.config_location |
|
46
|
|
|
if config_location is None: |
|
47
|
|
|
config_location = env["registry"].settings.get( |
|
48
|
|
|
"atramhasis.ldf.config_location", os.path.abspath(os.path.dirname(config_uri)) |
|
49
|
|
|
) |
|
50
|
|
|
|
|
51
|
|
|
dump_location = env["registry"].settings.get( |
|
52
|
|
|
"atramhasis.dump_location", os.path.abspath(os.path.dirname(config_uri)) |
|
53
|
|
|
) |
|
54
|
|
|
|
|
55
|
|
|
ldf_baseurl = env["registry"].settings.get("atramhasis.ldf.baseurl", None) |
|
56
|
|
|
|
|
57
|
|
|
ldf_protocol = env["registry"].settings.get("atramhasis.ldf.protocol", None) |
|
58
|
|
|
|
|
59
|
|
|
request = env["request"] |
|
60
|
|
|
|
|
61
|
|
|
if hasattr(request, "skos_registry") and request.skos_registry is not None: |
|
62
|
|
|
skos_registry = request.skos_registry |
|
63
|
|
|
else: |
|
64
|
|
|
raise SkosRegistryNotFoundException() # pragma: no cover |
|
65
|
|
|
|
|
66
|
|
|
start_time = time.time() |
|
67
|
|
|
ldfconfig = { |
|
68
|
|
|
"@context": "https://linkedsoftwaredependencies.org/bundles/npm/@ldf/server/^3.0.0/components/context.jsonld", |
|
69
|
|
|
"@id": "urn:ldf-server:my", |
|
70
|
|
|
"import": "preset-qpf:config-defaults.json", |
|
71
|
|
|
"title": "Atramhasis LDF server", |
|
72
|
|
|
"datasources": [], |
|
73
|
|
|
"prefixes": [ |
|
74
|
|
|
{"prefix": "rdf", "uri": "http://www.w3.org/1999/02/22-rdf-syntax-ns#"}, |
|
75
|
|
|
{"prefix": "rdfs", "uri": "http://www.w3.org/2000/01/rdf-schema#"}, |
|
76
|
|
|
{"prefix": "owl", "uri": "http://www.w3.org/2002/07/owl#"}, |
|
77
|
|
|
{"prefix": "xsd", "uri": "http://www.w3.org/2001/XMLSchema#"}, |
|
78
|
|
|
{"prefix": "hydra", "uri": "http://www.w3.org/ns/hydra/core#"}, |
|
79
|
|
|
{"prefix": "void", "uri": "http://rdfs.org/ns/void#"}, |
|
80
|
|
|
{"prefix": "skos", "uri": "http://www.w3.org/2004/02/skos/core#"}, |
|
81
|
|
|
{"prefix": "skos-thes", "uri": "http://purl.org/iso25964/skos-thes#"}, |
|
82
|
|
|
], |
|
83
|
|
|
} |
|
84
|
|
|
|
|
85
|
|
|
if ldf_baseurl: |
|
86
|
|
|
ldfconfig["baseURL"] = ldf_baseurl |
|
87
|
|
|
|
|
88
|
|
|
if ldf_protocol: |
|
89
|
|
|
ldfconfig["protocol"] = ldf_protocol |
|
90
|
|
|
|
|
91
|
|
|
pids = [] |
|
92
|
|
|
for p in skos_registry.get_providers(): |
|
93
|
|
|
if any([not_shown in p.get_metadata()['subject'] for not_shown in ['external']]): |
|
94
|
|
|
continue |
|
95
|
|
|
pid = p.get_metadata()["id"] |
|
96
|
|
|
title = p.concept_scheme.label().label if p.concept_scheme.label() else pid |
|
97
|
|
|
pids.append(pid) |
|
98
|
|
|
filename = os.path.join(dump_location, "%s-full" % pid) |
|
99
|
|
|
dumptype = "HdtDatasource" |
|
100
|
|
|
filetype = "hdtFile" |
|
101
|
|
|
dumpfile = filename + ".hdt" |
|
102
|
|
|
|
|
103
|
|
|
if not os.path.isfile(dumpfile): |
|
104
|
|
|
dumptype = "TurtleDatasource" |
|
105
|
|
|
filetype = "file" |
|
106
|
|
|
dumpfile = filename + ".ttl" |
|
107
|
|
|
|
|
108
|
|
|
sourceconfig = { |
|
109
|
|
|
"@id": f"urn:ldf-server:myDatasource{pid}", |
|
110
|
|
|
"@type": dumptype, |
|
111
|
|
|
"quads": False, # TODO |
|
112
|
|
|
"datasourcePath": pid, |
|
113
|
|
|
"datasourceTitle": title, |
|
114
|
|
|
filetype: dumpfile, |
|
115
|
|
|
} |
|
116
|
|
|
|
|
117
|
|
|
for n in p.concept_scheme.notes: |
|
118
|
|
|
if n.type in ["definition", "scopeNote"]: |
|
119
|
|
|
sourceconfig["description"] = n.note |
|
120
|
|
|
break |
|
121
|
|
|
|
|
122
|
|
|
ldfconfig["datasources"].append(sourceconfig) |
|
123
|
|
|
|
|
124
|
|
|
if len(pids): |
|
125
|
|
|
composite_sourceconfig = { |
|
126
|
|
|
"@id": "urn:ldf-server:myDatasourcecomposite", |
|
127
|
|
|
"@type": "CompositeDatasource", |
|
128
|
|
|
"quads": False, # TODO |
|
129
|
|
|
"datasourcePath": "composite", |
|
130
|
|
|
"datasourceTitle": "All conceptschemes", |
|
131
|
|
|
"description": ( |
|
132
|
|
|
"All conceptschemes contained in this Atramhasis instance together." |
|
133
|
|
|
), |
|
134
|
|
|
"compose": [f"urn:ldf-server:myDatasource{pid}" for pid in pids], |
|
135
|
|
|
} |
|
136
|
|
|
ldfconfig["datasources"].append(composite_sourceconfig) |
|
137
|
|
|
|
|
138
|
|
|
config_filename = os.path.join(config_location, "ldf_server_config.json") |
|
139
|
|
|
|
|
140
|
|
|
with open(config_filename, "w") as fp: |
|
141
|
|
|
json.dump(ldfconfig, fp, indent=4) |
|
142
|
|
|
|
|
143
|
|
|
log.info(f'Config written to {config_filename}.') |
|
144
|
|
|
|
|
145
|
|
|
log.info(f'--- {(time.time() - start_time)} seconds ---;') |
|
146
|
|
|
|
|
147
|
|
|
env["closer"]() |
|
148
|
|
|
|