1
|
|
|
""" |
2
|
|
|
Command-line interface for mandos. |
3
|
|
|
""" |
4
|
|
|
|
5
|
|
|
from __future__ import annotations |
6
|
|
|
|
7
|
|
|
import os |
8
|
|
|
from pathlib import Path |
9
|
|
|
from typing import Optional |
10
|
|
|
|
11
|
|
|
import decorateme |
|
|
|
|
12
|
|
|
import pandas as pd |
|
|
|
|
13
|
|
|
import typer |
|
|
|
|
14
|
|
|
from pocketutils.core.chars import Chars |
|
|
|
|
15
|
|
|
from pocketutils.core.exceptions import XValueError |
|
|
|
|
16
|
|
|
from pocketutils.tools.string_tools import StringTools |
|
|
|
|
17
|
|
|
from typeddfs import CompressionFormat, FileFormat |
|
|
|
|
18
|
|
|
from typeddfs.df_errors import InvalidDfError |
|
|
|
|
19
|
|
|
from typeddfs.utils import Utils as TdfUtils |
|
|
|
|
20
|
|
|
from typeddfs.utils.cli_help import DfCliHelp |
|
|
|
|
21
|
|
|
|
22
|
|
|
from mandos.analysis.filtration import Filtration |
23
|
|
|
from mandos.analysis.reification import Reifier |
24
|
|
|
from mandos.entry import entry |
25
|
|
|
from mandos.entry.tools.docs import Documenter |
26
|
|
|
from mandos.entry.tools.fillers import CompoundIdFiller, IdMatchDf |
27
|
|
|
from mandos.entry.tools.multi_searches import MultiSearch, SearchConfigDf |
28
|
|
|
from mandos.entry.tools.searchers import InputCompoundsDf |
29
|
|
|
from mandos.entry.utils._arg_utils import Arg, ArgUtils, EntryUtils, Opt |
30
|
|
|
from mandos.entry.utils._common_args import CommonArgs |
31
|
|
|
from mandos.entry.utils._common_args import CommonArgs as Ca |
|
|
|
|
32
|
|
|
from mandos.model.apis.g2p_api import CachingG2pApi |
33
|
|
|
from mandos.model.hit_dfs import HitDf |
34
|
|
|
from mandos.model.settings import SETTINGS |
35
|
|
|
from mandos.model.taxonomy import TaxonomyDf |
36
|
|
|
from mandos.model.taxonomy_caches import TaxonomyFactories |
37
|
|
|
from mandos.model.utils import unlink |
38
|
|
|
from mandos.model.utils.globals import Globals |
39
|
|
|
from mandos.model.utils.setup import LOG_SETUP, logger |
40
|
|
|
|
41
|
|
|
DEF_SUFFIX = SETTINGS.table_suffix |
42
|
|
|
nl = "\n\n" |
|
|
|
|
43
|
|
|
|
44
|
|
|
|
45
|
|
|
class _InsertedCommandListSingleton: |
46
|
|
|
commands = None |
47
|
|
|
|
48
|
|
|
|
49
|
|
|
@decorateme.auto_utils() |
|
|
|
|
50
|
|
|
class MiscCommands: |
51
|
|
|
@staticmethod |
52
|
|
|
@entry() |
53
|
|
|
def search( |
|
|
|
|
54
|
|
|
path: Path = Ca.in_compound_table, |
|
|
|
|
55
|
|
|
config: Path = Opt.in_file( |
|
|
|
|
56
|
|
|
r""" |
57
|
|
|
TOML config file. See the docs. |
58
|
|
|
""", |
59
|
|
|
default=..., |
60
|
|
|
), |
61
|
|
|
to: Path = Ca.out_wildcard, |
|
|
|
|
62
|
|
|
log: Optional[Path] = Ca.log, |
|
|
|
|
63
|
|
|
stderr: str = CommonArgs.stderr, |
|
|
|
|
64
|
|
|
replace: bool = Opt.flag(r"""Overwrite completed and partially completed searches."""), |
|
|
|
|
65
|
|
|
proceed: bool = Opt.flag(r"""Continue partially completed searches."""), |
|
|
|
|
66
|
|
|
check: bool = Opt.flag("Check and write docs file only; do not run"), |
|
|
|
|
67
|
|
|
) -> None: |
68
|
|
|
r""" |
69
|
|
|
Run multiple searches. |
70
|
|
|
""" |
71
|
|
|
LOG_SETUP(log, stderr) |
72
|
|
|
default = path.parent / ("search-" + Globals.start_time.strftime("%Y-%m-%d")) |
73
|
|
|
# TODO: , suffixes=FileFormat.from_path |
|
|
|
|
74
|
|
|
out_dir, suffix = EntryUtils.adjust_dir_name(to, default) |
75
|
|
|
logger.notice(f"Will write {suffix} to {out_dir}{os.sep}") |
76
|
|
|
config_fmt = FileFormat.from_path(config) |
77
|
|
|
if config_fmt is not FileFormat.toml: |
78
|
|
|
logger.caution(f"Config format is {config_fmt}, not toml; trying anyway") |
79
|
|
|
config = SearchConfigDf.read_file(config) |
80
|
|
|
search = MultiSearch(config, path, out_dir, suffix, replace, proceed, log) |
81
|
|
|
if not check: |
82
|
|
|
search.run() |
83
|
|
|
|
84
|
|
|
@staticmethod |
85
|
|
|
@entry() |
86
|
|
|
def init( |
87
|
|
|
log: Optional[Path] = CommonArgs.log, |
|
|
|
|
88
|
|
|
stderr: str = CommonArgs.stderr, |
|
|
|
|
89
|
|
|
): |
90
|
|
|
""" |
91
|
|
|
Initializes mandos, creating directories, etc. |
92
|
|
|
""" |
93
|
|
|
LOG_SETUP(log, stderr) |
94
|
|
|
Globals.mandos_path.mkdir(exist_ok=True, parents=True) |
95
|
|
|
typer.echo(f"Mandos home dir is {Globals.mandos_path}") |
96
|
|
|
if Globals.settings_path.exists(): |
97
|
|
|
typer.echo(f"Settings found at {Globals.settings_path}") |
98
|
|
|
else: |
99
|
|
|
typer.echo("No settings file found") |
100
|
|
|
typer.echo(f"Log level for stderr is level {logger.current_stderr_log_level}") |
101
|
|
|
|
102
|
|
|
@staticmethod |
103
|
|
|
@entry() |
104
|
|
|
def list_settings( |
105
|
|
|
log: Optional[Path] = CommonArgs.log, |
|
|
|
|
106
|
|
|
stderr: str = CommonArgs.stderr, |
|
|
|
|
107
|
|
|
): |
108
|
|
|
r""" |
109
|
|
|
Write the settings to stdout. |
110
|
|
|
""" |
111
|
|
|
LOG_SETUP(log, stderr) |
112
|
|
|
defaults = SETTINGS.defaults() |
113
|
|
|
width = max((len(k) + 2 + len(v) + 1 for k, v in SETTINGS.items())) |
|
|
|
|
114
|
|
|
for k, v in SETTINGS.as_dict(): |
|
|
|
|
115
|
|
|
msg = f"{k} = {v}".ljust(width) |
116
|
|
|
if v != defaults[k]: |
117
|
|
|
msg += f" (default: {defaults[k]})" |
118
|
|
|
typer.echo(msg) |
119
|
|
|
|
120
|
|
|
@staticmethod |
121
|
|
|
@entry() |
122
|
|
|
def document( |
|
|
|
|
123
|
|
|
to: Path = Opt.out_file( |
|
|
|
|
124
|
|
|
rf""" |
125
|
|
|
The path to write command documentation to. |
126
|
|
|
|
127
|
|
|
` For machine-readable output: {DfCliHelp.list_formats().get_short_text()}. |
128
|
|
|
For formatted output: .txt or .rst [{"/".join([str(c) for c in CompressionFormat.list_non_empty()])} |
|
|
|
|
129
|
|
|
|
130
|
|
|
[default: "commands-level<level>.rst"] |
131
|
|
|
""" |
132
|
|
|
), |
133
|
|
|
style: str = Opt.val( |
|
|
|
|
134
|
|
|
rf""" |
135
|
|
|
The format for formatted text output. |
136
|
|
|
|
137
|
|
|
Use "table" for machine-readable output, "docs" for long-form reStructuredText, |
138
|
|
|
or {TdfUtils.join_to_str(TdfUtils.table_formats(), last="or")} |
139
|
|
|
""", |
140
|
|
|
"--style", |
141
|
|
|
default="docs", |
142
|
|
|
), |
143
|
|
|
width: int = Opt.val( |
|
|
|
|
144
|
|
|
r""" |
145
|
|
|
Max number of characters for a cell before wrap. |
146
|
|
|
|
147
|
|
|
[default: 0 (none) for machine-readable; 100 for formatted] |
148
|
|
|
""", |
149
|
|
|
default=None, |
150
|
|
|
show_default=False, |
151
|
|
|
), |
152
|
|
|
level: int = Opt.val( |
|
|
|
|
153
|
|
|
r""" |
154
|
|
|
The amount of detail to output. |
155
|
|
|
(1): 1-line description |
156
|
|
|
(2): + params |
157
|
|
|
(3) + full description |
158
|
|
|
(4) + param 1-line descriptions |
159
|
|
|
(5) + param full descriptions |
160
|
|
|
(6) + --hidden --common |
161
|
|
|
""", |
162
|
|
|
default=3, |
163
|
|
|
min=1, |
164
|
|
|
max=6, |
165
|
|
|
), |
166
|
|
|
no_main: bool = Opt.flag(r"Exclude main commands."), |
|
|
|
|
167
|
|
|
no_search: bool = Opt.flag(r"Exclude search commands."), |
|
|
|
|
168
|
|
|
hidden: bool = Opt.flag(r"Show hidden commands."), |
|
|
|
|
169
|
|
|
common: bool = Opt.flag( |
|
|
|
|
170
|
|
|
r""" |
171
|
|
|
Show common arguments and options. |
172
|
|
|
|
173
|
|
|
Includes --log and --stderr, along with path, --key, --to, etc. for searches. |
174
|
|
|
""" |
175
|
|
|
), |
176
|
|
|
replace: bool = Ca.replace, |
|
|
|
|
177
|
|
|
log: Optional[Path] = Ca.log, |
|
|
|
|
178
|
|
|
stderr: str = CommonArgs.stderr, |
|
|
|
|
179
|
|
|
): |
180
|
|
|
r""" |
181
|
|
|
Write documentation on commands to a file. |
182
|
|
|
""" |
183
|
|
|
LOG_SETUP(log, stderr) |
184
|
|
|
if level == 5: |
185
|
|
|
hidden = common = True |
186
|
|
|
if width is None and style != "table": |
187
|
|
|
width = 100 |
188
|
|
|
elif width == 0: |
189
|
|
|
width = None |
190
|
|
|
default = f"commands-level{level}.rst" |
191
|
|
|
to = EntryUtils.adjust_filename(to, default, replace=replace) |
192
|
|
|
Documenter( |
193
|
|
|
level=level, |
194
|
|
|
main=not no_main, |
195
|
|
|
search=not no_search, |
196
|
|
|
hidden=hidden, |
197
|
|
|
common=common, |
198
|
|
|
width=width, |
199
|
|
|
).document(_InsertedCommandListSingleton.commands, to, style) |
200
|
|
|
|
201
|
|
|
@staticmethod |
202
|
|
|
@entry() |
203
|
|
|
def fill( |
|
|
|
|
204
|
|
|
path: Path = Arg.in_file( |
|
|
|
|
205
|
|
|
rf""" |
206
|
|
|
{DfCliHelp.help(InputCompoundsDf).get_short_text(nl=nl)} |
207
|
|
|
""", |
208
|
|
|
), |
209
|
|
|
to: Path = Opt.out_path( |
|
|
|
|
210
|
|
|
rf""" |
211
|
|
|
{DfCliHelp.help(IdMatchDf).get_short_text(nl=nl)} |
212
|
|
|
|
213
|
|
|
[default: <path>-ids-<start-time>{DEF_SUFFIX}] |
214
|
|
|
""" |
215
|
|
|
), |
216
|
|
|
no_pubchem: bool = Opt.flag("Do not use PubChem.", "--no-pubchem"), |
|
|
|
|
217
|
|
|
no_chembl: bool = Opt.flag("Do not use ChEMBL.", "--no-chembl"), |
|
|
|
|
218
|
|
|
replace: bool = Ca.replace, |
|
|
|
|
219
|
|
|
log: Optional[Path] = Ca.log, |
|
|
|
|
220
|
|
|
stderr: str = CommonArgs.stderr, |
|
|
|
|
221
|
|
|
) -> None: |
222
|
|
|
r""" |
223
|
|
|
Fill in missing IDs from existing compound data. |
224
|
|
|
|
225
|
|
|
The idea is to find a ChEMBL ID, a PubChem ID, and parent-compound InChI/InChI Key. |
226
|
|
|
Useful to check compound/ID associations before running a search. |
227
|
|
|
|
228
|
|
|
To be filled, each row must should have a non-null value for |
229
|
|
|
"inchikey", "chembl_id", and/or "pubchem_id". |
230
|
|
|
"inchi" will be used but not to match to PubChem and ChEMBL. |
231
|
|
|
|
232
|
|
|
No existing columns will be dropped or modified. |
233
|
|
|
Any conflicting column will be renamed to 'origin_<column>'. |
234
|
|
|
E.g. 'inchikey' will be renamed to 'origin_inchikey'. |
235
|
|
|
(Do not include a column beginning with 'origin_'). |
236
|
|
|
|
237
|
|
|
Final columns (assuming --no-chembl and --no-pubchem) will include: |
238
|
|
|
inchikey, inchi, pubchem_id, chembl_id, pubchem_inch, chembl_inchi, |
239
|
|
|
pubchem_inchikey, and chembl_inchikey. |
240
|
|
|
The "inchikey" and "inchikey" columns will be the "best" available: |
241
|
|
|
chembl (preferred), then pubchem, then your source inchikey column. |
242
|
|
|
In cases where PubChem and ChEMBL differ, an error will be logged. |
243
|
|
|
You can always check the columns "origin_inchikey" (yours), |
244
|
|
|
chembl_inchikey, and pubchem_inchikey. |
245
|
|
|
|
246
|
|
|
The steps are: |
247
|
|
|
|
248
|
|
|
- If "chembl_id" or "pubchem_id" is non-null, uses that to find an InChI Key (for each). |
249
|
|
|
|
250
|
|
|
- Otherwise, if only "inchikey" is non-null, uses it to find ChEMBL and PubChem records. |
251
|
|
|
|
252
|
|
|
- Log an error if the inchikeys or inchis differ between PubChem and ChEMBL. |
253
|
|
|
|
254
|
|
|
- Set the final "inchi" and "inchikey" to the best choice, |
255
|
|
|
falling back to the input inchi and inchikey if they are missing. |
256
|
|
|
""" |
257
|
|
|
LOG_SETUP(log, stderr) |
258
|
|
|
default = str(Path(path).with_suffix("")) + "-filled" + "".join(path.suffixes) |
259
|
|
|
to = EntryUtils.adjust_filename(to, default, replace=replace) |
260
|
|
|
df = IdMatchDf.read_file(path) |
|
|
|
|
261
|
|
|
df = CompoundIdFiller(chembl=not no_chembl, pubchem=not no_pubchem).fill(df) |
|
|
|
|
262
|
|
|
df.write_file(to) |
263
|
|
|
|
264
|
|
|
@staticmethod |
265
|
|
|
@entry() |
266
|
|
|
def cache_data( |
267
|
|
|
path: Path = Ca.in_compound_table, |
|
|
|
|
268
|
|
|
no_pubchem: bool = Opt.flag(r"Do not download data from PubChem", "--no-pubchem"), |
|
|
|
|
269
|
|
|
no_chembl: bool = Opt.flag(r"Do not fetch IDs from ChEMBL", "--no_chembl"), |
|
|
|
|
270
|
|
|
log: Optional[Path] = CommonArgs.log, |
|
|
|
|
271
|
|
|
stderr: str = CommonArgs.stderr, |
|
|
|
|
272
|
|
|
) -> None: |
273
|
|
|
r""" |
274
|
|
|
Fetch and cache compound data. |
275
|
|
|
|
276
|
|
|
Useful to freeze data before running a search. |
277
|
|
|
""" |
278
|
|
|
LOG_SETUP(log, stderr) |
279
|
|
|
df = IdMatchDf.read_file(path) |
|
|
|
|
280
|
|
|
CompoundIdFiller(chembl=not no_chembl, pubchem=not no_pubchem).fill(df) |
281
|
|
|
logger.notice(f"Done caching") |
|
|
|
|
282
|
|
|
|
283
|
|
|
@staticmethod |
284
|
|
|
@entry() |
285
|
|
|
def export_taxa( |
|
|
|
|
286
|
|
|
taxa: str = Ca.taxa, |
|
|
|
|
287
|
|
|
to: Path = Opt.out_path( |
|
|
|
|
288
|
|
|
rf""" |
289
|
|
|
{DfCliHelp.help(TaxonomyDf).get_short_text(nl=nl)} |
290
|
|
|
|
291
|
|
|
[default: ./<taxa>-<datetime>{DEF_SUFFIX}] |
292
|
|
|
""" |
293
|
|
|
), |
294
|
|
|
replace: bool = Ca.replace, |
|
|
|
|
295
|
|
|
in_cache: bool = CommonArgs.in_cache, |
|
|
|
|
296
|
|
|
log: Optional[Path] = CommonArgs.log, |
|
|
|
|
297
|
|
|
stderr: str = CommonArgs.stderr, |
|
|
|
|
298
|
|
|
): |
299
|
|
|
""" |
300
|
|
|
Export a taxonomic tree to a table. |
301
|
|
|
|
302
|
|
|
Writes a taxonomy of given taxa and their descendants to a table. |
303
|
|
|
""" |
304
|
|
|
LOG_SETUP(log, stderr) |
305
|
|
|
default = taxa + "-" + Globals.start_timestamp_filesys + DEF_SUFFIX |
306
|
|
|
to = EntryUtils.adjust_filename(to, default, replace=replace) |
307
|
|
|
tax = ArgUtils.get_taxonomy(taxa, local_only=in_cache, allow_forbid=False).get |
308
|
|
|
tax.to_df().write_file(to, mkdirs=True, file_hash=True) |
|
|
|
|
309
|
|
|
|
310
|
|
|
@staticmethod |
311
|
|
|
@entry() |
312
|
|
|
def cache_taxa( |
313
|
|
|
taxa: str = Opt.val( |
|
|
|
|
314
|
|
|
r""" |
315
|
|
|
Either "@all" or a comma-separated list of UniProt taxon IDs. |
316
|
|
|
|
317
|
|
|
"@all" is only valid when --replace is passed; |
318
|
|
|
this will regenerate all taxonomy files that are found in the cache. |
319
|
|
|
Aliases "vertebrata", "cellular", and "viral" are permitted. |
320
|
|
|
""", |
321
|
|
|
default="", |
322
|
|
|
), |
323
|
|
|
replace: bool = Ca.replace, |
|
|
|
|
324
|
|
|
log: Optional[Path] = CommonArgs.log, |
|
|
|
|
325
|
|
|
stderr: str = CommonArgs.stderr, |
|
|
|
|
326
|
|
|
) -> None: |
327
|
|
|
""" |
328
|
|
|
Prep a new taxonomy file for use in mandos. |
329
|
|
|
|
330
|
|
|
With --replace set, will delete any existing file. |
331
|
|
|
This can be useful to make sure your cached taxonomy is up-to-date before running. |
332
|
|
|
|
333
|
|
|
Downloads and converts a tab-separated file from UniProt. |
334
|
|
|
(To find manually, follow the ``All lower taxonomy nodes`` link and click ``Download``.) |
335
|
|
|
Then applies fixes and reduces the file size, creating a new file alongside. |
336
|
|
|
Puts both the raw data and fixed data in the cache under ``~/.mandos/taxonomy/``. |
337
|
|
|
""" |
338
|
|
|
LOG_SETUP(log, stderr) |
339
|
|
|
if taxa == "@all" and not replace: |
340
|
|
|
raise XValueError(f"Use --replace with '@all'") |
|
|
|
|
341
|
|
|
# we're good to go: |
342
|
|
|
factory = TaxonomyFactories.main() |
343
|
|
|
if taxa == "@all": |
344
|
|
|
taxa = TaxonomyFactories.list_cached_files().keys() |
345
|
|
|
else: |
346
|
|
|
taxa = ArgUtils.parse_taxa_ids(taxa) |
347
|
|
|
factory.rebuild(taxa, replace=replace) |
348
|
|
|
|
349
|
|
|
@staticmethod |
350
|
|
|
@entry() |
351
|
|
|
def cache_g2p( |
352
|
|
|
replace: bool = Ca.replace, |
|
|
|
|
353
|
|
|
log: Optional[Path] = CommonArgs.log, |
|
|
|
|
354
|
|
|
stderr: str = CommonArgs.stderr, |
|
|
|
|
355
|
|
|
) -> None: |
356
|
|
|
""" |
357
|
|
|
Caches GuideToPharmacology data. |
358
|
|
|
|
359
|
|
|
With --replace set, will overwrite existing cached data. |
360
|
|
|
Data will generally be stored under``~/.mandos/g2p/``. |
361
|
|
|
""" |
362
|
|
|
LOG_SETUP(log, stderr) |
363
|
|
|
api = CachingG2pApi(SETTINGS.g2p_cache_path) |
364
|
|
|
api.download(force=replace) |
365
|
|
|
|
366
|
|
|
@staticmethod |
367
|
|
|
@entry() |
368
|
|
|
def cache_clear( |
369
|
|
|
log: Optional[Path] = CommonArgs.log, |
|
|
|
|
370
|
|
|
stderr: str = CommonArgs.stderr, |
|
|
|
|
371
|
|
|
yes: bool = CommonArgs.yes, |
|
|
|
|
372
|
|
|
) -> None: |
373
|
|
|
""" |
374
|
|
|
Deletes all cached data. |
375
|
|
|
""" |
376
|
|
|
LOG_SETUP(log, stderr) |
377
|
|
|
typer.echo(f"Will recursively delete all of these paths:") |
|
|
|
|
378
|
|
|
for p in SETTINGS.all_cache_paths: |
|
|
|
|
379
|
|
|
typer.echo(f" {p}") |
380
|
|
|
if not yes: |
381
|
|
|
typer.confirm("Delete?", abort=True) |
382
|
|
|
for p in SETTINGS.all_cache_paths: |
|
|
|
|
383
|
|
|
unlink(missing_ok=True) |
|
|
|
|
384
|
|
|
logger.notice("Deleted all cached data") |
385
|
|
|
|
386
|
|
|
@staticmethod |
387
|
|
|
@entry() |
388
|
|
|
def concat( |
|
|
|
|
389
|
|
|
path: Path = Arg.in_dir( |
|
|
|
|
390
|
|
|
rf""" |
391
|
|
|
Directory containing results from a mandos search. |
392
|
|
|
|
393
|
|
|
{DfCliHelp.list_formats().get_short_text()} |
394
|
|
|
""" |
395
|
|
|
), |
396
|
|
|
to: Optional[Path] = Ca.out_annotations_file, |
|
|
|
|
397
|
|
|
replace: bool = Ca.replace, |
|
|
|
|
398
|
|
|
log: Optional[Path] = CommonArgs.log, |
|
|
|
|
399
|
|
|
stderr: str = CommonArgs.stderr, |
|
|
|
|
400
|
|
|
) -> None: |
401
|
|
|
r""" |
402
|
|
|
Concatenate Mandos annotation files into one. |
403
|
|
|
|
404
|
|
|
Note that ``:search`` automatically performs this; |
405
|
|
|
this is needed only if you want to combine results from multiple independent searches. |
406
|
|
|
""" |
407
|
|
|
LOG_SETUP(log, stderr) |
408
|
|
|
files_ = [] |
409
|
|
|
for file in path.iterdir(): |
410
|
|
|
ff = FileFormat.from_path_or_none(file) |
|
|
|
|
411
|
|
|
if ff not in [None, FileFormat.json, FileFormat.toml]: |
412
|
|
|
files_.append(file) |
413
|
|
|
logger.notice(f"Looking under {path} (NOT recursive)") |
414
|
|
|
logger.info(f"Found {len(files_)} potential input files: {[f.name for f in files_]}") |
415
|
|
|
files, names, dfs = [], [], [] |
416
|
|
|
for file in files_: |
417
|
|
|
try: |
418
|
|
|
df: HitDf = HitDf.read_file(file, attrs=True) |
|
|
|
|
419
|
|
|
except InvalidDfError: |
420
|
|
|
logger.warning(f"Skipping {file} {Chars.en} not a valid hit list") |
421
|
|
|
logger.opt(exception=True).debug(f"Error reading {file}") |
422
|
|
|
continue |
423
|
|
|
files.append(file) |
424
|
|
|
names.append(FileFormat.strip(file).name) |
425
|
|
|
dfs.append(df) |
426
|
|
|
default = path / (",".join(names) + DEF_SUFFIX) |
427
|
|
|
to = EntryUtils.adjust_filename(to, default, replace) |
428
|
|
|
logger.notice(f"Concatenated {len(files):,} files") |
429
|
|
|
for f_, df_ in zip(files, dfs): |
|
|
|
|
430
|
|
|
logger.success(f"Included: {f_.name} with {len(df_):,} rows") |
431
|
|
|
df = HitDf.of(dfs, keys=names) |
|
|
|
|
432
|
|
|
counts = {k: v for k, v in df.group_by("universal_id").count().to_dict() if v > 0} |
433
|
|
|
if len(counts) > 0: |
434
|
|
|
logger.error( |
435
|
|
|
f"There are {len(counts):,} universal IDs with duplicates!" |
436
|
|
|
+ f": {StringTools.join_kv(counts)}" |
437
|
|
|
) |
438
|
|
|
logger.notice(f"Wrote {len(df):,} rows to {to}") |
439
|
|
|
df.write_file(to, mkdirs=True, attrs=True, file_hash=True) |
440
|
|
|
|
441
|
|
|
@staticmethod |
442
|
|
|
@entry() |
443
|
|
|
def filter( |
|
|
|
|
444
|
|
|
path: Path = Ca.out_annotations_file, |
|
|
|
|
445
|
|
|
by: Optional[Path] = Arg.in_file( |
|
|
|
|
446
|
|
|
r""" |
447
|
|
|
Path to a file containing filters. |
448
|
|
|
|
449
|
|
|
See the docs for more info. |
450
|
|
|
""" |
451
|
|
|
), |
452
|
|
|
to: Optional[Path] = Ca.out_annotations_file, |
|
|
|
|
453
|
|
|
replace: bool = Ca.replace, |
|
|
|
|
454
|
|
|
log: Optional[Path] = CommonArgs.log, |
|
|
|
|
455
|
|
|
stderr: str = CommonArgs.stderr, |
|
|
|
|
456
|
|
|
) -> None: |
457
|
|
|
""" |
458
|
|
|
Filters by simple expressions. |
459
|
|
|
""" |
460
|
|
|
LOG_SETUP(log, stderr) |
461
|
|
|
default = str(path) + "-filter-" + by.stem + DEF_SUFFIX |
462
|
|
|
to = EntryUtils.adjust_filename(to, default, replace) |
463
|
|
|
df = HitDf.read_file(path) |
|
|
|
|
464
|
|
|
Filtration.from_file(by).apply(df).write_file(to) |
465
|
|
|
|
466
|
|
|
@staticmethod |
467
|
|
|
@entry() |
468
|
|
|
def export_state( |
|
|
|
|
469
|
|
|
path: Path = Ca.in_annotations_file, |
|
|
|
|
470
|
|
|
to: Optional[Path] = Opt.out_path( |
|
|
|
|
471
|
|
|
""" |
472
|
|
|
Path to the output file. |
473
|
|
|
|
474
|
|
|
Valid formats and filename suffixes are .nt and .txt with an optional .gz, .zip, or .xz. |
475
|
|
|
If only a filename suffix is provided, will use that suffix with the default directory. |
476
|
|
|
If no suffix is provided, will interpret the path as a directory and use the default filename. |
|
|
|
|
477
|
|
|
Will fail if the file exists and ``--replace`` is not set. |
478
|
|
|
|
479
|
|
|
[default: <path>-statements.nt] |
480
|
|
|
""" |
481
|
|
|
), |
482
|
|
|
replace: bool = Ca.replace, |
|
|
|
|
483
|
|
|
log: Optional[Path] = CommonArgs.log, |
|
|
|
|
484
|
|
|
stderr: str = CommonArgs.stderr, |
|
|
|
|
485
|
|
|
) -> None: |
486
|
|
|
""" |
487
|
|
|
Output simple N-triples statements. |
488
|
|
|
|
489
|
|
|
Each statement is of this form, where the InChI Key refers to the input data: |
490
|
|
|
|
491
|
|
|
`"InChI Key" "predicate" "object" .` |
492
|
|
|
""" |
493
|
|
|
LOG_SETUP(log, stderr) |
494
|
|
|
default = f"{path}-statements.nt" |
495
|
|
|
to = EntryUtils.adjust_filename(to, default, replace) |
496
|
|
|
hits = HitDf.read_file(path).to_hits() |
497
|
|
|
with to.open() as f: |
|
|
|
|
498
|
|
|
for hit in hits: |
499
|
|
|
f.write(hit.to_triple.n_triples) |
500
|
|
|
|
501
|
|
|
@staticmethod |
502
|
|
|
@entry() |
503
|
|
|
def export_reify( |
|
|
|
|
504
|
|
|
path: Path = Ca.in_annotations_file, |
|
|
|
|
505
|
|
|
to: Optional[Path] = Opt.out_path( |
|
|
|
|
506
|
|
|
r""" |
507
|
|
|
Path to the output file. |
508
|
|
|
|
509
|
|
|
The filename suffix should be either .nt (N-triples) or .ttl (Turtle), |
510
|
|
|
with an optional .gz, .zip, or .xz. |
511
|
|
|
If only a filename suffix is provided, will use that suffix with the default directory. |
512
|
|
|
If no suffix is provided, will interpret the path as a directory but use the default filename. |
|
|
|
|
513
|
|
|
Will fail if the file exists and ``--replace`` is not set. |
514
|
|
|
|
515
|
|
|
[default: <path>-reified.nt] |
516
|
|
|
""" |
517
|
|
|
), |
518
|
|
|
replace: bool = Ca.replace, |
|
|
|
|
519
|
|
|
log: Optional[Path] = CommonArgs.log, |
|
|
|
|
520
|
|
|
stderr: str = CommonArgs.stderr, |
|
|
|
|
521
|
|
|
) -> None: |
522
|
|
|
""" |
523
|
|
|
Outputs reified semantic triples. |
524
|
|
|
""" |
525
|
|
|
LOG_SETUP(log, stderr) |
526
|
|
|
default = f"{path}-reified.nt" |
527
|
|
|
to = EntryUtils.adjust_filename(to, default, replace) |
528
|
|
|
hits = HitDf.read_file(path).to_hits() |
529
|
|
|
with to.open() as f: |
|
|
|
|
530
|
|
|
for triple in Reifier().reify(hits): |
531
|
|
|
f.write(triple.n_triples) |
532
|
|
|
|
533
|
|
|
@staticmethod |
534
|
|
|
@entry() |
535
|
|
|
def export_copy( |
|
|
|
|
536
|
|
|
path: Path = Ca.in_annotations_file, |
|
|
|
|
537
|
|
|
to: Optional[Path] = Opt.out_path( |
|
|
|
|
538
|
|
|
rf""" |
539
|
|
|
Path to the output file. |
540
|
|
|
|
541
|
|
|
{DfCliHelp.list_formats().get_short_text()} |
542
|
|
|
|
543
|
|
|
[default: <path.parent>/export{DEF_SUFFIX}] |
544
|
|
|
""" |
545
|
|
|
), |
546
|
|
|
replace: bool = Ca.replace, |
|
|
|
|
547
|
|
|
log: Optional[Path] = CommonArgs.log, |
|
|
|
|
548
|
|
|
stderr: str = CommonArgs.stderr, |
|
|
|
|
549
|
|
|
) -> None: |
550
|
|
|
""" |
551
|
|
|
Copies and/or converts annotation files. |
552
|
|
|
|
553
|
|
|
Example: ``:export:copy --to .snappy`` to highly compress a data set. |
554
|
|
|
""" |
555
|
|
|
LOG_SETUP(log, stderr) |
556
|
|
|
default = path.parent / DEF_SUFFIX |
557
|
|
|
to = EntryUtils.adjust_filename(to, default, replace) |
558
|
|
|
df = HitDf.read_file(path) |
|
|
|
|
559
|
|
|
df.write_file(to) |
560
|
|
|
|
561
|
|
|
@staticmethod |
562
|
|
|
@entry() |
563
|
|
|
def serve( |
|
|
|
|
564
|
|
|
port: int = Opt.val(r"Port to serve on", default=1540), |
|
|
|
|
565
|
|
|
db: str = Opt.val("Name of the MySQL database", default="mandos"), |
|
|
|
|
566
|
|
|
log: Optional[Path] = Ca.log, |
|
|
|
|
567
|
|
|
stderr: str = CommonArgs.stderr, |
|
|
|
|
568
|
|
|
) -> None: |
569
|
|
|
r""" |
570
|
|
|
Start a REST server. |
571
|
|
|
|
572
|
|
|
The connection information is stored in your global settings file. |
573
|
|
|
""" |
574
|
|
|
LOG_SETUP(log, stderr) |
575
|
|
|
|
576
|
|
|
@staticmethod |
577
|
|
|
@entry() |
578
|
|
|
def export_db( |
|
|
|
|
579
|
|
|
path: Path = Ca.in_annotations_file, |
|
|
|
|
580
|
|
|
db: str = Opt.val(r"Name of the MySQL database", default="mandos"), |
|
|
|
|
581
|
|
|
host: str = Opt.val( |
|
|
|
|
582
|
|
|
r"Database hostname (ignored if ``--socket`` is passed", default="127.0.0.1" |
583
|
|
|
), |
584
|
|
|
socket: Optional[str] = Opt.val("Path to a Unix socket (if set, ``--host`` is ignored)"), |
|
|
|
|
585
|
|
|
user: Optional[str] = Opt.val("Database username (empty if not set)"), |
|
|
|
|
586
|
|
|
password: Optional[str] = Opt.val("Database password (empty if not set)"), |
|
|
|
|
587
|
|
|
as_of: Optional[str] = CommonArgs.as_of, |
|
|
|
|
588
|
|
|
log: Optional[Path] = CommonArgs.log, |
|
|
|
|
589
|
|
|
stderr: str = CommonArgs.stderr, |
|
|
|
|
590
|
|
|
) -> None: |
591
|
|
|
r""" |
592
|
|
|
Export to a relational database. |
593
|
|
|
|
594
|
|
|
Saves data from Mandos search commands to a database for serving via REST. |
595
|
|
|
|
596
|
|
|
See also: ``:serve``. |
597
|
|
|
""" |
598
|
|
|
LOG_SETUP(log, stderr) |
599
|
|
|
|
600
|
|
|
@staticmethod |
601
|
|
|
@entry() |
602
|
|
|
def init_db( |
|
|
|
|
603
|
|
|
db: str = Opt.val(r"Name of the MySQL database", default="mandos"), |
|
|
|
|
604
|
|
|
host: str = Opt.val( |
|
|
|
|
605
|
|
|
r"Database hostname (ignored if ``--socket`` is passed", default="127.0.0.1" |
606
|
|
|
), |
607
|
|
|
socket: Optional[str] = Opt.val("Path to a Unix socket (if set, ``--host`` is ignored)"), |
|
|
|
|
608
|
|
|
user: Optional[str] = Opt.val("Database username (empty if not set)"), |
|
|
|
|
609
|
|
|
password: Optional[str] = Opt.val("Database password (empty if not set)"), |
|
|
|
|
610
|
|
|
overwrite: bool = Opt.flag(r"Delete the database if it exists"), |
|
|
|
|
611
|
|
|
yes: bool = Ca.yes, |
|
|
|
|
612
|
|
|
log: Optional[Path] = CommonArgs.log, |
|
|
|
|
613
|
|
|
stderr: str = CommonArgs.stderr, |
|
|
|
|
614
|
|
|
) -> None: |
615
|
|
|
r""" |
616
|
|
|
Initialize an empty database. |
617
|
|
|
""" |
618
|
|
|
LOG_SETUP(log, stderr) |
619
|
|
|
|
620
|
|
|
|
621
|
|
|
__all__ = ["MiscCommands"] |
622
|
|
|
|