1
|
|
|
""" |
2
|
|
|
Command-line interface for mandos. |
3
|
|
|
""" |
4
|
|
|
|
5
|
|
|
from __future__ import annotations |
6
|
|
|
|
7
|
|
|
import os |
8
|
|
|
from pathlib import Path |
9
|
|
|
from typing import Optional |
10
|
|
|
|
11
|
|
|
import decorateme |
|
|
|
|
12
|
|
|
import pandas as pd |
|
|
|
|
13
|
|
|
import typer |
|
|
|
|
14
|
|
|
from pocketutils.core.chars import Chars |
|
|
|
|
15
|
|
|
from pocketutils.core.exceptions import XValueError |
|
|
|
|
16
|
|
|
from pocketutils.tools.string_tools import StringTools |
|
|
|
|
17
|
|
|
from typeddfs import CompressionFormat, FileFormat |
|
|
|
|
18
|
|
|
from typeddfs.df_errors import InvalidDfError |
|
|
|
|
19
|
|
|
from typeddfs.utils import Utils as TdfUtils |
|
|
|
|
20
|
|
|
from typeddfs.utils.cli_help import DfCliHelp |
|
|
|
|
21
|
|
|
|
22
|
|
|
from mandos.analysis.filtration import Filtration |
23
|
|
|
from mandos.analysis.reification import Reifier |
24
|
|
|
from mandos.entry.tools.docs import Documenter |
25
|
|
|
from mandos.entry.tools.fillers import CompoundIdFiller, IdMatchDf |
26
|
|
|
from mandos.entry.tools.multi_searches import MultiSearch, SearchConfigDf |
27
|
|
|
from mandos.entry.tools.searchers import InputCompoundsDf |
28
|
|
|
from mandos.entry.utils._arg_utils import Arg, ArgUtils, EntryUtils, Opt |
29
|
|
|
from mandos.entry.utils._common_args import CommonArgs |
30
|
|
|
from mandos.entry.utils._common_args import CommonArgs as Ca |
|
|
|
|
31
|
|
|
from mandos.model.apis.g2p_api import CachingG2pApi |
32
|
|
|
from mandos.model.hit_dfs import HitDf |
33
|
|
|
from mandos.model.settings import SETTINGS |
34
|
|
|
from mandos.model.taxonomy import TaxonomyDf |
35
|
|
|
from mandos.model.taxonomy_caches import TaxonomyFactories |
36
|
|
|
from mandos.model.utils.globals import Globals |
37
|
|
|
from mandos.model.utils.setup import LOG_SETUP, logger |
38
|
|
|
|
39
|
|
|
DEF_SUFFIX = SETTINGS.table_suffix |
40
|
|
|
nl = "\n\n" |
|
|
|
|
41
|
|
|
|
42
|
|
|
|
43
|
|
|
class _InsertedCommandListSingleton: |
44
|
|
|
commands = None |
45
|
|
|
|
46
|
|
|
|
47
|
|
|
@decorateme.auto_utils() |
|
|
|
|
48
|
|
|
class MiscCommands: |
49
|
|
|
@staticmethod |
50
|
|
|
def search( |
|
|
|
|
51
|
|
|
path: Path = Ca.in_compound_table, |
|
|
|
|
52
|
|
|
config: Path = Opt.in_file( |
|
|
|
|
53
|
|
|
r""" |
54
|
|
|
TOML config file. See the docs. |
55
|
|
|
""", |
56
|
|
|
default=..., |
57
|
|
|
), |
58
|
|
|
to: Path = Ca.out_wildcard, |
|
|
|
|
59
|
|
|
log: Optional[Path] = Ca.log, |
|
|
|
|
60
|
|
|
stderr: str = CommonArgs.stderr, |
|
|
|
|
61
|
|
|
replace: bool = Opt.flag(r"""Overwrite completed and partially completed searches."""), |
|
|
|
|
62
|
|
|
proceed: bool = Opt.flag(r"""Continue partially completed searches."""), |
|
|
|
|
63
|
|
|
check: bool = Opt.flag("Check and write docs file only; do not run"), |
|
|
|
|
64
|
|
|
) -> None: |
65
|
|
|
r""" |
66
|
|
|
Run multiple searches. |
67
|
|
|
""" |
68
|
|
|
LOG_SETUP(log, stderr) |
69
|
|
|
default = path.parent / ("search-" + Globals.start_time.strftime("%Y-%m-%d")) |
70
|
|
|
# TODO: , suffixes=FileFormat.from_path |
|
|
|
|
71
|
|
|
out_dir, suffix = EntryUtils.adjust_dir_name(to, default) |
72
|
|
|
logger.notice(f"Will write {suffix} to {out_dir}{os.sep}") |
73
|
|
|
config_fmt = FileFormat.from_path(config) |
74
|
|
|
if config_fmt is not FileFormat.toml: |
75
|
|
|
logger.caution(f"Config format is {config_fmt}, not toml; trying anyway") |
76
|
|
|
config = SearchConfigDf.read_file(config) |
77
|
|
|
search = MultiSearch(config, path, out_dir, suffix, replace, proceed, log) |
78
|
|
|
if not check: |
79
|
|
|
search.run() |
80
|
|
|
|
81
|
|
|
@staticmethod |
82
|
|
|
def init( |
83
|
|
|
log: Optional[Path] = CommonArgs.log, |
|
|
|
|
84
|
|
|
stderr: str = CommonArgs.stderr, |
|
|
|
|
85
|
|
|
): |
86
|
|
|
""" |
87
|
|
|
Initializes mandos, creating directories, etc. |
88
|
|
|
""" |
89
|
|
|
LOG_SETUP(log, stderr) |
90
|
|
|
Globals.mandos_path.mkdir(exist_ok=True, parents=True) |
91
|
|
|
typer.echo(f"Mandos home dir is {Globals.mandos_path}") |
92
|
|
|
if Globals.settings_path.exists(): |
93
|
|
|
typer.echo(f"Settings found at {Globals.settings_path}") |
94
|
|
|
else: |
95
|
|
|
typer.echo("No settings file found") |
96
|
|
|
typer.echo(f"Log level for stderr is level {logger.current_stderr_log_level}") |
97
|
|
|
|
98
|
|
|
@staticmethod |
99
|
|
|
def list_settings( |
100
|
|
|
log: Optional[Path] = CommonArgs.log, |
|
|
|
|
101
|
|
|
stderr: str = CommonArgs.stderr, |
|
|
|
|
102
|
|
|
): |
103
|
|
|
r""" |
104
|
|
|
Write the settings to stdout. |
105
|
|
|
""" |
106
|
|
|
LOG_SETUP(log, stderr) |
107
|
|
|
defaults = SETTINGS.defaults() |
108
|
|
|
width = max((len(k) + 2 + len(v) + 1 for k, v in SETTINGS.items())) |
|
|
|
|
109
|
|
|
for k, v in SETTINGS.as_dict(): |
|
|
|
|
110
|
|
|
msg = f"{k} = {v}".ljust(width) |
111
|
|
|
if v != defaults[k]: |
112
|
|
|
msg += f" (default: {defaults[k]})" |
113
|
|
|
typer.echo(msg) |
114
|
|
|
|
115
|
|
|
@staticmethod |
116
|
|
|
def document( |
|
|
|
|
117
|
|
|
to: Path = Opt.out_file( |
|
|
|
|
118
|
|
|
rf""" |
119
|
|
|
The path to write command documentation to. |
120
|
|
|
|
121
|
|
|
` For machine-readable output: {DfCliHelp.list_formats().get_short_text()}. |
122
|
|
|
For formatted output: .txt or .rst [{"/".join([str(c) for c in CompressionFormat.list_non_empty()])} |
|
|
|
|
123
|
|
|
|
124
|
|
|
[default: "commands-level<level>.rst"] |
125
|
|
|
""" |
126
|
|
|
), |
127
|
|
|
style: str = Opt.val( |
|
|
|
|
128
|
|
|
rf""" |
129
|
|
|
The format for formatted text output. |
130
|
|
|
|
131
|
|
|
Use "table" for machine-readable output, "docs" for long-form reStructuredText, |
132
|
|
|
or {TdfUtils.join_to_str(TdfUtils.table_formats(), last="or")} |
133
|
|
|
""", |
134
|
|
|
"--style", |
135
|
|
|
default="docs", |
136
|
|
|
), |
137
|
|
|
width: int = Opt.val( |
|
|
|
|
138
|
|
|
r""" |
139
|
|
|
Max number of characters for a cell before wrap. |
140
|
|
|
|
141
|
|
|
[default: 0 (none) for machine-readable; 100 for formatted] |
142
|
|
|
""", |
143
|
|
|
default=None, |
144
|
|
|
show_default=False, |
145
|
|
|
), |
146
|
|
|
level: int = Opt.val( |
|
|
|
|
147
|
|
|
r""" |
148
|
|
|
The amount of detail to output. |
149
|
|
|
(1): 1-line description |
150
|
|
|
(2): + params |
151
|
|
|
(3) + full description |
152
|
|
|
(4) + param 1-line descriptions |
153
|
|
|
(5) + param full descriptions |
154
|
|
|
(6) + --hidden --common |
155
|
|
|
""", |
156
|
|
|
default=3, |
157
|
|
|
min=1, |
158
|
|
|
max=6, |
159
|
|
|
), |
160
|
|
|
no_main: bool = Opt.flag(r"Exclude main commands."), |
|
|
|
|
161
|
|
|
no_search: bool = Opt.flag(r"Exclude search commands."), |
|
|
|
|
162
|
|
|
hidden: bool = Opt.flag(r"Show hidden commands."), |
|
|
|
|
163
|
|
|
common: bool = Opt.flag( |
|
|
|
|
164
|
|
|
r""" |
165
|
|
|
Show common arguments and options. |
166
|
|
|
|
167
|
|
|
Includes --log and --stderr, along with path, --key, --to, etc. for searches. |
168
|
|
|
""" |
169
|
|
|
), |
170
|
|
|
replace: bool = Ca.replace, |
|
|
|
|
171
|
|
|
log: Optional[Path] = Ca.log, |
|
|
|
|
172
|
|
|
stderr: str = CommonArgs.stderr, |
|
|
|
|
173
|
|
|
): |
174
|
|
|
r""" |
175
|
|
|
Write documentation on commands to a file. |
176
|
|
|
""" |
177
|
|
|
LOG_SETUP(log, stderr) |
178
|
|
|
if level == 5: |
179
|
|
|
hidden = common = True |
180
|
|
|
if width is None and style != "table": |
181
|
|
|
width = 100 |
182
|
|
|
elif width == 0: |
183
|
|
|
width = None |
184
|
|
|
default = f"commands-level{level}.rst" |
185
|
|
|
to = EntryUtils.adjust_filename(to, default, replace=replace) |
186
|
|
|
Documenter( |
187
|
|
|
level=level, |
188
|
|
|
main=not no_main, |
189
|
|
|
search=not no_search, |
190
|
|
|
hidden=hidden, |
191
|
|
|
common=common, |
192
|
|
|
width=width, |
193
|
|
|
).document(_InsertedCommandListSingleton.commands, to, style) |
194
|
|
|
|
195
|
|
|
@staticmethod |
196
|
|
|
def fill( |
|
|
|
|
197
|
|
|
path: Path = Arg.in_file( |
|
|
|
|
198
|
|
|
rf""" |
199
|
|
|
{DfCliHelp.help(InputCompoundsDf).get_short_text(nl=nl)} |
200
|
|
|
""", |
201
|
|
|
), |
202
|
|
|
to: Path = Opt.out_path( |
|
|
|
|
203
|
|
|
rf""" |
204
|
|
|
{DfCliHelp.help(IdMatchDf).get_short_text(nl=nl)} |
205
|
|
|
|
206
|
|
|
[default: <path>-ids-<start-time>{DEF_SUFFIX}] |
207
|
|
|
""" |
208
|
|
|
), |
209
|
|
|
no_pubchem: bool = Opt.flag("Do not use PubChem.", "--no-pubchem"), |
|
|
|
|
210
|
|
|
no_chembl: bool = Opt.flag("Do not use ChEMBL.", "--no-chembl"), |
|
|
|
|
211
|
|
|
replace: bool = Ca.replace, |
|
|
|
|
212
|
|
|
log: Optional[Path] = Ca.log, |
|
|
|
|
213
|
|
|
stderr: str = CommonArgs.stderr, |
|
|
|
|
214
|
|
|
) -> None: |
215
|
|
|
r""" |
216
|
|
|
Fill in missing IDs from existing compound data. |
217
|
|
|
|
218
|
|
|
The idea is to find a ChEMBL ID, a PubChem ID, and parent-compound InChI/InChI Key. |
219
|
|
|
Useful to check compound/ID associations before running a search. |
220
|
|
|
|
221
|
|
|
To be filled, each row must should have a non-null value for |
222
|
|
|
"inchikey", "chembl_id", and/or "pubchem_id". |
223
|
|
|
"inchi" will be used but not to match to PubChem and ChEMBL. |
224
|
|
|
|
225
|
|
|
No existing columns will be dropped or modified. |
226
|
|
|
Any conflicting column will be renamed to 'origin_<column>'. |
227
|
|
|
E.g. 'inchikey' will be renamed to 'origin_inchikey'. |
228
|
|
|
(Do not include a column beginning with 'origin_'). |
229
|
|
|
|
230
|
|
|
Final columns (assuming --no-chembl and --no-pubchem) will include: |
231
|
|
|
inchikey, inchi, pubchem_id, chembl_id, pubchem_inch, chembl_inchi, |
232
|
|
|
pubchem_inchikey, and chembl_inchikey. |
233
|
|
|
The "inchikey" and "inchikey" columns will be the "best" available: |
234
|
|
|
chembl (preferred), then pubchem, then your source inchikey column. |
235
|
|
|
In cases where PubChem and ChEMBL differ, an error will be logged. |
236
|
|
|
You can always check the columns "origin_inchikey" (yours), |
237
|
|
|
chembl_inchikey, and pubchem_inchikey. |
238
|
|
|
|
239
|
|
|
The steps are: |
240
|
|
|
|
241
|
|
|
- If "chembl_id" or "pubchem_id" is non-null, uses that to find an InChI Key (for each). |
242
|
|
|
|
243
|
|
|
- Otherwise, if only "inchikey" is non-null, uses it to find ChEMBL and PubChem records. |
244
|
|
|
|
245
|
|
|
- Log an error if the inchikeys or inchis differ between PubChem and ChEMBL. |
246
|
|
|
|
247
|
|
|
- Set the final "inchi" and "inchikey" to the best choice, |
248
|
|
|
falling back to the input inchi and inchikey if they are missing. |
249
|
|
|
""" |
250
|
|
|
LOG_SETUP(log, stderr) |
251
|
|
|
default = str(Path(path).with_suffix("")) + "-filled" + "".join(path.suffixes) |
252
|
|
|
to = EntryUtils.adjust_filename(to, default, replace=replace) |
253
|
|
|
df = IdMatchDf.read_file(path) |
|
|
|
|
254
|
|
|
df = CompoundIdFiller(chembl=not no_chembl, pubchem=not no_pubchem).fill(df) |
|
|
|
|
255
|
|
|
df.write_file(to) |
256
|
|
|
|
257
|
|
|
@staticmethod |
258
|
|
|
def cache_data( |
259
|
|
|
path: Path = Ca.in_compound_table, |
|
|
|
|
260
|
|
|
no_pubchem: bool = Opt.flag(r"Do not download data from PubChem", "--no-pubchem"), |
|
|
|
|
261
|
|
|
no_chembl: bool = Opt.flag(r"Do not fetch IDs from ChEMBL", "--no_chembl"), |
|
|
|
|
262
|
|
|
log: Optional[Path] = CommonArgs.log, |
|
|
|
|
263
|
|
|
stderr: str = CommonArgs.stderr, |
|
|
|
|
264
|
|
|
) -> None: |
265
|
|
|
r""" |
266
|
|
|
Fetch and cache compound data. |
267
|
|
|
|
268
|
|
|
Useful to freeze data before running a search. |
269
|
|
|
""" |
270
|
|
|
LOG_SETUP(log, stderr) |
271
|
|
|
df = IdMatchDf.read_file(path) |
|
|
|
|
272
|
|
|
CompoundIdFiller(chembl=not no_chembl, pubchem=not no_pubchem).fill(df) |
273
|
|
|
logger.notice(f"Done caching") |
|
|
|
|
274
|
|
|
|
275
|
|
|
@staticmethod |
276
|
|
|
def export_taxa( |
|
|
|
|
277
|
|
|
taxa: str = Ca.taxa, |
|
|
|
|
278
|
|
|
to: Path = Opt.out_path( |
|
|
|
|
279
|
|
|
rf""" |
280
|
|
|
{DfCliHelp.help(TaxonomyDf).get_short_text(nl=nl)} |
281
|
|
|
|
282
|
|
|
[default: ./<taxa>-<datetime>{DEF_SUFFIX}] |
283
|
|
|
""" |
284
|
|
|
), |
285
|
|
|
replace: bool = Ca.replace, |
|
|
|
|
286
|
|
|
in_cache: bool = CommonArgs.in_cache, |
|
|
|
|
287
|
|
|
log: Optional[Path] = CommonArgs.log, |
|
|
|
|
288
|
|
|
stderr: str = CommonArgs.stderr, |
|
|
|
|
289
|
|
|
): |
290
|
|
|
""" |
291
|
|
|
Export a taxonomic tree to a table. |
292
|
|
|
|
293
|
|
|
Writes a taxonomy of given taxa and their descendants to a table. |
294
|
|
|
""" |
295
|
|
|
LOG_SETUP(log, stderr) |
296
|
|
|
default = taxa + "-" + Globals.start_timestamp_filesys + DEF_SUFFIX |
297
|
|
|
to = EntryUtils.adjust_filename(to, default, replace=replace) |
298
|
|
|
tax = ArgUtils.get_taxonomy(taxa, local_only=in_cache, allow_forbid=False) |
299
|
|
|
tax.to_df().write_file(to, mkdirs=True, file_hash=True) |
300
|
|
|
|
301
|
|
|
@staticmethod |
302
|
|
|
def cache_taxa( |
303
|
|
|
taxa: str = Opt.val( |
|
|
|
|
304
|
|
|
r""" |
305
|
|
|
Either "@all" or a comma-separated list of UniProt taxon IDs. |
306
|
|
|
|
307
|
|
|
"@all" is only valid when --replace is passed; |
308
|
|
|
this will regenerate all taxonomy files that are found in the cache. |
309
|
|
|
Aliases "vertebrata", "cellular", and "viral" are permitted. |
310
|
|
|
""", |
311
|
|
|
default="", |
312
|
|
|
), |
313
|
|
|
replace: bool = Ca.replace, |
|
|
|
|
314
|
|
|
log: Optional[Path] = CommonArgs.log, |
|
|
|
|
315
|
|
|
stderr: str = CommonArgs.stderr, |
|
|
|
|
316
|
|
|
) -> None: |
317
|
|
|
""" |
318
|
|
|
Prep a new taxonomy file for use in mandos. |
319
|
|
|
|
320
|
|
|
With --replace set, will delete any existing file. |
321
|
|
|
This can be useful to make sure your cached taxonomy is up-to-date before running. |
322
|
|
|
|
323
|
|
|
Downloads and converts a tab-separated file from UniProt. |
324
|
|
|
(To find manually, follow the ``All lower taxonomy nodes`` link and click ``Download``.) |
325
|
|
|
Then applies fixes and reduces the file size, creating a new file alongside. |
326
|
|
|
Puts both the raw data and fixed data in the cache under ``~/.mandos/taxonomy/``. |
327
|
|
|
""" |
328
|
|
|
LOG_SETUP(log, stderr) |
329
|
|
|
if taxa == "@all" and not replace: |
330
|
|
|
raise XValueError(f"Use --replace with '@all'") |
|
|
|
|
331
|
|
|
# we're good to go: |
332
|
|
|
factory = TaxonomyFactories.main() |
333
|
|
|
if taxa == "@all": |
334
|
|
|
taxa = TaxonomyFactories.list_cached_files().keys() |
335
|
|
|
else: |
336
|
|
|
taxa = ArgUtils.parse_taxa_ids(taxa) |
337
|
|
|
factory.rebuild(taxa, replace=replace) |
338
|
|
|
|
339
|
|
|
@staticmethod |
340
|
|
|
def cache_g2p( |
341
|
|
|
replace: bool = Ca.replace, |
|
|
|
|
342
|
|
|
log: Optional[Path] = CommonArgs.log, |
|
|
|
|
343
|
|
|
stderr: str = CommonArgs.stderr, |
|
|
|
|
344
|
|
|
) -> None: |
345
|
|
|
""" |
346
|
|
|
Caches GuideToPharmacology data. |
347
|
|
|
|
348
|
|
|
With --replace set, will overwrite existing cached data. |
349
|
|
|
Data will generally be stored under``~/.mandos/g2p/``. |
350
|
|
|
""" |
351
|
|
|
LOG_SETUP(log, stderr) |
352
|
|
|
api = CachingG2pApi(SETTINGS.g2p_cache_path) |
353
|
|
|
api.download(force=replace) |
354
|
|
|
|
355
|
|
|
@staticmethod |
356
|
|
|
def cache_clear( |
357
|
|
|
log: Optional[Path] = CommonArgs.log, |
|
|
|
|
358
|
|
|
stderr: str = CommonArgs.stderr, |
|
|
|
|
359
|
|
|
yes: bool = CommonArgs.yes, |
|
|
|
|
360
|
|
|
) -> None: |
361
|
|
|
""" |
362
|
|
|
Deletes all cached data. |
363
|
|
|
""" |
364
|
|
|
LOG_SETUP(log, stderr) |
365
|
|
|
typer.echo(f"Will recursively delete all of these paths:") |
|
|
|
|
366
|
|
|
for p in SETTINGS.all_cache_paths: |
|
|
|
|
367
|
|
|
typer.echo(f" {p}") |
368
|
|
|
if not yes: |
369
|
|
|
typer.confirm("Delete?", abort=True) |
370
|
|
|
for p in SETTINGS.all_cache_paths: |
|
|
|
|
371
|
|
|
p.unlink(missing_ok=True) |
372
|
|
|
logger.notice("Deleted all cached data") |
373
|
|
|
|
374
|
|
|
@staticmethod |
375
|
|
|
def concat( |
|
|
|
|
376
|
|
|
path: Path = Arg.in_dir( |
|
|
|
|
377
|
|
|
rf""" |
378
|
|
|
Directory containing results from a mandos search. |
379
|
|
|
|
380
|
|
|
{DfCliHelp.list_formats().get_short_text()} |
381
|
|
|
""" |
382
|
|
|
), |
383
|
|
|
to: Optional[Path] = Ca.out_annotations_file, |
|
|
|
|
384
|
|
|
replace: bool = Ca.replace, |
|
|
|
|
385
|
|
|
log: Optional[Path] = CommonArgs.log, |
|
|
|
|
386
|
|
|
stderr: str = CommonArgs.stderr, |
|
|
|
|
387
|
|
|
) -> None: |
388
|
|
|
r""" |
389
|
|
|
Concatenate Mandos annotation files into one. |
390
|
|
|
|
391
|
|
|
Note that ``:search`` automatically performs this; |
392
|
|
|
this is needed only if you want to combine results from multiple independent searches. |
393
|
|
|
""" |
394
|
|
|
LOG_SETUP(log, stderr) |
395
|
|
|
files_ = [] |
396
|
|
|
for file in path.iterdir(): |
397
|
|
|
ff = FileFormat.from_path_or_none(file) |
|
|
|
|
398
|
|
|
if ff not in [None, FileFormat.json, FileFormat.toml] and not ff.name.endswith( |
399
|
|
|
".doc.tsv" |
|
|
|
|
400
|
|
|
): |
401
|
|
|
files_.append(file) |
402
|
|
|
logger.info(f"Looking under {path} (NOT recursive)") |
403
|
|
|
logger.debug(f"Found {len(files_)} potential input files: {[f.name for f in files_]}") |
404
|
|
|
files, dfs = [], [] |
405
|
|
|
for file in files_: |
406
|
|
|
try: |
407
|
|
|
df: HitDf = HitDf.read_file(file, attrs=True) |
|
|
|
|
408
|
|
|
except InvalidDfError: |
409
|
|
|
logger.warning(f"Skipping {file} {Chars.en} not a valid hit list") |
410
|
|
|
logger.debug(f"Error reading {file}", exc_info=True) |
411
|
|
|
continue |
412
|
|
|
df = df.set_attrs({file.name: df.attrs}) |
|
|
|
|
413
|
|
|
dfs.append(df) |
414
|
|
|
files.append(file) |
415
|
|
|
names = [CompressionFormat.strip_suffix(f).name for f in files] |
416
|
|
|
default = path / (",".join(names) + DEF_SUFFIX) |
417
|
|
|
to = EntryUtils.adjust_filename(to, default, replace) |
418
|
|
|
logger.notice(f"Concatenated {len(files)} files") |
419
|
|
|
for f_, df_ in zip(files, dfs): |
|
|
|
|
420
|
|
|
logger.success(f"Included: {f_.name} with {len(df_)} rows") |
421
|
|
|
df = HitDf.of(pd.concat(dfs)) |
|
|
|
|
422
|
|
|
counts = {k: v for k, v in df.group_by("universal_id").count().to_dict() if v > 0} |
423
|
|
|
if len(counts) > 0: |
424
|
|
|
logger.error( |
425
|
|
|
f"There are {len(counts)} universal IDs with duplicates!" |
426
|
|
|
+ f": {StringTools.join_kv(counts)}" |
427
|
|
|
) |
428
|
|
|
logger.notice(f"Wrote {len(df)} rows to {to}") |
429
|
|
|
df.write_file(to) |
430
|
|
|
|
431
|
|
|
@staticmethod |
432
|
|
|
def filter( |
|
|
|
|
433
|
|
|
path: Path = Ca.out_annotations_file, |
|
|
|
|
434
|
|
|
by: Optional[Path] = Arg.in_file( |
|
|
|
|
435
|
|
|
r""" |
436
|
|
|
Path to a file containing filters. |
437
|
|
|
|
438
|
|
|
See the docs for more info. |
439
|
|
|
""" |
440
|
|
|
), |
441
|
|
|
to: Optional[Path] = Ca.out_annotations_file, |
|
|
|
|
442
|
|
|
replace: bool = Ca.replace, |
|
|
|
|
443
|
|
|
log: Optional[Path] = CommonArgs.log, |
|
|
|
|
444
|
|
|
stderr: str = CommonArgs.stderr, |
|
|
|
|
445
|
|
|
) -> None: |
446
|
|
|
""" |
447
|
|
|
Filters by simple expressions. |
448
|
|
|
""" |
449
|
|
|
LOG_SETUP(log, stderr) |
450
|
|
|
default = str(path) + "-filter-" + by.stem + DEF_SUFFIX |
451
|
|
|
to = EntryUtils.adjust_filename(to, default, replace) |
452
|
|
|
df = HitDf.read_file(path) |
|
|
|
|
453
|
|
|
Filtration.from_file(by).apply(df).write_file(to) |
454
|
|
|
|
455
|
|
|
@staticmethod |
456
|
|
|
def export_state( |
|
|
|
|
457
|
|
|
path: Path = Ca.in_annotations_file, |
|
|
|
|
458
|
|
|
to: Optional[Path] = Opt.out_path( |
|
|
|
|
459
|
|
|
""" |
460
|
|
|
Path to the output file. |
461
|
|
|
|
462
|
|
|
Valid formats and filename suffixes are .nt and .txt with an optional .gz, .zip, or .xz. |
463
|
|
|
If only a filename suffix is provided, will use that suffix with the default directory. |
464
|
|
|
If no suffix is provided, will interpret the path as a directory and use the default filename. |
|
|
|
|
465
|
|
|
Will fail if the file exists and ``--replace`` is not set. |
466
|
|
|
|
467
|
|
|
[default: <path>-statements.nt] |
468
|
|
|
""" |
469
|
|
|
), |
470
|
|
|
replace: bool = Ca.replace, |
|
|
|
|
471
|
|
|
log: Optional[Path] = CommonArgs.log, |
|
|
|
|
472
|
|
|
stderr: str = CommonArgs.stderr, |
|
|
|
|
473
|
|
|
) -> None: |
474
|
|
|
""" |
475
|
|
|
Output simple N-triples statements. |
476
|
|
|
|
477
|
|
|
Each statement is of this form, where the InChI Key refers to the input data: |
478
|
|
|
|
479
|
|
|
`"InChI Key" "predicate" "object" .` |
480
|
|
|
""" |
481
|
|
|
LOG_SETUP(log, stderr) |
482
|
|
|
default = f"{path}-statements.nt" |
483
|
|
|
to = EntryUtils.adjust_filename(to, default, replace) |
484
|
|
|
hits = HitDf.read_file(path).to_hits() |
485
|
|
|
with to.open() as f: |
|
|
|
|
486
|
|
|
for hit in hits: |
487
|
|
|
f.write(hit.to_triple.n_triples) |
488
|
|
|
|
489
|
|
|
@staticmethod |
490
|
|
|
def export_reify( |
|
|
|
|
491
|
|
|
path: Path = Ca.in_annotations_file, |
|
|
|
|
492
|
|
|
to: Optional[Path] = Opt.out_path( |
|
|
|
|
493
|
|
|
r""" |
494
|
|
|
Path to the output file. |
495
|
|
|
|
496
|
|
|
The filename suffix should be either .nt (N-triples) or .ttl (Turtle), |
497
|
|
|
with an optional .gz, .zip, or .xz. |
498
|
|
|
If only a filename suffix is provided, will use that suffix with the default directory. |
499
|
|
|
If no suffix is provided, will interpret the path as a directory but use the default filename. |
|
|
|
|
500
|
|
|
Will fail if the file exists and ``--replace`` is not set. |
501
|
|
|
|
502
|
|
|
[default: <path>-reified.nt] |
503
|
|
|
""" |
504
|
|
|
), |
505
|
|
|
replace: bool = Ca.replace, |
|
|
|
|
506
|
|
|
log: Optional[Path] = CommonArgs.log, |
|
|
|
|
507
|
|
|
stderr: str = CommonArgs.stderr, |
|
|
|
|
508
|
|
|
) -> None: |
509
|
|
|
""" |
510
|
|
|
Outputs reified semantic triples. |
511
|
|
|
""" |
512
|
|
|
LOG_SETUP(log, stderr) |
513
|
|
|
default = f"{path}-reified.nt" |
514
|
|
|
to = EntryUtils.adjust_filename(to, default, replace) |
515
|
|
|
hits = HitDf.read_file(path).to_hits() |
516
|
|
|
with to.open() as f: |
|
|
|
|
517
|
|
|
for triple in Reifier().reify(hits): |
518
|
|
|
f.write(triple.n_triples) |
519
|
|
|
|
520
|
|
|
@staticmethod |
521
|
|
|
def export_copy( |
|
|
|
|
522
|
|
|
path: Path = Ca.in_annotations_file, |
|
|
|
|
523
|
|
|
to: Optional[Path] = Opt.out_path( |
|
|
|
|
524
|
|
|
rf""" |
525
|
|
|
Path to the output file. |
526
|
|
|
|
527
|
|
|
{DfCliHelp.list_formats().get_short_text()} |
528
|
|
|
|
529
|
|
|
[default: <path.parent>/export{DEF_SUFFIX}] |
530
|
|
|
""" |
531
|
|
|
), |
532
|
|
|
replace: bool = Ca.replace, |
|
|
|
|
533
|
|
|
log: Optional[Path] = CommonArgs.log, |
|
|
|
|
534
|
|
|
stderr: str = CommonArgs.stderr, |
|
|
|
|
535
|
|
|
) -> None: |
536
|
|
|
""" |
537
|
|
|
Copies and/or converts annotation files. |
538
|
|
|
|
539
|
|
|
Example: ``:export:copy --to .snappy`` to highly compress a data set. |
540
|
|
|
""" |
541
|
|
|
LOG_SETUP(log, stderr) |
542
|
|
|
default = path.parent / DEF_SUFFIX |
543
|
|
|
to = EntryUtils.adjust_filename(to, default, replace) |
544
|
|
|
df = HitDf.read_file(path) |
|
|
|
|
545
|
|
|
df.write_file(to) |
546
|
|
|
|
547
|
|
|
@staticmethod |
548
|
|
|
def serve( |
|
|
|
|
549
|
|
|
port: int = Opt.val(r"Port to serve on", default=1540), |
|
|
|
|
550
|
|
|
db: str = Opt.val("Name of the MySQL database", default="mandos"), |
|
|
|
|
551
|
|
|
log: Optional[Path] = Ca.log, |
|
|
|
|
552
|
|
|
stderr: str = CommonArgs.stderr, |
|
|
|
|
553
|
|
|
) -> None: |
554
|
|
|
r""" |
555
|
|
|
Start a REST server. |
556
|
|
|
|
557
|
|
|
The connection information is stored in your global settings file. |
558
|
|
|
""" |
559
|
|
|
LOG_SETUP(log, stderr) |
560
|
|
|
|
561
|
|
|
@staticmethod |
562
|
|
|
def export_db( |
|
|
|
|
563
|
|
|
path: Path = Ca.in_annotations_file, |
|
|
|
|
564
|
|
|
db: str = Opt.val(r"Name of the MySQL database", default="mandos"), |
|
|
|
|
565
|
|
|
host: str = Opt.val( |
|
|
|
|
566
|
|
|
r"Database hostname (ignored if ``--socket`` is passed", default="127.0.0.1" |
567
|
|
|
), |
568
|
|
|
socket: Optional[str] = Opt.val("Path to a Unix socket (if set, ``--host`` is ignored)"), |
|
|
|
|
569
|
|
|
user: Optional[str] = Opt.val("Database username (empty if not set)"), |
|
|
|
|
570
|
|
|
password: Optional[str] = Opt.val("Database password (empty if not set)"), |
|
|
|
|
571
|
|
|
as_of: Optional[str] = CommonArgs.as_of, |
|
|
|
|
572
|
|
|
log: Optional[Path] = CommonArgs.log, |
|
|
|
|
573
|
|
|
stderr: str = CommonArgs.stderr, |
|
|
|
|
574
|
|
|
) -> None: |
575
|
|
|
r""" |
576
|
|
|
Export to a relational database. |
577
|
|
|
|
578
|
|
|
Saves data from Mandos search commands to a database for serving via REST. |
579
|
|
|
|
580
|
|
|
See also: ``:serve``. |
581
|
|
|
""" |
582
|
|
|
LOG_SETUP(log, stderr) |
583
|
|
|
|
584
|
|
|
@staticmethod |
585
|
|
|
def init_db( |
|
|
|
|
586
|
|
|
db: str = Opt.val(r"Name of the MySQL database", default="mandos"), |
|
|
|
|
587
|
|
|
host: str = Opt.val( |
|
|
|
|
588
|
|
|
r"Database hostname (ignored if ``--socket`` is passed", default="127.0.0.1" |
589
|
|
|
), |
590
|
|
|
socket: Optional[str] = Opt.val("Path to a Unix socket (if set, ``--host`` is ignored)"), |
|
|
|
|
591
|
|
|
user: Optional[str] = Opt.val("Database username (empty if not set)"), |
|
|
|
|
592
|
|
|
password: Optional[str] = Opt.val("Database password (empty if not set)"), |
|
|
|
|
593
|
|
|
overwrite: bool = Opt.flag(r"Delete the database if it exists"), |
|
|
|
|
594
|
|
|
yes: bool = Ca.yes, |
|
|
|
|
595
|
|
|
log: Optional[Path] = CommonArgs.log, |
|
|
|
|
596
|
|
|
stderr: str = CommonArgs.stderr, |
|
|
|
|
597
|
|
|
) -> None: |
598
|
|
|
r""" |
599
|
|
|
Initialize an empty database. |
600
|
|
|
""" |
601
|
|
|
LOG_SETUP(log, stderr) |
602
|
|
|
|
603
|
|
|
|
604
|
|
|
__all__ = ["MiscCommands"] |
605
|
|
|
|