|
1
|
|
|
import gzip |
|
|
|
|
|
|
2
|
|
|
import hashlib |
|
3
|
|
|
import importlib.metadata |
|
|
|
|
|
|
4
|
|
|
import json |
|
5
|
|
|
import locale |
|
6
|
|
|
import logging |
|
7
|
|
|
import os |
|
8
|
|
|
import platform |
|
9
|
|
|
import shutil |
|
10
|
|
|
import socket |
|
11
|
|
|
import stat |
|
12
|
|
|
import struct |
|
13
|
|
|
import sys |
|
14
|
|
|
import tempfile |
|
15
|
|
|
import warnings |
|
16
|
|
|
from contextlib import contextmanager |
|
17
|
|
|
from datetime import datetime, timezone |
|
18
|
|
|
from getpass import getuser |
|
19
|
|
|
from pathlib import Path, PurePath |
|
20
|
|
|
from typing import Any, Generator, Iterable, Mapping, Optional, Sequence, SupportsBytes, Type, Union |
|
21
|
|
|
|
|
22
|
|
|
import numpy as np |
|
|
|
|
|
|
23
|
|
|
import pandas as pd |
|
|
|
|
|
|
24
|
|
|
import regex |
|
|
|
|
|
|
25
|
|
|
|
|
26
|
|
|
from pocketutils.core import JsonEncoder |
|
27
|
|
|
from pocketutils.core.exceptions import ( |
|
28
|
|
|
AlreadyUsedError, |
|
29
|
|
|
ContradictoryRequestError, |
|
30
|
|
|
FileDoesNotExistError, |
|
31
|
|
|
ParsingError, |
|
32
|
|
|
) |
|
33
|
|
|
from pocketutils.core.hashers import * |
|
|
|
|
|
|
34
|
|
|
from pocketutils.core.input_output import OpenMode, PathLike, Writeable |
|
35
|
|
|
from pocketutils.core.web_resource import * |
|
|
|
|
|
|
36
|
|
|
from pocketutils.tools.base_tools import BaseTools |
|
37
|
|
|
from pocketutils.tools.path_tools import PathTools |
|
38
|
|
|
|
|
39
|
|
|
logger = logging.getLogger("pocketutils") |
|
40
|
|
|
COMPRESS_LEVEL = 9 |
|
41
|
|
|
ENCODING = "utf8" |
|
42
|
|
|
|
|
43
|
|
|
try: |
|
44
|
|
|
import jsonpickle |
|
|
|
|
|
|
45
|
|
|
import jsonpickle.ext.numpy as jsonpickle_numpy |
|
46
|
|
|
|
|
47
|
|
|
jsonpickle_numpy.register_handlers() |
|
48
|
|
|
import jsonpickle.ext.pandas as jsonpickle_pandas |
|
49
|
|
|
|
|
50
|
|
|
jsonpickle_pandas.register_handlers() |
|
51
|
|
|
except ImportError: |
|
52
|
|
|
# zero them all out |
|
53
|
|
|
jsonpickle, jsonpickle_numpy, jsonpickle_pandas = None, None, None |
|
54
|
|
|
logger.debug("Could not import jsonpickle", exc_info=True) |
|
55
|
|
|
|
|
56
|
|
|
|
|
57
|
|
|
try: |
|
58
|
|
|
from defusedxml import ElementTree |
|
59
|
|
|
except ImportError: |
|
60
|
|
|
logger.warning("Could not import defusedxml; falling back to xml") |
|
61
|
|
|
from xml.etree import ElementTree |
|
62
|
|
|
|
|
63
|
|
|
|
|
64
|
|
|
class FilesysTools(BaseTools): |
|
|
|
|
|
|
65
|
|
|
""" |
|
66
|
|
|
Tools for file/directory creation, etc. |
|
67
|
|
|
|
|
68
|
|
|
.. caution:: |
|
69
|
|
|
Some functions may be insecure. |
|
70
|
|
|
""" |
|
71
|
|
|
|
|
72
|
|
|
@classmethod |
|
73
|
|
|
def new_hasher(cls, algorithm: str = "sha1") -> Hasher: |
|
|
|
|
|
|
74
|
|
|
return Hasher(algorithm) |
|
75
|
|
|
|
|
76
|
|
|
@classmethod |
|
77
|
|
|
def new_webresource( |
|
|
|
|
|
|
78
|
|
|
cls, url: str, archive_member: Optional[str], local_path: PathLike |
|
|
|
|
|
|
79
|
|
|
) -> WebResource: |
|
80
|
|
|
return WebResource(url, archive_member, local_path) |
|
81
|
|
|
|
|
82
|
|
|
@classmethod |
|
83
|
|
|
def is_linux(cls) -> bool: |
|
|
|
|
|
|
84
|
|
|
return sys.platform == "linux" |
|
85
|
|
|
|
|
86
|
|
|
@classmethod |
|
87
|
|
|
def is_windows(cls) -> bool: |
|
|
|
|
|
|
88
|
|
|
return sys.platform == "win32" |
|
89
|
|
|
|
|
90
|
|
|
@classmethod |
|
91
|
|
|
def is_macos(cls) -> bool: |
|
|
|
|
|
|
92
|
|
|
return sys.platform == "darwin" |
|
93
|
|
|
|
|
94
|
|
|
@classmethod |
|
95
|
|
|
def get_env_info(cls, *, include_insecure: bool = False) -> Mapping[str, str]: |
|
96
|
|
|
""" |
|
97
|
|
|
Get a dictionary of some system and environment information. |
|
98
|
|
|
Includes os_release, hostname, username, mem + disk, shell, etc. |
|
99
|
|
|
|
|
100
|
|
|
Args: |
|
101
|
|
|
include_insecure: Include data like hostname and username |
|
102
|
|
|
|
|
103
|
|
|
.. caution :: |
|
104
|
|
|
Even with ``include_insecure=False``, avoid exposing this data to untrusted |
|
105
|
|
|
sources. For example, this includes the specific OS release, which could |
|
106
|
|
|
be used in attack. |
|
107
|
|
|
""" |
|
108
|
|
|
try: |
|
109
|
|
|
import psutil |
|
|
|
|
|
|
110
|
|
|
except ImportError: |
|
111
|
|
|
psutil = None |
|
112
|
|
|
logger.warning("psutil is not installed, so cannot get extended env info") |
|
113
|
|
|
|
|
114
|
|
|
now = datetime.now(timezone.utc).astimezone().isoformat() |
|
115
|
|
|
uname = platform.uname() |
|
116
|
|
|
language_code, encoding = locale.getlocale() |
|
|
|
|
|
|
117
|
|
|
# build up this dict: |
|
118
|
|
|
data = {} |
|
119
|
|
|
|
|
120
|
|
|
def _try(os_fn, k: str, *args): |
|
121
|
|
|
if any((a is None for a in args)): |
|
|
|
|
|
|
122
|
|
|
return None |
|
123
|
|
|
try: |
|
124
|
|
|
v = os_fn(*args) |
|
|
|
|
|
|
125
|
|
|
data[k] = v |
|
126
|
|
|
return v |
|
127
|
|
|
except (OSError, ImportError): |
|
128
|
|
|
return None |
|
129
|
|
|
|
|
130
|
|
|
data.update( |
|
131
|
|
|
dict( |
|
132
|
|
|
platform=platform.platform(), |
|
133
|
|
|
python=".".join(str(i) for i in sys.version_info), |
|
134
|
|
|
os=uname.system, |
|
135
|
|
|
os_release=uname.release, |
|
136
|
|
|
os_version=uname.version, |
|
137
|
|
|
machine=uname.machine, |
|
138
|
|
|
byte_order=sys.byteorder, |
|
139
|
|
|
processor=uname.processor, |
|
140
|
|
|
build=sys.version, |
|
141
|
|
|
python_bits=8 * struct.calcsize("P"), |
|
142
|
|
|
environment_info_capture_datetime=now, |
|
143
|
|
|
encoding=encoding, |
|
144
|
|
|
locale=locale, |
|
145
|
|
|
recursion_limit=sys.getrecursionlimit(), |
|
146
|
|
|
float_info=sys.float_info, |
|
147
|
|
|
int_info=sys.int_info, |
|
148
|
|
|
flags=sys.flags, |
|
149
|
|
|
hash_info=sys.hash_info, |
|
150
|
|
|
implementation=sys.implementation, |
|
151
|
|
|
switch_interval=sys.getswitchinterval(), |
|
152
|
|
|
filesystem_encoding=sys.getfilesystemencoding(), |
|
153
|
|
|
) |
|
154
|
|
|
) |
|
155
|
|
|
if "LANG" in os.environ: |
|
156
|
|
|
data["lang"] = os.environ["LANG"] |
|
157
|
|
|
if "SHELL" in os.environ: |
|
158
|
|
|
data["shell"] = os.environ["SHELL"] |
|
159
|
|
|
if "LC_ALL" in os.environ: |
|
160
|
|
|
data["lc_all"] = os.environ["LC_ALL"] |
|
161
|
|
|
if hasattr(sys, "winver"): |
|
162
|
|
|
data["win_ver"] = (sys.getwindowsversion(),) |
|
|
|
|
|
|
163
|
|
|
if hasattr(sys, "macver"): |
|
164
|
|
|
data["mac_ver"] = (sys.mac_ver(),) |
|
|
|
|
|
|
165
|
|
|
if hasattr(sys, "linux_distribution"): |
|
166
|
|
|
data["linux_distribution"] = (sys.linux_distribution(),) |
|
|
|
|
|
|
167
|
|
|
if include_insecure: |
|
168
|
|
|
_try(getuser, "username") |
|
169
|
|
|
_try(os.getlogin, "login") |
|
170
|
|
|
_try(socket.gethostname, "hostname") |
|
171
|
|
|
_try(os.getcwd, "cwd") |
|
172
|
|
|
pid = _try(os.getpid, "pid") |
|
173
|
|
|
ppid = _try(os.getppid, "parent_pid") |
|
174
|
|
|
if hasattr(os, "getpriority"): |
|
175
|
|
|
_try(os.getpriority, "priority", os.PRIO_PROCESS, pid) |
|
176
|
|
|
_try(os.getpriority, "parent_priority", os.PRIO_PROCESS, ppid) |
|
177
|
|
|
if psutil is not None: |
|
178
|
|
|
data.update( |
|
179
|
|
|
dict( |
|
180
|
|
|
disk_used=psutil.disk_usage(".").used, |
|
181
|
|
|
disk_free=psutil.disk_usage(".").free, |
|
182
|
|
|
memory_used=psutil.virtual_memory().used, |
|
183
|
|
|
memory_available=psutil.virtual_memory().available, |
|
184
|
|
|
) |
|
185
|
|
|
) |
|
186
|
|
|
return {k: str(v) for k, v in dict(data).items()} |
|
187
|
|
|
|
|
188
|
|
|
@classmethod |
|
189
|
|
|
def list_package_versions(cls) -> Mapping[str, str]: |
|
190
|
|
|
""" |
|
191
|
|
|
Returns installed packages and their version numbers. |
|
192
|
|
|
Reliable; uses importlib (Python 3.8+). |
|
193
|
|
|
""" |
|
194
|
|
|
# calling .metadata reads the metadata file |
|
195
|
|
|
# and .version is an alias to .metadata["version"] |
|
196
|
|
|
# so make sure to only read once |
|
197
|
|
|
# TODO: get installed extras? |
|
|
|
|
|
|
198
|
|
|
dct = {} |
|
199
|
|
|
for d in importlib.metadata.distributions(): |
|
|
|
|
|
|
200
|
|
|
meta = d.metadata |
|
201
|
|
|
dct[meta["name"]] = meta["version"] |
|
202
|
|
|
return dct |
|
203
|
|
|
|
|
204
|
|
|
@classmethod |
|
205
|
|
|
def delete_surefire(cls, path: PathLike) -> Optional[Exception]: |
|
206
|
|
|
""" |
|
207
|
|
|
Deletes files or directories cross-platform, but working around multiple issues in Windows. |
|
208
|
|
|
|
|
209
|
|
|
Returns: |
|
210
|
|
|
None, or an Exception for minor warnings |
|
211
|
|
|
|
|
212
|
|
|
Raises: |
|
213
|
|
|
IOError: If it can't delete |
|
214
|
|
|
""" |
|
215
|
|
|
# we need this because of Windows |
|
216
|
|
|
path = Path(path) |
|
217
|
|
|
logger.debug(f"Permanently deleting {path} ...") |
|
|
|
|
|
|
218
|
|
|
chmod_err = None |
|
219
|
|
|
try: |
|
220
|
|
|
os.chmod(str(path), stat.S_IRWXU) |
|
221
|
|
|
except Exception as e: |
|
|
|
|
|
|
222
|
|
|
chmod_err = e |
|
223
|
|
|
# another reason for returning exception: |
|
224
|
|
|
# We don't want to interrupt the current line being printed like in slow_delete |
|
225
|
|
|
if path.is_dir(): |
|
226
|
|
|
shutil.rmtree(str(path), ignore_errors=True) # ignore_errors because of Windows |
|
227
|
|
|
try: |
|
228
|
|
|
path.unlink(missing_ok=True) # again, because of Windows |
|
229
|
|
|
except IOError: |
|
230
|
|
|
pass # almost definitely because it doesn't exist |
|
231
|
|
|
else: |
|
232
|
|
|
path.unlink(missing_ok=True) |
|
233
|
|
|
logger.debug(f"Permanently deleted {path}") |
|
|
|
|
|
|
234
|
|
|
return chmod_err |
|
235
|
|
|
|
|
236
|
|
|
@classmethod |
|
237
|
|
|
def trash(cls, path: PathLike, trash_dir: Optional[PathLike] = None) -> None: |
|
238
|
|
|
""" |
|
239
|
|
|
Trash a file or directory. |
|
240
|
|
|
|
|
241
|
|
|
Args: |
|
242
|
|
|
path: The path to move to the trash |
|
243
|
|
|
trash_dir: If None, uses :meth:`pocketutils.tools.path_tools.PathTools.guess_trash` |
|
244
|
|
|
""" |
|
245
|
|
|
if trash_dir is None: |
|
246
|
|
|
trash_dir = PathTools.guess_trash() |
|
247
|
|
|
logger.debug(f"Trashing {path} to {trash_dir} ...") |
|
|
|
|
|
|
248
|
|
|
shutil.move(str(path), str(trash_dir)) |
|
249
|
|
|
logger.debug(f"Trashed {path} to {trash_dir}") |
|
|
|
|
|
|
250
|
|
|
|
|
251
|
|
|
@classmethod |
|
252
|
|
|
def try_cleanup(cls, path: Path, *, bound: Type[Exception] = PermissionError) -> None: |
|
253
|
|
|
""" |
|
254
|
|
|
Try to delete a file (probably temp file), if it exists, and log any PermissionError. |
|
255
|
|
|
""" |
|
256
|
|
|
path = Path(path) |
|
257
|
|
|
# noinspection PyBroadException |
|
258
|
|
|
try: |
|
259
|
|
|
path.unlink(missing_ok=True) |
|
260
|
|
|
except bound: |
|
261
|
|
|
logger.error(f"Permission error preventing deleting {path}") |
|
|
|
|
|
|
262
|
|
|
|
|
263
|
|
|
@classmethod |
|
264
|
|
|
def read_lines_file(cls, path: PathLike, ignore_comments: bool = False) -> Sequence[str]: |
|
265
|
|
|
""" |
|
266
|
|
|
Returns a list of lines in the file. |
|
267
|
|
|
Optionally skips lines starting with '#' or that only contain whitespace. |
|
268
|
|
|
""" |
|
269
|
|
|
lines = [] |
|
270
|
|
|
with FilesysTools.open_file(path, "r") as f: |
|
|
|
|
|
|
271
|
|
|
for line in f.readlines(): |
|
272
|
|
|
line = line.strip() |
|
273
|
|
|
if not ignore_comments or not line.startswith("#") and not len(line.strip()) == 0: |
|
274
|
|
|
lines.append(line) |
|
275
|
|
|
return lines |
|
276
|
|
|
|
|
277
|
|
|
@classmethod |
|
278
|
|
|
def read_properties_file(cls, path: PathLike) -> Mapping[str, str]: |
|
279
|
|
|
""" |
|
280
|
|
|
Reads a .properties file. |
|
281
|
|
|
A list of lines with key=value pairs (with an equals sign). |
|
282
|
|
|
Lines beginning with # are ignored. |
|
283
|
|
|
Each line must contain exactly 1 equals sign. |
|
284
|
|
|
|
|
285
|
|
|
Args: |
|
286
|
|
|
path: Read the file at this local path |
|
287
|
|
|
|
|
288
|
|
|
Returns: |
|
289
|
|
|
A dict mapping keys to values, both with surrounding whitespace stripped |
|
290
|
|
|
""" |
|
291
|
|
|
dct = {} |
|
292
|
|
|
with FilesysTools.open_file(path, "r") as f: |
|
|
|
|
|
|
293
|
|
|
for i, line in enumerate(f.readlines()): |
|
294
|
|
|
line = line.strip() |
|
295
|
|
|
if len(line) == 0 or line.startswith("#"): |
|
296
|
|
|
continue |
|
297
|
|
|
if line.count("=") != 1: |
|
298
|
|
|
raise ParsingError(f"Bad line {i} in {path}", resource=path) |
|
299
|
|
|
k, v = line.split("=") |
|
|
|
|
|
|
300
|
|
|
k, v = k.strip(), v.strip() |
|
|
|
|
|
|
301
|
|
|
if k in dct: |
|
302
|
|
|
raise AlreadyUsedError(f"Duplicate property {k} (line {i})", key=k) |
|
303
|
|
|
dct[k] = v |
|
304
|
|
|
return dct |
|
305
|
|
|
|
|
306
|
|
|
@classmethod |
|
307
|
|
|
def write_properties_file( |
|
|
|
|
|
|
308
|
|
|
cls, properties: Mapping[Any, Any], path: Union[str, PurePath], mode: str = "o" |
|
|
|
|
|
|
309
|
|
|
): |
|
310
|
|
|
if not OpenMode(mode).write: |
|
311
|
|
|
raise ContradictoryRequestError(f"Cannot write text to {path} in mode {mode}") |
|
312
|
|
|
with FilesysTools.open_file(path, mode) as f: |
|
|
|
|
|
|
313
|
|
|
bads = [] |
|
314
|
|
|
for k, v in properties.items(): |
|
|
|
|
|
|
315
|
|
|
if "=" in k or "=" in v or "\n" in k or "\n" in v: |
|
316
|
|
|
bads.append(k) |
|
317
|
|
|
f.write( |
|
318
|
|
|
str(k).replace("=", "--").replace("\n", "\\n") |
|
319
|
|
|
+ "=" |
|
320
|
|
|
+ str(v).replace("=", "--").replace("\n", "\\n") |
|
321
|
|
|
+ "\n" |
|
322
|
|
|
) |
|
323
|
|
|
if 0 < len(bads) <= 10: |
|
324
|
|
|
logger.warning( |
|
|
|
|
|
|
325
|
|
|
"At least one properties entry contains an equals sign or newline (\\n)." |
|
326
|
|
|
f"These were escaped: {', '.join(bads)}" |
|
327
|
|
|
) |
|
328
|
|
|
elif len(bads) > 0: |
|
329
|
|
|
logger.warning( |
|
330
|
|
|
"At least one properties entry contains an equals sign or newline (\\n)," |
|
331
|
|
|
"which were escaped." |
|
332
|
|
|
) |
|
333
|
|
|
|
|
334
|
|
|
@classmethod |
|
335
|
|
|
def save_json(cls, data: Any, path: PathLike, mode: str = "w") -> None: |
|
|
|
|
|
|
336
|
|
|
warnings.warn("save_json will be removed; use orjson instead", DeprecationWarning) |
|
337
|
|
|
with cls.open_file(path, mode) as f: |
|
|
|
|
|
|
338
|
|
|
json.dump(data, f, ensure_ascii=False, cls=JsonEncoder) |
|
339
|
|
|
|
|
340
|
|
|
@classmethod |
|
341
|
|
|
def load_json(cls, path: PathLike): |
|
|
|
|
|
|
342
|
|
|
warnings.warn("save_json will be removed; use orjson instead", DeprecationWarning) |
|
343
|
|
|
return json.loads(Path(path).read_text(encoding="utf8")) |
|
344
|
|
|
|
|
345
|
|
|
@classmethod |
|
346
|
|
|
def read_any( |
|
|
|
|
|
|
347
|
|
|
cls, path: PathLike |
|
|
|
|
|
|
348
|
|
|
) -> Union[ |
|
349
|
|
|
str, |
|
350
|
|
|
bytes, |
|
351
|
|
|
Sequence[str], |
|
352
|
|
|
pd.DataFrame, |
|
353
|
|
|
Sequence[int], |
|
354
|
|
|
Sequence[float], |
|
355
|
|
|
Sequence[str], |
|
356
|
|
|
Mapping[str, str], |
|
357
|
|
|
]: |
|
358
|
|
|
""" |
|
359
|
|
|
Reads a variety of simple formats based on filename extension. |
|
360
|
|
|
Includes '.txt', 'csv', .xml', '.properties', '.json'. |
|
361
|
|
|
Also reads '.data' (binary), '.lines' (text lines). |
|
362
|
|
|
And formatted lists: '.strings', '.floats', and '.ints' (ex: "[1, 2, 3]"). |
|
363
|
|
|
""" |
|
364
|
|
|
path = Path(path) |
|
365
|
|
|
ext = path.suffix.lstrip(".") |
|
366
|
|
|
|
|
367
|
|
|
def load_list(dtype): |
|
368
|
|
|
return [ |
|
369
|
|
|
dtype(s) |
|
370
|
|
|
for s in FilesysTools.read_lines_file(path)[0] |
|
371
|
|
|
.replace(" ", "") |
|
372
|
|
|
.replace("[", "") |
|
373
|
|
|
.replace("]", "") |
|
374
|
|
|
.split(",") |
|
375
|
|
|
] |
|
376
|
|
|
|
|
377
|
|
|
if ext == "lines": |
|
|
|
|
|
|
378
|
|
|
return FilesysTools.read_lines_file(path) |
|
379
|
|
|
elif ext == "txt": |
|
380
|
|
|
return path.read_text("utf-8") |
|
381
|
|
|
elif ext == "data": |
|
382
|
|
|
return path.read_bytes() |
|
383
|
|
|
elif ext == "json": |
|
384
|
|
|
return FilesysTools.load_json(path) |
|
385
|
|
|
elif ext in ["npy", "npz"]: |
|
386
|
|
|
return np.load(str(path), allow_pickle=False) |
|
387
|
|
|
elif ext == "properties": |
|
388
|
|
|
return FilesysTools.read_properties_file(path) |
|
389
|
|
|
elif ext == "csv": |
|
390
|
|
|
return pd.read_csv(path) |
|
391
|
|
|
elif ext == "ints": |
|
392
|
|
|
return load_list(int) |
|
393
|
|
|
elif ext == "floats": |
|
394
|
|
|
return load_list(float) |
|
395
|
|
|
elif ext == "strings": |
|
396
|
|
|
return load_list(str) |
|
397
|
|
|
elif ext == "xml": |
|
398
|
|
|
ElementTree.parse(path).getroot() |
|
399
|
|
|
else: |
|
400
|
|
|
raise TypeError(f"Did not recognize resource file type for file {path}") |
|
401
|
|
|
|
|
402
|
|
|
@classmethod |
|
403
|
|
|
@contextmanager |
|
404
|
|
|
def open_file(cls, path: PathLike, mode: str): |
|
405
|
|
|
""" |
|
406
|
|
|
Opens a file in a safer way, always using the encoding set in Kale (utf8) by default. |
|
407
|
|
|
This avoids the problems of accidentally overwriting, forgetting to set mode, and not setting the encoding. |
|
|
|
|
|
|
408
|
|
|
Note that the default encoding on open() is not UTF on Windows. |
|
409
|
|
|
Raises specific informative errors. |
|
410
|
|
|
Cannot set overwrite in append mode. |
|
411
|
|
|
""" |
|
412
|
|
|
path = Path(path) |
|
413
|
|
|
mode = OpenMode(mode) |
|
414
|
|
|
if mode.write and mode.safe and path.exists(): |
|
415
|
|
|
raise FileDoesNotExistError(f"Path {path} already exists") |
|
416
|
|
|
if not mode.read: |
|
417
|
|
|
PathTools.prep_file(path, exist_ok=mode.overwrite or mode.append) |
|
418
|
|
|
if mode.gzipped: |
|
419
|
|
|
yield gzip.open(path, mode.internal, compresslevel=COMPRESS_LEVEL) |
|
420
|
|
|
elif mode.binary: |
|
421
|
|
|
yield open(path, mode.internal) |
|
422
|
|
|
else: |
|
423
|
|
|
yield open(path, mode.internal, encoding=ENCODING) |
|
424
|
|
|
|
|
425
|
|
|
@classmethod |
|
426
|
|
|
def write_lines(cls, iterable: Iterable[Any], path: PathLike, mode: str = "w") -> int: |
|
427
|
|
|
""" |
|
428
|
|
|
Just writes an iterable line-by-line to a file, using '\n'. |
|
429
|
|
|
Makes the parent directory if needed. |
|
430
|
|
|
Checks that the iterable is a "true iterable" (not a string or bytes). |
|
431
|
|
|
|
|
432
|
|
|
Returns: |
|
433
|
|
|
The number of lines written (the same as len(iterable) if iterable has a length) |
|
434
|
|
|
|
|
435
|
|
|
Raises: |
|
436
|
|
|
FileExistsError: If the path exists and append is False |
|
437
|
|
|
PathIsNotFileError: If append is True, and the path exists but is not a file |
|
438
|
|
|
""" |
|
439
|
|
|
path = Path(path) |
|
440
|
|
|
mode = OpenMode(mode) |
|
441
|
|
|
if not mode.overwrite or mode.binary: |
|
442
|
|
|
raise ContradictoryRequestError(f"Wrong mode for writing a text file: {mode}") |
|
443
|
|
|
if not cls.is_true_iterable(iterable): |
|
444
|
|
|
raise TypeError("Not a true iterable") # TODO include iterable if small |
|
|
|
|
|
|
445
|
|
|
PathTools.prep_file(path, exist_ok=mode.overwrite or mode.append) |
|
446
|
|
|
n = 0 |
|
|
|
|
|
|
447
|
|
|
with cls.open_file(path, mode) as f: |
|
|
|
|
|
|
448
|
|
|
for x in iterable: |
|
|
|
|
|
|
449
|
|
|
f.write(str(x) + "\n") |
|
450
|
|
|
n += 1 |
|
|
|
|
|
|
451
|
|
|
return n |
|
452
|
|
|
|
|
453
|
|
|
@classmethod |
|
454
|
|
|
def hash_hex(cls, x: SupportsBytes, algorithm: str) -> str: |
|
|
|
|
|
|
455
|
|
|
""" |
|
456
|
|
|
Returns the hex-encoded hash of the object (converted to bytes). |
|
457
|
|
|
""" |
|
458
|
|
|
m = hashlib.new(algorithm) |
|
|
|
|
|
|
459
|
|
|
m.update(bytes(x)) |
|
460
|
|
|
return m.hexdigest() |
|
461
|
|
|
|
|
462
|
|
|
@classmethod |
|
463
|
|
|
def replace_in_file(cls, path: PathLike, changes: Mapping[str, str]) -> None: |
|
464
|
|
|
""" |
|
465
|
|
|
Uses re.sub repeatedly to modify (AND REPLACE) a file's content. |
|
466
|
|
|
""" |
|
467
|
|
|
path = Path(path) |
|
468
|
|
|
data = path.read_text(encoding="utf-8") |
|
469
|
|
|
for key, value in changes.items(): |
|
470
|
|
|
data = regex.sub(key, value, data, flags=regex.V1 | regex.MULTILINE | regex.DOTALL) |
|
471
|
|
|
path.write_text(data, encoding="utf-8") |
|
472
|
|
|
|
|
473
|
|
|
@classmethod |
|
474
|
|
|
def tmppath(cls, path: Optional[PathLike] = None, **kwargs) -> Generator[Path, None, None]: |
|
475
|
|
|
""" |
|
476
|
|
|
Makes a temporary Path. Won't create ``path`` but will delete it at the end. |
|
477
|
|
|
If ``path`` is None, will use ``tempfile.mkstemp``. |
|
478
|
|
|
""" |
|
479
|
|
|
if path is None: |
|
480
|
|
|
_, path = tempfile.mkstemp() |
|
481
|
|
|
try: |
|
482
|
|
|
yield Path(path, **kwargs) |
|
483
|
|
|
finally: |
|
484
|
|
|
Path(path).unlink() |
|
485
|
|
|
|
|
486
|
|
|
@classmethod |
|
487
|
|
|
def tmpfile( |
|
488
|
|
|
cls, path: Optional[PathLike] = None, *, spooled: bool = False, **kwargs |
|
|
|
|
|
|
489
|
|
|
) -> Generator[Writeable, None, None]: |
|
490
|
|
|
""" |
|
491
|
|
|
Simple wrapper around tempfile.TemporaryFile, tempfile.NamedTemporaryFile, and tempfile.SpooledTemporaryFile. |
|
|
|
|
|
|
492
|
|
|
""" |
|
493
|
|
|
if spooled: |
|
494
|
|
|
with tempfile.SpooledTemporaryFile(**kwargs) as x: |
|
|
|
|
|
|
495
|
|
|
yield x |
|
496
|
|
|
elif path is None: |
|
497
|
|
|
with tempfile.TemporaryFile(**kwargs) as x: |
|
|
|
|
|
|
498
|
|
|
yield x |
|
499
|
|
|
else: |
|
500
|
|
|
with tempfile.NamedTemporaryFile(str(path), **kwargs) as x: |
|
|
|
|
|
|
501
|
|
|
yield x |
|
502
|
|
|
|
|
503
|
|
|
@classmethod |
|
504
|
|
|
def tmpdir(cls, **kwargs) -> Generator[Path, None, None]: |
|
|
|
|
|
|
505
|
|
|
with tempfile.TemporaryDirectory(**kwargs) as x: |
|
|
|
|
|
|
506
|
|
|
yield Path(x) |
|
507
|
|
|
|
|
508
|
|
|
|
|
509
|
|
|
__all__ = ["FilesysTools"] |
|
510
|
|
|
|