1 | # -*- coding: utf-8 -*- |
||
2 | # Copyright (C) 2012 Anaconda, Inc |
||
3 | # SPDX-License-Identifier: BSD-3-Clause |
||
4 | from __future__ import absolute_import, division, print_function, unicode_literals |
||
5 | |||
6 | 7 | from collections import defaultdict |
|
7 | from logging import getLogger |
||
8 | 7 | import os |
|
9 | 7 | from os import listdir, lstat, walk |
|
10 | 7 | from os.path import getsize, isdir, join |
|
11 | 7 | import sys |
|
12 | 7 | ||
13 | from ..base.constants import CONDA_TARBALL_EXTENSION |
||
14 | 7 | from ..base.context import context |
|
15 | 7 | ||
16 | 7 | log = getLogger(__name__) |
|
17 | 7 | ||
18 | 7 | ||
19 | def find_tarballs(): |
||
20 | 7 | from ..core.package_cache_data import PackageCacheData |
|
21 | pkgs_dirs = defaultdict(list) |
||
22 | totalsize = 0 |
||
23 | part_ext = CONDA_TARBALL_EXTENSION + '.part' |
||
24 | 7 | for package_cache in PackageCacheData.writable_caches(context.pkgs_dirs): |
|
25 | pkgs_dir = package_cache.pkgs_dir |
||
26 | if not isdir(pkgs_dir): |
||
27 | continue |
||
28 | root, _, filenames = next(os.walk(pkgs_dir)) |
||
29 | for fn in filenames: |
||
30 | 7 | if fn.endswith(CONDA_TARBALL_EXTENSION) or fn.endswith(part_ext): |
|
31 | 7 | pkgs_dirs[pkgs_dir].append(fn) |
|
32 | totalsize += getsize(join(root, fn)) |
||
33 | |||
34 | return pkgs_dirs, totalsize |
||
35 | |||
36 | |||
37 | 7 | def rm_tarballs(args, pkgs_dirs, totalsize, verbose=True): |
|
38 | 7 | from .common import confirm_yn |
|
39 | 7 | from ..gateways.disk.delete import rm_rf |
|
40 | from ..utils import human_bytes |
||
41 | |||
42 | if verbose: |
||
43 | for pkgs_dir in pkgs_dirs: |
||
44 | print('Cache location: %s' % pkgs_dir) |
||
45 | 7 | ||
46 | if not any(pkgs_dirs[i] for i in pkgs_dirs): |
||
47 | if verbose: |
||
48 | print("There are no tarballs to remove") |
||
49 | return |
||
50 | 7 | ||
51 | if verbose: |
||
52 | print("Will remove the following tarballs:") |
||
53 | View Code Duplication | print() |
|
0 ignored issues
–
show
Duplication
introduced
by
Loading history...
|
|||
54 | |||
55 | 7 | for pkgs_dir in pkgs_dirs: |
|
56 | print(pkgs_dir) |
||
57 | print('-'*len(pkgs_dir)) |
||
58 | fmt = "%-40s %10s" |
||
59 | for fn in pkgs_dirs[pkgs_dir]: |
||
60 | 7 | size = getsize(join(pkgs_dir, fn)) |
|
61 | print(fmt % (fn, human_bytes(size))) |
||
62 | print() |
||
63 | print('-' * 51) # From 40 + 1 + 10 in fmt |
||
64 | print(fmt % ('Total:', human_bytes(totalsize))) |
||
65 | print() |
||
66 | 7 | ||
67 | if not context.json or not context.always_yes: |
||
68 | confirm_yn() |
||
69 | if context.json and args.dry_run: |
||
70 | return |
||
71 | 7 | ||
72 | for pkgs_dir in pkgs_dirs: |
||
73 | for fn in pkgs_dirs[pkgs_dir]: |
||
74 | try: |
||
75 | if rm_rf(os.path.join(pkgs_dir, fn)): |
||
76 | if verbose: |
||
77 | 7 | print("Removed %s" % fn) |
|
78 | 7 | else: |
|
79 | if verbose: |
||
80 | 7 | print("WARNING: cannot remove, file permissions: %s" % fn) |
|
81 | except (IOError, OSError) as e: |
||
82 | if verbose: |
||
83 | 7 | print("WARNING: cannot remove, file permissions: %s\n%r" % (fn, e)) |
|
84 | else: |
||
85 | log.info("%r", e) |
||
86 | |||
87 | |||
88 | def find_pkgs(): |
||
89 | 7 | # TODO: This doesn't handle packages that have hard links to files within |
|
90 | # themselves, like bin/python3.3 and bin/python3.3m in the Python package |
||
91 | warnings = [] |
||
92 | |||
93 | 7 | from ..gateways.disk.link import CrossPlatformStLink |
|
94 | cross_platform_st_nlink = CrossPlatformStLink() |
||
95 | pkgs_dirs = defaultdict(list) |
||
96 | for pkgs_dir in context.pkgs_dirs: |
||
97 | if not os.path.exists(pkgs_dir): |
||
98 | if not context.json: |
||
99 | print("WARNING: {0} does not exist".format(pkgs_dir)) |
||
100 | continue |
||
101 | pkgs = [i for i in listdir(pkgs_dir) |
||
102 | if (isdir(join(pkgs_dir, i)) and # only include actual packages |
||
103 | isdir(join(pkgs_dir, i, 'info')))] |
||
104 | for pkg in pkgs: |
||
105 | breakit = False |
||
106 | for root, dir, files in walk(join(pkgs_dir, pkg)): |
||
107 | for fn in files: |
||
108 | try: |
||
109 | st_nlink = cross_platform_st_nlink(join(root, fn)) |
||
110 | except OSError as e: |
||
111 | warnings.append((fn, e)) |
||
112 | continue |
||
113 | if st_nlink > 1: |
||
114 | # print('%s is installed: %s' % (pkg, join(root, fn))) |
||
115 | breakit = True |
||
116 | break |
||
117 | |||
118 | 7 | if breakit: |
|
119 | break |
||
120 | else: |
||
121 | pkgs_dirs[pkgs_dir].append(pkg) |
||
122 | |||
123 | totalsize = 0 |
||
124 | pkgsizes = defaultdict(list) |
||
125 | for pkgs_dir in pkgs_dirs: |
||
126 | for pkg in pkgs_dirs[pkgs_dir]: |
||
127 | pkgsize = 0 |
||
128 | for root, dir, files in walk(join(pkgs_dir, pkg)): |
||
129 | for fn in files: |
||
130 | # We don't have to worry about counting things twice: by |
||
131 | # definition these files all have a link count of 1! |
||
132 | size = lstat(join(root, fn)).st_size |
||
133 | totalsize += size |
||
134 | pkgsize += size |
||
135 | pkgsizes[pkgs_dir].append(pkgsize) |
||
136 | |||
137 | return pkgs_dirs, warnings, totalsize, pkgsizes |
||
138 | |||
139 | |||
140 | def rm_pkgs(args, pkgs_dirs, warnings, totalsize, pkgsizes, verbose=True): |
||
141 | from .common import confirm_yn |
||
142 | from ..gateways.disk.delete import rm_rf |
||
143 | from ..utils import human_bytes |
||
144 | if verbose: |
||
145 | for pkgs_dir in pkgs_dirs: |
||
146 | print('Cache location: %s' % pkgs_dir) |
||
147 | for fn, exception in warnings: |
||
148 | print(exception) |
||
149 | |||
150 | if not any(pkgs_dirs[i] for i in pkgs_dirs): |
||
151 | if verbose: |
||
152 | print("There are no unused packages to remove") |
||
153 | return |
||
154 | |||
155 | if verbose: |
||
156 | print("Will remove the following packages:") |
||
157 | View Code Duplication | for pkgs_dir in pkgs_dirs: |
|
0 ignored issues
–
show
|
|||
158 | print(pkgs_dir) |
||
159 | print('-' * len(pkgs_dir)) |
||
160 | print() |
||
161 | fmt = "%-40s %10s" |
||
162 | for pkg, pkgsize in zip(pkgs_dirs[pkgs_dir], pkgsizes[pkgs_dir]): |
||
163 | 7 | print(fmt % (pkg, human_bytes(pkgsize))) |
|
164 | print() |
||
165 | print('-' * 51) # 40 + 1 + 10 in fmt |
||
166 | print(fmt % ('Total:', human_bytes(totalsize))) |
||
167 | print() |
||
168 | |||
169 | if not context.json or not context.always_yes: |
||
170 | confirm_yn() |
||
171 | if context.json and args.dry_run: |
||
172 | return |
||
173 | |||
174 | for pkgs_dir in pkgs_dirs: |
||
175 | for pkg in pkgs_dirs[pkgs_dir]: |
||
176 | if verbose: |
||
177 | print("removing %s" % pkg) |
||
178 | rm_rf(join(pkgs_dir, pkg)) |
||
179 | |||
180 | |||
181 | def rm_index_cache(): |
||
182 | from ..gateways.disk.delete import rm_rf |
||
183 | from ..core.package_cache_data import PackageCacheData |
||
184 | for package_cache in PackageCacheData.writable_caches(): |
||
185 | rm_rf(join(package_cache.pkgs_dir, 'cache')) |
||
186 | |||
187 | |||
188 | def find_source_cache(): |
||
189 | cache_dirs = { |
||
190 | 'source cache': context.src_cache, |
||
191 | 7 | 'git cache': context.git_cache, |
|
192 | 'hg cache': context.hg_cache, |
||
193 | 'svn cache': context.svn_cache, |
||
194 | } |
||
195 | |||
196 | sizes = {} |
||
197 | totalsize = 0 |
||
198 | for cache_type, cache_dir in cache_dirs.items(): |
||
199 | 7 | dirsize = 0 |
|
200 | for root, d, files in walk(cache_dir): |
||
201 | for fn in files: |
||
202 | size = lstat(join(root, fn)).st_size |
||
203 | totalsize += size |
||
204 | dirsize += size |
||
205 | sizes[cache_type] = dirsize |
||
206 | |||
207 | return { |
||
208 | 'warnings': [], |
||
209 | 'cache_dirs': cache_dirs, |
||
210 | 'cache_sizes': sizes, |
||
211 | 'total_size': totalsize, |
||
212 | } |
||
213 | |||
214 | |||
215 | def rm_source_cache(args, cache_dirs, warnings, cache_sizes, total_size): |
||
216 | from .common import confirm_yn |
||
217 | 7 | from ..gateways.disk.delete import rm_rf |
|
218 | from ..utils import human_bytes |
||
219 | |||
220 | verbose = not (context.json or context.quiet) |
||
221 | if warnings: |
||
222 | if verbose: # lgtm [py/uninitialized-local-variable] |
||
223 | for warning in warnings: |
||
224 | print(warning, file=sys.stderr) |
||
225 | return |
||
226 | |||
227 | if verbose: # lgtm [py/uninitialized-local-variable] |
||
228 | for cache_type in cache_dirs: |
||
229 | print("%s (%s)" % (cache_type, cache_dirs[cache_type])) |
||
230 | print("%-40s %10s" % ("Size:", human_bytes(cache_sizes[cache_type]))) |
||
231 | print() |
||
232 | |||
233 | print("%-40s %10s" % ("Total:", human_bytes(total_size))) |
||
234 | |||
235 | if not context.json or not context.always_yes: |
||
236 | confirm_yn() |
||
237 | if context.json and args.dry_run: |
||
238 | return |
||
239 | |||
240 | for dir in cache_dirs.values(): |
||
241 | if verbose: |
||
242 | print("Removing %s" % dir) |
||
243 | rm_rf(dir) |
||
244 | |||
245 | |||
246 | def execute(args, parser): |
||
247 | from .common import stdout_json |
||
248 | json_result = { |
||
249 | 'success': True |
||
250 | } |
||
251 | |||
252 | if args.tarballs or args.all: |
||
253 | pkgs_dirs, totalsize = find_tarballs() |
||
254 | first = sorted(pkgs_dirs)[0] if pkgs_dirs else '' |
||
255 | json_result['tarballs'] = { |
||
256 | 'pkgs_dir': first, # Backwards compabitility |
||
257 | 'pkgs_dirs': dict(pkgs_dirs), |
||
258 | 'files': pkgs_dirs[first], # Backwards compatibility |
||
259 | 7 | 'total_size': totalsize |
|
260 | } |
||
261 | rm_tarballs(args, pkgs_dirs, totalsize, verbose=not (context.json or context.quiet)) |
||
262 | |||
263 | if args.index_cache or args.all: |
||
264 | json_result['index_cache'] = { |
||
265 | 'files': [join(context.pkgs_dirs[0], 'cache')] |
||
266 | } |
||
267 | rm_index_cache() |
||
268 | |||
269 | if args.packages or args.all: |
||
270 | pkgs_dirs, warnings, totalsize, pkgsizes = find_pkgs() |
||
271 | first = sorted(pkgs_dirs)[0] if pkgs_dirs else '' |
||
272 | json_result['packages'] = { |
||
273 | 'pkgs_dir': first, # Backwards compatibility |
||
274 | 'pkgs_dirs': dict(pkgs_dirs), |
||
275 | 'files': pkgs_dirs[first], # Backwards compatibility |
||
276 | 'total_size': totalsize, |
||
277 | 'warnings': warnings, |
||
278 | 'pkg_sizes': {i: dict(zip(pkgs_dirs[i], pkgsizes[i])) for i in pkgs_dirs}, |
||
279 | } |
||
280 | rm_pkgs(args, pkgs_dirs, warnings, totalsize, pkgsizes, |
||
281 | verbose=not (context.json or context.quiet)) |
||
282 | |||
283 | if args.source_cache or args.all: |
||
284 | json_result['source_cache'] = find_source_cache() |
||
285 | rm_source_cache(args, **json_result['source_cache']) |
||
286 | |||
287 | if not any((args.lock, args.tarballs, args.index_cache, args.packages, |
||
288 | args.source_cache, args.all)): |
||
289 | from ..exceptions import ArgumentError |
||
290 | raise ArgumentError("One of {--lock, --tarballs, --index-cache, --packages, " |
||
291 | "--source-cache, --all} required") |
||
292 | |||
293 | if context.json: |
||
294 | stdout_json(json_result) |
||
295 |