diff_for_unlink_link_precs()   F
last analyzed

Complexity

Conditions 22

Size

Total Lines 36

Duplication

Lines 0
Ratio 0 %

Importance

Changes 0
Metric Value
cc 22
dl 0
loc 36
rs 0
c 0
b 0
f 0

1 Method

Rating   Name   Duplication   Size   Complexity  
A _add_to_unlink_and_link() 0 4 2

How to fix   Complexity   

Complexity

Complex classes like diff_for_unlink_link_precs() often do a lot of different things. To break such a class down, we need to identify a cohesive component within that class. A common approach to find such a component is to look for fields/methods that share the same prefixes, or suffixes.

Once you have determined the fields that belong together, you can apply the Extract Class refactoring. If the component makes sense as a sub-class, Extract Subclass is also a candidate, and is often faster.

1
# -*- coding: utf-8 -*-
2
# Copyright (C) 2012 Anaconda, Inc
3
# SPDX-License-Identifier: BSD-3-Clause
4
from __future__ import absolute_import, division, print_function, unicode_literals
5
6
from genericpath import exists
7
from logging import DEBUG, getLogger
8
from os.path import join
9
import sys
10
from textwrap import dedent
11
12
from .index import get_reduced_index
13
from .link import PrefixSetup, UnlinkLinkTransaction
14
from .prefix_data import PrefixData
15
from .subdir_data import SubdirData
16
from .. import CondaError, __version__ as CONDA_VERSION
17
from .._vendor.auxlib.ish import dals
18
from .._vendor.boltons.setutils import IndexedSet
19
from ..base.constants import DepsModifier, UNKNOWN_CHANNEL, UpdateModifier
20
from ..base.context import context
21
from ..common.compat import iteritems, itervalues, odict, text_type
22
from ..common.constants import NULL
23
from ..common.io import Spinner
24
from ..common.path import get_major_minor_version, paths_equal
25
from ..exceptions import PackagesNotFoundError
26
from ..gateways.logging import TRACE
27
from ..history import History
28
from ..models.channel import Channel
29
from ..models.enums import NoarchType
30
from ..models.match_spec import MatchSpec
31
from ..models.prefix_graph import PrefixGraph
32
from ..models.version import VersionOrder
33
from ..resolve import Resolve, dashlist
34
35
try:
36
    from cytoolz.itertoolz import concat, concatv, groupby
37
except ImportError:
38
    from .._vendor.toolz.itertoolz import concat, concatv, groupby  # NOQA
39
40
log = getLogger(__name__)
41
42
43
class Solver(object):
44
    """
45
    A high-level API to conda's solving logic. Three public methods are provided to access a
46
    solution in various forms.
47
48
      * :meth:`solve_final_state`
49
      * :meth:`solve_for_diff`
50
      * :meth:`solve_for_transaction`
51
52
    """
53
54
    def __init__(self, prefix, channels, subdirs=(), specs_to_add=(), specs_to_remove=()):
55
        """
56
        Args:
57
            prefix (str):
58
                The conda prefix / environment location for which the :class:`Solver`
59
                is being instantiated.
60
            channels (Sequence[:class:`Channel`]):
61
                A prioritized list of channels to use for the solution.
62
            subdirs (Sequence[str]):
63
                A prioritized list of subdirs to use for the solution.
64
            specs_to_add (Set[:class:`MatchSpec`]):
65
                The set of package specs to add to the prefix.
66
            specs_to_remove (Set[:class:`MatchSpec`]):
67
                The set of package specs to remove from the prefix.
68
69
        """
70
        self.prefix = prefix
71
        self.channels = IndexedSet(Channel(c) for c in channels or context.channels)
72
        self.subdirs = tuple(s for s in subdirs or context.subdirs)
73
        self.specs_to_add = frozenset(MatchSpec.merge(s for s in specs_to_add))
74
        self.specs_to_remove = frozenset(MatchSpec.merge(s for s in specs_to_remove))
75
76
        assert all(s in context.known_subdirs for s in self.subdirs)
77
        self._index = None
78
        self._r = None
79
        self._prepared = False
80
81
    def solve_final_state(self, update_modifier=NULL, deps_modifier=NULL, prune=NULL,
82
                          ignore_pinned=NULL, force_remove=NULL):
83
        """Gives the final, solved state of the environment.
84
85
        Args:
86
            update_modifier (UpdateModifier):
87
                An optional flag directing how updates are handled regarding packages already
88
                existing in the environment.
89
90
            deps_modifier (DepsModifier):
91
                An optional flag indicating special solver handling for dependencies. The
92
                default solver behavior is to be as conservative as possible with dependency
93
                updates (in the case the dependency already exists in the environment), while
94
                still ensuring all dependencies are satisfied.  Options include
95
                    * NO_DEPS
96
                    * ONLY_DEPS
97
                    * UPDATE_DEPS
98
                    * UPDATE_DEPS_ONLY_DEPS
99
                    * FREEZE_INSTALLED
100
            prune (bool):
101
                If ``True``, the solution will not contain packages that were
102
                previously brought into the environment as dependencies but are no longer
103
                required as dependencies and are not user-requested.
104
            ignore_pinned (bool):
105
                If ``True``, the solution will ignore pinned package configuration
106
                for the prefix.
107
            force_remove (bool):
108
                Forces removal of a package without removing packages that depend on it.
109
110
        Returns:
111
            Tuple[PackageRef]:
112
                In sorted dependency order from roots to leaves, the package references for
113
                the solved state of the environment.
114
115
        """
116
        if update_modifier is NULL:
117
            update_modifier = context.update_modifier
118
        else:
119
            update_modifier = UpdateModifier(text_type(update_modifier).lower())
120
        if deps_modifier is NULL:
121
            deps_modifier = context.deps_modifier
122
        else:
123
            deps_modifier = DepsModifier(text_type(deps_modifier).lower())
124
        prune = context.prune if prune is NULL else prune
125
        ignore_pinned = context.ignore_pinned if ignore_pinned is NULL else ignore_pinned
126
        force_remove = context.force_remove if force_remove is NULL else force_remove
127
        specs_to_remove = self.specs_to_remove
128
        specs_to_add = self.specs_to_add
129
130
        # force_remove is a special case where we return early
131
        if specs_to_remove and force_remove:
132
            if specs_to_add:
133
                raise NotImplementedError()
134
            solution = tuple(prec for prec in PrefixData(self.prefix).iter_records()
135
                             if not any(spec.match(prec) for spec in specs_to_remove))
136
            return IndexedSet(PrefixGraph(solution).graph)
137
138
        log.debug("solving prefix %s\n"
139
                  "  specs_to_remove: %s\n"
140
                  "  specs_to_add: %s\n"
141
                  "  prune: %s", self.prefix, specs_to_remove, specs_to_add, prune)
142
143
        # declare starting point, the initial state of the environment
144
        # `solution` and `specs_map` are mutated throughout this method
145
        prefix_data = PrefixData(self.prefix)
146
        solution = tuple(prec for prec in prefix_data.iter_records())
147
148
        # Check if specs are satisfied by current environment. If they are, exit early.
149
        if (update_modifier == UpdateModifier.SPECS_SATISFIED_SKIP_SOLVE
150
                and not specs_to_remove and not prune):
151
            for spec in specs_to_add:
152
                if not next(prefix_data.query(spec), None):
153
                    break
154
            else:
155
                # All specs match a package in the current environment.
156
                # Return early, with a solution that should just be PrefixData().iter_records()
157
                return IndexedSet(PrefixGraph(solution).graph)
158
159
        specs_from_history_map = History(self.prefix).get_requested_specs_map()
160
        if prune:  # or update_modifier == UpdateModifier.UPDATE_ALL  # pending conda/constructor#138  # NOQA
161
            # Users are struggling with the prune functionality in --update-all, due to
162
            # https://github.com/conda/constructor/issues/138.  Until that issue is resolved,
163
            # and for the foreseeable future, it's best to be more conservative with --update-all.
164
165
            # Start with empty specs map for UPDATE_ALL because we're optimizing the update
166
            # only for specs the user has requested; it's ok to remove dependencies.
167
            specs_map = odict()
168
169
            # However, because of https://github.com/conda/constructor/issues/138, we need
170
            # to hard-code keeping conda, conda-build, and anaconda, if they're already in
171
            # the environment.
172
            solution_pkg_names = set(d.name for d in solution)
173
            ensure_these = (pkg_name for pkg_name in {
174
                'anaconda', 'conda', 'conda-build',
175
            } if pkg_name not in specs_from_history_map and pkg_name in solution_pkg_names)
176
            for pkg_name in ensure_these:
177
                specs_from_history_map[pkg_name] = MatchSpec(pkg_name)
178
        else:
179
            specs_map = odict((d.name, MatchSpec(d.name)) for d in solution)
180
181
        # add in historically-requested specs
182
        specs_map.update(specs_from_history_map)
183
184
        # let's pretend for now that this is the right place to build the index
185
        prepared_specs = set(concatv(
186
            specs_to_remove,
187
            specs_to_add,
188
            itervalues(specs_from_history_map),
189
        ))
190
191
        index, r = self._prepare(prepared_specs)
192
193
        if specs_to_remove:
194
            # In a previous implementation, we invoked SAT here via `r.remove()` to help with
195
            # spec removal, and then later invoking SAT again via `r.solve()`. Rather than invoking
196
            # SAT for spec removal determination, we can use the PrefixGraph and simple tree
197
            # traversal if we're careful about how we handle features. We still invoke sat via
198
            # `r.solve()` later.
199
            _track_fts_specs = (spec for spec in specs_to_remove if 'track_features' in spec)
200
            feature_names = set(concat(spec.get_raw_value('track_features')
201
                                       for spec in _track_fts_specs))
202
            graph = PrefixGraph(solution, itervalues(specs_map))
203
204
            all_removed_records = []
205
            no_removed_records_specs = []
206
            for spec in specs_to_remove:
207
                # If the spec was a track_features spec, then we need to also remove every
208
                # package with a feature that matches the track_feature. The
209
                # `graph.remove_spec()` method handles that for us.
210
                log.trace("using PrefixGraph to remove records for %s", spec)
211
                removed_records = graph.remove_spec(spec)
212
                if removed_records:
213
                    all_removed_records.extend(removed_records)
214
                else:
215
                    no_removed_records_specs.append(spec)
216
217
            # ensure that each spec in specs_to_remove is actually associated with removed records
218
            unmatched_specs_to_remove = tuple(
219
                spec for spec in no_removed_records_specs
220
                if not any(spec.match(rec) for rec in all_removed_records)
221
            )
222
            if unmatched_specs_to_remove:
223
                raise PackagesNotFoundError(
224
                    tuple(sorted(str(s) for s in unmatched_specs_to_remove))
225
                )
226
227
            for rec in all_removed_records:
228
                # We keep specs (minus the feature part) for the non provides_features packages
229
                # if they're in the history specs.  Otherwise, we pop them from the specs_map.
230
                rec_has_a_feature = set(rec.features or ()) & feature_names
231
                if rec_has_a_feature and rec.name in specs_from_history_map:
232
                    spec = specs_map.get(rec.name, MatchSpec(rec.name))
233
                    spec._match_components.pop('features', None)
234
                    specs_map[spec.name] = spec
235
                else:
236
                    specs_map.pop(rec.name, None)
237
238
            solution = tuple(graph.graph)
239
240
        # We handle as best as possible environments in inconsistent states. To do this,
241
        # we remove now from consideration the set of packages causing inconsistencies,
242
        # and then we add them back in following the main SAT call.
243
        _, inconsistent_precs = r.bad_installed(solution, ())
244
        add_back_map = {}  # name: (prec, spec)
245
        if log.isEnabledFor(DEBUG):
246
            log.debug("inconsistent precs: %s",
247
                      dashlist(inconsistent_precs) if inconsistent_precs else 'None')
248
        if inconsistent_precs:
249
            for prec in inconsistent_precs:
250
                # pop and save matching spec in specs_map
251
                add_back_map[prec.name] = (prec, specs_map.pop(prec.name, None))
252
            solution = tuple(prec for prec in solution if prec not in inconsistent_precs)
253
254
        # For the remaining specs in specs_map, add target to each spec. `target` is a reference
255
        # to the package currently existing in the environment. Setting target instructs the
256
        # solver to not disturb that package if it's not necessary.
257
        # If the spec.name is being modified by inclusion in specs_to_add, we don't set `target`,
258
        # since we *want* the solver to modify/update that package.
259
        #
260
        # TLDR: when working with MatchSpec objects,
261
        #  - to minimize the version change, set MatchSpec(name=name, target=prec.dist_str())
262
        #  - to freeze the package, set all the components of MatchSpec individually
263
        for pkg_name, spec in iteritems(specs_map):
264
            matches_for_spec = tuple(prec for prec in solution if spec.match(prec))
265
            if matches_for_spec:
266
                if len(matches_for_spec) != 1:
267
                    raise CondaError(dals("""
268
                    Conda encountered an error with your environment.  Please report an issue
269
                    at https://github.com/conda/conda/issues/new.  In your report, please include
270
                    the output of 'conda info' and 'conda list' for the active environment, along
271
                    with the command you invoked that resulted in this error.
272
                      pkg_name: %s
273
                      spec: %s
274
                      matches_for_spec: %s
275
                    """) % (pkg_name, spec,
276
                            dashlist((text_type(s) for s in matches_for_spec), indent=4)))
277
                target_prec = matches_for_spec[0]
278
                if update_modifier == UpdateModifier.FREEZE_INSTALLED:
279
                    new_spec = MatchSpec(target_prec)
280
                else:
281
                    target = target_prec.dist_str()
282
                    new_spec = MatchSpec(spec, target=target)
283
                specs_map[pkg_name] = new_spec
284
        if log.isEnabledFor(TRACE):
285
            log.trace("specs_map with targets: %s", specs_map)
286
287
        # If we're in UPDATE_ALL mode, we need to drop all the constraints attached to specs,
288
        # so they can all float and the solver can find the most up-to-date solution. In the case
289
        # of UPDATE_ALL, `specs_map` wasn't initialized with packages from the current environment,
290
        # but *only* historically-requested specs.  This lets UPDATE_ALL drop dependencies if
291
        # they're no longer needed, and their presence would otherwise prevent the updated solution
292
        # the user most likely wants.
293
        if update_modifier == UpdateModifier.UPDATE_ALL:
294
            specs_map = {pkg_name: MatchSpec(spec.name, optional=spec.optional)
295
                         for pkg_name, spec in iteritems(specs_map)}
296
297
        # As a business rule, we never want to update python beyond the current minor version,
298
        # unless that's requested explicitly by the user (which we actively discourage).
299
        if 'python' in specs_map:
300
            python_prefix_rec = prefix_data.get('python')
301
            if python_prefix_rec:
302
                python_spec = specs_map['python']
303
                if not python_spec.get('version'):
304
                    pinned_version = get_major_minor_version(python_prefix_rec.version) + '.*'
305
                    specs_map['python'] = MatchSpec(python_spec, version=pinned_version)
306
307
        # For the aggressive_update_packages configuration parameter, we strip any target
308
        # that's been set.
309
        if not context.offline:
310
            for spec in context.aggressive_update_packages:
311
                if spec.name in specs_map:
312
                    specs_map[spec.name] = spec
313
            if (context.auto_update_conda and paths_equal(self.prefix, context.root_prefix)
314
                    and any(prec.name == "conda" for prec in solution)):
315
                specs_map["conda"] = MatchSpec("conda")
316
317
        # add in explicitly requested specs from specs_to_add
318
        # this overrides any name-matching spec already in the spec map
319
        specs_map.update((s.name, s) for s in specs_to_add)
320
321
        # collect additional specs to add to the solution
322
        track_features_specs = pinned_specs = ()
323
        if context.track_features:
324
            track_features_specs = tuple(MatchSpec(x + '@') for x in context.track_features)
325
        if not ignore_pinned:
326
            pinned_specs = get_pinned_specs(self.prefix)
327
328
        final_environment_specs = IndexedSet(concatv(
329
            itervalues(specs_map),
330
            track_features_specs,
331
            pinned_specs,
332
        ))
333
334
        # We've previously checked `solution` for consistency (which at that point was the
335
        # pre-solve state of the environment). Now we check our compiled set of
336
        # `final_environment_specs` for the possibility of a solution.  If there are conflicts,
337
        # we can often avoid them by neutering specs that have a target (e.g. removing version
338
        # constraint) and also making them optional. The result here will be less cases of
339
        # `UnsatisfiableError` handed to users, at the cost of more packages being modified
340
        # or removed from the environment.
341
        conflicting_specs = r.get_conflicting_specs(tuple(final_environment_specs))
342
        if log.isEnabledFor(DEBUG):
343
            log.debug("conflicting specs: %s", dashlist(conflicting_specs))
344
        for spec in conflicting_specs:
345
            if spec.target:
346
                final_environment_specs.remove(spec)
347
                neutered_spec = MatchSpec(spec.name, target=spec.target, optional=True)
348
                final_environment_specs.add(neutered_spec)
349
350
        # Finally! We get to call SAT.
351
        if log.isEnabledFor(DEBUG):
352
            log.debug("final specs to add: %s",
353
                      dashlist(sorted(text_type(s) for s in final_environment_specs)))
354
        solution = r.solve(tuple(final_environment_specs))  # return value is List[PackageRecord]
355
356
        # add back inconsistent packages to solution
357
        if add_back_map:
358
            for name, (prec, spec) in iteritems(add_back_map):
359
                if not any(d.name == name for d in solution):
360
                    solution.append(prec)
361
                    if spec:
362
                        final_environment_specs.add(spec)
363
364
        # Special case handling for various DepsModifier flags. Maybe this block could be pulled
365
        # out into its own non-public helper method?
366
        if deps_modifier == DepsModifier.NO_DEPS:
367
            # In the NO_DEPS case, we need to start with the original list of packages in the
368
            # environment, and then only modify packages that match specs_to_add or
369
            # specs_to_remove.
370
            _no_deps_solution = IndexedSet(prefix_data.iter_records())
371
            only_remove_these = set(prec
372
                                    for spec in specs_to_remove
373
                                    for prec in _no_deps_solution
374
                                    if spec.match(prec))
375
            _no_deps_solution -= only_remove_these
376
377
            only_add_these = set(prec
378
                                 for spec in specs_to_add
379
                                 for prec in solution
380
                                 if spec.match(prec))
381
            remove_before_adding_back = set(prec.name for prec in only_add_these)
382
            _no_deps_solution = IndexedSet(prec for prec in _no_deps_solution
383
                                           if prec.name not in remove_before_adding_back)
384
            _no_deps_solution |= only_add_these
385
            solution = _no_deps_solution
386
        elif (deps_modifier == DepsModifier.ONLY_DEPS
387
                and update_modifier != UpdateModifier.UPDATE_DEPS):
388
            # Using a special instance of PrefixGraph to remove youngest child nodes that match
389
            # the original specs_to_add.  It's important to remove only the *youngest* child nodes,
390
            # because a typical use might be `conda install --only-deps python=2 flask`, and in
391
            # that case we'd want to keep python.
392
            graph = PrefixGraph(solution, specs_to_add)
393
            graph.remove_youngest_descendant_nodes_with_specs()
394
            solution = tuple(graph.graph)
395
396
        elif update_modifier == UpdateModifier.UPDATE_DEPS:
397
            # Here we have to SAT solve again :(  It's only now that we know the dependency
398
            # chain of specs_to_add.
399
            specs_to_add_names = set(spec.name for spec in specs_to_add)
400
            update_names = set()
401
            graph = PrefixGraph(solution, final_environment_specs)
402
            for spec in specs_to_add:
403
                node = graph.get_node_by_name(spec.name)
404
                for ancestor_record in graph.all_ancestors(node):
405
                    ancestor_name = ancestor_record.name
406
                    if ancestor_name not in specs_to_add_names:
407
                        update_names.add(ancestor_name)
408
            grouped_specs = groupby(lambda s: s.name in update_names, final_environment_specs)
409
            new_final_environment_specs = set(grouped_specs.get(False, ()))
410
            update_specs = set(MatchSpec(spec.name, optional=spec.optional)
411
                               for spec in grouped_specs.get(True, ()))
412
            final_environment_specs = new_final_environment_specs | update_specs
413
            solution = r.solve(final_environment_specs)
414
415
            if deps_modifier == DepsModifier.ONLY_DEPS:
416
                # duplicated from DepsModifier.ONLY_DEPS
417
                graph = PrefixGraph(solution, specs_to_add)
418
                graph.remove_youngest_descendant_nodes_with_specs()
419
                solution = tuple(graph.graph)
420
421
        if prune:
422
            graph = PrefixGraph(solution, final_environment_specs)
423
            graph.prune()
424
            solution = tuple(graph.graph)
425
426
        self._check_solution(solution, pinned_specs)
427
428
        solution = IndexedSet(PrefixGraph(solution).graph)
429
        log.debug("solved prefix %s\n"
430
                  "  solved_linked_dists:\n"
431
                  "    %s\n",
432
                  self.prefix, "\n    ".join(prec.dist_str() for prec in solution))
433
        return solution
434
435
    def solve_for_diff(self, update_modifier=NULL, deps_modifier=NULL, prune=NULL,
436
                       ignore_pinned=NULL, force_remove=NULL, force_reinstall=NULL):
437
        """Gives the package references to remove from an environment, followed by
438
        the package references to add to an environment.
439
440
        Args:
441
            deps_modifier (DepsModifier):
442
                See :meth:`solve_final_state`.
443
            prune (bool):
444
                See :meth:`solve_final_state`.
445
            ignore_pinned (bool):
446
                See :meth:`solve_final_state`.
447
            force_remove (bool):
448
                See :meth:`solve_final_state`.
449
            force_reinstall (bool):
450
                For requested specs_to_add that are already satisfied in the environment,
451
                    instructs the solver to remove the package and spec from the environment,
452
                    and then add it back--possibly with the exact package instance modified,
453
                    depending on the spec exactness.
454
455
        Returns:
456
            Tuple[PackageRef], Tuple[PackageRef]:
457
                A two-tuple of PackageRef sequences.  The first is the group of packages to
458
                remove from the environment, in sorted dependency order from leaves to roots.
459
                The second is the group of packages to add to the environment, in sorted
460
                dependency order from roots to leaves.
461
462
        """
463
        final_precs = self.solve_final_state(update_modifier, deps_modifier, prune, ignore_pinned,
464
                                             force_remove)
465
        unlink_precs, link_precs = diff_for_unlink_link_precs(
466
            self.prefix, final_precs, self.specs_to_add, force_reinstall
467
        )
468
        return unlink_precs, link_precs
469
470
    def solve_for_transaction(self, update_modifier=NULL, deps_modifier=NULL, prune=NULL,
471
                              ignore_pinned=NULL, force_remove=NULL, force_reinstall=NULL):
472
        """Gives an UnlinkLinkTransaction instance that can be used to execute the solution
473
        on an environment.
474
475
        Args:
476
            deps_modifier (DepsModifier):
477
                See :meth:`solve_final_state`.
478
            prune (bool):
479
                See :meth:`solve_final_state`.
480
            ignore_pinned (bool):
481
                See :meth:`solve_final_state`.
482
            force_remove (bool):
483
                See :meth:`solve_final_state`.
484
            force_reinstall (bool):
485
                See :meth:`solve_for_diff`.
486
487
        Returns:
488
            UnlinkLinkTransaction:
489
490
        """
491
        if self.prefix == context.root_prefix and context.enable_private_envs:
492
            # This path has the ability to generate a multi-prefix transaction. The basic logic
493
            # is in the commented out get_install_transaction() function below. Exercised at
494
            # the integration level in the PrivateEnvIntegrationTests in test_create.py.
495
            raise NotImplementedError()
496
        else:
497
            with Spinner("Solving environment", not context.verbosity and not context.quiet,
498
                         context.json):
499
                unlink_precs, link_precs = self.solve_for_diff(update_modifier, deps_modifier,
500
                                                               prune, ignore_pinned,
501
                                                               force_remove, force_reinstall)
502
                stp = PrefixSetup(self.prefix, unlink_precs, link_precs,
503
                                  self.specs_to_remove, self.specs_to_add)
504
                # TODO: Only explicitly requested remove and update specs are being included in
505
                #   History right now. Do we need to include other categories from the solve?
506
507
            self._notify_conda_outdated(link_precs)
508
            return UnlinkLinkTransaction(stp)
509
510
    def _notify_conda_outdated(self, link_precs):
511
        if not context.notify_outdated_conda or context.quiet:
512
            return
513
        current_conda_prefix_rec = PrefixData(context.conda_prefix).get('conda', None)
514
        if current_conda_prefix_rec:
515
            channel_name = current_conda_prefix_rec.channel.canonical_name
516
            if channel_name == UNKNOWN_CHANNEL:
517
                channel_name = "defaults"
518
519
            # only look for a newer conda in the channel conda is currently installed from
520
            conda_newer_spec = MatchSpec('%s::conda>%s' % (channel_name, CONDA_VERSION))
521
522
            if paths_equal(self.prefix, context.conda_prefix):
523
                if any(conda_newer_spec.match(prec) for prec in link_precs):
524
                    return
525
526
            conda_newer_precs = sorted(
527
                SubdirData.query_all(conda_newer_spec, self.channels, self.subdirs),
528
                key=lambda x: VersionOrder(x.version)
529
                # VersionOrder is fine here rather than r.version_key because all precs
530
                # should come from the same channel
531
            )
532
            if conda_newer_precs:
533
                latest_version = conda_newer_precs[-1].version
534
                # If conda comes from defaults, ensure we're giving instructions to users
535
                # that should resolve release timing issues between defaults and conda-forge.
536
                add_channel = "-c defaults " if channel_name == "defaults" else ""
537
                print(dedent("""
538
539
                ==> WARNING: A newer version of conda exists. <==
540
                  current version: %s
541
                  latest version: %s
542
543
                Please update conda by running
544
545
                    $ conda update -n base %sconda
546
547
                """) % (CONDA_VERSION, latest_version, add_channel), file=sys.stderr)
548
549
    def _prepare(self, prepared_specs):
550
        # All of this _prepare() method is hidden away down here. Someday we may want to further
551
        # abstract away the use of `index` or the Resolve object.
552
553
        if self._prepared and prepared_specs == self._prepared_specs:
554
            return self._index, self._r
555
556
        if hasattr(self, '_index') and self._index:
557
            # added in install_actions for conda-build back-compat
558
            self._prepared_specs = prepared_specs
559
            self._r = Resolve(self._index, channels=self.channels)
560
        else:
561
            # add in required channels that aren't explicitly given in the channels list
562
            # For correctness, we should probably add to additional_channels any channel that
563
            #  is given by PrefixData(self.prefix).all_subdir_urls().  However that causes
564
            #  usability problems with bad / expired tokens.
565
566
            additional_channels = set()
567
            for spec in self.specs_to_add:
568
                # TODO: correct handling for subdir isn't yet done
569
                channel = spec.get_exact_value('channel')
570
                if channel:
571
                    additional_channels.add(Channel(channel))
572
573
            self.channels.update(additional_channels)
574
            reduced_index = get_reduced_index(self.prefix, self.channels,
575
                                              self.subdirs, prepared_specs)
576
            self._prepared_specs = prepared_specs
577
            self._index = reduced_index
578
            self._r = Resolve(reduced_index, channels=self.channels)
579
580
        self._prepared = True
581
        return self._index, self._r
582
583
    def _check_solution(self, solution, pinned_specs):
584
        # Ensure that solution is consistent with pinned specs.
585
        for spec in pinned_specs:
586
            spec = MatchSpec(spec, optional=False)
587
            if not any(spec.match(d) for d in solution):
588
                # if the spec doesn't match outright, make sure there's no package by that
589
                # name in the solution
590
                assert not any(d.name == spec.name for d in solution)
591
592
                # Let this be handled as part of txn.verify()
593
                # # Ensure conda or its dependencies aren't being uninstalled in conda's
594
                # # own environment.
595
                # if paths_equal(self.prefix, context.conda_prefix) and not context.force:
596
                #     conda_spec = MatchSpec("conda")
597
                #     conda_dist = next((conda_spec.match(d) for d in solution), None)
598
                #     assert conda_dist
599
                #     conda_deps_specs = self._r.ms_depends(conda_dist)
600
                #     for spec in conda_deps_specs:
601
                #         assert any(spec.match(d) for d in solution)
602
603
604
def get_pinned_specs(prefix):
605
    """Find pinned specs from file and return a tuple of MatchSpec."""
606
    pinfile = join(prefix, 'conda-meta', 'pinned')
607
    if exists(pinfile):
608
        with open(pinfile) as f:
609
            from_file = (i for i in f.read().strip().splitlines()
610
                         if i and not i.strip().startswith('#'))
611
    else:
612
        from_file = ()
613
614
    return tuple(MatchSpec(s, optional=True) for s in
615
                 concatv(context.pinned_packages, from_file))
616
617
618
def diff_for_unlink_link_precs(prefix, final_precs, specs_to_add=(), force_reinstall=NULL):
619
    assert isinstance(final_precs, IndexedSet)
620
    final_precs = final_precs
621
    previous_records = IndexedSet(PrefixGraph(PrefixData(prefix).iter_records()).graph)
622
    force_reinstall = context.force_reinstall if force_reinstall is NULL else force_reinstall
623
624
    unlink_precs = previous_records - final_precs
625
    link_precs = final_precs - previous_records
626
627
    def _add_to_unlink_and_link(rec):
628
        link_precs.add(rec)
629
        if prec in previous_records:
630
            unlink_precs.add(rec)
631
632
    # If force_reinstall is enabled, make sure any package in specs_to_add is unlinked then
633
    # re-linked
634
    if force_reinstall:
635
        for spec in specs_to_add:
636
            prec = next((rec for rec in final_precs if spec.match(rec)), None)
637
            assert prec
638
            _add_to_unlink_and_link(prec)
639
640
    # add back 'noarch: python' packages to unlink and link if python version changes
641
    python_spec = MatchSpec('python')
642
    prev_python = next((rec for rec in previous_records if python_spec.match(rec)), None)
643
    curr_python = next((rec for rec in final_precs if python_spec.match(rec)), None)
644
    gmm = get_major_minor_version
645
    if prev_python and curr_python and gmm(prev_python.version) != gmm(curr_python.version):
646
        noarch_python_precs = (p for p in final_precs if p.noarch == NoarchType.python)
647
        for prec in noarch_python_precs:
648
            _add_to_unlink_and_link(prec)
649
650
    unlink_precs = IndexedSet(reversed(sorted(unlink_precs,
651
                                              key=lambda x: previous_records.index(x))))
652
    link_precs = IndexedSet(sorted(link_precs, key=lambda x: final_precs.index(x)))
653
    return unlink_precs, link_precs
654
655
656
# NOTE: The remaining code in this module is being left for development reference until
657
#  the context.enable_private_envs portion is implemented in :meth:`solve_for_transaction`.
658
659
# def solve_prefix(prefix, r, specs_to_remove=(), specs_to_add=(), prune=False):
660
#     # this function gives a "final state" for an existing prefix given just these simple inputs
661
#     prune = context.prune or prune
662
#     log.debug("solving prefix %s\n"
663
#               "  specs_to_remove: %s\n"
664
#               "  specs_to_add: %s\n"
665
#               "  prune: %s", prefix, specs_to_remove, specs_to_add, prune)
666
#
667
#     # declare starting point
668
#     solved_linked_dists = () if prune else tuple(iterkeys(linked_data(prefix)))
669
#     # TODO: to change this whole function from working with dists to working with records, just
670
#     #       change iterkeys to itervalues
671
#
672
#     if solved_linked_dists and specs_to_remove:
673
#         solved_linked_dists = r.remove(tuple(text_type(s) for s in specs_to_remove),
674
#                                        solved_linked_dists)
675
#
676
#     specs_from_history = _get_relevant_specs_from_history(prefix, specs_to_remove, specs_to_add)
677
#     augmented_specs_to_add = augment_specs(prefix, concatv(specs_from_history, specs_to_add))
678
#
679
#     log.debug("final specs to add:\n    %s\n",
680
#               "\n    ".join(text_type(s) for s in augmented_specs_to_add))
681
#     solved_linked_dists = r.install(augmented_specs_to_add,
682
#                                     solved_linked_dists,
683
#                                     update_deps=context.update_dependencies)
684
#
685
#     if not context.ignore_pinned:
686
#         # TODO: assert all pinned specs are compatible with what's in solved_linked_dists
687
#         pass
688
#
689
#     # TODO: don't uninstall conda or its dependencies, probably need to check elsewhere
690
#
691
#     solved_linked_dists = IndexedSet(r.dependency_sort({d.name: d for d in solved_linked_dists}))
692
#
693
#     log.debug("solved prefix %s\n"
694
#               "  solved_linked_dists:\n"
695
#               "    %s\n",
696
#               prefix, "\n    ".join(text_type(d) for d in solved_linked_dists))
697
#
698
#     return solved_linked_dists, specs_to_add
699
700
701
# def solve_for_actions(prefix, r, specs_to_remove=(), specs_to_add=(), prune=False):
702
#     # this is not for force-removing packages, which doesn't invoke the solver
703
#
704
#     solved_dists, _specs_to_add = solve_prefix(prefix, r, specs_to_remove, specs_to_add, prune)
705
#     # TODO: this _specs_to_add part should be refactored when we can better pin package channel
706
#     #     origin  # NOQA
707
#     dists_for_unlinking, dists_for_linking = sort_unlink_link_from_solve(prefix, solved_dists,
708
#                                                                          _specs_to_add)
709
#
710
#     def remove_non_matching_dists(dists_set, specs_to_match):
711
#         _dists_set = IndexedSet(dists_set)
712
#         for dist in dists_set:
713
#             for spec in specs_to_match:
714
#                 if spec.match(dist):
715
#                     break
716
#             else:  # executed if the loop ended normally (no break)
717
#                 _dists_set.remove(dist)
718
#         return _dists_set
719
#
720
#     if context.no_dependencies:
721
#         # for `conda create --no-deps python=3 flask`, do we install python? yes
722
#         # the only dists we touch are the ones that match a specs_to_add
723
#         dists_for_linking = remove_non_matching_dists(dists_for_linking, specs_to_add)
724
#         dists_for_unlinking = remove_non_matching_dists(dists_for_unlinking, specs_to_add)
725
#     elif context.only_dependencies:
726
#         # for `conda create --only-deps python=3 flask`, do we install python? yes
727
#         # remove all dists that match a specs_to_add, as long as that dist isn't a dependency
728
#         #   of other specs_to_add
729
#         _index = r.index
730
#         _match_any = lambda spec, dists: next((dist for dist in dists
731
#                                                if spec.match(_index[dist])),
732
#                                               None)
733
#         _is_dependency = lambda spec, dist: any(r.depends_on(s, dist.name)
734
#                                                 for s in specs_to_add if s != spec)
735
#         for spec in specs_to_add:
736
#             link_matching_dist = _match_any(spec, dists_for_linking)
737
#             if link_matching_dist:
738
#                 if not _is_dependency(spec, link_matching_dist):
739
#                     # as long as that dist isn't a dependency of other specs_to_add
740
#                     dists_for_linking.remove(link_matching_dist)
741
#                     unlink_matching_dist = _match_any(spec, dists_for_unlinking)
742
#                     if unlink_matching_dist:
743
#                         dists_for_unlinking.remove(unlink_matching_dist)
744
#
745
#     if context.force:
746
#         dists_for_unlinking, dists_for_linking = forced_reinstall_specs(prefix, solved_dists,
747
#                                                                         dists_for_unlinking,
748
#                                                                         dists_for_linking,
749
#                                                                         specs_to_add)
750
#
751
#     dists_for_unlinking = IndexedSet(reversed(dists_for_unlinking))
752
#     return dists_for_unlinking, dists_for_linking
753
754
755
# def sort_unlink_link_from_solve(prefix, solved_dists, remove_satisfied_specs):
756
#     # solved_dists should be the return value of solve_prefix()
757
#     old_linked_dists = IndexedSet(iterkeys(linked_data(prefix)))
758
#
759
#     dists_for_unlinking = old_linked_dists - solved_dists
760
#     dists_for_linking = solved_dists - old_linked_dists
761
#
762
#     # TODO: add back 'noarch: python' to unlink and link if python version changes
763
#
764
#     # r_linked = Resolve(linked_data(prefix))
765
#     # for spec in remove_satisfied_specs:
766
#     #     if r_linked.find_matches(spec):
767
#     #         spec_name = spec.name
768
#     #         unlink_dist = next((d for d in dists_for_unlinking if d.name == spec_name), None)
769
#     #         link_dist = next((d for d in dists_for_linking if d.name == spec_name), None)
770
#     #         if unlink_dist:
771
#     #             dists_for_unlinking.discard(unlink_dist)
772
#     #         if link_dist:
773
#     #             dists_for_linking.discard(link_dist)
774
#
775
#     return dists_for_unlinking, dists_for_linking
776
777
778
# def get_install_transaction(prefix, index, spec_strs, force=False, only_names=None,
779
#                             always_copy=False, pinned=True, update_deps=True,
780
#                             prune=False, channel_priority_map=None, is_update=False):
781
#     # type: (str, Dict[Dist, Record], List[str], bool, Option[List[str]], bool, bool, bool,
782
#     #        bool, bool, bool, Dict[str, Sequence[str, int]]) -> List[Dict[weird]]
783
#
784
#     # split out specs into potentially multiple preferred envs if:
785
#     #  1. the user default env (root_prefix) is the prefix being considered here
786
#     #  2. the user has not specified the --name or --prefix command-line flags
787
#     if (prefix == context.root_prefix
788
#             and not context.prefix_specified
789
#             and prefix_is_writable(prefix)
790
#             and context.enable_private_envs):
791
#
792
#         # a registered package CANNOT be installed in the root env
793
#         # if ANY package requesting a private env is required in the root env, all packages for
794
#         #   that requested env must instead be installed in the root env
795
#
796
#         root_r = get_resolve_object(index.copy(), context.root_prefix)
797
#
798
#         def get_env_for_spec(spec):
799
#             # use resolve's get_dists_for_spec() to find the "best" matching record
800
#             record_for_spec = root_r.index[root_r.get_dists_for_spec(spec, emptyok=False)[-1]]
801
#             return ensure_pad(record_for_spec.preferred_env)
802
#
803
#         # specs grouped by target env, the 'None' key holds the specs for the root env
804
#         env_add_map = groupby(get_env_for_spec, (MatchSpec(s) for s in spec_strs))
805
#         requested_root_specs_to_add = {s for s in env_add_map.pop(None, ())}
806
#
807
#         ed = EnvsDirectory(join(context.root_prefix, 'envs'))
808
#         registered_packages = ed.get_registered_packages_keyed_on_env_name()
809
#
810
#         if len(env_add_map) == len(registered_packages) == 0:
811
#             # short-circuit the rest of this logic
812
#             return get_install_transaction_single(prefix, index, spec_strs, force, only_names,
813
#                                                   always_copy, pinned, update_deps,
814
#                                                   prune, channel_priority_map, is_update)
815
#
816
#         root_specs_to_remove = set(MatchSpec(s.name) for s in concat(itervalues(env_add_map)))
817
#         required_root_dists, _ = solve_prefix(context.root_prefix, root_r,
818
#                                               specs_to_remove=root_specs_to_remove,
819
#                                               specs_to_add=requested_root_specs_to_add,
820
#                                               prune=True)
821
#
822
#         required_root_package_names = tuple(d.name for d in required_root_dists)
823
#
824
#         # first handle pulling back requested specs to root
825
#         forced_root_specs_to_add = set()
826
#         pruned_env_add_map = defaultdict(list)
827
#         for env_name, specs in iteritems(env_add_map):
828
#             for spec in specs:
829
#                 spec_name = MatchSpec(spec).name
830
#                 if spec_name in required_root_package_names:
831
#                     forced_root_specs_to_add.add(spec)
832
#                 else:
833
#                     pruned_env_add_map[env_name].append(spec)
834
#         env_add_map = pruned_env_add_map
835
#
836
#         # second handle pulling back registered specs to root
837
#         env_remove_map = defaultdict(list)
838
#         for env_name, registered_package_entries in iteritems(registered_packages):
839
#             for rpe in registered_package_entries:
840
#                 if rpe['package_name'] in required_root_package_names:
841
#                     # ANY registered packages in this environment need to be pulled back
842
#                     for pe in registered_package_entries:
843
#                         # add an entry in env_remove_map
844
#                         # add an entry in forced_root_specs_to_add
845
#                         pname = pe['package_name']
846
#                         env_remove_map[env_name].append(MatchSpec(pname))
847
#                         forced_root_specs_to_add.add(MatchSpec(pe['requested_spec']))
848
#                 break
849
#
850
#         unlink_link_map = odict()
851
#
852
#         # solve all neede preferred_env prefixes
853
#         for env_name in set(concatv(env_add_map, env_remove_map)):
854
#             specs_to_add = env_add_map[env_name]
855
#             spec_to_remove = env_remove_map[env_name]
856
#             pfx = ed.preferred_env_to_prefix(env_name)
857
#             unlink, link = solve_for_actions(pfx, get_resolve_object(index.copy(), pfx),
858
#                                              specs_to_remove=spec_to_remove,
859
#                                              specs_to_add=specs_to_add,
860
#                                              prune=True)
861
#             unlink_link_map[env_name] = unlink, link, specs_to_add
862
#
863
#         # now solve root prefix
864
#         # we have to solve root a second time in all cases, because this time we don't prune
865
#         root_specs_to_add = set(concatv(requested_root_specs_to_add, forced_root_specs_to_add))
866
#         root_unlink, root_link = solve_for_actions(context.root_prefix, root_r,
867
#                                                    specs_to_remove=root_specs_to_remove,
868
#                                                    specs_to_add=root_specs_to_add)
869
#         if root_unlink or root_link:
870
#             # this needs to be added to odict last; the private envs need to be updated first
871
#             unlink_link_map[None] = root_unlink, root_link, root_specs_to_add
872
#
873
#         def make_txn_setup(pfx, unlink, link, specs):
874
#             # TODO: this index here is probably wrong; needs to be per-prefix
875
#             return PrefixSetup(index, pfx, unlink, link, 'INSTALL',
876
#                                tuple(specs))
877
#
878
#         txn_args = tuple(make_txn_setup(ed.to_prefix(ensure_pad(env_name)), *oink)
879
#                          for env_name, oink in iteritems(unlink_link_map))
880
#         txn = UnlinkLinkTransaction(*txn_args)
881
#         return txn
882
#
883
#     else:
884
#         # disregard any requested preferred env
885
#         return get_install_transaction_single(prefix, index, spec_strs, force, only_names,
886
#                                               always_copy, pinned, update_deps,
887
#                                               prune, channel_priority_map, is_update)
888