root/galaxy-central/lib/pkg_resources.py @ 2

リビジョン 2, 83.4 KB (コミッタ: hatakeyama, 14 年 前)

import galaxy-central

行番号 
1"""Package resource API
2--------------------
3
4A resource is a logical file contained within a package, or a logical
5subdirectory thereof.  The package resource API expects resource names
6to have their path parts separated with ``/``, *not* whatever the local
7path separator is.  Do not use os.path operations to manipulate resource
8names being passed into the API.
9
10The package resource API is designed to work with normal filesystem packages,
11.egg files, and unpacked .egg files.  It can also work in a limited way with
12.zip files and with custom PEP 302 loaders that support the ``get_data()``
13method.
14"""
15
16import sys, os, zipimport, time, re, imp
17
18try:
19    frozenset
20except NameError:
21    from sets import ImmutableSet as frozenset
22
23# capture these to bypass sandboxing
24from os import utime, rename, unlink, mkdir
25from os import open as os_open
26from os.path import isdir, split
27
28
29def _bypass_ensure_directory(name, mode=0777):
30    # Sandbox-bypassing version of ensure_directory()
31    dirname, filename = split(name)
32    if dirname and filename and not isdir(dirname):
33        _bypass_ensure_directory(dirname)
34        mkdir(dirname, mode)
35
36
37
38
39
40
41
42_state_vars = {}
43
44def _declare_state(vartype, **kw):
45    g = globals()
46    for name, val in kw.iteritems():
47        g[name] = val
48        _state_vars[name] = vartype
49
50def __getstate__():
51    state = {}
52    g = globals()
53    for k, v in _state_vars.iteritems():
54        state[k] = g['_sget_'+v](g[k])
55    return state
56
57def __setstate__(state):
58    g = globals()
59    for k, v in state.iteritems():
60        g['_sset_'+_state_vars[k]](k, g[k], v)
61    return state
62
63def _sget_dict(val):
64    return val.copy()
65
66def _sset_dict(key, ob, state):
67    ob.clear()
68    ob.update(state)
69
70def _sget_object(val):
71    return val.__getstate__()
72
73def _sset_object(key, ob, state):
74    ob.__setstate__(state)
75
76_sget_none = _sset_none = lambda *args: None
77
78
79
80
81
82
83def get_supported_platform():
84    """Return this platform's maximum compatible version.
85
86    distutils.util.get_platform() normally reports the minimum version
87    of Mac OS X that would be required to *use* extensions produced by
88    distutils.  But what we want when checking compatibility is to know the
89    version of Mac OS X that we are *running*.  To allow usage of packages that
90    explicitly require a newer version of Mac OS X, we must also know the
91    current version of the OS.
92
93    If this condition occurs for any other platform with a version in its
94    platform strings, this function should be extended accordingly.
95    """
96    plat = get_build_platform(); m = macosVersionString.match(plat)
97    if m is not None and sys.platform == "darwin":
98        try:
99            plat = 'macosx-%s-%s' % ('.'.join(_macosx_vers()[:2]), m.group(3))
100        except ValueError:
101            pass    # not Mac OS X
102    return plat
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124__all__ = [
125    # Basic resource access and distribution/entry point discovery
126    'require', 'run_script', 'get_provider',  'get_distribution',
127    'load_entry_point', 'get_entry_map', 'get_entry_info', 'iter_entry_points',
128    'resource_string', 'resource_stream', 'resource_filename',
129    'resource_listdir', 'resource_exists', 'resource_isdir',
130
131    # Environmental control
132    'declare_namespace', 'working_set', 'add_activation_listener',
133    'find_distributions', 'set_extraction_path', 'cleanup_resources',
134    'get_default_cache',
135
136    # Primary implementation classes
137    'Environment', 'WorkingSet', 'ResourceManager',
138    'Distribution', 'Requirement', 'EntryPoint',
139
140    # Exceptions
141    'ResolutionError','VersionConflict','DistributionNotFound','UnknownExtra',
142    'ExtractionError',
143
144    # Parsing functions and string utilities
145    'parse_requirements', 'parse_version', 'safe_name', 'safe_version',
146    'get_platform', 'compatible_platforms', 'yield_lines', 'split_sections',
147    'safe_extra', 'to_filename',
148
149    # filesystem utilities
150    'ensure_directory', 'normalize_path',
151
152    # Distribution "precedence" constants
153    'EGG_DIST', 'BINARY_DIST', 'SOURCE_DIST', 'CHECKOUT_DIST', 'DEVELOP_DIST',
154
155    # "Provider" interfaces, implementations, and registration/lookup APIs
156    'IMetadataProvider', 'IResourceProvider', 'FileMetadata',
157    'PathMetadata', 'EggMetadata', 'EmptyProvider', 'empty_provider',
158    'NullProvider', 'EggProvider', 'DefaultProvider', 'ZipProvider',
159    'register_finder', 'register_namespace_handler', 'register_loader_type',
160    'fixup_namespace_packages', 'get_importer',
161
162    # Deprecated/backward compatibility only
163    'run_main', 'AvailableDistributions',
164]
165class ResolutionError(Exception):
166    """Abstract base for dependency resolution errors"""
167    def __repr__(self): return self.__class__.__name__+repr(self.args)
168
169class VersionConflict(ResolutionError):
170    """An already-installed version conflicts with the requested version"""
171
172class DistributionNotFound(ResolutionError):
173    """A requested distribution was not found"""
174
175class UnknownExtra(ResolutionError):
176    """Distribution doesn't have an "extra feature" of the given name"""
177_provider_factories = {}
178PY_MAJOR = sys.version[:3]
179EGG_DIST    = 3
180BINARY_DIST = 2
181SOURCE_DIST = 1
182CHECKOUT_DIST = 0
183DEVELOP_DIST = -1
184
185def register_loader_type(loader_type, provider_factory):
186    """Register `provider_factory` to make providers for `loader_type`
187
188    `loader_type` is the type or class of a PEP 302 ``module.__loader__``,
189    and `provider_factory` is a function that, passed a *module* object,
190    returns an ``IResourceProvider`` for that module.
191    """
192    _provider_factories[loader_type] = provider_factory
193
194def get_provider(moduleOrReq):
195    """Return an IResourceProvider for the named module or requirement"""
196    if isinstance(moduleOrReq,Requirement):
197        return working_set.find(moduleOrReq) or require(str(moduleOrReq))[0]
198    try:
199        module = sys.modules[moduleOrReq]
200    except KeyError:
201        __import__(moduleOrReq)
202        module = sys.modules[moduleOrReq]
203    loader = getattr(module, '__loader__', None)
204    return _find_adapter(_provider_factories, loader)(module)
205
206def _macosx_vers(_cache=[]):
207    if not _cache:
208        from platform import mac_ver
209        _cache.append(mac_ver()[0].split('.'))
210    return _cache[0]
211
212def _macosx_arch(machine):
213    return {'PowerPC':'ppc', 'Power_Macintosh':'ppc'}.get(machine,machine)
214
215def get_build_platform():
216    """Return this platform's string for platform-specific distributions
217
218    XXX Currently this is the same as ``distutils.util.get_platform()``, but it
219    needs some hacks for Linux and Mac OS X.
220    """
221    from distutils.util import get_platform
222    plat = get_platform()
223    if sys.platform == "darwin" and not plat.startswith('macosx-'):
224        try:
225            version = _macosx_vers()
226            machine = os.uname()[4].replace(" ", "_")
227            return "macosx-%d.%d-%s" % (int(version[0]), int(version[1]),
228                _macosx_arch(machine))
229        except ValueError:
230            # if someone is running a non-Mac darwin system, this will fall
231            # through to the default implementation
232            pass
233    return plat
234
235macosVersionString = re.compile(r"macosx-(\d+)\.(\d+)-(.*)")
236darwinVersionString = re.compile(r"darwin-(\d+)\.(\d+)\.(\d+)-(.*)")
237get_platform = get_build_platform   # XXX backward compat
238
239
240
241
242
243
244
245
246
247def compatible_platforms(provided,required):
248    """Can code for the `provided` platform run on the `required` platform?
249
250    Returns true if either platform is ``None``, or the platforms are equal.
251
252    XXX Needs compatibility checks for Linux and other unixy OSes.
253    """
254    if provided is None or required is None or provided==required:
255        return True     # easy case
256
257    # Mac OS X special cases
258    reqMac = macosVersionString.match(required)
259    if reqMac:
260        provMac = macosVersionString.match(provided)
261
262        # is this a Mac package?
263        if not provMac:
264            # this is backwards compatibility for packages built before
265            # setuptools 0.6. All packages built after this point will
266            # use the new macosx designation.
267            provDarwin = darwinVersionString.match(provided)
268            if provDarwin:
269                dversion = int(provDarwin.group(1))
270                macosversion = "%s.%s" % (reqMac.group(1), reqMac.group(2))
271                if dversion == 7 and macosversion >= "10.3" or \
272                    dversion == 8 and macosversion >= "10.4":
273
274                    #import warnings
275                    #warnings.warn("Mac eggs should be rebuilt to "
276                    #    "use the macosx designation instead of darwin.",
277                    #    category=DeprecationWarning)
278                    return True
279            return False    # egg isn't macosx or legacy darwin
280
281        # are they the same major version and machine type?
282        if provMac.group(1) != reqMac.group(1) or \
283            provMac.group(3) != reqMac.group(3):
284            return False
285
286
287
288        # is the required OS major update >= the provided one?
289        if int(provMac.group(2)) > int(reqMac.group(2)):
290            return False
291
292        return True
293
294    # XXX Linux and other platforms' special cases should go here
295    return False
296
297
298def run_script(dist_spec, script_name):
299    """Locate distribution `dist_spec` and run its `script_name` script"""
300    ns = sys._getframe(1).f_globals
301    name = ns['__name__']
302    ns.clear()
303    ns['__name__'] = name
304    require(dist_spec)[0].run_script(script_name, ns)
305
306run_main = run_script   # backward compatibility
307
308def get_distribution(dist):
309    """Return a current distribution object for a Requirement or string"""
310    if isinstance(dist,basestring): dist = Requirement.parse(dist)
311    if isinstance(dist,Requirement): dist = get_provider(dist)
312    if not isinstance(dist,Distribution):
313        raise TypeError("Expected string, Requirement, or Distribution", dist)
314    return dist
315
316def load_entry_point(dist, group, name):
317    """Return `name` entry point of `group` for `dist` or raise ImportError"""
318    return get_distribution(dist).load_entry_point(group, name)
319
320def get_entry_map(dist, group=None):
321    """Return the entry point map for `group`, or the full entry map"""
322    return get_distribution(dist).get_entry_map(group)
323
324def get_entry_info(dist, group, name):
325    """Return the EntryPoint object for `group`+`name`, or ``None``"""
326    return get_distribution(dist).get_entry_info(group, name)
327
328
329class IMetadataProvider:
330
331    def has_metadata(name):
332        """Does the package's distribution contain the named metadata?"""
333
334    def get_metadata(name):
335        """The named metadata resource as a string"""
336
337    def get_metadata_lines(name):
338        """Yield named metadata resource as list of non-blank non-comment lines
339
340       Leading and trailing whitespace is stripped from each line, and lines
341       with ``#`` as the first non-blank character are omitted."""
342
343    def metadata_isdir(name):
344        """Is the named metadata a directory?  (like ``os.path.isdir()``)"""
345
346    def metadata_listdir(name):
347        """List of metadata names in the directory (like ``os.listdir()``)"""
348
349    def run_script(script_name, namespace):
350        """Execute the named script in the supplied namespace dictionary"""
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370class IResourceProvider(IMetadataProvider):
371    """An object that provides access to package resources"""
372
373    def get_resource_filename(manager, resource_name):
374        """Return a true filesystem path for `resource_name`
375
376        `manager` must be an ``IResourceManager``"""
377
378    def get_resource_stream(manager, resource_name):
379        """Return a readable file-like object for `resource_name`
380
381        `manager` must be an ``IResourceManager``"""
382
383    def get_resource_string(manager, resource_name):
384        """Return a string containing the contents of `resource_name`
385
386        `manager` must be an ``IResourceManager``"""
387
388    def has_resource(resource_name):
389        """Does the package contain the named resource?"""
390
391    def resource_isdir(resource_name):
392        """Is the named resource a directory?  (like ``os.path.isdir()``)"""
393
394    def resource_listdir(resource_name):
395        """List of resource names in the directory (like ``os.listdir()``)"""
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411class WorkingSet(object):
412    """A collection of active distributions on sys.path (or a similar list)"""
413
414    def __init__(self, entries=None):
415        """Create working set from list of path entries (default=sys.path)"""
416        self.entries = []
417        self.entry_keys = {}
418        self.by_key = {}
419        self.callbacks = []
420
421        if entries is None:
422            entries = sys.path
423
424        for entry in entries:
425            self.add_entry(entry)
426
427
428    def add_entry(self, entry):
429        """Add a path item to ``.entries``, finding any distributions on it
430
431        ``find_distributions(entry, True)`` is used to find distributions
432        corresponding to the path entry, and they are added.  `entry` is
433        always appended to ``.entries``, even if it is already present.
434        (This is because ``sys.path`` can contain the same value more than
435        once, and the ``.entries`` of the ``sys.path`` WorkingSet should always
436        equal ``sys.path``.)
437        """
438        self.entry_keys.setdefault(entry, [])
439        self.entries.append(entry)
440        for dist in find_distributions(entry, True):
441            self.add(dist, entry, False)
442
443
444    def __contains__(self,dist):
445        """True if `dist` is the active distribution for its project"""
446        return self.by_key.get(dist.key) == dist
447
448
449
450
451
452    def find(self, req):
453        """Find a distribution matching requirement `req`
454
455        If there is an active distribution for the requested project, this
456        returns it as long as it meets the version requirement specified by
457        `req`.  But, if there is an active distribution for the project and it
458        does *not* meet the `req` requirement, ``VersionConflict`` is raised.
459        If there is no active distribution for the requested project, ``None``
460        is returned.
461        """
462        dist = self.by_key.get(req.key)
463        if dist is not None and dist not in req:
464            raise VersionConflict(dist,req)     # XXX add more info
465        else:
466            return dist
467
468    def iter_entry_points(self, group, name=None):
469        """Yield entry point objects from `group` matching `name`
470
471        If `name` is None, yields all entry points in `group` from all
472        distributions in the working set, otherwise only ones matching
473        both `group` and `name` are yielded (in distribution order).
474        """
475        for dist in self:
476            entries = dist.get_entry_map(group)
477            if name is None:
478                for ep in entries.values():
479                    yield ep
480            elif name in entries:
481                yield entries[name]
482
483    def run_script(self, requires, script_name):
484        """Locate distribution for `requires` and run `script_name` script"""
485        ns = sys._getframe(1).f_globals
486        name = ns['__name__']
487        ns.clear()
488        ns['__name__'] = name
489        self.require(requires)[0].run_script(script_name, ns)
490
491
492
493    def __iter__(self):
494        """Yield distributions for non-duplicate projects in the working set
495
496        The yield order is the order in which the items' path entries were
497        added to the working set.
498        """
499        seen = {}
500        for item in self.entries:
501            for key in self.entry_keys[item]:
502                if key not in seen:
503                    seen[key]=1
504                    yield self.by_key[key]
505
506    def add(self, dist, entry=None, insert=True):
507        """Add `dist` to working set, associated with `entry`
508
509        If `entry` is unspecified, it defaults to the ``.location`` of `dist`.
510        On exit from this routine, `entry` is added to the end of the working
511        set's ``.entries`` (if it wasn't already present).
512
513        `dist` is only added to the working set if it's for a project that
514        doesn't already have a distribution in the set.  If it's added, any
515        callbacks registered with the ``subscribe()`` method will be called.
516        """
517        if insert:
518            dist.insert_on(self.entries, entry)
519
520        if entry is None:
521            entry = dist.location
522        keys = self.entry_keys.setdefault(entry,[])
523        keys2 = self.entry_keys.setdefault(dist.location,[])
524        if dist.key in self.by_key:
525            return      # ignore hidden distros
526
527        self.by_key[dist.key] = dist
528        if dist.key not in keys:
529            keys.append(dist.key)
530        if dist.key not in keys2:
531            keys2.append(dist.key)
532        self._added_new(dist)
533
534    def resolve(self, requirements, env=None, installer=None):
535        """List all distributions needed to (recursively) meet `requirements`
536
537        `requirements` must be a sequence of ``Requirement`` objects.  `env`,
538        if supplied, should be an ``Environment`` instance.  If
539        not supplied, it defaults to all distributions available within any
540        entry or distribution in the working set.  `installer`, if supplied,
541        will be invoked with each requirement that cannot be met by an
542        already-installed distribution; it should return a ``Distribution`` or
543        ``None``.
544        """
545
546        requirements = list(requirements)[::-1]  # set up the stack
547        processed = {}  # set of processed requirements
548        best = {}  # key -> dist
549        to_activate = []
550
551        while requirements:
552            req = requirements.pop(0)   # process dependencies breadth-first
553            if req in processed:
554                # Ignore cyclic or redundant dependencies
555                continue
556            dist = best.get(req.key)
557            if dist is None:
558                # Find the best distribution and add it to the map
559                dist = self.by_key.get(req.key)
560                if dist is None:
561                    if env is None:
562                        env = Environment(self.entries)
563                    dist = best[req.key] = env.best_match(req, self, installer)
564                    if dist is None:
565                        raise DistributionNotFound(req)  # XXX put more info here
566                to_activate.append(dist)
567            if dist not in req:
568                # Oops, the "best" so far conflicts with a dependency
569                raise VersionConflict(dist,req) # XXX put more info here
570            requirements.extend(dist.requires(req.extras)[::-1])
571            processed[req] = True
572
573        return to_activate    # return list of distros to activate
574
575    def find_plugins(self,
576        plugin_env, full_env=None, installer=None, fallback=True
577    ):
578        """Find all activatable distributions in `plugin_env`
579
580        Example usage::
581
582            distributions, errors = working_set.find_plugins(
583                Environment(plugin_dirlist)
584            )
585            map(working_set.add, distributions)  # add plugins+libs to sys.path
586            print "Couldn't load", errors        # display errors
587
588        The `plugin_env` should be an ``Environment`` instance that contains
589        only distributions that are in the project's "plugin directory" or
590        directories. The `full_env`, if supplied, should be an ``Environment``
591        contains all currently-available distributions.  If `full_env` is not
592        supplied, one is created automatically from the ``WorkingSet`` this
593        method is called on, which will typically mean that every directory on
594        ``sys.path`` will be scanned for distributions.
595
596        `installer` is a standard installer callback as used by the
597        ``resolve()`` method. The `fallback` flag indicates whether we should
598        attempt to resolve older versions of a plugin if the newest version
599        cannot be resolved.
600
601        This method returns a 2-tuple: (`distributions`, `error_info`), where
602        `distributions` is a list of the distributions found in `plugin_env`
603        that were loadable, along with any other distributions that are needed
604        to resolve their dependencies.  `error_info` is a dictionary mapping
605        unloadable plugin distributions to an exception instance describing the
606        error that occurred. Usually this will be a ``DistributionNotFound`` or
607        ``VersionConflict`` instance.
608        """
609
610        plugin_projects = list(plugin_env)
611        plugin_projects.sort()  # scan project names in alphabetic order
612
613        error_info = {}
614        distributions = {}
615
616        if full_env is None:
617            env = Environment(self.entries)
618            env += plugin_env
619        else:
620            env = full_env + plugin_env
621
622        shadow_set = self.__class__([])
623        map(shadow_set.add, self)   # put all our entries in shadow_set
624
625        for project_name in plugin_projects:
626
627            for dist in plugin_env[project_name]:
628
629                req = [dist.as_requirement()]
630
631                try:
632                    resolvees = shadow_set.resolve(req, env, installer)
633
634                except ResolutionError,v:
635                    error_info[dist] = v    # save error info
636                    if fallback:
637                        continue    # try the next older version of project
638                    else:
639                        break       # give up on this project, keep going
640
641                else:
642                    map(shadow_set.add, resolvees)
643                    distributions.update(dict.fromkeys(resolvees))
644
645                    # success, no need to try any more versions of this project
646                    break
647
648        distributions = list(distributions)
649        distributions.sort()
650
651        return distributions, error_info
652
653
654
655
656
657    def require(self, *requirements):
658        """Ensure that distributions matching `requirements` are activated
659
660        `requirements` must be a string or a (possibly-nested) sequence
661        thereof, specifying the distributions and versions required.  The
662        return value is a sequence of the distributions that needed to be
663        activated to fulfill the requirements; all relevant distributions are
664        included, even if they were already activated in this working set.
665        """
666        needed = self.resolve(parse_requirements(requirements))
667
668        for dist in needed:
669            self.add(dist)
670
671        return needed
672
673    def subscribe(self, callback):
674        """Invoke `callback` for all distributions (including existing ones)"""
675        if callback in self.callbacks:
676            return
677        self.callbacks.append(callback)
678        for dist in self:
679            callback(dist)
680
681    def _added_new(self, dist):
682        for callback in self.callbacks:
683            callback(dist)
684
685    def __getstate__(self):
686        return (
687            self.entries[:], self.entry_keys.copy(), self.by_key.copy(),
688            self.callbacks[:]
689        )
690
691    def __setstate__(self, (entries, keys, by_key, callbacks)):
692        self.entries = entries[:]
693        self.entry_keys = keys.copy()
694        self.by_key = by_key.copy()
695        self.callbacks = callbacks[:]
696
697
698class Environment(object):
699    """Searchable snapshot of distributions on a search path"""
700
701    def __init__(self, search_path=None, platform=get_supported_platform(), python=PY_MAJOR):
702        """Snapshot distributions available on a search path
703
704        Any distributions found on `search_path` are added to the environment.
705        `search_path` should be a sequence of ``sys.path`` items.  If not
706        supplied, ``sys.path`` is used.
707
708        `platform` is an optional string specifying the name of the platform
709        that platform-specific distributions must be compatible with.  If
710        unspecified, it defaults to the current platform.  `python` is an
711        optional string naming the desired version of Python (e.g. ``'2.4'``);
712        it defaults to the current version.
713
714        You may explicitly set `platform` (and/or `python`) to ``None`` if you
715        wish to map *all* distributions, not just those compatible with the
716        running platform or Python version.
717        """
718        self._distmap = {}
719        self._cache = {}
720        self.platform = platform
721        self.python = python
722        self.scan(search_path)
723
724    def can_add(self, dist):
725        """Is distribution `dist` acceptable for this environment?
726
727        The distribution must match the platform and python version
728        requirements specified when this environment was created, or False
729        is returned.
730        """
731        return (self.python is None or dist.py_version is None
732            or dist.py_version==self.python) \
733           and compatible_platforms(dist.platform,self.platform)
734
735    def remove(self, dist):
736        """Remove `dist` from the environment"""
737        self._distmap[dist.key].remove(dist)
738
739    def scan(self, search_path=None):
740        """Scan `search_path` for distributions usable in this environment
741
742        Any distributions found are added to the environment.
743        `search_path` should be a sequence of ``sys.path`` items.  If not
744        supplied, ``sys.path`` is used.  Only distributions conforming to
745        the platform/python version defined at initialization are added.
746        """
747        if search_path is None:
748            search_path = sys.path
749
750        for item in search_path:
751            for dist in find_distributions(item):
752                self.add(dist)
753
754    def __getitem__(self,project_name):
755        """Return a newest-to-oldest list of distributions for `project_name`
756        """
757        try:
758            return self._cache[project_name]
759        except KeyError:
760            project_name = project_name.lower()
761            if project_name not in self._distmap:
762                return []
763
764        if project_name not in self._cache:
765            dists = self._cache[project_name] = self._distmap[project_name]
766            _sort_dists(dists)
767
768        return self._cache[project_name]
769
770    def add(self,dist):
771        """Add `dist` if we ``can_add()`` it and it isn't already added"""
772        if self.can_add(dist) and dist.has_version():
773            dists = self._distmap.setdefault(dist.key,[])
774            if dist not in dists:
775                dists.append(dist)
776                if dist.key in self._cache:
777                    _sort_dists(self._cache[dist.key])
778
779
780    def best_match(self, req, working_set, installer=None):
781        """Find distribution best matching `req` and usable on `working_set`
782
783        This calls the ``find(req)`` method of the `working_set` to see if a
784        suitable distribution is already active.  (This may raise
785        ``VersionConflict`` if an unsuitable version of the project is already
786        active in the specified `working_set`.)  If a suitable distribution
787        isn't active, this method returns the newest distribution in the
788        environment that meets the ``Requirement`` in `req`.  If no suitable
789        distribution is found, and `installer` is supplied, then the result of
790        calling the environment's ``obtain(req, installer)`` method will be
791        returned.
792        """
793        dist = working_set.find(req)
794        if dist is not None:
795            return dist
796        for dist in self[req.key]:
797            if dist in req:
798                return dist
799        return self.obtain(req, installer) # try and download/install
800
801    def obtain(self, requirement, installer=None):
802        """Obtain a distribution matching `requirement` (e.g. via download)
803
804        Obtain a distro that matches requirement (e.g. via download).  In the
805        base ``Environment`` class, this routine just returns
806        ``installer(requirement)``, unless `installer` is None, in which case
807        None is returned instead.  This method is a hook that allows subclasses
808        to attempt other ways of obtaining a distribution before falling back
809        to the `installer` argument."""
810        if installer is not None:
811            return installer(requirement)
812
813    def __iter__(self):
814        """Yield the unique project names of the available distributions"""
815        for key in self._distmap.keys():
816            if self[key]: yield key
817
818
819
820
821    def __iadd__(self, other):
822        """In-place addition of a distribution or environment"""
823        if isinstance(other,Distribution):
824            self.add(other)
825        elif isinstance(other,Environment):
826            for project in other:
827                for dist in other[project]:
828                    self.add(dist)
829        else:
830            raise TypeError("Can't add %r to environment" % (other,))
831        return self
832
833    def __add__(self, other):
834        """Add an environment or distribution to an environment"""
835        new = self.__class__([], platform=None, python=None)
836        for env in self, other:
837            new += env
838        return new
839
840
841AvailableDistributions = Environment    # XXX backward compatibility
842
843
844class ExtractionError(RuntimeError):
845    """An error occurred extracting a resource
846
847    The following attributes are available from instances of this exception:
848
849    manager
850        The resource manager that raised this exception
851
852    cache_path
853        The base directory for resource extraction
854
855    original_error
856        The exception instance that caused extraction to fail
857    """
858
859
860
861
862class ResourceManager:
863    """Manage resource extraction and packages"""
864    extraction_path = None
865
866    def __init__(self):
867        self.cached_files = {}
868
869    def resource_exists(self, package_or_requirement, resource_name):
870        """Does the named resource exist?"""
871        return get_provider(package_or_requirement).has_resource(resource_name)
872
873    def resource_isdir(self, package_or_requirement, resource_name):
874        """Is the named resource an existing directory?"""
875        return get_provider(package_or_requirement).resource_isdir(
876            resource_name
877        )
878
879    def resource_filename(self, package_or_requirement, resource_name):
880        """Return a true filesystem path for specified resource"""
881        return get_provider(package_or_requirement).get_resource_filename(
882            self, resource_name
883        )
884
885    def resource_stream(self, package_or_requirement, resource_name):
886        """Return a readable file-like object for specified resource"""
887        return get_provider(package_or_requirement).get_resource_stream(
888            self, resource_name
889        )
890
891    def resource_string(self, package_or_requirement, resource_name):
892        """Return specified resource as a string"""
893        return get_provider(package_or_requirement).get_resource_string(
894            self, resource_name
895        )
896
897    def resource_listdir(self, package_or_requirement, resource_name):
898        """List the contents of the named resource directory"""
899        return get_provider(package_or_requirement).resource_listdir(
900            resource_name
901        )
902
903    def extraction_error(self):
904        """Give an error message for problems extracting file(s)"""
905
906        old_exc = sys.exc_info()[1]
907        cache_path = self.extraction_path or get_default_cache()
908
909        err = ExtractionError("""Can't extract file(s) to egg cache
910
911The following error occurred while trying to extract file(s) to the Python egg
912cache:
913
914  %s
915
916The Python egg cache directory is currently set to:
917
918  %s
919
920Perhaps your account does not have write access to this directory?  You can
921change the cache directory by setting the PYTHON_EGG_CACHE environment
922variable to point to an accessible directory.
923"""         % (old_exc, cache_path)
924        )
925        err.manager        = self
926        err.cache_path     = cache_path
927        err.original_error = old_exc
928        raise err
929
930
931
932
933
934
935
936
937
938
939
940
941
942
943
944    def get_cache_path(self, archive_name, names=()):
945        """Return absolute location in cache for `archive_name` and `names`
946
947        The parent directory of the resulting path will be created if it does
948        not already exist.  `archive_name` should be the base filename of the
949        enclosing egg (which may not be the name of the enclosing zipfile!),
950        including its ".egg" extension.  `names`, if provided, should be a
951        sequence of path name parts "under" the egg's extraction location.
952
953        This method should only be called by resource providers that need to
954        obtain an extraction location, and only for names they intend to
955        extract, as it tracks the generated names for possible cleanup later.
956        """
957        extract_path = self.extraction_path or get_default_cache()
958        target_path = os.path.join(extract_path, archive_name+'-tmp', *names)
959        try:
960            _bypass_ensure_directory(target_path)
961        except:
962            self.extraction_error()
963
964        self.cached_files[target_path] = 1
965        return target_path
966
967
968
969
970
971
972
973
974
975
976
977
978
979
980
981
982
983
984
985    def postprocess(self, tempname, filename):
986        """Perform any platform-specific postprocessing of `tempname`
987
988        This is where Mac header rewrites should be done; other platforms don't
989        have anything special they should do.
990
991        Resource providers should call this method ONLY after successfully
992        extracting a compressed resource.  They must NOT call it on resources
993        that are already in the filesystem.
994
995        `tempname` is the current (temporary) name of the file, and `filename`
996        is the name it will be renamed to by the caller after this routine
997        returns.
998        """
999
1000        if os.name == 'posix':
1001            # Make the resource executable
1002            mode = ((os.stat(tempname).st_mode) | 0555) & 07777
1003            os.chmod(tempname, mode)
1004
1005
1006
1007
1008
1009
1010
1011
1012
1013
1014
1015
1016
1017
1018
1019
1020
1021
1022
1023
1024
1025
1026    def set_extraction_path(self, path):
1027        """Set the base path where resources will be extracted to, if needed.
1028
1029        If you do not call this routine before any extractions take place, the
1030        path defaults to the return value of ``get_default_cache()``.  (Which
1031        is based on the ``PYTHON_EGG_CACHE`` environment variable, with various
1032        platform-specific fallbacks.  See that routine's documentation for more
1033        details.)
1034
1035        Resources are extracted to subdirectories of this path based upon
1036        information given by the ``IResourceProvider``.  You may set this to a
1037        temporary directory, but then you must call ``cleanup_resources()`` to
1038        delete the extracted files when done.  There is no guarantee that
1039        ``cleanup_resources()`` will be able to remove all extracted files.
1040
1041        (Note: you may not change the extraction path for a given resource
1042        manager once resources have been extracted, unless you first call
1043        ``cleanup_resources()``.)
1044        """
1045        if self.cached_files:
1046            raise ValueError(
1047                "Can't change extraction path, files already extracted"
1048            )
1049
1050        self.extraction_path = path
1051
1052    def cleanup_resources(self, force=False):
1053        """
1054        Delete all extracted resource files and directories, returning a list
1055        of the file and directory names that could not be successfully removed.
1056        This function does not have any concurrency protection, so it should
1057        generally only be called when the extraction path is a temporary
1058        directory exclusive to a single process.  This method is not
1059        automatically called; you must call it explicitly or register it as an
1060        ``atexit`` function if you wish to ensure cleanup of a temporary
1061        directory used for extractions.
1062        """
1063        # XXX
1064
1065
1066
1067def get_default_cache():
1068    """Determine the default cache location
1069
1070    This returns the ``PYTHON_EGG_CACHE`` environment variable, if set.
1071    Otherwise, on Windows, it returns a "Python-Eggs" subdirectory of the
1072    "Application Data" directory.  On all other systems, it's "~/.python-eggs".
1073    """
1074    try:
1075        return os.environ['PYTHON_EGG_CACHE']
1076    except KeyError:
1077        pass
1078
1079    if os.name!='nt':
1080        return os.path.expanduser('~/.python-eggs')
1081
1082    app_data = 'Application Data'   # XXX this may be locale-specific!
1083    app_homes = [
1084        (('APPDATA',), None),       # best option, should be locale-safe
1085        (('USERPROFILE',), app_data),
1086        (('HOMEDRIVE','HOMEPATH'), app_data),
1087        (('HOMEPATH',), app_data),
1088        (('HOME',), None),
1089        (('WINDIR',), app_data),    # 95/98/ME
1090    ]
1091
1092    for keys, subdir in app_homes:
1093        dirname = ''
1094        for key in keys:
1095            if key in os.environ:
1096                dirname = os.path.join(dirname, os.environ[key])
1097            else:
1098                break
1099        else:
1100            if subdir:
1101                dirname = os.path.join(dirname,subdir)
1102            return os.path.join(dirname, 'Python-Eggs')
1103    else:
1104        raise RuntimeError(
1105            "Please set the PYTHON_EGG_CACHE enviroment variable"
1106        )
1107
1108def safe_name(name):
1109    """Convert an arbitrary string to a standard distribution name
1110
1111    Any runs of non-alphanumeric/. characters are replaced with a single '-'.
1112    """
1113    return re.sub('[^A-Za-z0-9.]+', '-', name)
1114
1115
1116def safe_version(version):
1117    """Convert an arbitrary string to a standard version string
1118
1119    Spaces become dots, and all other non-alphanumeric characters become
1120    dashes, with runs of multiple dashes condensed to a single dash.
1121    """
1122    version = version.replace(' ','.')
1123    return re.sub('[^A-Za-z0-9.]+', '-', version)
1124
1125
1126def safe_extra(extra):
1127    """Convert an arbitrary string to a standard 'extra' name
1128
1129    Any runs of non-alphanumeric characters are replaced with a single '_',
1130    and the result is always lowercased.
1131    """
1132    return re.sub('[^A-Za-z0-9.]+', '_', extra).lower()
1133
1134
1135def to_filename(name):
1136    """Convert a project or version name to its filename-escaped form
1137
1138    Any '-' characters are currently replaced with '_'.
1139    """
1140    return name.replace('-','_')
1141
1142
1143
1144
1145
1146
1147
1148
1149class NullProvider:
1150    """Try to implement resources and metadata for arbitrary PEP 302 loaders"""
1151
1152    egg_name = None
1153    egg_info = None
1154    loader = None
1155
1156    def __init__(self, module):
1157        self.loader = getattr(module, '__loader__', None)
1158        self.module_path = os.path.dirname(getattr(module, '__file__', ''))
1159
1160    def get_resource_filename(self, manager, resource_name):
1161        return self._fn(self.module_path, resource_name)
1162
1163    def get_resource_stream(self, manager, resource_name):
1164        return StringIO(self.get_resource_string(manager, resource_name))
1165
1166    def get_resource_string(self, manager, resource_name):
1167        return self._get(self._fn(self.module_path, resource_name))
1168
1169    def has_resource(self, resource_name):
1170        return self._has(self._fn(self.module_path, resource_name))
1171
1172    def has_metadata(self, name):
1173        return self.egg_info and self._has(self._fn(self.egg_info,name))
1174
1175    def get_metadata(self, name):
1176        if not self.egg_info:
1177            return ""
1178        return self._get(self._fn(self.egg_info,name))
1179
1180    def get_metadata_lines(self, name):
1181        return yield_lines(self.get_metadata(name))
1182
1183    def resource_isdir(self,resource_name):
1184        return self._isdir(self._fn(self.module_path, resource_name))
1185
1186    def metadata_isdir(self,name):
1187        return self.egg_info and self._isdir(self._fn(self.egg_info,name))
1188
1189
1190    def resource_listdir(self,resource_name):
1191        return self._listdir(self._fn(self.module_path,resource_name))
1192
1193    def metadata_listdir(self,name):
1194        if self.egg_info:
1195            return self._listdir(self._fn(self.egg_info,name))
1196        return []
1197
1198    def run_script(self,script_name,namespace):
1199        script = 'scripts/'+script_name
1200        if not self.has_metadata(script):
1201            raise ResolutionError("No script named %r" % script_name)
1202        script_text = self.get_metadata(script).replace('\r\n','\n')
1203        script_text = script_text.replace('\r','\n')
1204        script_filename = self._fn(self.egg_info,script)
1205        namespace['__file__'] = script_filename
1206        if os.path.exists(script_filename):
1207            execfile(script_filename, namespace, namespace)
1208        else:
1209            from linecache import cache
1210            cache[script_filename] = (
1211                len(script_text), 0, script_text.split('\n'), script_filename
1212            )
1213            script_code = compile(script_text,script_filename,'exec')
1214            exec script_code in namespace, namespace
1215
1216    def _has(self, path):
1217        raise NotImplementedError(
1218            "Can't perform this operation for unregistered loader type"
1219        )
1220
1221    def _isdir(self, path):
1222        raise NotImplementedError(
1223            "Can't perform this operation for unregistered loader type"
1224        )
1225
1226    def _listdir(self, path):
1227        raise NotImplementedError(
1228            "Can't perform this operation for unregistered loader type"
1229        )
1230
1231    def _fn(self, base, resource_name):
1232        if resource_name:
1233            return os.path.join(base, *resource_name.split('/'))
1234        return base
1235
1236    def _get(self, path):
1237        if hasattr(self.loader, 'get_data'):
1238            return self.loader.get_data(path)
1239        raise NotImplementedError(
1240            "Can't perform this operation for loaders without 'get_data()'"
1241        )
1242
1243register_loader_type(object, NullProvider)
1244
1245
1246class EggProvider(NullProvider):
1247    """Provider based on a virtual filesystem"""
1248
1249    def __init__(self,module):
1250        NullProvider.__init__(self,module)
1251        self._setup_prefix()
1252
1253    def _setup_prefix(self):
1254        # we assume here that our metadata may be nested inside a "basket"
1255        # of multiple eggs; that's why we use module_path instead of .archive
1256        path = self.module_path
1257        old = None
1258        while path!=old:
1259            if path.lower().endswith('.egg'):
1260                self.egg_name = os.path.basename(path)
1261                self.egg_info = os.path.join(path, 'EGG-INFO')
1262                self.egg_root = path
1263                break
1264            old = path
1265            path, base = os.path.split(path)
1266
1267
1268
1269
1270
1271
1272class DefaultProvider(EggProvider):
1273    """Provides access to package resources in the filesystem"""
1274
1275    def _has(self, path):
1276        return os.path.exists(path)
1277
1278    def _isdir(self,path):
1279        return os.path.isdir(path)
1280
1281    def _listdir(self,path):
1282        return os.listdir(path)
1283
1284    def get_resource_stream(self, manager, resource_name):
1285        return open(self._fn(self.module_path, resource_name), 'rb')
1286
1287    def _get(self, path):
1288        stream = open(path, 'rb')
1289        try:
1290            return stream.read()
1291        finally:
1292            stream.close()
1293
1294register_loader_type(type(None), DefaultProvider)
1295
1296
1297class EmptyProvider(NullProvider):
1298    """Provider that returns nothing for all requests"""
1299
1300    _isdir = _has = lambda self,path: False
1301    _get          = lambda self,path: ''
1302    _listdir      = lambda self,path: []
1303    module_path   = None
1304
1305    def __init__(self):
1306        pass
1307
1308empty_provider = EmptyProvider()
1309
1310
1311
1312
1313class ZipProvider(EggProvider):
1314    """Resource support for zips and eggs"""
1315
1316    eagers = None
1317
1318    def __init__(self, module):
1319        EggProvider.__init__(self,module)
1320        self.zipinfo = zipimport._zip_directory_cache[self.loader.archive]
1321        self.zip_pre = self.loader.archive+os.sep
1322
1323    def _zipinfo_name(self, fspath):
1324        # Convert a virtual filename (full path to file) into a zipfile subpath
1325        # usable with the zipimport directory cache for our target archive
1326        if fspath.startswith(self.zip_pre):
1327            return fspath[len(self.zip_pre):]
1328        raise AssertionError(
1329            "%s is not a subpath of %s" % (fspath,self.zip_pre)
1330        )
1331
1332    def _parts(self,zip_path):
1333        # Convert a zipfile subpath into an egg-relative path part list
1334        fspath = self.zip_pre+zip_path  # pseudo-fs path
1335        if fspath.startswith(self.egg_root+os.sep):
1336            return fspath[len(self.egg_root)+1:].split(os.sep)
1337        raise AssertionError(
1338            "%s is not a subpath of %s" % (fspath,self.egg_root)
1339        )
1340
1341    def get_resource_filename(self, manager, resource_name):
1342        if not self.egg_name:
1343            raise NotImplementedError(
1344                "resource_filename() only supported for .egg, not .zip"
1345            )
1346        # no need to lock for extraction, since we use temp names
1347        zip_path = self._resource_to_zip(resource_name)
1348        eagers = self._get_eager_resources()
1349        if '/'.join(self._parts(zip_path)) in eagers:
1350            for name in eagers:
1351                self._extract_resource(manager, self._eager_to_zip(name))
1352        return self._extract_resource(manager, zip_path)
1353
1354    def _extract_resource(self, manager, zip_path):
1355
1356        if zip_path in self._index():
1357            for name in self._index()[zip_path]:
1358                last = self._extract_resource(
1359                    manager, os.path.join(zip_path, name)
1360                )
1361            return os.path.dirname(last)  # return the extracted directory name
1362
1363        zip_stat = self.zipinfo[zip_path]
1364        t,d,size = zip_stat[5], zip_stat[6], zip_stat[3]
1365        date_time = (
1366            (d>>9)+1980, (d>>5)&0xF, d&0x1F,                      # ymd
1367            (t&0xFFFF)>>11, (t>>5)&0x3F, (t&0x1F) * 2, 0, 0, -1   # hms, etc.
1368        )
1369        timestamp = time.mktime(date_time)
1370
1371        try:
1372            real_path = manager.get_cache_path(
1373                self.egg_name, self._parts(zip_path)
1374            )
1375
1376            if os.path.isfile(real_path):
1377                stat = os.stat(real_path)
1378                if stat.st_size==size and stat.st_mtime==timestamp:
1379                    # size and stamp match, don't bother extracting
1380                    return real_path
1381
1382            outf, tmpnam = _mkstemp(".$extract", dir=os.path.dirname(real_path))
1383            os.write(outf, self.loader.get_data(zip_path))
1384            os.close(outf)
1385            utime(tmpnam, (timestamp,timestamp))
1386            manager.postprocess(tmpnam, real_path)
1387
1388            try:
1389                rename(tmpnam, real_path)
1390
1391            except os.error:
1392                if os.path.isfile(real_path):
1393                    stat = os.stat(real_path)
1394
1395                    if stat.st_size==size and stat.st_mtime==timestamp:
1396                        # size and stamp match, somebody did it just ahead of
1397                        # us, so we're done
1398                        return real_path
1399                    elif os.name=='nt':     # Windows, del old file and retry
1400                        unlink(real_path)
1401                        rename(tmpnam, real_path)
1402                        return real_path
1403                raise
1404
1405        except os.error:
1406            manager.extraction_error()  # report a user-friendly error
1407
1408        return real_path
1409
1410    def _get_eager_resources(self):
1411        if self.eagers is None:
1412            eagers = []
1413            for name in ('native_libs.txt', 'eager_resources.txt'):
1414                if self.has_metadata(name):
1415                    eagers.extend(self.get_metadata_lines(name))
1416            self.eagers = eagers
1417        return self.eagers
1418
1419    def _index(self):
1420        try:
1421            return self._dirindex
1422        except AttributeError:
1423            ind = {}
1424            for path in self.zipinfo:
1425                parts = path.split(os.sep)
1426                while parts:
1427                    parent = os.sep.join(parts[:-1])
1428                    if parent in ind:
1429                        ind[parent].append(parts[-1])
1430                        break
1431                    else:
1432                        ind[parent] = [parts.pop()]
1433            self._dirindex = ind
1434            return ind
1435
1436    def _has(self, fspath):
1437        zip_path = self._zipinfo_name(fspath)
1438        return zip_path in self.zipinfo or zip_path in self._index()
1439
1440    def _isdir(self,fspath):
1441        return self._zipinfo_name(fspath) in self._index()
1442
1443    def _listdir(self,fspath):
1444        return list(self._index().get(self._zipinfo_name(fspath), ()))
1445
1446    def _eager_to_zip(self,resource_name):
1447        return self._zipinfo_name(self._fn(self.egg_root,resource_name))
1448
1449    def _resource_to_zip(self,resource_name):
1450        return self._zipinfo_name(self._fn(self.module_path,resource_name))
1451
1452register_loader_type(zipimport.zipimporter, ZipProvider)
1453
1454
1455
1456
1457
1458
1459
1460
1461
1462
1463
1464
1465
1466
1467
1468
1469
1470
1471
1472
1473
1474
1475
1476
1477class FileMetadata(EmptyProvider):
1478    """Metadata handler for standalone PKG-INFO files
1479
1480    Usage::
1481
1482        metadata = FileMetadata("/path/to/PKG-INFO")
1483
1484    This provider rejects all data and metadata requests except for PKG-INFO,
1485    which is treated as existing, and will be the contents of the file at
1486    the provided location.
1487    """
1488
1489    def __init__(self,path):
1490        self.path = path
1491
1492    def has_metadata(self,name):
1493        return name=='PKG-INFO'
1494
1495    def get_metadata(self,name):
1496        if name=='PKG-INFO':
1497            return open(self.path,'rU').read()
1498        raise KeyError("No metadata except PKG-INFO is available")
1499
1500    def get_metadata_lines(self,name):
1501        return yield_lines(self.get_metadata(name))
1502
1503
1504
1505
1506
1507
1508
1509
1510
1511
1512
1513
1514
1515
1516
1517
1518class PathMetadata(DefaultProvider):
1519    """Metadata provider for egg directories
1520
1521    Usage::
1522
1523        # Development eggs:
1524
1525        egg_info = "/path/to/PackageName.egg-info"
1526        base_dir = os.path.dirname(egg_info)
1527        metadata = PathMetadata(base_dir, egg_info)
1528        dist_name = os.path.splitext(os.path.basename(egg_info))[0]
1529        dist = Distribution(basedir,project_name=dist_name,metadata=metadata)
1530
1531        # Unpacked egg directories:
1532
1533        egg_path = "/path/to/PackageName-ver-pyver-etc.egg"
1534        metadata = PathMetadata(egg_path, os.path.join(egg_path,'EGG-INFO'))
1535        dist = Distribution.from_filename(egg_path, metadata=metadata)
1536    """
1537
1538    def __init__(self, path, egg_info):
1539        self.module_path = path
1540        self.egg_info = egg_info
1541
1542
1543class EggMetadata(ZipProvider):
1544    """Metadata provider for .egg files"""
1545
1546    def __init__(self, importer):
1547        """Create a metadata provider from a zipimporter"""
1548
1549        self.zipinfo = zipimport._zip_directory_cache[importer.archive]
1550        self.zip_pre = importer.archive+os.sep
1551        self.loader = importer
1552        if importer.prefix:
1553            self.module_path = os.path.join(importer.archive, importer.prefix)
1554        else:
1555            self.module_path = importer.archive
1556        self._setup_prefix()
1557
1558
1559class ImpWrapper:
1560    """PEP 302 Importer that wraps Python's "normal" import algorithm"""
1561
1562    def __init__(self, path=None):
1563        self.path = path
1564
1565    def find_module(self, fullname, path=None):
1566        subname = fullname.split(".")[-1]
1567        if subname != fullname and self.path is None:
1568            return None
1569        if self.path is None:
1570            path = None
1571        else:
1572            path = [self.path]
1573        try:
1574            file, filename, etc = imp.find_module(subname, path)
1575        except ImportError:
1576            return None
1577        return ImpLoader(file, filename, etc)
1578
1579
1580class ImpLoader:
1581    """PEP 302 Loader that wraps Python's "normal" import algorithm"""
1582
1583    def __init__(self, file, filename, etc):
1584        self.file = file
1585        self.filename = filename
1586        self.etc = etc
1587
1588    def load_module(self, fullname):
1589        try:
1590            mod = imp.load_module(fullname, self.file, self.filename, self.etc)
1591        finally:
1592            if self.file: self.file.close()
1593        # Note: we don't set __loader__ because we want the module to look
1594        # normal; i.e. this is just a wrapper for standard import machinery
1595        return mod
1596
1597
1598
1599
1600def get_importer(path_item):
1601    """Retrieve a PEP 302 "importer" for the given path item
1602
1603    If there is no importer, this returns a wrapper around the builtin import
1604    machinery.  The returned importer is only cached if it was created by a
1605    path hook.
1606    """
1607    try:
1608        importer = sys.path_importer_cache[path_item]
1609    except KeyError:
1610        for hook in sys.path_hooks:
1611            try:
1612                importer = hook(path_item)
1613            except ImportError:
1614                pass
1615            else:
1616                break
1617        else:
1618            importer = None
1619
1620    sys.path_importer_cache.setdefault(path_item,importer)
1621    if importer is None:
1622        try:
1623            importer = ImpWrapper(path_item)
1624        except ImportError:
1625            pass
1626    return importer
1627
1628try:
1629    from pkgutil import get_importer, ImpImporter
1630except ImportError:
1631    pass    # Python 2.3 or 2.4, use our own implementation
1632else:
1633    ImpWrapper = ImpImporter    # Python 2.5, use pkgutil's implementation
1634    del ImpLoader, ImpImporter
1635
1636
1637
1638
1639
1640
1641_declare_state('dict', _distribution_finders = {})
1642
1643def register_finder(importer_type, distribution_finder):
1644    """Register `distribution_finder` to find distributions in sys.path items
1645
1646    `importer_type` is the type or class of a PEP 302 "Importer" (sys.path item
1647    handler), and `distribution_finder` is a callable that, passed a path
1648    item and the importer instance, yields ``Distribution`` instances found on
1649    that path item.  See ``pkg_resources.find_on_path`` for an example."""
1650    _distribution_finders[importer_type] = distribution_finder
1651
1652
1653def find_distributions(path_item, only=False):
1654    """Yield distributions accessible via `path_item`"""
1655    importer = get_importer(path_item)
1656    finder = _find_adapter(_distribution_finders, importer)
1657    return finder(importer, path_item, only)
1658
1659def find_in_zip(importer, path_item, only=False):
1660    metadata = EggMetadata(importer)
1661    if metadata.has_metadata('PKG-INFO'):
1662        yield Distribution.from_filename(path_item, metadata=metadata)
1663    if only:
1664        return  # don't yield nested distros
1665    for subitem in metadata.resource_listdir('/'):
1666        if subitem.endswith('.egg'):
1667            subpath = os.path.join(path_item, subitem)
1668            for dist in find_in_zip(zipimport.zipimporter(subpath), subpath):
1669                yield dist
1670
1671register_finder(zipimport.zipimporter, find_in_zip)
1672
1673def StringIO(*args, **kw):
1674    """Thunk to load the real StringIO on demand"""
1675    global StringIO
1676    try:
1677        from cStringIO import StringIO
1678    except ImportError:
1679        from StringIO import StringIO
1680    return StringIO(*args,**kw)
1681
1682def find_nothing(importer, path_item, only=False):
1683    return ()
1684register_finder(object,find_nothing)
1685
1686def find_on_path(importer, path_item, only=False):
1687    """Yield distributions accessible on a sys.path directory"""
1688    path_item = _normalize_cached(path_item)
1689
1690    if os.path.isdir(path_item) and os.access(path_item, os.R_OK):
1691        if path_item.lower().endswith('.egg'):
1692            # unpacked egg
1693            yield Distribution.from_filename(
1694                path_item, metadata=PathMetadata(
1695                    path_item, os.path.join(path_item,'EGG-INFO')
1696                )
1697            )
1698        else:
1699            # scan for .egg and .egg-info in directory
1700            for entry in os.listdir(path_item):
1701                lower = entry.lower()
1702                if lower.endswith('.egg-info'):
1703                    fullpath = os.path.join(path_item, entry)
1704                    if os.path.isdir(fullpath):
1705                        # egg-info directory, allow getting metadata
1706                        metadata = PathMetadata(path_item, fullpath)
1707                    else:
1708                        metadata = FileMetadata(fullpath)
1709                    yield Distribution.from_location(
1710                        path_item,entry,metadata,precedence=DEVELOP_DIST
1711                    )
1712                elif not only and lower.endswith('.egg'):
1713                    for dist in find_distributions(os.path.join(path_item, entry)):
1714                        yield dist
1715                elif not only and lower.endswith('.egg-link'):
1716                    for line in file(os.path.join(path_item, entry)):
1717                        if not line.strip(): continue
1718                        for item in find_distributions(os.path.join(path_item,line.rstrip())):
1719                            yield item
1720                        break
1721register_finder(ImpWrapper,find_on_path)
1722
1723_declare_state('dict', _namespace_handlers = {})
1724_declare_state('dict', _namespace_packages = {})
1725
1726def register_namespace_handler(importer_type, namespace_handler):
1727    """Register `namespace_handler` to declare namespace packages
1728
1729    `importer_type` is the type or class of a PEP 302 "Importer" (sys.path item
1730    handler), and `namespace_handler` is a callable like this::
1731
1732        def namespace_handler(importer,path_entry,moduleName,module):
1733            # return a path_entry to use for child packages
1734
1735    Namespace handlers are only called if the importer object has already
1736    agreed that it can handle the relevant path item, and they should only
1737    return a subpath if the module __path__ does not already contain an
1738    equivalent subpath.  For an example namespace handler, see
1739    ``pkg_resources.file_ns_handler``.
1740    """
1741    _namespace_handlers[importer_type] = namespace_handler
1742
1743def _handle_ns(packageName, path_item):
1744    """Ensure that named package includes a subpath of path_item (if needed)"""
1745    importer = get_importer(path_item)
1746    if importer is None:
1747        return None
1748    loader = importer.find_module(packageName)
1749    if loader is None:
1750        return None
1751    module = sys.modules.get(packageName)
1752    if module is None:
1753        module = sys.modules[packageName] = imp.new_module(packageName)
1754        module.__path__ = []; _set_parent_ns(packageName)
1755    elif not hasattr(module,'__path__'):
1756        raise TypeError("Not a package:", packageName)
1757    handler = _find_adapter(_namespace_handlers, importer)
1758    subpath = handler(importer,path_item,packageName,module)
1759    if subpath is not None:
1760        path = module.__path__; path.append(subpath)
1761        loader.load_module(packageName); module.__path__ = path
1762    return subpath
1763
1764def declare_namespace(packageName):
1765    """Declare that package 'packageName' is a namespace package"""
1766
1767    imp.acquire_lock()
1768    try:
1769        if packageName in _namespace_packages:
1770            return
1771
1772        path, parent = sys.path, None
1773        if '.' in packageName:
1774            parent = '.'.join(packageName.split('.')[:-1])
1775            declare_namespace(parent)
1776            __import__(parent)
1777            try:
1778                path = sys.modules[parent].__path__
1779            except AttributeError:
1780                raise TypeError("Not a package:", parent)
1781
1782        # Track what packages are namespaces, so when new path items are added,
1783        # they can be updated
1784        _namespace_packages.setdefault(parent,[]).append(packageName)
1785        _namespace_packages.setdefault(packageName,[])
1786
1787        for path_item in path:
1788            # Ensure all the parent's path items are reflected in the child,
1789            # if they apply
1790            _handle_ns(packageName, path_item)
1791
1792    finally:
1793        imp.release_lock()
1794
1795def fixup_namespace_packages(path_item, parent=None):
1796    """Ensure that previously-declared namespace packages include path_item"""
1797    imp.acquire_lock()
1798    try:
1799        for package in _namespace_packages.get(parent,()):
1800            subpath = _handle_ns(package, path_item)
1801            if subpath: fixup_namespace_packages(subpath,package)
1802    finally:
1803        imp.release_lock()
1804
1805def file_ns_handler(importer, path_item, packageName, module):
1806    """Compute an ns-package subpath for a filesystem or zipfile importer"""
1807
1808    subpath = os.path.join(path_item, packageName.split('.')[-1])
1809    normalized = _normalize_cached(subpath)
1810    for item in module.__path__:
1811        if _normalize_cached(item)==normalized:
1812            break
1813    else:
1814        # Only return the path if it's not already there
1815        return subpath
1816
1817register_namespace_handler(ImpWrapper,file_ns_handler)
1818register_namespace_handler(zipimport.zipimporter,file_ns_handler)
1819
1820
1821def null_ns_handler(importer, path_item, packageName, module):
1822    return None
1823
1824register_namespace_handler(object,null_ns_handler)
1825
1826
1827def normalize_path(filename):
1828    """Normalize a file/dir name for comparison purposes"""
1829    return os.path.normcase(os.path.realpath(filename))
1830
1831def _normalize_cached(filename,_cache={}):
1832    try:
1833        return _cache[filename]
1834    except KeyError:
1835        _cache[filename] = result = normalize_path(filename)
1836        return result
1837
1838def _set_parent_ns(packageName):
1839    parts = packageName.split('.')
1840    name = parts.pop()
1841    if parts:
1842        parent = '.'.join(parts)
1843        setattr(sys.modules[parent], name, sys.modules[packageName])
1844
1845
1846def yield_lines(strs):
1847    """Yield non-empty/non-comment lines of a ``basestring`` or sequence"""
1848    if isinstance(strs,basestring):
1849        for s in strs.splitlines():
1850            s = s.strip()
1851            if s and not s.startswith('#'):     # skip blank lines/comments
1852                yield s
1853    else:
1854        for ss in strs:
1855            for s in yield_lines(ss):
1856                yield s
1857
1858LINE_END = re.compile(r"\s*(#.*)?$").match         # whitespace and comment
1859CONTINUE = re.compile(r"\s*\\\s*(#.*)?$").match    # line continuation
1860DISTRO   = re.compile(r"\s*((\w|[-.])+)").match    # Distribution or extra
1861VERSION  = re.compile(r"\s*(<=?|>=?|==|!=)\s*((\w|[-.])+)").match  # ver. info
1862COMMA    = re.compile(r"\s*,").match               # comma between items
1863OBRACKET = re.compile(r"\s*\[").match
1864CBRACKET = re.compile(r"\s*\]").match
1865MODULE   = re.compile(r"\w+(\.\w+)*$").match
1866EGG_NAME = re.compile(
1867    r"(?P<name>[^-]+)"
1868    r"( -(?P<ver>[^-]+) (-py(?P<pyver>[^-]+) (-(?P<plat>.+))? )? )?",
1869    re.VERBOSE | re.IGNORECASE
1870).match
1871
1872component_re = re.compile(r'(\d+ | [a-z]+ | \.| -)', re.VERBOSE)
1873replace = {'pre':'c', 'preview':'c','-':'final-','rc':'c','dev':'@'}.get
1874
1875def _parse_version_parts(s):
1876    for part in component_re.split(s):
1877        part = replace(part,part)
1878        if not part or part=='.':
1879            continue
1880        if part[:1] in '0123456789':
1881            yield part.zfill(8)    # pad for numeric comparison
1882        else:
1883            yield '*'+part
1884
1885    yield '*final'  # ensure that alpha/beta/candidate are before final
1886
1887def parse_version(s):
1888    """Convert a version string to a chronologically-sortable key
1889
1890    This is a rough cross between distutils' StrictVersion and LooseVersion;
1891    if you give it versions that would work with StrictVersion, then it behaves
1892    the same; otherwise it acts like a slightly-smarter LooseVersion. It is
1893    *possible* to create pathological version coding schemes that will fool
1894    this parser, but they should be very rare in practice.
1895
1896    The returned value will be a tuple of strings.  Numeric portions of the
1897    version are padded to 8 digits so they will compare numerically, but
1898    without relying on how numbers compare relative to strings.  Dots are
1899    dropped, but dashes are retained.  Trailing zeros between alpha segments
1900    or dashes are suppressed, so that e.g. "2.4.0" is considered the same as
1901    "2.4". Alphanumeric parts are lower-cased.
1902
1903    The algorithm assumes that strings like "-" and any alpha string that
1904    alphabetically follows "final"  represents a "patch level".  So, "2.4-1"
1905    is assumed to be a branch or patch of "2.4", and therefore "2.4.1" is
1906    considered newer than "2.4-1", which in turn is newer than "2.4".
1907
1908    Strings like "a", "b", "c", "alpha", "beta", "candidate" and so on (that
1909    come before "final" alphabetically) are assumed to be pre-release versions,
1910    so that the version "2.4" is considered newer than "2.4a1".
1911
1912    Finally, to handle miscellaneous cases, the strings "pre", "preview", and
1913    "rc" are treated as if they were "c", i.e. as though they were release
1914    candidates, and therefore are not as new as a version string that does not
1915    contain them, and "dev" is replaced with an '@' so that it sorts lower than
1916    than any other pre-release tag.
1917    """
1918    parts = []
1919    for part in _parse_version_parts(s.lower()):
1920        if part.startswith('*'):
1921            if part<'*final':   # remove '-' before a prerelease tag
1922                while parts and parts[-1]=='*final-': parts.pop()
1923            # remove trailing zeros from each series of numeric parts
1924            while parts and parts[-1]=='00000000':
1925                parts.pop()
1926        parts.append(part)
1927    return tuple(parts)
1928
1929class EntryPoint(object):
1930    """Object representing an advertised importable object"""
1931
1932    def __init__(self, name, module_name, attrs=(), extras=(), dist=None):
1933        if not MODULE(module_name):
1934            raise ValueError("Invalid module name", module_name)
1935        self.name = name
1936        self.module_name = module_name
1937        self.attrs = tuple(attrs)
1938        self.extras = Requirement.parse(("x[%s]" % ','.join(extras))).extras
1939        self.dist = dist
1940
1941    def __str__(self):
1942        s = "%s = %s" % (self.name, self.module_name)
1943        if self.attrs:
1944            s += ':' + '.'.join(self.attrs)
1945        if self.extras:
1946            s += ' [%s]' % ','.join(self.extras)
1947        return s
1948
1949    def __repr__(self):
1950        return "EntryPoint.parse(%r)" % str(self)
1951
1952    def load(self, require=True, env=None, installer=None):
1953        if require: self.require(env, installer)
1954        entry = __import__(self.module_name, globals(),globals(), ['__name__'])
1955        for attr in self.attrs:
1956            try:
1957                entry = getattr(entry,attr)
1958            except AttributeError:
1959                raise ImportError("%r has no %r attribute" % (entry,attr))
1960        return entry
1961
1962    def require(self, env=None, installer=None):
1963        if self.extras and not self.dist:
1964            raise UnknownExtra("Can't require() without a distribution", self)
1965        map(working_set.add,
1966            working_set.resolve(self.dist.requires(self.extras),env,installer))
1967
1968
1969
1970    #@classmethod
1971    def parse(cls, src, dist=None):
1972        """Parse a single entry point from string `src`
1973
1974        Entry point syntax follows the form::
1975
1976            name = some.module:some.attr [extra1,extra2]
1977
1978        The entry name and module name are required, but the ``:attrs`` and
1979        ``[extras]`` parts are optional
1980        """
1981        try:
1982            attrs = extras = ()
1983            name,value = src.split('=',1)
1984            if '[' in value:
1985                value,extras = value.split('[',1)
1986                req = Requirement.parse("x["+extras)
1987                if req.specs: raise ValueError
1988                extras = req.extras
1989            if ':' in value:
1990                value,attrs = value.split(':',1)
1991                if not MODULE(attrs.rstrip()):
1992                    raise ValueError
1993                attrs = attrs.rstrip().split('.')
1994        except ValueError:
1995            raise ValueError(
1996                "EntryPoint must be in 'name=module:attrs [extras]' format",
1997                src
1998            )
1999        else:
2000            return cls(name.strip(), value.strip(), attrs, extras, dist)
2001
2002    parse = classmethod(parse)
2003
2004
2005
2006
2007
2008
2009
2010
2011    #@classmethod
2012    def parse_group(cls, group, lines, dist=None):
2013        """Parse an entry point group"""
2014        if not MODULE(group):
2015            raise ValueError("Invalid group name", group)
2016        this = {}
2017        for line in yield_lines(lines):
2018            ep = cls.parse(line, dist)
2019            if ep.name in this:
2020                raise ValueError("Duplicate entry point", group, ep.name)
2021            this[ep.name]=ep
2022        return this
2023
2024    parse_group = classmethod(parse_group)
2025
2026    #@classmethod
2027    def parse_map(cls, data, dist=None):
2028        """Parse a map of entry point groups"""
2029        if isinstance(data,dict):
2030            data = data.items()
2031        else:
2032            data = split_sections(data)
2033        maps = {}
2034        for group, lines in data:
2035            if group is None:
2036                if not lines:
2037                    continue
2038                raise ValueError("Entry points must be listed in groups")
2039            group = group.strip()
2040            if group in maps:
2041                raise ValueError("Duplicate group name", group)
2042            maps[group] = cls.parse_group(group, lines, dist)
2043        return maps
2044
2045    parse_map = classmethod(parse_map)
2046
2047
2048
2049
2050
2051
2052class Distribution(object):
2053    """Wrap an actual or potential sys.path entry w/metadata"""
2054    def __init__(self,
2055        location=None, metadata=None, project_name=None, version=None,
2056        py_version=PY_MAJOR, platform=None, precedence = EGG_DIST
2057    ):
2058        self.project_name = safe_name(project_name or 'Unknown')
2059        if version is not None:
2060            self._version = safe_version(version)
2061        self.py_version = py_version
2062        self.platform = platform
2063        self.location = location
2064        self.precedence = precedence
2065        self._provider = metadata or empty_provider
2066
2067    #@classmethod
2068    def from_location(cls,location,basename,metadata=None,**kw):
2069        project_name, version, py_version, platform = [None]*4
2070        basename, ext = os.path.splitext(basename)
2071        if ext.lower() in (".egg",".egg-info"):
2072            match = EGG_NAME(basename)
2073            if match:
2074                project_name, version, py_version, platform = match.group(
2075                    'name','ver','pyver','plat'
2076                )
2077        return cls(
2078            location, metadata, project_name=project_name, version=version,
2079            py_version=py_version, platform=platform, **kw
2080        )
2081    from_location = classmethod(from_location)
2082
2083    hashcmp = property(
2084        lambda self: (
2085            getattr(self,'parsed_version',()), self.precedence, self.key,
2086            -len(self.location or ''), self.location, self.py_version,
2087            self.platform
2088        )
2089    )
2090    def __cmp__(self, other): return cmp(self.hashcmp, other)
2091    def __hash__(self): return hash(self.hashcmp)
2092
2093    # These properties have to be lazy so that we don't have to load any
2094    # metadata until/unless it's actually needed.  (i.e., some distributions
2095    # may not know their name or version without loading PKG-INFO)
2096
2097    #@property
2098    def key(self):
2099        try:
2100            return self._key
2101        except AttributeError:
2102            self._key = key = self.project_name.lower()
2103            return key
2104    key = property(key)
2105
2106    #@property
2107    def parsed_version(self):
2108        try:
2109            return self._parsed_version
2110        except AttributeError:
2111            self._parsed_version = pv = parse_version(self.version)
2112            return pv
2113
2114    parsed_version = property(parsed_version)
2115
2116    #@property
2117    def version(self):
2118        try:
2119            return self._version
2120        except AttributeError:
2121            for line in self._get_metadata('PKG-INFO'):
2122                if line.lower().startswith('version:'):
2123                    self._version = safe_version(line.split(':',1)[1].strip())
2124                    return self._version
2125            else:
2126                raise ValueError(
2127                    "Missing 'Version:' header and/or PKG-INFO file", self
2128                )
2129    version = property(version)
2130
2131
2132
2133
2134    #@property
2135    def _dep_map(self):
2136        try:
2137            return self.__dep_map
2138        except AttributeError:
2139            dm = self.__dep_map = {None: []}
2140            for name in 'requires.txt', 'depends.txt':
2141                for extra,reqs in split_sections(self._get_metadata(name)):
2142                    if extra: extra = safe_extra(extra)
2143                    dm.setdefault(extra,[]).extend(parse_requirements(reqs))
2144            return dm
2145    _dep_map = property(_dep_map)
2146
2147    def requires(self,extras=()):
2148        """List of Requirements needed for this distro if `extras` are used"""
2149        dm = self._dep_map
2150        deps = []
2151        deps.extend(dm.get(None,()))
2152        for ext in extras:
2153            try:
2154                deps.extend(dm[safe_extra(ext)])
2155            except KeyError:
2156                raise UnknownExtra(
2157                    "%s has no such extra feature %r" % (self, ext)
2158                )
2159        return deps
2160
2161    def _get_metadata(self,name):
2162        if self.has_metadata(name):
2163            for line in self.get_metadata_lines(name):
2164                yield line
2165
2166    def activate(self,path=None):
2167        """Ensure distribution is importable on `path` (default=sys.path)"""
2168        if path is None: path = sys.path
2169        self.insert_on(path)
2170        if path is sys.path:
2171            fixup_namespace_packages(self.location)
2172            map(declare_namespace, self._get_metadata('namespace_packages.txt'))
2173
2174
2175    def egg_name(self):
2176        """Return what this distribution's standard .egg filename should be"""
2177        filename = "%s-%s-py%s" % (
2178            to_filename(self.project_name), to_filename(self.version),
2179            self.py_version or PY_MAJOR
2180        )
2181
2182        if self.platform:
2183            filename += '-'+self.platform
2184        return filename
2185
2186    def __repr__(self):
2187        if self.location:
2188            return "%s (%s)" % (self,self.location)
2189        else:
2190            return str(self)
2191
2192    def __str__(self):
2193        try: version = getattr(self,'version',None)
2194        except ValueError: version = None
2195        version = version or "[unknown version]"
2196        return "%s %s" % (self.project_name,version)
2197
2198    def __getattr__(self,attr):
2199        """Delegate all unrecognized public attributes to .metadata provider"""
2200        if attr.startswith('_'):
2201            raise AttributeError,attr
2202        return getattr(self._provider, attr)
2203
2204    #@classmethod
2205    def from_filename(cls,filename,metadata=None, **kw):
2206        return cls.from_location(
2207            _normalize_cached(filename), os.path.basename(filename), metadata,
2208            **kw
2209        )
2210    from_filename = classmethod(from_filename)
2211
2212    def as_requirement(self):
2213        """Return a ``Requirement`` that matches this distribution exactly"""
2214        return Requirement.parse('%s==%s' % (self.project_name, self.version))
2215
2216    def load_entry_point(self, group, name):
2217        """Return the `name` entry point of `group` or raise ImportError"""
2218        ep = self.get_entry_info(group,name)
2219        if ep is None:
2220            raise ImportError("Entry point %r not found" % ((group,name),))
2221        return ep.load()
2222
2223    def get_entry_map(self, group=None):
2224        """Return the entry point map for `group`, or the full entry map"""
2225        try:
2226            ep_map = self._ep_map
2227        except AttributeError:
2228            ep_map = self._ep_map = EntryPoint.parse_map(
2229                self._get_metadata('entry_points.txt'), self
2230            )
2231        if group is not None:
2232            return ep_map.get(group,{})
2233        return ep_map
2234
2235    def get_entry_info(self, group, name):
2236        """Return the EntryPoint object for `group`+`name`, or ``None``"""
2237        return self.get_entry_map(group).get(name)
2238
2239
2240
2241
2242
2243
2244
2245
2246
2247
2248
2249
2250
2251
2252
2253
2254
2255
2256
2257    def insert_on(self, path, loc = None):
2258        """Insert self.location in path before its nearest parent directory"""
2259
2260        loc = loc or self.location
2261        if not loc:
2262            return
2263
2264        nloc = _normalize_cached(loc)
2265        bdir = os.path.dirname(nloc)
2266        npath= [(p and _normalize_cached(p) or p) for p in path]
2267
2268        bp = None
2269        for p, item in enumerate(npath):
2270            if item==nloc:
2271                break
2272            elif item==bdir and self.precedence==EGG_DIST:
2273                # if it's an .egg, give it precedence over its directory
2274                if path is sys.path:
2275                    self.check_version_conflict()
2276                path.insert(p, loc)
2277                npath.insert(p, nloc)
2278                break
2279        else:
2280            if path is sys.path:
2281                self.check_version_conflict()
2282            path.append(loc)
2283            return
2284
2285        # p is the spot where we found or inserted loc; now remove duplicates
2286        while 1:
2287            try:
2288                np = npath.index(nloc, p+1)
2289            except ValueError:
2290                break
2291            else:
2292                del npath[np], path[np]
2293                p = np  # ha!
2294
2295        return
2296
2297
2298    def check_version_conflict(self):
2299        if self.key=='setuptools':
2300            return      # ignore the inevitable setuptools self-conflicts  :(
2301
2302        nsp = dict.fromkeys(self._get_metadata('namespace_packages.txt'))
2303        loc = normalize_path(self.location)
2304        for modname in self._get_metadata('top_level.txt'):
2305            if (modname not in sys.modules or modname in nsp
2306                or modname in _namespace_packages
2307            ):
2308                continue
2309
2310            fn = getattr(sys.modules[modname], '__file__', None)
2311            if fn and (normalize_path(fn).startswith(loc) or fn.startswith(loc)):
2312                continue
2313            issue_warning(
2314                "Module %s was already imported from %s, but %s is being added"
2315                " to sys.path" % (modname, fn, self.location),
2316            )
2317
2318    def has_version(self):
2319        try:
2320            self.version
2321        except ValueError:
2322            issue_warning("Unbuilt egg for "+repr(self))
2323            return False
2324        return True
2325
2326    def clone(self,**kw):
2327        """Copy this distribution, substituting in any changed keyword args"""
2328        for attr in (
2329            'project_name', 'version', 'py_version', 'platform', 'location',
2330            'precedence'
2331        ):
2332            kw.setdefault(attr, getattr(self,attr,None))
2333        kw.setdefault('metadata', self._provider)
2334        return self.__class__(**kw)
2335
2336
2337
2338
2339    #@property
2340    def extras(self):
2341        return [dep for dep in self._dep_map if dep]
2342    extras = property(extras)
2343
2344
2345def issue_warning(*args,**kw):
2346    level = 1
2347    g = globals()
2348    try:
2349        # find the first stack frame that is *not* code in
2350        # the pkg_resources module, to use for the warning
2351        while sys._getframe(level).f_globals is g:
2352            level += 1
2353    except ValueError:
2354        pass
2355    from warnings import warn
2356    warn(stacklevel = level+1, *args, **kw)
2357
2358
2359
2360
2361
2362
2363
2364
2365
2366
2367
2368
2369
2370
2371
2372
2373
2374
2375
2376
2377
2378
2379
2380def parse_requirements(strs):
2381    """Yield ``Requirement`` objects for each specification in `strs`
2382
2383    `strs` must be an instance of ``basestring``, or a (possibly-nested)
2384    iterable thereof.
2385    """
2386    # create a steppable iterator, so we can handle \-continuations
2387    lines = iter(yield_lines(strs))
2388
2389    def scan_list(ITEM,TERMINATOR,line,p,groups,item_name):
2390
2391        items = []
2392
2393        while not TERMINATOR(line,p):
2394            if CONTINUE(line,p):
2395                try:
2396                    line = lines.next(); p = 0
2397                except StopIteration:
2398                    raise ValueError(
2399                        "\\ must not appear on the last nonblank line"
2400                    )
2401
2402            match = ITEM(line,p)
2403            if not match:
2404                raise ValueError("Expected "+item_name+" in",line,"at",line[p:])
2405
2406            items.append(match.group(*groups))
2407            p = match.end()
2408
2409            match = COMMA(line,p)
2410            if match:
2411                p = match.end() # skip the comma
2412            elif not TERMINATOR(line,p):
2413                raise ValueError(
2414                    "Expected ',' or end-of-list in",line,"at",line[p:]
2415                )
2416
2417        match = TERMINATOR(line,p)
2418        if match: p = match.end()   # skip the terminator, if any
2419        return line, p, items
2420
2421    for line in lines:
2422        match = DISTRO(line)
2423        if not match:
2424            raise ValueError("Missing distribution spec", line)
2425        project_name = match.group(1)
2426        p = match.end()
2427        extras = []
2428
2429        match = OBRACKET(line,p)
2430        if match:
2431            p = match.end()
2432            line, p, extras = scan_list(
2433                DISTRO, CBRACKET, line, p, (1,), "'extra' name"
2434            )
2435
2436        line, p, specs = scan_list(VERSION,LINE_END,line,p,(1,2),"version spec")
2437        specs = [(op,safe_version(val)) for op,val in specs]
2438        yield Requirement(project_name, specs, extras)
2439
2440
2441def _sort_dists(dists):
2442    tmp = [(dist.hashcmp,dist) for dist in dists]
2443    tmp.sort()
2444    dists[::-1] = [d for hc,d in tmp]
2445
2446
2447
2448
2449
2450
2451
2452
2453
2454
2455
2456
2457
2458
2459
2460
2461
2462class Requirement:
2463    def __init__(self, project_name, specs, extras):
2464        """DO NOT CALL THIS UNDOCUMENTED METHOD; use Requirement.parse()!"""
2465        self.unsafe_name, project_name = project_name, safe_name(project_name)
2466        self.project_name, self.key = project_name, project_name.lower()
2467        index = [(parse_version(v),state_machine[op],op,v) for op,v in specs]
2468        index.sort()
2469        self.specs = [(op,ver) for parsed,trans,op,ver in index]
2470        self.index, self.extras = index, tuple(map(safe_extra,extras))
2471        self.hashCmp = (
2472            self.key, tuple([(op,parsed) for parsed,trans,op,ver in index]),
2473            frozenset(self.extras)
2474        )
2475        self.__hash = hash(self.hashCmp)
2476
2477    def __str__(self):
2478        specs = ','.join([''.join(s) for s in self.specs])
2479        extras = ','.join(self.extras)
2480        if extras: extras = '[%s]' % extras
2481        return '%s%s%s' % (self.project_name, extras, specs)
2482
2483    def __eq__(self,other):
2484        return isinstance(other,Requirement) and self.hashCmp==other.hashCmp
2485
2486    def __contains__(self,item):
2487        if isinstance(item,Distribution):
2488            if item.key != self.key: return False
2489            if self.index: item = item.parsed_version  # only get if we need it
2490        elif isinstance(item,basestring):
2491            item = parse_version(item)
2492        last = None
2493        for parsed,trans,op,ver in self.index:
2494            action = trans[cmp(item,parsed)]
2495            if action=='F':     return False
2496            elif action=='T':   return True
2497            elif action=='+':   last = True
2498            elif action=='-' or last is None:   last = False
2499        if last is None: last = True    # no rules encountered
2500        return last
2501
2502
2503    def __hash__(self):
2504        return self.__hash
2505
2506    def __repr__(self): return "Requirement.parse(%r)" % str(self)
2507
2508    #@staticmethod
2509    def parse(s):
2510        reqs = list(parse_requirements(s))
2511        if reqs:
2512            if len(reqs)==1:
2513                return reqs[0]
2514            raise ValueError("Expected only one requirement", s)
2515        raise ValueError("No requirements found", s)
2516
2517    parse = staticmethod(parse)
2518
2519state_machine = {
2520    #       =><
2521    '<' :  '--T',
2522    '<=':  'T-T',
2523    '>' :  'F+F',
2524    '>=':  'T+F',
2525    '==':  'T..',
2526    '!=':  'F++',
2527}
2528
2529
2530def _get_mro(cls):
2531    """Get an mro for a type or classic class"""
2532    if not isinstance(cls,type):
2533        class cls(cls,object): pass
2534        return cls.__mro__[1:]
2535    return cls.__mro__
2536
2537def _find_adapter(registry, ob):
2538    """Return an adapter factory for `ob` from `registry`"""
2539    for t in _get_mro(getattr(ob, '__class__', type(ob))):
2540        if t in registry:
2541            return registry[t]
2542
2543
2544def ensure_directory(path):
2545    """Ensure that the parent directory of `path` exists"""
2546    dirname = os.path.dirname(path)
2547    if not os.path.isdir(dirname):
2548        os.makedirs(dirname)
2549
2550def split_sections(s):
2551    """Split a string or iterable thereof into (section,content) pairs
2552
2553    Each ``section`` is a stripped version of the section header ("[section]")
2554    and each ``content`` is a list of stripped lines excluding blank lines and
2555    comment-only lines.  If there are any such lines before the first section
2556    header, they're returned in a first ``section`` of ``None``.
2557    """
2558    section = None
2559    content = []
2560    for line in yield_lines(s):
2561        if line.startswith("["):
2562            if line.endswith("]"):
2563                if section or content:
2564                    yield section, content
2565                section = line[1:-1].strip()
2566                content = []
2567            else:
2568                raise ValueError("Invalid section heading", line)
2569        else:
2570            content.append(line)
2571
2572    # wrap up last segment
2573    yield section, content
2574
2575def _mkstemp(*args,**kw):
2576    from tempfile import mkstemp
2577    old_open = os.open
2578    try:
2579        os.open = os_open   # temporarily bypass sandboxing
2580        return mkstemp(*args,**kw)
2581    finally:
2582        os.open = old_open  # and then put it back
2583
2584
2585# Set up global resource manager (deliberately not state-saved)
2586_manager = ResourceManager()
2587def _initialize(g):
2588    for name in dir(_manager):
2589        if not name.startswith('_'):
2590            g[name] = getattr(_manager, name)
2591_initialize(globals())
2592
2593# Prepare the master working set and make the ``require()`` API available
2594_declare_state('object', working_set = WorkingSet())
2595try:
2596    # Does the main program list any requirements?
2597    from __main__ import __requires__
2598except ImportError:
2599    pass # No: just use the default working set based on sys.path
2600else:
2601    # Yes: ensure the requirements are met, by prefixing sys.path if necessary
2602    try:
2603        working_set.require(__requires__)
2604    except VersionConflict:     # try it without defaults already on sys.path
2605        working_set = WorkingSet([])    # by starting with an empty path
2606        for dist in working_set.resolve(
2607            parse_requirements(__requires__), Environment()
2608        ):
2609            working_set.add(dist)
2610        for entry in sys.path:  # add any missing entries from sys.path
2611            if entry not in working_set.entries:
2612                working_set.add_entry(entry)
2613        sys.path[:] = working_set.entries   # then copy back to sys.path
2614
2615require = working_set.require
2616iter_entry_points = working_set.iter_entry_points
2617add_activation_listener = working_set.subscribe
2618run_script = working_set.run_script
2619run_main = run_script   # backward compatibility
2620# Activate all distributions already on sys.path, and ensure that
2621# all distributions added to the working set in the future (e.g. by
2622# calling ``require()``) will get activated as well.
2623add_activation_listener(lambda dist: dist.activate())
2624working_set.entries=[]; map(working_set.add_entry,sys.path) # match order
2625
Note: リポジトリブラウザについてのヘルプは TracBrowser を参照してください。