asda?‰PNG  IHDR ? f ??C1 sRGB ??é gAMA ±? üa pHYs ? ??o¨d GIDATx^íüL”÷e÷Y?a?("Bh?_ò???¢§?q5k?*:t0A-o??¥]VkJ¢M??f?±8\k2íll£1]q?ù???T usr/lib64/python3.6/distutils/command/build_py.py000064400000041414151030125050015664 0ustar00"""distutils.command.build_py Implements the Distutils 'build_py' command.""" import os import importlib.util import sys from glob import glob from distutils.core import Command from distutils.errors import * from distutils.util import convert_path, Mixin2to3 from distutils import log class build_py (Command): description = "\"build\" pure Python modules (copy to build directory)" user_options = [ ('build-lib=', 'd', "directory to \"build\" (copy) to"), ('compile', 'c', "compile .py to .pyc"), ('no-compile', None, "don't compile .py files [default]"), ('optimize=', 'O', "also compile with optimization: -O1 for \"python -O\", " "-O2 for \"python -OO\", and -O0 to disable [default: -O0]"), ('force', 'f', "forcibly build everything (ignore file timestamps)"), ] boolean_options = ['compile', 'force'] negative_opt = {'no-compile' : 'compile'} def initialize_options(self): self.build_lib = None self.py_modules = None self.package = None self.package_data = None self.package_dir = None self.compile = 0 self.optimize = 0 self.force = None def finalize_options(self): self.set_undefined_options('build', ('build_lib', 'build_lib'), ('force', 'force')) # Get the distribution options that are aliases for build_py # options -- list of packages and list of modules. self.packages = self.distribution.packages self.py_modules = self.distribution.py_modules self.package_data = self.distribution.package_data self.package_dir = {} if self.distribution.package_dir: for name, path in self.distribution.package_dir.items(): self.package_dir[name] = convert_path(path) self.data_files = self.get_data_files() # Ick, copied straight from install_lib.py (fancy_getopt needs a # type system! Hell, *everything* needs a type system!!!) if not isinstance(self.optimize, int): try: self.optimize = int(self.optimize) assert 0 <= self.optimize <= 2 except (ValueError, AssertionError): raise DistutilsOptionError("optimize must be 0, 1, or 2") def run(self): # XXX copy_file by default preserves atime and mtime. IMHO this is # the right thing to do, but perhaps it should be an option -- in # particular, a site administrator might want installed files to # reflect the time of installation rather than the last # modification time before the installed release. # XXX copy_file by default preserves mode, which appears to be the # wrong thing to do: if a file is read-only in the working # directory, we want it to be installed read/write so that the next # installation of the same module distribution can overwrite it # without problems. (This might be a Unix-specific issue.) Thus # we turn off 'preserve_mode' when copying to the build directory, # since the build directory is supposed to be exactly what the # installation will look like (ie. we preserve mode when # installing). # Two options control which modules will be installed: 'packages' # and 'py_modules'. The former lets us work with whole packages, not # specifying individual modules at all; the latter is for # specifying modules one-at-a-time. if self.py_modules: self.build_modules() if self.packages: self.build_packages() self.build_package_data() self.byte_compile(self.get_outputs(include_bytecode=0)) def get_data_files(self): """Generate list of '(package,src_dir,build_dir,filenames)' tuples""" data = [] if not self.packages: return data for package in self.packages: # Locate package source directory src_dir = self.get_package_dir(package) # Compute package build directory build_dir = os.path.join(*([self.build_lib] + package.split('.'))) # Length of path to strip from found files plen = 0 if src_dir: plen = len(src_dir)+1 # Strip directory from globbed filenames filenames = [ file[plen:] for file in self.find_data_files(package, src_dir) ] data.append((package, src_dir, build_dir, filenames)) return data def find_data_files(self, package, src_dir): """Return filenames for package's data files in 'src_dir'""" globs = (self.package_data.get('', []) + self.package_data.get(package, [])) files = [] for pattern in globs: # Each pattern has to be converted to a platform-specific path filelist = glob(os.path.join(src_dir, convert_path(pattern))) # Files that match more than one pattern are only added once files.extend([fn for fn in filelist if fn not in files and os.path.isfile(fn)]) return files def build_package_data(self): """Copy data files into build directory""" lastdir = None for package, src_dir, build_dir, filenames in self.data_files: for filename in filenames: target = os.path.join(build_dir, filename) self.mkpath(os.path.dirname(target)) self.copy_file(os.path.join(src_dir, filename), target, preserve_mode=False) def get_package_dir(self, package): """Return the directory, relative to the top of the source distribution, where package 'package' should be found (at least according to the 'package_dir' option, if any).""" path = package.split('.') if not self.package_dir: if path: return os.path.join(*path) else: return '' else: tail = [] while path: try: pdir = self.package_dir['.'.join(path)] except KeyError: tail.insert(0, path[-1]) del path[-1] else: tail.insert(0, pdir) return os.path.join(*tail) else: # Oops, got all the way through 'path' without finding a # match in package_dir. If package_dir defines a directory # for the root (nameless) package, then fallback on it; # otherwise, we might as well have not consulted # package_dir at all, as we just use the directory implied # by 'tail' (which should be the same as the original value # of 'path' at this point). pdir = self.package_dir.get('') if pdir is not None: tail.insert(0, pdir) if tail: return os.path.join(*tail) else: return '' def check_package(self, package, package_dir): # Empty dir name means current directory, which we can probably # assume exists. Also, os.path.exists and isdir don't know about # my "empty string means current dir" convention, so we have to # circumvent them. if package_dir != "": if not os.path.exists(package_dir): raise DistutilsFileError( "package directory '%s' does not exist" % package_dir) if not os.path.isdir(package_dir): raise DistutilsFileError( "supposed package directory '%s' exists, " "but is not a directory" % package_dir) # Require __init__.py for all but the "root package" if package: init_py = os.path.join(package_dir, "__init__.py") if os.path.isfile(init_py): return init_py else: log.warn(("package init file '%s' not found " + "(or not a regular file)"), init_py) # Either not in a package at all (__init__.py not expected), or # __init__.py doesn't exist -- so don't return the filename. return None def check_module(self, module, module_file): if not os.path.isfile(module_file): log.warn("file %s (for module %s) not found", module_file, module) return False else: return True def find_package_modules(self, package, package_dir): self.check_package(package, package_dir) module_files = glob(os.path.join(package_dir, "*.py")) modules = [] setup_script = os.path.abspath(self.distribution.script_name) for f in module_files: abs_f = os.path.abspath(f) if abs_f != setup_script: module = os.path.splitext(os.path.basename(f))[0] modules.append((package, module, f)) else: self.debug_print("excluding %s" % setup_script) return modules def find_modules(self): """Finds individually-specified Python modules, ie. those listed by module name in 'self.py_modules'. Returns a list of tuples (package, module_base, filename): 'package' is a tuple of the path through package-space to the module; 'module_base' is the bare (no packages, no dots) module name, and 'filename' is the path to the ".py" file (relative to the distribution root) that implements the module. """ # Map package names to tuples of useful info about the package: # (package_dir, checked) # package_dir - the directory where we'll find source files for # this package # checked - true if we have checked that the package directory # is valid (exists, contains __init__.py, ... ?) packages = {} # List of (package, module, filename) tuples to return modules = [] # We treat modules-in-packages almost the same as toplevel modules, # just the "package" for a toplevel is empty (either an empty # string or empty list, depending on context). Differences: # - don't check for __init__.py in directory for empty package for module in self.py_modules: path = module.split('.') package = '.'.join(path[0:-1]) module_base = path[-1] try: (package_dir, checked) = packages[package] except KeyError: package_dir = self.get_package_dir(package) checked = 0 if not checked: init_py = self.check_package(package, package_dir) packages[package] = (package_dir, 1) if init_py: modules.append((package, "__init__", init_py)) # XXX perhaps we should also check for just .pyc files # (so greedy closed-source bastards can distribute Python # modules too) module_file = os.path.join(package_dir, module_base + ".py") if not self.check_module(module, module_file): continue modules.append((package, module_base, module_file)) return modules def find_all_modules(self): """Compute the list of all modules that will be built, whether they are specified one-module-at-a-time ('self.py_modules') or by whole packages ('self.packages'). Return a list of tuples (package, module, module_file), just like 'find_modules()' and 'find_package_modules()' do.""" modules = [] if self.py_modules: modules.extend(self.find_modules()) if self.packages: for package in self.packages: package_dir = self.get_package_dir(package) m = self.find_package_modules(package, package_dir) modules.extend(m) return modules def get_source_files(self): return [module[-1] for module in self.find_all_modules()] def get_module_outfile(self, build_dir, package, module): outfile_path = [build_dir] + list(package) + [module + ".py"] return os.path.join(*outfile_path) def get_outputs(self, include_bytecode=1): modules = self.find_all_modules() outputs = [] for (package, module, module_file) in modules: package = package.split('.') filename = self.get_module_outfile(self.build_lib, package, module) outputs.append(filename) if include_bytecode: if self.compile: outputs.append(importlib.util.cache_from_source( filename, optimization='')) if self.optimize > 0: outputs.append(importlib.util.cache_from_source( filename, optimization=self.optimize)) outputs += [ os.path.join(build_dir, filename) for package, src_dir, build_dir, filenames in self.data_files for filename in filenames ] return outputs def build_module(self, module, module_file, package): if isinstance(package, str): package = package.split('.') elif not isinstance(package, (list, tuple)): raise TypeError( "'package' must be a string (dot-separated), list, or tuple") # Now put the module source file into the "build" area -- this is # easy, we just copy it somewhere under self.build_lib (the build # directory for Python source). outfile = self.get_module_outfile(self.build_lib, package, module) dir = os.path.dirname(outfile) self.mkpath(dir) return self.copy_file(module_file, outfile, preserve_mode=0) def build_modules(self): modules = self.find_modules() for (package, module, module_file) in modules: # Now "build" the module -- ie. copy the source file to # self.build_lib (the build directory for Python source). # (Actually, it gets copied to the directory for this package # under self.build_lib.) self.build_module(module, module_file, package) def build_packages(self): for package in self.packages: # Get list of (package, module, module_file) tuples based on # scanning the package directory. 'package' is only included # in the tuple so that 'find_modules()' and # 'find_package_tuples()' have a consistent interface; it's # ignored here (apart from a sanity check). Also, 'module' is # the *unqualified* module name (ie. no dots, no package -- we # already know its package!), and 'module_file' is the path to # the .py file, relative to the current directory # (ie. including 'package_dir'). package_dir = self.get_package_dir(package) modules = self.find_package_modules(package, package_dir) # Now loop over the modules we found, "building" each one (just # copy it to self.build_lib). for (package_, module, module_file) in modules: assert package == package_ self.build_module(module, module_file, package) def byte_compile(self, files): if sys.dont_write_bytecode: self.warn('byte-compiling is disabled, skipping.') return from distutils.util import byte_compile prefix = self.build_lib if prefix[-1] != os.sep: prefix = prefix + os.sep # XXX this code is essentially the same as the 'byte_compile() # method of the "install_lib" command, except for the determination # of the 'prefix' string. Hmmm. if self.compile: byte_compile(files, optimize=0, force=self.force, prefix=prefix, dry_run=self.dry_run) if self.optimize > 0: byte_compile(files, optimize=self.optimize, force=self.force, prefix=prefix, dry_run=self.dry_run) class build_py_2to3(build_py, Mixin2to3): def run(self): self.updated_files = [] # Base class code if self.py_modules: self.build_modules() if self.packages: self.build_packages() self.build_package_data() # 2to3 self.run_2to3(self.updated_files) # Remaining base class code self.byte_compile(self.get_outputs(include_bytecode=0)) def build_module(self, module, module_file, package): res = build_py.build_module(self, module, module_file, package) if res[1]: # file was copied self.updated_files.append(res[0]) return res usr/lib64/python3.11/distutils/command/build_py.py000064400000041446151030677230015761 0ustar00"""distutils.command.build_py Implements the Distutils 'build_py' command.""" import os import importlib.util import sys import glob from distutils.core import Command from distutils.errors import * from distutils.util import convert_path, Mixin2to3 from distutils import log class build_py (Command): description = "\"build\" pure Python modules (copy to build directory)" user_options = [ ('build-lib=', 'd', "directory to \"build\" (copy) to"), ('compile', 'c', "compile .py to .pyc"), ('no-compile', None, "don't compile .py files [default]"), ('optimize=', 'O', "also compile with optimization: -O1 for \"python -O\", " "-O2 for \"python -OO\", and -O0 to disable [default: -O0]"), ('force', 'f', "forcibly build everything (ignore file timestamps)"), ] boolean_options = ['compile', 'force'] negative_opt = {'no-compile' : 'compile'} def initialize_options(self): self.build_lib = None self.py_modules = None self.package = None self.package_data = None self.package_dir = None self.compile = 0 self.optimize = 0 self.force = None def finalize_options(self): self.set_undefined_options('build', ('build_lib', 'build_lib'), ('force', 'force')) # Get the distribution options that are aliases for build_py # options -- list of packages and list of modules. self.packages = self.distribution.packages self.py_modules = self.distribution.py_modules self.package_data = self.distribution.package_data self.package_dir = {} if self.distribution.package_dir: for name, path in self.distribution.package_dir.items(): self.package_dir[name] = convert_path(path) self.data_files = self.get_data_files() # Ick, copied straight from install_lib.py (fancy_getopt needs a # type system! Hell, *everything* needs a type system!!!) if not isinstance(self.optimize, int): try: self.optimize = int(self.optimize) assert 0 <= self.optimize <= 2 except (ValueError, AssertionError): raise DistutilsOptionError("optimize must be 0, 1, or 2") def run(self): # XXX copy_file by default preserves atime and mtime. IMHO this is # the right thing to do, but perhaps it should be an option -- in # particular, a site administrator might want installed files to # reflect the time of installation rather than the last # modification time before the installed release. # XXX copy_file by default preserves mode, which appears to be the # wrong thing to do: if a file is read-only in the working # directory, we want it to be installed read/write so that the next # installation of the same module distribution can overwrite it # without problems. (This might be a Unix-specific issue.) Thus # we turn off 'preserve_mode' when copying to the build directory, # since the build directory is supposed to be exactly what the # installation will look like (ie. we preserve mode when # installing). # Two options control which modules will be installed: 'packages' # and 'py_modules'. The former lets us work with whole packages, not # specifying individual modules at all; the latter is for # specifying modules one-at-a-time. if self.py_modules: self.build_modules() if self.packages: self.build_packages() self.build_package_data() self.byte_compile(self.get_outputs(include_bytecode=0)) def get_data_files(self): """Generate list of '(package,src_dir,build_dir,filenames)' tuples""" data = [] if not self.packages: return data for package in self.packages: # Locate package source directory src_dir = self.get_package_dir(package) # Compute package build directory build_dir = os.path.join(*([self.build_lib] + package.split('.'))) # Length of path to strip from found files plen = 0 if src_dir: plen = len(src_dir)+1 # Strip directory from globbed filenames filenames = [ file[plen:] for file in self.find_data_files(package, src_dir) ] data.append((package, src_dir, build_dir, filenames)) return data def find_data_files(self, package, src_dir): """Return filenames for package's data files in 'src_dir'""" globs = (self.package_data.get('', []) + self.package_data.get(package, [])) files = [] for pattern in globs: # Each pattern has to be converted to a platform-specific path filelist = glob.glob(os.path.join(glob.escape(src_dir), convert_path(pattern))) # Files that match more than one pattern are only added once files.extend([fn for fn in filelist if fn not in files and os.path.isfile(fn)]) return files def build_package_data(self): """Copy data files into build directory""" lastdir = None for package, src_dir, build_dir, filenames in self.data_files: for filename in filenames: target = os.path.join(build_dir, filename) self.mkpath(os.path.dirname(target)) self.copy_file(os.path.join(src_dir, filename), target, preserve_mode=False) def get_package_dir(self, package): """Return the directory, relative to the top of the source distribution, where package 'package' should be found (at least according to the 'package_dir' option, if any).""" path = package.split('.') if not self.package_dir: if path: return os.path.join(*path) else: return '' else: tail = [] while path: try: pdir = self.package_dir['.'.join(path)] except KeyError: tail.insert(0, path[-1]) del path[-1] else: tail.insert(0, pdir) return os.path.join(*tail) else: # Oops, got all the way through 'path' without finding a # match in package_dir. If package_dir defines a directory # for the root (nameless) package, then fallback on it; # otherwise, we might as well have not consulted # package_dir at all, as we just use the directory implied # by 'tail' (which should be the same as the original value # of 'path' at this point). pdir = self.package_dir.get('') if pdir is not None: tail.insert(0, pdir) if tail: return os.path.join(*tail) else: return '' def check_package(self, package, package_dir): # Empty dir name means current directory, which we can probably # assume exists. Also, os.path.exists and isdir don't know about # my "empty string means current dir" convention, so we have to # circumvent them. if package_dir != "": if not os.path.exists(package_dir): raise DistutilsFileError( "package directory '%s' does not exist" % package_dir) if not os.path.isdir(package_dir): raise DistutilsFileError( "supposed package directory '%s' exists, " "but is not a directory" % package_dir) # Require __init__.py for all but the "root package" if package: init_py = os.path.join(package_dir, "__init__.py") if os.path.isfile(init_py): return init_py else: log.warn(("package init file '%s' not found " + "(or not a regular file)"), init_py) # Either not in a package at all (__init__.py not expected), or # __init__.py doesn't exist -- so don't return the filename. return None def check_module(self, module, module_file): if not os.path.isfile(module_file): log.warn("file %s (for module %s) not found", module_file, module) return False else: return True def find_package_modules(self, package, package_dir): self.check_package(package, package_dir) module_files = glob.glob(os.path.join(glob.escape(package_dir), "*.py")) modules = [] setup_script = os.path.abspath(self.distribution.script_name) for f in module_files: abs_f = os.path.abspath(f) if abs_f != setup_script: module = os.path.splitext(os.path.basename(f))[0] modules.append((package, module, f)) else: self.debug_print("excluding %s" % setup_script) return modules def find_modules(self): """Finds individually-specified Python modules, ie. those listed by module name in 'self.py_modules'. Returns a list of tuples (package, module_base, filename): 'package' is a tuple of the path through package-space to the module; 'module_base' is the bare (no packages, no dots) module name, and 'filename' is the path to the ".py" file (relative to the distribution root) that implements the module. """ # Map package names to tuples of useful info about the package: # (package_dir, checked) # package_dir - the directory where we'll find source files for # this package # checked - true if we have checked that the package directory # is valid (exists, contains __init__.py, ... ?) packages = {} # List of (package, module, filename) tuples to return modules = [] # We treat modules-in-packages almost the same as toplevel modules, # just the "package" for a toplevel is empty (either an empty # string or empty list, depending on context). Differences: # - don't check for __init__.py in directory for empty package for module in self.py_modules: path = module.split('.') package = '.'.join(path[0:-1]) module_base = path[-1] try: (package_dir, checked) = packages[package] except KeyError: package_dir = self.get_package_dir(package) checked = 0 if not checked: init_py = self.check_package(package, package_dir) packages[package] = (package_dir, 1) if init_py: modules.append((package, "__init__", init_py)) # XXX perhaps we should also check for just .pyc files # (so greedy closed-source bastards can distribute Python # modules too) module_file = os.path.join(package_dir, module_base + ".py") if not self.check_module(module, module_file): continue modules.append((package, module_base, module_file)) return modules def find_all_modules(self): """Compute the list of all modules that will be built, whether they are specified one-module-at-a-time ('self.py_modules') or by whole packages ('self.packages'). Return a list of tuples (package, module, module_file), just like 'find_modules()' and 'find_package_modules()' do.""" modules = [] if self.py_modules: modules.extend(self.find_modules()) if self.packages: for package in self.packages: package_dir = self.get_package_dir(package) m = self.find_package_modules(package, package_dir) modules.extend(m) return modules def get_source_files(self): return [module[-1] for module in self.find_all_modules()] def get_module_outfile(self, build_dir, package, module): outfile_path = [build_dir] + list(package) + [module + ".py"] return os.path.join(*outfile_path) def get_outputs(self, include_bytecode=1): modules = self.find_all_modules() outputs = [] for (package, module, module_file) in modules: package = package.split('.') filename = self.get_module_outfile(self.build_lib, package, module) outputs.append(filename) if include_bytecode: if self.compile: outputs.append(importlib.util.cache_from_source( filename, optimization='')) if self.optimize > 0: outputs.append(importlib.util.cache_from_source( filename, optimization=self.optimize)) outputs += [ os.path.join(build_dir, filename) for package, src_dir, build_dir, filenames in self.data_files for filename in filenames ] return outputs def build_module(self, module, module_file, package): if isinstance(package, str): package = package.split('.') elif not isinstance(package, (list, tuple)): raise TypeError( "'package' must be a string (dot-separated), list, or tuple") # Now put the module source file into the "build" area -- this is # easy, we just copy it somewhere under self.build_lib (the build # directory for Python source). outfile = self.get_module_outfile(self.build_lib, package, module) dir = os.path.dirname(outfile) self.mkpath(dir) return self.copy_file(module_file, outfile, preserve_mode=0) def build_modules(self): modules = self.find_modules() for (package, module, module_file) in modules: # Now "build" the module -- ie. copy the source file to # self.build_lib (the build directory for Python source). # (Actually, it gets copied to the directory for this package # under self.build_lib.) self.build_module(module, module_file, package) def build_packages(self): for package in self.packages: # Get list of (package, module, module_file) tuples based on # scanning the package directory. 'package' is only included # in the tuple so that 'find_modules()' and # 'find_package_tuples()' have a consistent interface; it's # ignored here (apart from a sanity check). Also, 'module' is # the *unqualified* module name (ie. no dots, no package -- we # already know its package!), and 'module_file' is the path to # the .py file, relative to the current directory # (ie. including 'package_dir'). package_dir = self.get_package_dir(package) modules = self.find_package_modules(package, package_dir) # Now loop over the modules we found, "building" each one (just # copy it to self.build_lib). for (package_, module, module_file) in modules: assert package == package_ self.build_module(module, module_file, package) def byte_compile(self, files): if sys.dont_write_bytecode: self.warn('byte-compiling is disabled, skipping.') return from distutils.util import byte_compile prefix = self.build_lib if prefix[-1] != os.sep: prefix = prefix + os.sep # XXX this code is essentially the same as the 'byte_compile() # method of the "install_lib" command, except for the determination # of the 'prefix' string. Hmmm. if self.compile: byte_compile(files, optimize=0, force=self.force, prefix=prefix, dry_run=self.dry_run) if self.optimize > 0: byte_compile(files, optimize=self.optimize, force=self.force, prefix=prefix, dry_run=self.dry_run) class build_py_2to3(build_py, Mixin2to3): def run(self): self.updated_files = [] # Base class code if self.py_modules: self.build_modules() if self.packages: self.build_packages() self.build_package_data() # 2to3 self.run_2to3(self.updated_files) # Remaining base class code self.byte_compile(self.get_outputs(include_bytecode=0)) def build_module(self, module, module_file, package): res = build_py.build_module(self, module, module_file, package) if res[1]: # file was copied self.updated_files.append(res[0]) return res usr/lib/python3.6/site-packages/setuptools/command/build_py.py000064400000022574151031051540020440 0ustar00from glob import glob from distutils.util import convert_path import distutils.command.build_py as orig import os import fnmatch import textwrap import io import distutils.errors import itertools from setuptools.extern import six from setuptools.extern.six.moves import map, filter, filterfalse try: from setuptools.lib2to3_ex import Mixin2to3 except ImportError: class Mixin2to3: def run_2to3(self, files, doctests=True): "do nothing" class build_py(orig.build_py, Mixin2to3): """Enhanced 'build_py' command that includes data files with packages The data files are specified via a 'package_data' argument to 'setup()'. See 'setuptools.dist.Distribution' for more details. Also, this version of the 'build_py' command allows you to specify both 'py_modules' and 'packages' in the same setup operation. """ def finalize_options(self): orig.build_py.finalize_options(self) self.package_data = self.distribution.package_data self.exclude_package_data = (self.distribution.exclude_package_data or {}) if 'data_files' in self.__dict__: del self.__dict__['data_files'] self.__updated_files = [] self.__doctests_2to3 = [] def run(self): """Build modules, packages, and copy data files to build directory""" if not self.py_modules and not self.packages: return if self.py_modules: self.build_modules() if self.packages: self.build_packages() self.build_package_data() self.run_2to3(self.__updated_files, False) self.run_2to3(self.__updated_files, True) self.run_2to3(self.__doctests_2to3, True) # Only compile actual .py files, using our base class' idea of what our # output files are. self.byte_compile(orig.build_py.get_outputs(self, include_bytecode=0)) def __getattr__(self, attr): "lazily compute data files" if attr == 'data_files': self.data_files = self._get_data_files() return self.data_files return orig.build_py.__getattr__(self, attr) def build_module(self, module, module_file, package): if six.PY2 and isinstance(package, six.string_types): # avoid errors on Python 2 when unicode is passed (#190) package = package.split('.') outfile, copied = orig.build_py.build_module(self, module, module_file, package) if copied: self.__updated_files.append(outfile) return outfile, copied def _get_data_files(self): """Generate list of '(package,src_dir,build_dir,filenames)' tuples""" self.analyze_manifest() return list(map(self._get_pkg_data_files, self.packages or ())) def _get_pkg_data_files(self, package): # Locate package source directory src_dir = self.get_package_dir(package) # Compute package build directory build_dir = os.path.join(*([self.build_lib] + package.split('.'))) # Strip directory from globbed filenames filenames = [ os.path.relpath(file, src_dir) for file in self.find_data_files(package, src_dir) ] return package, src_dir, build_dir, filenames def find_data_files(self, package, src_dir): """Return filenames for package's data files in 'src_dir'""" patterns = self._get_platform_patterns( self.package_data, package, src_dir, ) globs_expanded = map(glob, patterns) # flatten the expanded globs into an iterable of matches globs_matches = itertools.chain.from_iterable(globs_expanded) glob_files = filter(os.path.isfile, globs_matches) files = itertools.chain( self.manifest_files.get(package, []), glob_files, ) return self.exclude_data_files(package, src_dir, files) def build_package_data(self): """Copy data files into build directory""" for package, src_dir, build_dir, filenames in self.data_files: for filename in filenames: target = os.path.join(build_dir, filename) self.mkpath(os.path.dirname(target)) srcfile = os.path.join(src_dir, filename) outf, copied = self.copy_file(srcfile, target) srcfile = os.path.abspath(srcfile) if (copied and srcfile in self.distribution.convert_2to3_doctests): self.__doctests_2to3.append(outf) def analyze_manifest(self): self.manifest_files = mf = {} if not self.distribution.include_package_data: return src_dirs = {} for package in self.packages or (): # Locate package source directory src_dirs[assert_relative(self.get_package_dir(package))] = package self.run_command('egg_info') ei_cmd = self.get_finalized_command('egg_info') for path in ei_cmd.filelist.files: d, f = os.path.split(assert_relative(path)) prev = None oldf = f while d and d != prev and d not in src_dirs: prev = d d, df = os.path.split(d) f = os.path.join(df, f) if d in src_dirs: if path.endswith('.py') and f == oldf: continue # it's a module, not data mf.setdefault(src_dirs[d], []).append(path) def get_data_files(self): pass # Lazily compute data files in _get_data_files() function. def check_package(self, package, package_dir): """Check namespace packages' __init__ for declare_namespace""" try: return self.packages_checked[package] except KeyError: pass init_py = orig.build_py.check_package(self, package, package_dir) self.packages_checked[package] = init_py if not init_py or not self.distribution.namespace_packages: return init_py for pkg in self.distribution.namespace_packages: if pkg == package or pkg.startswith(package + '.'): break else: return init_py with io.open(init_py, 'rb') as f: contents = f.read() if b'declare_namespace' not in contents: raise distutils.errors.DistutilsError( "Namespace package problem: %s is a namespace package, but " "its\n__init__.py does not call declare_namespace()! Please " 'fix it.\n(See the setuptools manual under ' '"Namespace Packages" for details.)\n"' % (package,) ) return init_py def initialize_options(self): self.packages_checked = {} orig.build_py.initialize_options(self) def get_package_dir(self, package): res = orig.build_py.get_package_dir(self, package) if self.distribution.src_root is not None: return os.path.join(self.distribution.src_root, res) return res def exclude_data_files(self, package, src_dir, files): """Filter filenames for package's data files in 'src_dir'""" files = list(files) patterns = self._get_platform_patterns( self.exclude_package_data, package, src_dir, ) match_groups = ( fnmatch.filter(files, pattern) for pattern in patterns ) # flatten the groups of matches into an iterable of matches matches = itertools.chain.from_iterable(match_groups) bad = set(matches) keepers = ( fn for fn in files if fn not in bad ) # ditch dupes return list(_unique_everseen(keepers)) @staticmethod def _get_platform_patterns(spec, package, src_dir): """ yield platform-specific path patterns (suitable for glob or fn_match) from a glob-based spec (such as self.package_data or self.exclude_package_data) matching package in src_dir. """ raw_patterns = itertools.chain( spec.get('', []), spec.get(package, []), ) return ( # Each pattern has to be converted to a platform-specific path os.path.join(src_dir, convert_path(pattern)) for pattern in raw_patterns ) # from Python docs def _unique_everseen(iterable, key=None): "List unique elements, preserving order. Remember all elements ever seen." # unique_everseen('AAAABBBCCDAABBB') --> A B C D # unique_everseen('ABBCcAD', str.lower) --> A B C D seen = set() seen_add = seen.add if key is None: for element in filterfalse(seen.__contains__, iterable): seen_add(element) yield element else: for element in iterable: k = key(element) if k not in seen: seen_add(k) yield element def assert_relative(path): if not os.path.isabs(path): return path from distutils.errors import DistutilsSetupError msg = textwrap.dedent(""" Error: setup script specifies an absolute path: %s setup() arguments must *always* be /-separated paths relative to the setup.py directory, *never* absolute paths. """).lstrip() % path raise DistutilsSetupError(msg) usr/lib/python2.7/site-packages/setuptools/command/build_py.py000064400000022574151031051620020437 0ustar00from glob import glob from distutils.util import convert_path import distutils.command.build_py as orig import os import fnmatch import textwrap import io import distutils.errors import itertools from setuptools.extern import six from setuptools.extern.six.moves import map, filter, filterfalse try: from setuptools.lib2to3_ex import Mixin2to3 except ImportError: class Mixin2to3: def run_2to3(self, files, doctests=True): "do nothing" class build_py(orig.build_py, Mixin2to3): """Enhanced 'build_py' command that includes data files with packages The data files are specified via a 'package_data' argument to 'setup()'. See 'setuptools.dist.Distribution' for more details. Also, this version of the 'build_py' command allows you to specify both 'py_modules' and 'packages' in the same setup operation. """ def finalize_options(self): orig.build_py.finalize_options(self) self.package_data = self.distribution.package_data self.exclude_package_data = (self.distribution.exclude_package_data or {}) if 'data_files' in self.__dict__: del self.__dict__['data_files'] self.__updated_files = [] self.__doctests_2to3 = [] def run(self): """Build modules, packages, and copy data files to build directory""" if not self.py_modules and not self.packages: return if self.py_modules: self.build_modules() if self.packages: self.build_packages() self.build_package_data() self.run_2to3(self.__updated_files, False) self.run_2to3(self.__updated_files, True) self.run_2to3(self.__doctests_2to3, True) # Only compile actual .py files, using our base class' idea of what our # output files are. self.byte_compile(orig.build_py.get_outputs(self, include_bytecode=0)) def __getattr__(self, attr): "lazily compute data files" if attr == 'data_files': self.data_files = self._get_data_files() return self.data_files return orig.build_py.__getattr__(self, attr) def build_module(self, module, module_file, package): if six.PY2 and isinstance(package, six.string_types): # avoid errors on Python 2 when unicode is passed (#190) package = package.split('.') outfile, copied = orig.build_py.build_module(self, module, module_file, package) if copied: self.__updated_files.append(outfile) return outfile, copied def _get_data_files(self): """Generate list of '(package,src_dir,build_dir,filenames)' tuples""" self.analyze_manifest() return list(map(self._get_pkg_data_files, self.packages or ())) def _get_pkg_data_files(self, package): # Locate package source directory src_dir = self.get_package_dir(package) # Compute package build directory build_dir = os.path.join(*([self.build_lib] + package.split('.'))) # Strip directory from globbed filenames filenames = [ os.path.relpath(file, src_dir) for file in self.find_data_files(package, src_dir) ] return package, src_dir, build_dir, filenames def find_data_files(self, package, src_dir): """Return filenames for package's data files in 'src_dir'""" patterns = self._get_platform_patterns( self.package_data, package, src_dir, ) globs_expanded = map(glob, patterns) # flatten the expanded globs into an iterable of matches globs_matches = itertools.chain.from_iterable(globs_expanded) glob_files = filter(os.path.isfile, globs_matches) files = itertools.chain( self.manifest_files.get(package, []), glob_files, ) return self.exclude_data_files(package, src_dir, files) def build_package_data(self): """Copy data files into build directory""" for package, src_dir, build_dir, filenames in self.data_files: for filename in filenames: target = os.path.join(build_dir, filename) self.mkpath(os.path.dirname(target)) srcfile = os.path.join(src_dir, filename) outf, copied = self.copy_file(srcfile, target) srcfile = os.path.abspath(srcfile) if (copied and srcfile in self.distribution.convert_2to3_doctests): self.__doctests_2to3.append(outf) def analyze_manifest(self): self.manifest_files = mf = {} if not self.distribution.include_package_data: return src_dirs = {} for package in self.packages or (): # Locate package source directory src_dirs[assert_relative(self.get_package_dir(package))] = package self.run_command('egg_info') ei_cmd = self.get_finalized_command('egg_info') for path in ei_cmd.filelist.files: d, f = os.path.split(assert_relative(path)) prev = None oldf = f while d and d != prev and d not in src_dirs: prev = d d, df = os.path.split(d) f = os.path.join(df, f) if d in src_dirs: if path.endswith('.py') and f == oldf: continue # it's a module, not data mf.setdefault(src_dirs[d], []).append(path) def get_data_files(self): pass # Lazily compute data files in _get_data_files() function. def check_package(self, package, package_dir): """Check namespace packages' __init__ for declare_namespace""" try: return self.packages_checked[package] except KeyError: pass init_py = orig.build_py.check_package(self, package, package_dir) self.packages_checked[package] = init_py if not init_py or not self.distribution.namespace_packages: return init_py for pkg in self.distribution.namespace_packages: if pkg == package or pkg.startswith(package + '.'): break else: return init_py with io.open(init_py, 'rb') as f: contents = f.read() if b'declare_namespace' not in contents: raise distutils.errors.DistutilsError( "Namespace package problem: %s is a namespace package, but " "its\n__init__.py does not call declare_namespace()! Please " 'fix it.\n(See the setuptools manual under ' '"Namespace Packages" for details.)\n"' % (package,) ) return init_py def initialize_options(self): self.packages_checked = {} orig.build_py.initialize_options(self) def get_package_dir(self, package): res = orig.build_py.get_package_dir(self, package) if self.distribution.src_root is not None: return os.path.join(self.distribution.src_root, res) return res def exclude_data_files(self, package, src_dir, files): """Filter filenames for package's data files in 'src_dir'""" files = list(files) patterns = self._get_platform_patterns( self.exclude_package_data, package, src_dir, ) match_groups = ( fnmatch.filter(files, pattern) for pattern in patterns ) # flatten the groups of matches into an iterable of matches matches = itertools.chain.from_iterable(match_groups) bad = set(matches) keepers = ( fn for fn in files if fn not in bad ) # ditch dupes return list(_unique_everseen(keepers)) @staticmethod def _get_platform_patterns(spec, package, src_dir): """ yield platform-specific path patterns (suitable for glob or fn_match) from a glob-based spec (such as self.package_data or self.exclude_package_data) matching package in src_dir. """ raw_patterns = itertools.chain( spec.get('', []), spec.get(package, []), ) return ( # Each pattern has to be converted to a platform-specific path os.path.join(src_dir, convert_path(pattern)) for pattern in raw_patterns ) # from Python docs def _unique_everseen(iterable, key=None): "List unique elements, preserving order. Remember all elements ever seen." # unique_everseen('AAAABBBCCDAABBB') --> A B C D # unique_everseen('ABBCcAD', str.lower) --> A B C D seen = set() seen_add = seen.add if key is None: for element in filterfalse(seen.__contains__, iterable): seen_add(element) yield element else: for element in iterable: k = key(element) if k not in seen: seen_add(k) yield element def assert_relative(path): if not os.path.isabs(path): return path from distutils.errors import DistutilsSetupError msg = textwrap.dedent(""" Error: setup script specifies an absolute path: %s setup() arguments must *always* be /-separated paths relative to the setup.py directory, *never* absolute paths. """).lstrip() % path raise DistutilsSetupError(msg)