1 # Copyright 2011 OpenStack Foundation
2 # Copyright 2012-2013 Hewlett-Packard Development Company, L.P.
5 # Licensed under the Apache License, Version 2.0 (the "License"); you may
6 # not use this file except in compliance with the License. You may obtain
7 # a copy of the License at
9 # http://www.apache.org/licenses/LICENSE-2.0
11 # Unless required by applicable law or agreed to in writing, software
12 # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
13 # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
14 # License for the specific language governing permissions and limitations
18 Utilities with minimum-depends for use in setup.py
21 from __future__ import unicode_literals
23 from distutils.command import install as du_install
24 from distutils import log
34 from setuptools.command import develop
35 from setuptools.command import easy_install
36 from setuptools.command import egg_info
37 from setuptools.command import install
38 from setuptools.command import install_scripts
39 from setuptools.command import sdist
41 from pbr import extra_files
43 from pbr import options
45 from pbr import testr_command
46 from pbr import version
48 REQUIREMENTS_FILES = ('requirements.txt', 'tools/pip-requires')
49 PY_REQUIREMENTS_FILES = [x % sys.version_info[0] for x in (
50 'requirements-py%d.txt', 'tools/pip-requires-py%d')]
51 TEST_REQUIREMENTS_FILES = ('test-requirements.txt', 'tools/test-requires')
54 def get_requirements_files():
55 files = os.environ.get("PBR_REQUIREMENTS_FILES")
57 return tuple(f.strip() for f in files.split(','))
58 # Returns a list composed of:
59 # - REQUIREMENTS_FILES with -py2 or -py3 in the name
60 # (e.g. requirements-py3.txt)
61 # - REQUIREMENTS_FILES
63 return PY_REQUIREMENTS_FILES + list(REQUIREMENTS_FILES)
66 def append_text_list(config, key, text_list):
67 """Append a \n separated list to possibly existing value."""
69 current_value = config.get(key, "")
71 new_value.append(current_value)
72 new_value.extend(text_list)
73 config[key] = '\n'.join(new_value)
76 def _any_existing(file_list):
77 return [f for f in file_list if os.path.exists(f)]
80 # Get requirements from the first file that exists
81 def get_reqs_from_files(requirements_files):
82 existing = _any_existing(requirements_files)
84 # TODO(stephenfin): Remove this in pbr 6.0+
85 deprecated = [f for f in existing if f in PY_REQUIREMENTS_FILES]
87 warnings.warn('Support for \'-pyN\'-suffixed requirements files is '
88 'removed in pbr 5.0 and these files are now ignored. '
89 'Use environment markers instead. Conflicting files: '
93 existing = [f for f in existing if f not in PY_REQUIREMENTS_FILES]
94 for requirements_file in existing:
95 with open(requirements_file, 'r') as fil:
96 return fil.read().split('\n')
101 def parse_requirements(requirements_files=None, strip_markers=False):
103 if requirements_files is None:
104 requirements_files = get_requirements_files()
106 def egg_fragment(match):
107 # take a versioned egg fragment and return a
108 # versioned package requirement e.g.
109 # nova-1.2.3 becomes nova>=1.2.3
110 return re.sub(r'([\w.]+)-([\w.-]+)',
115 for line in get_reqs_from_files(requirements_files):
117 if (not line.strip()) or line.startswith('#'):
120 # Ignore index URL lines
121 if re.match(r'^\s*(-i|--index-url|--extra-index-url|--find-links).*',
125 # Handle nested requirements files such as:
126 # -r other-requirements.txt
127 if line.startswith('-r'):
128 req_file = line.partition(' ')[2]
129 requirements += parse_requirements(
130 [req_file], strip_markers=strip_markers)
134 project_name = pkg_resources.Requirement.parse(line).project_name
138 # For the requirements list, we need to inject only the portion
139 # after egg= so that distutils knows the package it's looking for
141 # -e git://github.com/openstack/nova/master#egg=nova
142 # -e git://github.com/openstack/nova/master#egg=nova-1.2.3
143 # -e git+https://foo.com/zipball#egg=bar&subdirectory=baz
144 if re.match(r'\s*-e\s+', line):
145 line = re.sub(r'\s*-e\s+.*#egg=([^&]+).*$', egg_fragment, line)
147 # http://github.com/openstack/nova/zipball/master#egg=nova
148 # http://github.com/openstack/nova/zipball/master#egg=nova-1.2.3
149 # git+https://foo.com/zipball#egg=bar&subdirectory=baz
150 elif re.match(r'\s*(https?|git(\+(https|ssh))?):', line):
151 line = re.sub(r'\s*(https?|git(\+(https|ssh))?):.*#egg=([^&]+).*$',
153 # -f lines are for index locations, and don't get used here
154 elif re.match(r'\s*-f\s+', line):
156 reason = 'Index Location'
159 line = re.sub('#.*$', '', line)
161 semi_pos = line.find(';')
164 line = line[:semi_pos]
165 requirements.append(line)
168 '[pbr] Excluding %s: %s' % (project_name, reason))
173 def parse_dependency_links(requirements_files=None):
174 if requirements_files is None:
175 requirements_files = get_requirements_files()
176 dependency_links = []
177 # dependency_links inject alternate locations to find packages listed
179 for line in get_reqs_from_files(requirements_files):
180 # skip comments and blank lines
181 if re.match(r'(\s*#)|(\s*$)', line):
183 # lines with -e or -f need the whole line, minus the flag
184 if re.match(r'\s*-[ef]\s+', line):
185 dependency_links.append(re.sub(r'\s*-[ef]\s+', '', line))
186 # lines that are only urls can go in unmolested
187 elif re.match(r'\s*(https?|git(\+(https|ssh))?):', line):
188 dependency_links.append(line)
189 return dependency_links
192 class InstallWithGit(install.install):
193 """Extracts ChangeLog and AUTHORS from git then installs.
195 This is useful for e.g. readthedocs where the package is
196 installed and then docs built.
199 command_name = 'install'
202 _from_git(self.distribution)
203 return install.install.run(self)
206 class LocalInstall(install.install):
207 """Runs python setup.py install in a sensible manner.
209 Force a non-egg installed in the manner of
210 single-version-externally-managed, which allows us to install manpages
214 command_name = 'install'
217 _from_git(self.distribution)
218 return du_install.install.run(self)
221 class TestrTest(testr_command.Testr):
222 """Make setup.py test do the right thing."""
224 command_name = 'test'
225 description = 'DEPRECATED: Run unit tests using testr'
228 warnings.warn('testr integration is deprecated in pbr 4.2 and will '
229 'be removed in a future release. Please call your test '
233 # Can't use super - base class old-style class
234 testr_command.Testr.run(self)
237 class LocalRPMVersion(setuptools.Command):
238 __doc__ = """Output the rpm *compatible* version string of this package"""
239 description = __doc__
242 command_name = "rpm_version"
245 log.info("[pbr] Extracting rpm version")
246 name = self.distribution.get_name()
247 print(version.VersionInfo(name).semantic_version().rpm_string())
249 def initialize_options(self):
252 def finalize_options(self):
256 class LocalDebVersion(setuptools.Command):
257 __doc__ = """Output the deb *compatible* version string of this package"""
258 description = __doc__
261 command_name = "deb_version"
264 log.info("[pbr] Extracting deb version")
265 name = self.distribution.get_name()
266 print(version.VersionInfo(name).semantic_version().debian_string())
268 def initialize_options(self):
271 def finalize_options(self):
276 return testr_command.have_testr
280 from nose import commands
282 class NoseTest(commands.nosetests):
283 """Fallback test runner if testr is a no-go."""
285 command_name = 'test'
286 description = 'DEPRECATED: Run unit tests using nose'
289 warnings.warn('nose integration in pbr is deprecated. Please use '
290 'the native nose setuptools configuration or call '
294 # Can't use super - base class old-style class
295 commands.nosetests.run(self)
306 _wsgi_text = """#PBR Generated from %(group)r
310 from %(module_name)s import %(import_target)s
312 if __name__ == "__main__":
316 import wsgiref.simple_server as wss
318 parser = argparse.ArgumentParser(
319 description=%(import_target)s.__doc__,
320 formatter_class=argparse.ArgumentDefaultsHelpFormatter,
321 usage='%%(prog)s [-h] [--port PORT] [--host IP] -- [passed options]')
322 parser.add_argument('--port', '-p', type=int, default=8000,
323 help='TCP port to listen on')
324 parser.add_argument('--host', '-b', default='',
325 help='IP to bind the server to')
326 parser.add_argument('args',
327 nargs=argparse.REMAINDER,
328 metavar='-- [passed options]',
329 help="'--' is the separator of the arguments used "
330 "to start the WSGI server and the arguments passed "
331 "to the WSGI application.")
332 args = parser.parse_args()
334 if args.args[0] == '--':
337 parser.error("unrecognized arguments: %%s" %% ' '.join(args.args))
338 sys.argv[1:] = args.args
339 server = wss.make_server(args.host, args.port, %(invoke_target)s())
342 print("STARTING test server %(module_name)s.%(invoke_target)s")
343 url = "http://%%s:%%d/" %% (server.server_name, server.server_port)
344 print("Available at %%s" %% url)
345 print("DANGER! For testing only, do not use in production")
349 server.serve_forever()
352 app_lock = threading.Lock()
355 if application is None:
356 application = %(invoke_target)s()
360 _script_text = """# PBR Generated from %(group)r
364 from %(module_name)s import %(import_target)s
367 if __name__ == "__main__":
368 sys.exit(%(invoke_target)s())
372 # the following allows us to specify different templates per entry
373 # point group when generating pbr scripts.
375 'console_scripts': _script_text,
376 'gui_scripts': _script_text,
377 'wsgi_scripts': _wsgi_text
381 def generate_script(group, entry_point, header, template):
382 """Generate the script based on the template.
385 The entry-point group name, e.g., "console_scripts".
387 The first line of the script, e.g., "!#/usr/bin/env python".
391 The templated script content
395 if not entry_point.attrs or len(entry_point.attrs) > 2:
396 raise ValueError("Script targets must be of the form "
397 "'func' or 'Class.class_method'.")
398 script_text = template % dict(
400 module_name=entry_point.module_name,
401 import_target=entry_point.attrs[0],
402 invoke_target='.'.join(entry_point.attrs),
404 return header + script_text
407 def override_get_script_args(
408 dist, executable=os.path.normpath(sys.executable), is_wininst=False):
409 """Override entrypoints console_script."""
410 header = easy_install.get_script_header("", executable, is_wininst)
411 for group, template in ENTRY_POINTS_MAP.items():
412 for name, ep in dist.get_entry_map(group).items():
413 yield (name, generate_script(group, ep, header, template))
416 class LocalDevelop(develop.develop):
418 command_name = 'develop'
420 def install_wrapper_scripts(self, dist):
421 if sys.platform == 'win32':
422 return develop.develop.install_wrapper_scripts(self, dist)
423 if not self.exclude_scripts:
424 for args in override_get_script_args(dist):
425 self.write_script(*args)
428 class LocalInstallScripts(install_scripts.install_scripts):
429 """Intercepts console scripts entry_points."""
430 command_name = 'install_scripts'
432 def _make_wsgi_scripts_only(self, dist, executable, is_wininst):
433 header = easy_install.get_script_header("", executable, is_wininst)
434 wsgi_script_template = ENTRY_POINTS_MAP['wsgi_scripts']
435 for name, ep in dist.get_entry_map('wsgi_scripts').items():
436 content = generate_script(
437 'wsgi_scripts', ep, header, wsgi_script_template)
438 self.write_script(name, content)
441 import distutils.command.install_scripts
443 self.run_command("egg_info")
444 if self.distribution.scripts:
445 # run first to set up self.outfiles
446 distutils.command.install_scripts.install_scripts.run(self)
450 ei_cmd = self.get_finalized_command("egg_info")
451 dist = pkg_resources.Distribution(
453 pkg_resources.PathMetadata(ei_cmd.egg_base, ei_cmd.egg_info),
454 ei_cmd.egg_name, ei_cmd.egg_version,
456 bs_cmd = self.get_finalized_command('build_scripts')
457 executable = getattr(
458 bs_cmd, 'executable', easy_install.sys_executable)
459 is_wininst = getattr(
460 self.get_finalized_command("bdist_wininst"), '_is_running', False
463 if 'bdist_wheel' in self.distribution.have_run:
464 # We're building a wheel which has no way of generating mod_wsgi
465 # scripts for us. Let's build them.
466 # NOTE(sigmavirus24): This needs to happen here because, as the
467 # comment below indicates, no_ep is True when building a wheel.
468 self._make_wsgi_scripts_only(dist, executable, is_wininst)
471 # no_ep is True if we're installing into an .egg file or building
472 # a .whl file, in those cases, we do not want to build all of the
473 # entry-points listed for this package.
477 get_script_args = override_get_script_args
479 get_script_args = easy_install.get_script_args
480 executable = '"%s"' % executable
482 for args in get_script_args(dist, executable, is_wininst):
483 self.write_script(*args)
486 class LocalManifestMaker(egg_info.manifest_maker):
487 """Add any files that are in git and some standard sensible files."""
489 def _add_pbr_defaults(self):
490 for template_line in [
493 'exclude .gitignore',
494 'exclude .gitreview',
495 'global-exclude *.pyc'
497 self.filelist.process_template_line(template_line)
499 def add_defaults(self):
500 """Add all the default files to self.filelist:
502 Extends the functionality provided by distutils to also included
503 additional sane defaults, such as the ``AUTHORS`` and ``ChangeLog``
504 files generated by *pbr*.
506 Warns if (``README`` or ``README.txt``) or ``setup.py`` are missing;
507 everything else is optional.
509 option_dict = self.distribution.get_option_dict('pbr')
511 sdist.sdist.add_defaults(self)
512 self.filelist.append(self.template)
513 self.filelist.append(self.manifest)
514 self.filelist.extend(extra_files.get_extra_files())
515 should_skip = options.get_boolean_option(option_dict, 'skip_git_sdist',
518 rcfiles = git._find_git_files()
520 self.filelist.extend(rcfiles)
521 elif os.path.exists(self.manifest):
523 ei_cmd = self.get_finalized_command('egg_info')
524 self._add_pbr_defaults()
525 self.filelist.include_pattern("*", prefix=ei_cmd.egg_info)
528 class LocalEggInfo(egg_info.egg_info):
529 """Override the egg_info command to regenerate SOURCES.txt sensibly."""
531 command_name = 'egg_info'
533 def find_sources(self):
534 """Generate SOURCES.txt only if there isn't one already.
536 If we are in an sdist command, then we always want to update
537 SOURCES.txt. If we are not in an sdist command, then it doesn't
538 matter one flip, and is actually destructive.
539 However, if we're in a git context, it's always the right thing to do
540 to recreate SOURCES.txt
542 manifest_filename = os.path.join(self.egg_info, "SOURCES.txt")
543 if (not os.path.exists(manifest_filename) or
544 os.path.exists('.git') or
545 'sdist' in sys.argv):
546 log.info("[pbr] Processing SOURCES.txt")
547 mm = LocalManifestMaker(self.distribution)
548 mm.manifest = manifest_filename
550 self.filelist = mm.filelist
552 log.info("[pbr] Reusing existing SOURCES.txt")
553 self.filelist = egg_info.FileList()
554 for entry in open(manifest_filename, 'r').read().split('\n'):
555 self.filelist.append(entry)
558 def _from_git(distribution):
559 option_dict = distribution.get_option_dict('pbr')
560 changelog = git._iter_log_oneline()
562 changelog = git._iter_changelog(changelog)
563 git.write_git_changelog(option_dict=option_dict, changelog=changelog)
564 git.generate_authors(option_dict=option_dict)
567 class LocalSDist(sdist.sdist):
568 """Builds the ChangeLog and Authors files from VC first."""
570 command_name = 'sdist'
572 def checking_reno(self):
573 """Ensure reno is installed and configured.
575 We can't run reno-based commands if reno isn't installed/available, and
576 don't want to if the user isn't using it.
578 if hasattr(self, '_has_reno'):
579 return self._has_reno
581 option_dict = self.distribution.get_option_dict('pbr')
582 should_skip = options.get_boolean_option(option_dict, 'skip_reno',
583 'SKIP_GENERATE_RENO')
585 self._has_reno = False
589 # versions of reno witout this module will not have the required
590 # feature, hence the import
591 from reno import setup_command # noqa
593 log.info('[pbr] reno was not found or is too old. Skipping '
595 self._has_reno = False
598 conf, output_file, cache_file = setup_command.load_config(
601 if not os.path.exists(os.path.join(conf.reporoot, conf.notespath)):
602 log.info('[pbr] reno does not appear to be configured. Skipping '
604 self._has_reno = False
607 self._files = [output_file, cache_file]
609 log.info('[pbr] Generating release notes')
610 self._has_reno = True
614 sub_commands = [('build_reno', checking_reno)] + sdist.sdist.sub_commands
617 _from_git(self.distribution)
618 # sdist.sdist is an old style class, can't use super()
619 sdist.sdist.run(self)
621 def make_distribution(self):
622 # This is included in make_distribution because setuptools doesn't use
623 # 'get_file_list'. As such, this is the only hook point that runs after
624 # the commands in 'sub_commands'
625 if self.checking_reno():
626 self.filelist.extend(self._files)
628 sdist.sdist.make_distribution(self)
631 from pbr import builddoc
633 # Import the symbols from their new home so the package API stays
635 LocalBuildDoc = builddoc.LocalBuildDoc
645 def _get_increment_kwargs(git_dir, tag):
646 """Calculate the sort of semver increment needed from git history.
648 Every commit from HEAD to tag is consider for Sem-Ver metadata lines.
649 See the pbr docs for their syntax.
651 :return: a dict of kwargs for passing into SemanticVersion.increment.
655 version_spec = tag + "..HEAD"
657 version_spec = "HEAD"
658 # Get the raw body of the commit messages so that we don't have to
659 # parse out any formatting whitespace and to avoid user settings on
660 # git log output affecting out ability to have working sem ver headers.
661 changelog = git._run_git_command(['log', '--pretty=%B', version_spec],
663 header_len = len('sem-ver:')
664 commands = [line[header_len:].strip() for line in changelog.split('\n')
665 if line.lower().startswith('sem-ver:')]
667 for command in commands:
668 symbols.update([symbol.strip() for symbol in command.split(',')])
670 def _handle_symbol(symbol, symbols, impact):
671 if symbol in symbols:
672 result[impact] = True
673 symbols.discard(symbol)
674 _handle_symbol('bugfix', symbols, 'patch')
675 _handle_symbol('feature', symbols, 'minor')
676 _handle_symbol('deprecation', symbols, 'minor')
677 _handle_symbol('api-break', symbols, 'major')
678 for symbol in symbols:
679 log.info('[pbr] Unknown Sem-Ver symbol %r' % symbol)
680 # We don't want patch in the kwargs since it is not a keyword argument -
681 # its the default minimum increment.
682 result.pop('patch', None)
686 def _get_revno_and_last_tag(git_dir):
687 """Return the commit data about the most recent tag.
689 We use git-describe to find this out, but if there are no
690 tags then we fall back to counting commits since the beginning
693 changelog = git._iter_log_oneline(git_dir=git_dir)
695 for row_count, (ignored, tag_set, ignored) in enumerate(changelog):
697 semver_to_tag = dict()
698 for tag in list(tag_set):
700 semver = version.SemanticVersion.from_pip_string(tag)
701 semver_to_tag[semver] = tag
702 version_tags.add(semver)
706 return semver_to_tag[max(version_tags)], row_count
710 def _get_version_from_git_target(git_dir, target_version):
711 """Calculate a version from a target version in git_dir.
713 This is used for untagged versions only. A new version is calculated as
714 necessary based on git metadata - distance to tags, current hash, contents
717 :param git_dir: The git directory we're working from.
718 :param target_version: If None, the last tagged version (or 0 if there are
719 no tags yet) is incremented as needed to produce an appropriate target
720 version following semver rules. Otherwise target_version is used as a
721 constraint - if semver rules would result in a newer version then an
723 :return: A semver version object.
725 tag, distance = _get_revno_and_last_tag(git_dir)
726 last_semver = version.SemanticVersion.from_pip_string(tag or '0')
728 new_version = last_semver
730 new_version = last_semver.increment(
731 **_get_increment_kwargs(git_dir, tag))
732 if target_version is not None and new_version > target_version:
734 "git history requires a target version of %(new)s, but target "
735 "version is %(target)s" %
736 dict(new=new_version, target=target_version))
739 new_dev = new_version.to_dev(distance)
740 if target_version is not None:
741 target_dev = target_version.to_dev(distance)
742 if target_dev > new_dev:
747 def _get_version_from_git(pre_version=None):
748 """Calculate a version string from git.
750 If the revision is tagged, return that. Otherwise calculate a semantic
751 version description of the tree.
753 The number of revisions since the last tag is included in the dev counter
754 in the version for untagged versions.
756 :param pre_version: If supplied use this as the target version rather than
757 inferring one from the last tag + commit messages.
759 git_dir = git._run_git_functions()
762 tagged = git._run_git_command(
763 ['describe', '--exact-match'], git_dir,
764 throw_on_error=True).replace('-', '.')
765 target_version = version.SemanticVersion.from_pip_string(tagged)
768 # not released yet - use pre_version as the target
769 target_version = version.SemanticVersion.from_pip_string(
772 # not released yet - just calculate from git history
773 target_version = None
774 result = _get_version_from_git_target(git_dir, target_version)
775 return result.release_string()
776 # If we don't know the version, return an empty string so at least
777 # the downstream users of the value always have the same type of
778 # object to work with.
785 def _get_version_from_pkg_metadata(package_name):
786 """Get the version from package metadata if present.
788 This looks for PKG-INFO if present (for sdists), and if not looks
789 for METADATA (for wheels) and failing that will return None.
791 pkg_metadata_filenames = ['PKG-INFO', 'METADATA']
793 for filename in pkg_metadata_filenames:
795 pkg_metadata_file = open(filename, 'r')
796 except (IOError, OSError):
799 pkg_metadata = email.message_from_file(pkg_metadata_file)
800 except email.errors.MessageError:
803 # Check to make sure we're in our own dir
804 if pkg_metadata.get('Name', None) != package_name:
806 return pkg_metadata.get('Version', None)
809 def get_version(package_name, pre_version=None):
810 """Get the version of the project.
812 First, try getting it from PKG-INFO or METADATA, if it exists. If it does,
813 that means we're in a distribution tarball or that install has happened.
814 Otherwise, if there is no PKG-INFO or METADATA file, pull the version
817 We do not support setup.py version sanity in git archive tarballs, nor do
818 we support packagers directly sucking our git repo into theirs. We expect
819 that a source tarball be made from our git repo - or that if someone wants
820 to make a source tarball from a fork of our repo with additional tags in it
821 that they understand and desire the results of doing that.
823 :param pre_version: The version field from setup.cfg - if set then this
824 version will be the next release.
826 version = os.environ.get(
828 os.environ.get("OSLO_PACKAGE_VERSION", None))
831 version = _get_version_from_pkg_metadata(package_name)
834 version = _get_version_from_git(pre_version)
835 # Handle http://bugs.python.org/issue11638
836 # version will either be an empty unicode string or a valid
837 # unicode version string, but either way it's unicode and needs to
839 if sys.version_info[0] == 2:
840 version = version.encode('utf-8')
843 raise Exception("Versioning for this project requires either an sdist"
844 " tarball, or access to an upstream git repository."
845 " It's also possible that there is a mismatch between"
846 " the package name in setup.cfg and the argument given"
847 " to pbr.version.VersionInfo. Project name {name} was"
848 " given, but was not able to be found.".format(
852 # This is added because pbr uses pbr to install itself. That means that
853 # any changes to the egg info writer entrypoints must be forward and
854 # backward compatible. This maintains the pbr.packaging.write_pbr_json
856 write_pbr_json = pbr.pbr_json.write_pbr_json