diff --git a/.gitattributes b/.gitattributes new file mode 100644 index 0000000..e8427b8 --- /dev/null +++ b/.gitattributes @@ -0,0 +1 @@ +docker-make/_version.py export-subst diff --git a/.gitignore b/.gitignore index 9bea4a8..067409f 100644 --- a/.gitignore +++ b/.gitignore @@ -1,8 +1,7 @@ -### Example user template template -### Example user template docker_makefiles Dockerfile.fail _docker_make_tmp +dockerfile.fail # IntelliJ project files .idea diff --git a/.travis.yml b/.travis.yml new file mode 100644 index 0000000..66a30eb --- /dev/null +++ b/.travis.yml @@ -0,0 +1,25 @@ +language: python + +python: +- '2.7' + +sudo: required + +services: +- docker + +before_install: +- pip install pytest + +install: +- pip install . +script: cd test && py.test + +deploy: + provider: pypi + user: avirshup + password: + secure: TYbx42AboT7VtAy+YCbmRkEraJhkDYMX4uQeEjZe+JNqsdFQ+4YWkEjkTCiMg2GO944W75NqBe3Cq6+UN3GYscM6QSZ/N+A/ME0V4JnC/N6AVPo0M+ns0lvBKm34dsafjWozf0J5Jwt5pK8UBByR6fEru5SvZej2IAow2cZzJRATFpq5YbVBuS77zDOZKk3526xzqGq1U/Ay/e/ar/ZTVTkdY78PDC3pLSs6aie+LtJJee80czUkMTXK8CTQz3g1ni37tNVhYWKdZjAdnfSbJWP8BB/SDW+/TBaKAHBhmTRbFWhiPJKvhFmqSt7JG7Nzu0zWqqh+blgXiQjseWmJB7hbPvr4g7lIHtUYdjTPPvQXYRt+6TNAm6N0Lx/gMPLnfN9t1VKv9ZAtgM8jKLoY+jBmpmZyx5QeO+clsLOqCh+wkeJUULtHJWeSQ6mz5PlgDgVu3gx6/d21uR1I7bHbohZAQ707J1bDhzjsolbQ+NzyMYlVuEoJs0NVnGTVR7q5NzjGSgYODFdbf1QZXeB/KeZmjskAzmNsUe1sGQsPIA4KtLS+PAYDdCJiH1i54/yaXF7Uj4g+vpdrOMKaVyEdtaS8yeiAcH18tDIqPwkWCFxboLcSreV4FcTrBocwUwbJhznC5vq2YgOKUf8DVtv+YeC4KcTLUufm0ybd24rbvW0= + on: + tags: true + repo: avirshup/DockerMake diff --git a/LICENSE b/LICENSE index 1d80f04..85ef87f 100644 --- a/LICENSE +++ b/LICENSE @@ -1,22 +1,201 @@ -Copyright (c) 2015, Autodesk Inc. -All rights reserved. - -Redistribution and use in source and binary forms, with or without -modification, are permitted provided that the following conditions are met: - -1. Redistributions of source code must retain the above copyright notice, this - list of conditions and the following disclaimer. -2. Redistributions in binary form must reproduce the above copyright notice, - this list of conditions and the following disclaimer in the documentation - and/or other materials provided with the distribution. - -THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND -ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED -WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE -DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR -ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES -(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; -LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND -ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS -SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. \ No newline at end of file + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + +TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + +1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + +2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + +3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + +4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + +5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + +6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + +7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + +8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + +9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + +END OF TERMS AND CONDITIONS + +APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + +Copyright [yyyy] [name of copyright owner] + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. diff --git a/MANIFEST.in b/MANIFEST.in new file mode 100644 index 0000000..9f70142 --- /dev/null +++ b/MANIFEST.in @@ -0,0 +1,2 @@ +include versioneer.py +include dockermake/_version.py diff --git a/README.md b/README.md index af7046a..07a7dcb 100644 --- a/README.md +++ b/README.md @@ -1,29 +1,54 @@ # Docker-make -Build and manage stacks of docker images - a dependency graph for Dockerfiles +Build and manage stacks of docker images - a dependency graph for Docker images +[![Build Status](https://travis-ci.org/avirshup/DockerMake.svg?branch=master)](https://travis-ci.org/avirshup/DockerMake) + Table of Contents ================= + * [Install](#Install) + * [Run it](#Run-it) * [What you can do with it](#what-you-can-do-with-it) * [Example](#example) * [Writing DockerMake\.yaml](#writing-dockermakeyaml) * [Requirements](#requirements) * [Command line usage](#command-line-usage) + +### Install + +Requires [Docker](https://www.docker.com/products/docker), and Python (2.7 or 3.4+). + +``` +pip install git+https://github.com/autodesk/DockerMake +``` + +This will install the command line tool, `docker-make`, and its supporting python package, which you can import as `import dockermake`. + + +### Run it + +To build some illustrative examples, try running: + +```bash +wget https://raw.githubusercontent.com/autodesk/DockerMake/master/example/DockerMake.yml + +docker-make --list +docker-make data_science --repo docker.io/myusername --tag testbuild +``` + + ### What you can do with it + * **New**: Build an artifact (such as an executable or library) in one image, then copy it into a smaller image for deployment + * **New**: easily invalidate the docker image cache at an arbitrary layer * Define small pieces of configuration or functionality, then mix them together into production docker images. - * "Inherit" from multiple image builds + * "Inherit" Dockerfile instructions from multiple sources * Easily manage images that pull files from multiple directories on your filesystem - * Rebuild an entire stack of images as needed with a single command + * Easily manage images that pull binaries from other _docker images_ that you've defined + * Build and push an entire stack of images with a single command -**How is this different from docker-compose?**
`docker-make` automates and manages the process of building docker images. `docker-compose` spins up containers and links them to make serivces. - -**How is this different from the FROM command in Dockerfiles?** - 1. Using the `requires` field, you can inherit from multiple images. - 2. You can create builds that reference multiple directories on your filesystem using the `build_directory` keyword. - 3. The builds are not tied to any image's tag or repository - when you build an image with `docker-make`, it will be up-to-date. ### Example -[Click here to see a production-level example.](https://github.com/Autodesk/molecular-design-toolkit/blob/master/DockerMakefiles/DockerMake.yml) +[Click here to see how we're using this in production.](https://github.com/Autodesk/molecular-design-toolkit/tree/master/DockerMakefiles) This example builds a single docker image called `data_science`. It does this by mixing together three components: `devbase` (the base image), `airline_data` (a big CSV file), and `python_image` (a python installation). `docker-make` will create an image that combines all of these components. @@ -62,7 +87,7 @@ data_science: To build an image called `alice/data_science`, you can run: ```bash -docker-make.py data_science --repository alice +docker-make data_science --repository alice ``` which will create an image with all the commands in `python_image` and `airline_data`. @@ -77,18 +102,29 @@ Here's the dependency graph and generated Dockerfiles: The idea is to write dockerfile commands for each specific piece of functionality in the `build` field, and "inherit" all other functionality from a list of other components that your image `requires`. If you need to add files with the ADD and COPY commands, specify the root directory for those files with `build_directory`. Your tree of "requires" must have _exactly one_ unique named base image in the `FROM` field. ```yaml [image_name]: - build_directory: [relative path where the ADD and COPY commands will look for files] requires: - - [other image name] - - [yet another image name] + - [other image name] + - [yet another image name] + [...] FROM: [named_base_image] build: | - RUN [something] - ADD [something else] - [Dockerfile commands go here] - -[other image name]: ... -[yet another image name]: ... + RUN [something] + ADD [something else] + [Dockerfile commands go here] + build_directory: [path where the ADD and COPY commands will look for files] + # note that the "build_directory" path can be relative or absolute. + # if it's relative, it's interpreted relative to DockerMake.yml's directory + copy_from: # Note: the copy_from commands will always run AFTER any build commands + [source_image]: + [source path1]:[destination path1] + [source path2]:[destination path2] + [...] + [source_image_2]: + [...] + + +[other image name]: [...] +[...] ``` @@ -101,12 +137,12 @@ eval $(docker-machine env [machine-name]) ### Command line usage ``` -usage: docker-make.py [-h] [-f MAKEFILE] [-a] [-l] - [--requires [REQUIRES [REQUIRES ...]]] [--name NAME] - [-p] [-n] [--pull] [--no-cache] - [--repository REPOSITORY] [--tag TAG] - [--push-to-registry] [--help-yaml] - [TARGETS [TARGETS ...]] +usage: docker-make [-h] [-f MAKEFILE] [-a] [-l] + [--requires [REQUIRES [REQUIRES ...]]] [--name NAME] [-p] + [-n] [--pull] [--no-cache] [--bust-cache BUST_CACHE] + [--clear-copy-cache] [--repository REPOSITORY] [--tag TAG] + [--push-to-registry] [--version] [--help-yaml] + [TARGETS [TARGETS ...]] NOTE: Docker environmental variables must be set. For a docker-machine, run `eval $(docker-machine env [machine-name])` @@ -127,7 +163,7 @@ Choosing what to build: --name NAME Name for custom docker images (requires --requires) Dockerfiles: - -p, --print_dockerfiles + -p, --print-dockerfiles, --print_dockerfiles Print out the generated dockerfiles named `Dockerfile.[image]` -n, --no_build Only print Dockerfiles, don't build them. Implies @@ -136,6 +172,12 @@ Dockerfiles: Image caching: --pull Always try to pull updated FROM images --no-cache Rebuild every layer + --bust-cache BUST_CACHE + Force docker to rebuilt all layers in this image. You + can bust multiple image layers by passing --bust-cache + multiple times. + --clear-copy-cache, --clear-cache + Remove docker-make's cache of files for `copy_from`. Repositories and tags: --repository REPOSITORY, -r REPOSITORY, -u REPOSITORY @@ -157,10 +199,11 @@ Repositories and tags: to dockerhub.com, use index.docker.io as the registry) Help: + --version Print version and exit. --help-yaml Print summary of YAML file format and exit. ``` -Written by Aaron Virshup, Bio/Nano Research Group, Autodesk Research +Written by Aaron Virshup, BioNano Group at Autodesk -Copyright (c) 2016, Autodesk Inc. Released under the simplified BSD license. +Copyright (c) 2015-2017, Autodesk Inc. Released under the Apache 2.0 License. diff --git a/docker-make.py b/docker-make.py deleted file mode 100755 index 4a2123c..0000000 --- a/docker-make.py +++ /dev/null @@ -1,536 +0,0 @@ -#!/usr/bin/env python2.7 -# Copyright 2016 Autodesk Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -""" -Multiple inheritance for your dockerfiles. -Requires: python 2.7, docker-py, pyyaml (RUN: easy_install pip; pip install docker-py pyyaml) -""" -import sys -import os -import textwrap -from collections import OrderedDict -from io import StringIO, BytesIO -import argparse -import pprint - -import docker -import docker.utils -import yaml - - -class DockerMaker(object): - def __init__(self, makefile, repository=None, - build_images=True, - print_dockerfiles=False, - no_cache=False, - tag=None, - pull=False, - buildargs=None): - - self._sources = set() - self.makefile_path = makefile - self.img_defs = self.parse_yaml(self.makefile_path) - self.all_targets = self.img_defs.pop('_ALL_', None) - - # Connect to docker daemon if necessary - if build_images: - connection = docker.utils.kwargs_from_env() - try: - connection['tls'].assert_hostname = False - except KeyError as e: - print('WARNING: Key error %s' % (e)) - finally: - self.client = docker.Client(**connection) - else: - self.client = None - - if repository and repository[-1] not in '/:': - self.repo = repository + '/' - elif repository is None: - self.repo = '' - else: - self.repo = repository - self.tag = tag - self.build_images = build_images - self.print_dockerfiles = print_dockerfiles - self.pull = pull - self.no_cache = no_cache - - self.buildargs = {} - fname = buildargs - print 'READING %s' % os.path.expanduser(fname) - with open(fname, 'r') as yaml_file: - self.buildargs = yaml.load(yaml_file) - # - print 'buildargs dict `%s`' % self.buildargs - - def parse_yaml(self, filename): - fname = os.path.expanduser(filename) - print 'READING %s' % os.path.expanduser(fname) - if fname in self._sources: - raise ValueError('Circular _SOURCE_') - self._sources.add(fname) - - with open(fname, 'r') as yaml_file: - yamldefs = yaml.load(yaml_file) - - sourcedefs = {} - for s in yamldefs.get('_SOURCES_', []): - sourcedefs.update(self.parse_yaml(s)) - - sourcedefs.update(yamldefs) - return sourcedefs - - def build(self, image): - """ - Drives the build of the final image - get the list of steps and execute them. - :param image: name of the image from the yaml file to build - :return: final tagged image name - """ - print 'docker-make starting build for %s' % image - build_steps = self.generate_build_order(image) - for istep, step in enumerate(build_steps): - print ' **** DockerMake Step %d/%d: %s ***' % (istep + 1, len(build_steps), ','.join(step.images)) - print ' * Build directory: %s' % step.build_dir - print ' * Target image name: %s' % step.tag - dockerfile = '\n'.join(step.dockerfile) - - # build the image - if self.build_images: - self.build_step(step, dockerfile) - - # Dump the dockerfile to a file - if self.print_dockerfiles: - if not os.path.exists('docker_makefiles'): - os.makedirs('docker_makefiles') - if '/' in step.tag: - filename = 'docker_makefiles/Dockerfile.%s' % image - else: - filename = 'docker_makefiles/Dockerfile.%s' % step.tag - with open(filename, 'w') as dfout: - print >> dfout, dockerfile - - return step.tag - - def build_step(self, step, dockerfile): - """ - Drives an individual build step. Build steps are separated by build_directory. - If a build has zero one or less build_directories, it will be built in a single - step. - """ - # set up the build context - build_args = dict(decode=True, tag=step.tag, pull=self.pull, - fileobj=None, path=None, dockerfile=None, - nocache=self.no_cache) - if step.build_dir is not None: - tempname = '_docker_make_tmp/' - tempdir = '%s/%s' % (step.build_dir, tempname) - temp_df = tempdir + 'Dockerfile' - if not os.path.isdir(tempdir): - os.makedirs(tempdir) - with open(temp_df, 'w') as df_out: - print >> df_out, dockerfile - - build_args['path'] = os.path.abspath(step.build_dir) - build_args['dockerfile'] = tempname + 'Dockerfile' - else: - build_args['fileobj'] = StringIO(unicode(dockerfile)) - - # - build_args['buildargs'] = self.buildargs - - # start the build - stream = self.client.build(**build_args) - - # monitor the output - for item in stream: - if item.keys() == ['stream']: - print item['stream'].strip() - elif 'errorDetail' in item or 'error' in item: - raise BuildError(dockerfile, item, build_args) - else: - print item - - # remove the temporary dockerfile - if step.build_dir is not None: - os.unlink(temp_df) - os.rmdir(tempdir) - - def generate_build_order(self, image): - """ - Separate the build into a series of one or more intermediate steps. - Each specified build directory gets its own step - """ - repo_name = self.repo + image - if self.tag: - if ':' in repo_name: - repo_name += '-' + self.tag - else: - repo_name += ':' + self.tag - dependencies = self.sort_dependencies(image) - base = self.get_external_base_image(image, dependencies) - - build_steps = [BuildStep(base)] - step = build_steps[0] - for d in dependencies: - dep_definition = self.img_defs[d] - mydir = dep_definition.get('build_directory', None) - if mydir is not None: - mydir = os.path.expanduser(mydir) # expands `~` to home directory - if step.build_dir is not None: - # Create a new build step if there's already a build directory - step.tag = '%dbuild_%s' % (len(build_steps), image) - build_steps.append(BuildStep(step.tag)) - step = build_steps[-1] - step.build_dir = mydir - - step.images.append(d) - from_dockerfile = dep_definition.get('import_dockerfile', 'Dockerfile') - from_dockerfile = os.path.join(step.build_dir, from_dockerfile) - # print('from_dockerfile: %s' % from_dockerfile) - if os.path.exists(from_dockerfile): - try: - with open(from_dockerfile, 'r') as import_dockerfile: - step.dockerfile.append('\n#Commands from %s' % from_dockerfile) - dockerfile = import_dockerfile.read() - # print('dockerfile: %s' % dockerfile) - step.dockerfile.append(dockerfile) - except IOError: - print("IOError -> Can't import `%s`" % from_dockerfile) - if 'build' in dep_definition: - # print("dep_definition['build']: %s" % dep_definition['build']) - step.dockerfile.append('\n#Commands for %s' % d) - step.dockerfile.append(dep_definition['build']) - else: - step.dockerfile.append('\n####end of requirements for %s\n' % d) - - # Sets the last step's name to the final build target - step.tag = repo_name - for step in build_steps: - step.dockerfile.insert(0, '#Build directory: %s\n#tag: %s' % - (step.build_dir, step.tag)) - return build_steps - - def sort_dependencies(self, com, dependencies=None): - """ - Topologically sort the docker commands by their requirements - TODO: sort using a "maximum common tree"? - :param com: process this docker image's dependencies - :param dependencies: running cache of sorted dependencies (ordered dict) - :return type: OrderedDict - """ - if dependencies is None: - dependencies = OrderedDict() - - if com in dependencies: - return - requires = self.img_defs[com].get('requires', []) - assert type(requires) == list, 'Requirements for %s are not a list' % com - - for dep in requires: - self.sort_dependencies(dep, dependencies) - if com in dependencies: - raise ValueError('Circular dependency found', dependencies) - dependencies[com] = None - return dependencies - - def get_external_base_image(self, image, dependencies): - """ - Makes sure that this image has exactly one external base image - """ - base = None - base_for = None - for d in dependencies: - this_base = self.img_defs[d].get('FROM', None) - if this_base is not None and base is not None and this_base != base: - error = ('Multiple external dependencies: image %s depends on:\n' % image + - ' %s (FROM: %s), and\n' % (base_for, base) + - ' %s (FROM: %s).' % (d, this_base)) - raise ValueError(error) - if this_base is not None: - base = this_base - base_for = d - if base is None: - raise ValueError("No base image found in %s's dependencies" % image) - return base - - -class BuildError(Exception): - def __init__(self, dockerfile, item, build_args): - with open('dockerfile.fail', 'w') as dff: - print>> dff, dockerfile - with BytesIO() as stream: - print >> stream, '\n -------- Docker daemon output --------' - pprint.pprint(item, stream, indent=4) - print >> stream, ' -------- Arguments to client.build --------' - pprint.pprint(build_args, stream, indent=4) - print >> stream, 'This dockerfile was written to dockerfile.fail' - stream.seek(0) - super(BuildError, self).__init__(stream.read()) - - -class BuildStep(object): - def __init__(self, baseimage): - self.dockerfile = ['FROM %s\n' % baseimage] - self.tag = None - self.build_dir = None - self.images = [] - - -def main(): - args = make_arg_parser().parse_args() - - # Help and exit - if args.help_yaml: - print_yaml_help() - return - - # Otherwise, parse the yaml file - # buildargs - maker = DockerMaker(args.makefile, repository=args.repository, - build_images=not (args.no_build or args.list), - print_dockerfiles=(args.print_dockerfiles or args.no_build), - pull=args.pull, no_cache=args.no_cache, tag=args.tag, - buildargs=args.buildargs) - - if args.list: - print 'TARGETS in `%s`' % args.makefile - for item in maker.img_defs.keys(): - print ' *', item - return - - # Assemble custom requirements target - if args.requires or args.name: - assert args.requires and args.name - assert args.name not in maker.img_defs - maker.img_defs[args.name] = {'requires': args.requires} - targets = [args.name] - elif args.all: - assert len(args.TARGETS) == 0, "Pass either a list of targets or `--all`, not both" - if maker.all_targets is not None: - targets = maker.all_targets - else: - targets = maker.img_defs.keys() - else: - targets = args.TARGETS - - if not targets: - print 'No build targets specified!' - print 'Targets in `%s`:' % args.makefile - for item in maker.img_defs.keys(): - print ' *', item - return - - # Actually build the images! (or Dockerfiles) - built, warnings = [], [] - for t in targets: - name = maker.build(t) - print ' docker-make built:', name - built.append(name) - if args.push_to_registry: - success, w = push(maker, name) - warnings.extend(w) - if not success: - built[-1] += ' -- PUSH FAILED' - else: - built[-1] += ' -- pushed to %s' % name.split('/')[0] - - # Summarize the build process - print '\ndocker-make finished.' - print 'Built: ' - for item in built: - print ' *', item - if warnings: - print 'Warnings:' - for item in warnings: - print ' *', item - - -def push(maker, name): - success = False - warnings = [] - if '/' not in name or name.split('/')[0].find('.') < 0: - warn = 'WARNING: could not push %s - ' \ - 'repository name does not contain a registry URL' % name - warnings.append(warn) - print warn - else: - print ' Pushing %s to %s:' % (name, name.split('/')[0]) - line = {'error': 'no push information received'} - _lastid = None - for line in maker.client.push(name, stream=True): - line = yaml.load(line) - if 'status' in line: - if line.get('id', None) == _lastid and line['status'] == 'Pushing': - print '\r', line['status'], line['id'], line.get('progress', ''), - sys.stdout.flush() - else: - print line['status'], line.get('id', '') - _lastid = line.get('id', None) - else: - print line - if 'error' in line: - warnings.append('WARNING: push failed for %s. Message: %s' % (name, line['error'])) - else: - success = True - return success, warnings - - -def print_yaml_help(): - print "A brief introduction to writing Dockerfile.yml files:\n" - - print 'SYNTAX:' - print printable_code("""[image_name]: - build_directory: [relative path where the ADD and COPY commands will look for files] - requires: - - [other image name] - - [yet another image name] - FROM: [named_base_image] - build: | - RUN [something] - ADD [something else] - [Dockerfile commands go here] - -[other image name]: ... -[yet another image name]: ...""") - - print - print textwrap.fill("The idea is to write dockerfile commands for each specific " - 'piece of functionality in the build field, and "inherit" all other' - ' functionality from a list of other components that your image requires. ' - 'If you need to add files with the ADD and COPY commands, specify the root' - ' directory for those files with build_directory. Your tree of ' - '"requires" must have exactly one unique named base image ' - 'in the FROM field.') - - print '\n\nAN EXAMPLE:' - print printable_code("""devbase: - FROM: phusion/baseimage - build: | - RUN apt-get -y update && apt-get -y install build-essential - -airline_data: - requires: - - devbase - build_directory: sample_data/airline_data - build: | - ADD AirlinePassengers.csv - -python_image: - requires: - - devbase - build: | - RUN apt-get -y update \ - && apt-get install -y python python-pip \ - && pip install pandas - -data_science: - requires: - - python_image - - airline_data""") - - -def printable_code(c): - output = [] - dedented = textwrap.dedent(c) - for line in dedented.split('\n'): - output.append(' >> ' + line) - return '\n'.join(output) - - -def make_arg_parser(): - parser = argparse.ArgumentParser(description="NOTE: Docker environmental variables must be set.\n" - "For a docker-machine, run " - "`eval $(docker-machine env [machine-name])`") - bo = parser.add_argument_group('Choosing what to build') - bo.add_argument('TARGETS', nargs="*", - help='Docker images to build as specified in the YAML file') - bo.add_argument('-f', '--makefile', - default='DockerMake.yml', - help='YAML file containing build instructions') - bo.add_argument('-a', '--all', action='store_true', - help="Print or build all images (or those specified by _ALL_)") - bo.add_argument('-l', '--list', action='store_true', - help='List all available targets in the file, then exit.') - bo.add_argument('--requires', nargs="*", - help='Build a special image from these requirements. Requires --name') - bo.add_argument('--name', type=str, - help="Name for custom docker images (requires --requires)") - - bo.add_argument('-b', '--buildargs', - default='buildargs.yml', - help="YAML file containing build args") - - df = parser.add_argument_group('Dockerfiles') - df.add_argument('-p', '--print_dockerfiles', action='store_true', - help="Print out the generated dockerfiles named `Dockerfile.[image]`") - df.add_argument('-n', '--no_build', action='store_true', - help='Only print Dockerfiles, don\'t build them. Implies --print.') - - ca = parser.add_argument_group('Image caching') - ca.add_argument('--pull', action='store_true', - help='Always try to pull updated FROM images') - ca.add_argument('--no-cache', action='store_true', - help="Rebuild every layer") - # TODO: add a way to invalidate a specific target - - rt = parser.add_argument_group('Repositories and tags') - rt.add_argument('--repository', '-r', '-u', - help="Prepend this repository to all built images, e.g.\n" - "`docker-make hello-world -u quay.io/elvis` will tag the image " - "as `quay.io/elvis/hello-world`. You can add a ':' to the end to " - "image names into tags:\n `docker-make -u quay.io/elvis/repo: hello-world` " - "will create the image in the elvis repository: quay.io/elvis/repo:hello-world") - rt.add_argument('--tag', '-t', type=str, - help='Tag all built images with this tag. If image names are ALREADY tags (i.e.,' - ' your repo name ends in a ":"), this will append the tag name with a dash. ' - 'For example: `docker-make hello-world -u elvis/repo: -t 1.0` will create ' - 'the image "elvis/repo:hello-world-1.0') - rt.add_argument('--push-to-registry', '-P', action='store_true', - help='Push all built images to the repository specified ' - '(only if image repository contains a URL) -- to push to dockerhub.com, ' - 'use index.docker.io as the registry)') - - hh = parser.add_argument_group('Help') - hh.add_argument('--help-yaml', action='store_true', - help="Print summary of YAML file format and exit.") - - return parser - - -__license__ = """Copyright (c) 2016, Autodesk Research -All rights reserved. - -Redistribution and use in source and binary forms, with or without -modification, are permitted provided that the following conditions are met: - -1. Redistributions of source code must retain the above copyright notice, this - list of conditions and the following disclaimer. -2. Redistributions in binary form must reproduce the above copyright notice, - this list of conditions and the following disclaimer in the documentation - and/or other materials provided with the distribution. - -THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND -ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED -WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE -DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR -ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES -(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; -LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND -ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS -SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.""" - -if __name__ == '__main__': - main() diff --git a/dockermake/__init__.py b/dockermake/__init__.py new file mode 100644 index 0000000..28e17dd --- /dev/null +++ b/dockermake/__init__.py @@ -0,0 +1,27 @@ +from ._version import get_versions +__version__ = get_versions()['version'] +del get_versions + + +__license__ = """Copyright (c) 2016, Autodesk Research +All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are met: + +1. Redistributions of source code must retain the above copyright notice, this + list of conditions and the following disclaimer. +2. Redistributions in binary form must reproduce the above copyright notice, + this list of conditions and the following disclaimer in the documentation + and/or other materials provided with the distribution. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND +ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR +ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES +(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; +LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND +ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS +SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.""" diff --git a/dockermake/__main__.py b/dockermake/__main__.py new file mode 100755 index 0000000..87e6ac2 --- /dev/null +++ b/dockermake/__main__.py @@ -0,0 +1,79 @@ +#!/usr/bin/env python +# Copyright 2015-2017 Autodesk Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +""" +Multiple inheritance for your dockerfiles. +""" +from __future__ import print_function +import os + +from . import cli, utils, staging +from .imagedefs import ImageDefs + + +def main(): + + parser = cli.make_arg_parser() + args = parser.parse_args() + + # print version and exit + if args.version: + from . import __version__ + print('docker-make version %s' % __version__) + return + + # Print help and exit + if args.help_yaml: + cli.print_yaml_help() + return + + if args.clear_copy_cache: + staging.clear_copy_cache() + return + + if not os.path.exists(args.makefile): + if args.makefile == 'DockerMake.yml': + parser.print_help() + return + else: + raise IOError('No docker makefile found at path "%s"' % args.makefile) + + defs = ImageDefs(args.makefile) + + if args.list: + utils.list_image_defs(args, defs) + return + + targets = utils.get_build_targets(args, defs) + if not targets: + print('No build targets specified!') + utils.list_image_defs(args, defs) + return + + # Actually build the images! (or just Dockerfiles) + built, warnings = utils.build_targets(args, defs, targets) + + # Summarize the build process + print('\ndocker-make finished.') + print('Built: ') + for item in built: + print(' *', item) + if warnings: + print('Warnings:') + for item in warnings: + print(' *', item) + + +if __name__ == '__main__': + main() diff --git a/dockermake/_version.py b/dockermake/_version.py new file mode 100644 index 0000000..b437111 --- /dev/null +++ b/dockermake/_version.py @@ -0,0 +1,521 @@ + +# This file helps to compute a version number in source trees obtained from +# git-archive tarball (such as those provided by githubs download-from-tag +# feature). Distribution tarballs (built by setup.py sdist) and build +# directories (produced by setup.py build) will contain a much shorter file +# that just contains the computed version number. + +# This file is released into the public domain. Generated by +# versioneer-0.18 (https://github.com/warner/python-versioneer) + +"""Git implementation of _version.py.""" +from __future__ import print_function + +import errno +import os +import re +import subprocess +import sys + + +def get_keywords(): + """Get the keywords needed to look up the version information.""" + # these strings will be replaced by git during git-archive. + # setup.py/versioneer.py will grep for the variable names, so they must + # each be defined on a line of their own. _version.py will just call + # get_keywords(). + git_refnames = "$Format:%d$" + git_full = "$Format:%H$" + git_date = "$Format:%ci$" + keywords = {"refnames": git_refnames, "full": git_full, "date": git_date} + return keywords + + +class VersioneerConfig: + """Container for Versioneer configuration parameters.""" + + +def get_config(): + """Create, populate and return the VersioneerConfig() object.""" + # these strings are filled in when 'setup.py versioneer' creates + # _version.py + cfg = VersioneerConfig() + cfg.VCS = "git" + cfg.style = "pep440" + cfg.tag_prefix = "" + cfg.parentdir_prefix = "None" + cfg.versionfile_source = "docker-make/_version.py" + cfg.verbose = False + return cfg + + +class NotThisMethod(Exception): + """Exception raised if a method is not valid for the current scenario.""" + + +LONG_VERSION_PY = {} +HANDLERS = {} + + +def register_vcs_handler(vcs, method): # decorator + """Decorator to mark a method as the handler for a particular VCS.""" + def decorate(f): + """Store f in HANDLERS[vcs][method].""" + if vcs not in HANDLERS: + HANDLERS[vcs] = {} + HANDLERS[vcs][method] = f + return f + return decorate + + +def run_command(commands, args, cwd=None, verbose=False, hide_stderr=False, + env=None): + """Call the given command(s).""" + assert isinstance(commands, list) + p = None + for c in commands: + try: + dispcmd = str([c] + args) + # remember shell=False, so use git.cmd on windows, not just git + p = subprocess.Popen([c] + args, cwd=cwd, env=env, + stdout=subprocess.PIPE, + stderr=(subprocess.PIPE if hide_stderr + else None)) + break + except EnvironmentError: + e = sys.exc_info()[1] + if e.errno == errno.ENOENT: + continue + if verbose: + print("unable to run %s" % dispcmd) + print(e) + return None, None + else: + if verbose: + print("unable to find command, tried %s" % (commands,)) + return None, None + stdout = p.communicate()[0].strip() + if sys.version_info[0] >= 3: + stdout = stdout.decode() + if p.returncode != 0: + if verbose: + print("unable to run %s (error)" % dispcmd) + print("stdout was %s" % stdout) + return None, p.returncode + return stdout, p.returncode + + +def versions_from_parentdir(parentdir_prefix, root, verbose): + """Try to determine the version from the parent directory name. + + Source tarballs conventionally unpack into a directory that includes both + the project name and a version string. We will also support searching up + two directory levels for an appropriately named parent directory + """ + rootdirs = [] + + for i in range(3): + dirname = os.path.basename(root) + if dirname.startswith(parentdir_prefix): + return {"version": dirname[len(parentdir_prefix):], + "full-revisionid": None, + "dirty": False, "error": None, "date": None} + else: + rootdirs.append(root) + root = os.path.dirname(root) # up a level + + if verbose: + print("Tried directories %s but none started with prefix %s" % + (str(rootdirs), parentdir_prefix)) + raise NotThisMethod("rootdir doesn't start with parentdir_prefix") + + +@register_vcs_handler("git", "get_keywords") +def git_get_keywords(versionfile_abs): + """Extract version information from the given file.""" + # the code embedded in _version.py can just fetch the value of these + # keywords. When used from setup.py, we don't want to import _version.py, + # so we do it with a regexp instead. This function is not used from + # _version.py. + keywords = {} + try: + f = open(versionfile_abs, "r") + for line in f.readlines(): + if line.strip().startswith("git_refnames ="): + mo = re.search(r'=\s*"(.*)"', line) + if mo: + keywords["refnames"] = mo.group(1) + if line.strip().startswith("git_full ="): + mo = re.search(r'=\s*"(.*)"', line) + if mo: + keywords["full"] = mo.group(1) + if line.strip().startswith("git_date ="): + mo = re.search(r'=\s*"(.*)"', line) + if mo: + keywords["date"] = mo.group(1) + f.close() + except EnvironmentError: + pass + return keywords + + +@register_vcs_handler("git", "keywords") +def git_versions_from_keywords(keywords, tag_prefix, verbose): + """Get version information from git keywords.""" + if not keywords: + raise NotThisMethod("no keywords at all, weird") + date = keywords.get("date") + if date is not None: + # git-2.2.0 added "%cI", which expands to an ISO-8601 -compliant + # datestamp. However we prefer "%ci" (which expands to an "ISO-8601 + # -like" string, which we must then edit to make compliant), because + # it's been around since git-1.5.3, and it's too difficult to + # discover which version we're using, or to work around using an + # older one. + date = date.strip().replace(" ", "T", 1).replace(" ", "", 1) + refnames = keywords["refnames"].strip() + if refnames.startswith("$Format"): + if verbose: + print("keywords are unexpanded, not using") + raise NotThisMethod("unexpanded keywords, not a git-archive tarball") + refs = set([r.strip() for r in refnames.strip("()").split(",")]) + # starting in git-1.8.3, tags are listed as "tag: foo-1.0" instead of + # just "foo-1.0". If we see a "tag: " prefix, prefer those. + TAG = "tag: " + tags = set([r[len(TAG):] for r in refs if r.startswith(TAG)]) + if not tags: + # Either we're using git < 1.8.3, or there really are no tags. We use + # a heuristic: assume all version tags have a digit. The old git %d + # expansion behaves like git log --decorate=short and strips out the + # refs/heads/ and refs/tags/ prefixes that would let us distinguish + # between branches and tags. By ignoring refnames without digits, we + # filter out many common branch names like "release" and + # "stabilization", as well as "HEAD" and "master". + tags = set([r for r in refs if re.search(r'\d', r)]) + if verbose: + print("discarding '%s', no digits" % ",".join(refs - tags)) + if verbose: + print("likely tags: %s" % ",".join(sorted(tags))) + for ref in sorted(tags): + # sorting will prefer e.g. "2.0" over "2.0rc1" + if ref.startswith(tag_prefix): + r = ref[len(tag_prefix):] + if verbose: + print("picking %s" % r) + return {"version": r, + "full-revisionid": keywords["full"].strip(), + "dirty": False, "error": None, + "date": date} + # no suitable tags, so version is "0+unknown", but full hex is still there + if verbose: + print("no suitable tags, using unknown + full revision id") + return {"version": "0+unknown", + "full-revisionid": keywords["full"].strip(), + "dirty": False, "error": "no suitable tags", "date": None} + + +@register_vcs_handler("git", "pieces_from_vcs") +def git_pieces_from_vcs(tag_prefix, root, verbose, run_command=run_command): + """Get version from 'git describe' in the root of the source tree. + + This only gets called if the git-archive 'subst' keywords were *not* + expanded, and _version.py hasn't already been rewritten with a short + version string, meaning we're inside a checked out source tree. + """ + GITS = ["git"] + if sys.platform == "win32": + GITS = ["git.cmd", "git.exe"] + + out, rc = run_command(GITS, ["rev-parse", "--git-dir"], cwd=root, + hide_stderr=True) + if rc != 0: + if verbose: + print("Directory %s not under git control" % root) + raise NotThisMethod("'git rev-parse --git-dir' returned error") + + # if there is a tag matching tag_prefix, this yields TAG-NUM-gHEX[-dirty] + # if there isn't one, this yields HEX[-dirty] (no NUM) + describe_out, rc = run_command(GITS, ["describe", "--tags", "--dirty", + "--always", "--long", + "--match", "%s*" % tag_prefix], + cwd=root) + # --long was added in git-1.5.5 + if describe_out is None: + raise NotThisMethod("'git describe' failed") + describe_out = describe_out.strip() + full_out, rc = run_command(GITS, ["rev-parse", "HEAD"], cwd=root) + if full_out is None: + raise NotThisMethod("'git rev-parse' failed") + full_out = full_out.strip() + + pieces = {} + pieces["long"] = full_out + pieces["short"] = full_out[:7] # maybe improved later + pieces["error"] = None + + # parse describe_out. It will be like TAG-NUM-gHEX[-dirty] or HEX[-dirty] + # TAG might have hyphens. + git_describe = describe_out + + # look for -dirty suffix + dirty = git_describe.endswith("-dirty") + pieces["dirty"] = dirty + if dirty: + git_describe = git_describe[:git_describe.rindex("-dirty")] + + # now we have TAG-NUM-gHEX or HEX + + if "-" in git_describe: + # TAG-NUM-gHEX + mo = re.search(r'^(.+)-(\d+)-g([0-9a-f]+)$', git_describe) + if not mo: + # unparseable. Maybe git-describe is misbehaving? + pieces["error"] = ("unable to parse git-describe output: '%s'" + % describe_out) + return pieces + + # tag + full_tag = mo.group(1) + if not full_tag.startswith(tag_prefix): + if verbose: + fmt = "tag '%s' doesn't start with prefix '%s'" + print(fmt % (full_tag, tag_prefix)) + pieces["error"] = ("tag '%s' doesn't start with prefix '%s'" + % (full_tag, tag_prefix)) + return pieces + pieces["closest-tag"] = full_tag[len(tag_prefix):] + + # distance: number of commits since tag + pieces["distance"] = int(mo.group(2)) + + # commit: short hex revision ID + pieces["short"] = mo.group(3) + + else: + # HEX: no tags + pieces["closest-tag"] = None + count_out, rc = run_command(GITS, ["rev-list", "HEAD", "--count"], + cwd=root) + pieces["distance"] = int(count_out) # total number of commits + + # commit date: see ISO-8601 comment in git_versions_from_keywords() + date = run_command(GITS, ["show", "-s", "--format=%ci", "HEAD"], + cwd=root)[0].strip() + pieces["date"] = date.strip().replace(" ", "T", 1).replace(" ", "", 1) + + return pieces + + +def plus_or_dot(pieces): + """Return a + if we don't already have one, else return a .""" + if "+" in pieces.get("closest-tag", ""): + return "." + return "+" + + +def render_pep440(pieces): + """Build up version string, with post-release "local version identifier". + + Our goal: TAG[+DISTANCE.gHEX[.dirty]] . Note that if you + get a tagged build and then dirty it, you'll get TAG+0.gHEX.dirty + + Exceptions: + 1: no tags. git_describe was just HEX. 0+untagged.DISTANCE.gHEX[.dirty] + """ + if pieces["closest-tag"]: + rendered = pieces["closest-tag"] + if pieces["distance"] or pieces["dirty"]: + rendered += plus_or_dot(pieces) + rendered += "%d.g%s" % (pieces["distance"], pieces["short"]) + if pieces["dirty"]: + rendered += ".dirty" + else: + # exception #1 + rendered = "0+untagged.%d.g%s" % (pieces["distance"], + pieces["short"]) + if pieces["dirty"]: + rendered += ".dirty" + return rendered + + +def render_pep440_pre(pieces): + """TAG[.post.devDISTANCE] -- No -dirty. + + Exceptions: + 1: no tags. 0.post.devDISTANCE + """ + if pieces["closest-tag"]: + rendered = pieces["closest-tag"] + if pieces["distance"]: + rendered += ".post.dev%d" % pieces["distance"] + else: + # exception #1 + rendered = "0.post.dev%d" % pieces["distance"] + return rendered + + +def render_pep440_post(pieces): + """TAG[.postDISTANCE[.dev0]+gHEX] . + + The ".dev0" means dirty. Note that .dev0 sorts backwards + (a dirty tree will appear "older" than the corresponding clean one), + but you shouldn't be releasing software with -dirty anyways. + + Exceptions: + 1: no tags. 0.postDISTANCE[.dev0] + """ + if pieces["closest-tag"]: + rendered = pieces["closest-tag"] + if pieces["distance"] or pieces["dirty"]: + rendered += ".post%d" % pieces["distance"] + if pieces["dirty"]: + rendered += ".dev0" + rendered += plus_or_dot(pieces) + rendered += "g%s" % pieces["short"] + else: + # exception #1 + rendered = "0.post%d" % pieces["distance"] + if pieces["dirty"]: + rendered += ".dev0" + rendered += "+g%s" % pieces["short"] + return rendered + + +def render_pep440_old(pieces): + """TAG[.postDISTANCE[.dev0]] . + + The ".dev0" means dirty. + + Eexceptions: + 1: no tags. 0.postDISTANCE[.dev0] + """ + if pieces["closest-tag"]: + rendered = pieces["closest-tag"] + if pieces["distance"] or pieces["dirty"]: + rendered += ".post%d" % pieces["distance"] + if pieces["dirty"]: + rendered += ".dev0" + else: + # exception #1 + rendered = "0.post%d" % pieces["distance"] + if pieces["dirty"]: + rendered += ".dev0" + return rendered + + +def render_git_describe(pieces): + """TAG[-DISTANCE-gHEX][-dirty]. + + Like 'git describe --tags --dirty --always'. + + Exceptions: + 1: no tags. HEX[-dirty] (note: no 'g' prefix) + """ + if pieces["closest-tag"]: + rendered = pieces["closest-tag"] + if pieces["distance"]: + rendered += "-%d-g%s" % (pieces["distance"], pieces["short"]) + else: + # exception #1 + rendered = pieces["short"] + if pieces["dirty"]: + rendered += "-dirty" + return rendered + + +def render_git_describe_long(pieces): + """TAG-DISTANCE-gHEX[-dirty]. + + Like 'git describe --tags --dirty --always -long'. + The distance/hash is unconditional. + + Exceptions: + 1: no tags. HEX[-dirty] (note: no 'g' prefix) + """ + if pieces["closest-tag"]: + rendered = pieces["closest-tag"] + rendered += "-%d-g%s" % (pieces["distance"], pieces["short"]) + else: + # exception #1 + rendered = pieces["short"] + if pieces["dirty"]: + rendered += "-dirty" + return rendered + + +def render(pieces, style): + """Render the given version pieces into the requested style.""" + if pieces["error"]: + return {"version": "unknown", + "full-revisionid": pieces.get("long"), + "dirty": None, + "error": pieces["error"], + "date": None} + + if not style or style == "default": + style = "pep440" # the default + + if style == "pep440": + rendered = render_pep440(pieces) + elif style == "pep440-pre": + rendered = render_pep440_pre(pieces) + elif style == "pep440-post": + rendered = render_pep440_post(pieces) + elif style == "pep440-old": + rendered = render_pep440_old(pieces) + elif style == "git-describe": + rendered = render_git_describe(pieces) + elif style == "git-describe-long": + rendered = render_git_describe_long(pieces) + else: + raise ValueError("unknown style '%s'" % style) + + return {"version": rendered, "full-revisionid": pieces["long"], + "dirty": pieces["dirty"], "error": None, + "date": pieces.get("date")} + + +def get_versions(): + """Get version information or return default if unable to do so.""" + # I am in _version.py, which lives at ROOT/VERSIONFILE_SOURCE. If we have + # __file__, we can work backwards from there to the root. Some + # py2exe/bbfreeze/non-CPython implementations don't do __file__, in which + # case we can only use expanded keywords. + + cfg = get_config() + verbose = cfg.verbose + + try: + return git_versions_from_keywords(get_keywords(), cfg.tag_prefix, + verbose) + except NotThisMethod: + pass + + try: + root = os.path.realpath(__file__) + # versionfile_source is the relative path from the top of the source + # tree (where the .git directory might live) to this file. Invert + # this to find the root from __file__. + for i in cfg.versionfile_source.split('/'): + root = os.path.dirname(root) + except NameError: + return {"version": "0+unknown", "full-revisionid": None, + "dirty": None, + "error": "unable to find root of source tree", + "date": None} + + try: + pieces = git_pieces_from_vcs(cfg.tag_prefix, root, verbose) + return render(pieces, cfg.style) + except NotThisMethod: + pass + + try: + if cfg.parentdir_prefix: + return versions_from_parentdir(cfg.parentdir_prefix, root, verbose) + except NotThisMethod: + pass + + return {"version": "0+unknown", "full-revisionid": None, + "dirty": None, + "error": "unable to compute version", "date": None} diff --git a/dockermake/builds.py b/dockermake/builds.py new file mode 100644 index 0000000..fd4e999 --- /dev/null +++ b/dockermake/builds.py @@ -0,0 +1,269 @@ +# Copyright 2015-2017 Autodesk Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +from __future__ import print_function + +import os +import pprint +from io import BytesIO, StringIO + +from builtins import object +from builtins import str + +from . import staging, utils + +DOCKER_TMPDIR = '_docker_make_tmp/' + +_updated_staging_images = set() # stored per session so that we don't try to update them repeatedly +_rebuilt = set() # only rebuild a unique stack of images ONCE per session + + +class BuildStep(object): + """ Stores and runs the instructions to build a single image. + + Args: + imagename (str): name of this image definition + baseimage (str): name of the image to inherit from (through "FROM") + img_def (dict): yaml definition of this image + buildname (str): what to call this image, once built + bust_cache(bool): never use docker cache for this build step + """ + + def __init__(self, imagename, baseimage, img_def, buildname, bust_cache=False): + self.imagename = imagename + self.baseimage = baseimage + self.dockerfile_lines = ['FROM %s\n' % baseimage, + img_def.get('build', '')] + self.buildname = buildname + self.build_dir = img_def.get('build_directory', None) + self.bust_cache = bust_cache + self.sourcefile = img_def['_sourcefile'] + + def build(self, client, pull=False, usecache=True): + """ + Drives an individual build step. Build steps are separated by build_directory. + If a build has zero one or less build_directories, it will be built in a single + step. + + Args: + client (docker.APIClient): docker client object that will build the image + pull (bool): whether to pull dependent layers from remote repositories + usecache (bool): whether to use cached layers or rebuild from scratch + """ + print(' Image definition "%s" from file %s' % (self.imagename, + self.sourcefile)) + + if self.bust_cache: + usecache = False + + if not usecache: + print(' INFO: Docker caching disabled - forcing rebuild') + + dockerfile = '\n'.join(self.dockerfile_lines) + + build_args = dict(tag=self.buildname, + pull=pull, + nocache=not usecache, + decode=True, rm=True) + + if self.build_dir is not None: + tempdir = self.write_dockerfile(dockerfile) + build_args.update(fileobj=None, + path=os.path.abspath(os.path.expanduser(self.build_dir)), + dockerfile=os.path.join(DOCKER_TMPDIR, 'Dockerfile')) + else: + build_args.update(fileobj=StringIO(str(dockerfile)), + path=None, + dockerfile=None) + tempdir = None + + # start the build + stream = client.build(**build_args) + try: + utils.stream_docker_logs(stream, self.buildname) + except ValueError as e: + raise BuildError(dockerfile, e.args[0], build_args) + + # remove the temporary dockerfile + if tempdir is not None: + os.unlink(os.path.join(tempdir, 'Dockerfile')) + os.rmdir(tempdir) + + def write_dockerfile(self, dockerfile): + tempdir = os.path.abspath(os.path.join(self.build_dir, DOCKER_TMPDIR)) + temp_df = os.path.join(tempdir, 'Dockerfile') + if not os.path.isdir(tempdir): + os.makedirs(tempdir) + with open(temp_df, 'w') as df_out: + print(dockerfile, file=df_out) + return tempdir + + def printfile(self): + if not os.path.exists('docker_makefiles'): + os.makedirs('docker_makefiles') + filename = 'docker_makefiles/Dockerfile.%s' % self.imagename + + with open(filename, 'w') as dfout: + print('\n'.join(self.dockerfile_lines), file=dfout) + + +class FileCopyStep(BuildStep): + """ + A specialized build step that copies files into an image from another image. + + Args: + sourceimage (str): name of image to copy file from + sourcepath (str): file path in source image + base_image (str): name of image to copy file into + destpath (str): directory to copy the file into + buildname (str): name of the built image + ymldef (Dict): yml definition of this build step + definitionname (str): name of this definition + """ + + bust_cache = False # can't bust this + + def __init__(self, sourceimage, sourcepath, base_image, destpath, buildname, + ymldef, definitionname): + self.sourceimage = sourceimage + self.sourcepath = sourcepath + self.base_image = base_image + self.destpath = destpath + self.buildname = buildname + self.definitionname = definitionname + self.sourcefile = ymldef['_sourcefile'] + + def build(self, client, pull=False, usecache=True): + """ + Note: + `pull` and `usecache` are for compatibility only. They're irrelevant because + hey were applied when BUILDING self.sourceimage + """ + print(' File copy from "%s", defined in file %s' % (self.definitionname, self.sourcefile)) + stage = staging.StagedFile(self.sourceimage, self.sourcepath, self.destpath) + stage.stage(self.base_image, self.buildname) + + + +class BuildTarget(object): + """ Represents a target docker image. + + Args: + imagename (str): name of the image definition + targetname (str): name to assign the final built image + steps (List[BuildStep]): list of steps required to build this image + stagedfiles (List[StagedFile]): list of files to stage into this image from other images + from_iamge (str): External base image name + """ + def __init__(self, imagename, targetname, steps, sourcebuilds, from_image): + self.imagename = imagename + self.steps = steps + self.sourcebuilds = sourcebuilds + self.targetname = targetname + self.from_image = from_image + + def build(self, client, + printdockerfiles=False, + nobuild=False, + keepbuildtags=False, + usecache=True, + pull=False): + """ + Drives the build of the final image - get the list of steps and execute them. + + Args: + client (docker.APIClient): docker client object that will build the image + printdockerfiles (bool): create the dockerfile for this build + nobuild (bool): just create dockerfiles, don't actually build the image + keepbuildtags (bool): keep tags on intermediate images + usecache (bool): use docker cache, or rebuild everything from scratch? + pull (bool): try to pull new versions of repository images? + """ + if not nobuild: + self.update_source_images(client, + usecache=usecache, + pull=pull) + + print('\n' + '-'*utils.get_console_width()) + print(' STARTING BUILD for "%s" (image definition "%s" from %s)\n' % ( + self.targetname, self.imagename, self.steps[-1].sourcefile)) + + for istep, step in enumerate(self.steps): + print(' * Building %s, Step %d/%d:' % (self.imagename, + istep+1, + len(self.steps))) + if printdockerfiles: + step.printfile() + + if not nobuild: + if step.bust_cache: + stackkey = self._get_stack_key(istep) + if stackkey in _rebuilt: + step.bust_cache = False + + step.build(client, usecache=usecache) + print(" - Created intermediate image %s\n" % step.buildname) + + if step.bust_cache: + _rebuilt.add(stackkey) + + finalimage = step.buildname + + if not nobuild: + self.finalizenames(client, finalimage, keepbuildtags) + print(' *** Successfully built image %s\n' % self.targetname) + + def _get_stack_key(self, istep): + names = [self.from_image] + for i in xrange(istep+1): + step = self.steps[i] + if isinstance(step, FileCopyStep): + continue + names.append(step.imagename) + return tuple(names) + + def update_source_images(self, client, usecache, pull): + for build in self.sourcebuilds: + if build.targetname in _updated_staging_images: + continue + print('\nUpdating source image %s' % build.targetname) + build.build(client, + usecache=usecache, + pull=pull) + print(' *** Done with source image %s\n' % build.targetname) + + def finalizenames(self, client, finalimage, keepbuildtags): + """ Tag the built image with its final name and untag intermediate containers + """ + client.tag(finalimage, *self.targetname.split(':')) + print('Tagged final image as %s' % self.targetname) + if not keepbuildtags: + print('Untagging intermediate containers:', end='') + for step in self.steps: + client.remove_image(step.buildname, force=True) + print(step.buildname, end=',') + print() + + +class BuildError(Exception): + def __init__(self, dockerfile, item, build_args): + with open('dockerfile.fail', 'w') as dff: + print(dockerfile, file=dff) + with BytesIO() as stream: + print('\n -------- Docker daemon output --------', file=stream) + pprint.pprint(item, stream, indent=4) + print(' -------- Arguments to client.build --------', file=stream) + pprint.pprint(build_args, stream, indent=4) + print('This dockerfile was written to dockerfile.fail', file=stream) + stream.seek(0) + super(BuildError, self).__init__(stream.read()) diff --git a/dockermake/cli.py b/dockermake/cli.py new file mode 100644 index 0000000..066e98d --- /dev/null +++ b/dockermake/cli.py @@ -0,0 +1,142 @@ +from __future__ import print_function +# Copyright 2015-2017 Autodesk Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +import argparse +import textwrap + + +def make_arg_parser(): + parser = argparse.ArgumentParser(description= + "NOTE: Docker environmental variables must be set.\n" + "For a docker-machine, run " + "`eval $(docker-machine env [machine-name])`") + bo = parser.add_argument_group('Choosing what to build') + bo.add_argument('TARGETS', nargs="*", + help='Docker images to build as specified in the YAML file') + bo.add_argument('-f', '--makefile', + default='DockerMake.yml', + help='YAML file containing build instructions') + bo.add_argument('-a', '--all', action='store_true', + help="Print or build all images (or those specified by _ALL_)") + bo.add_argument('-l', '--list', action='store_true', + help='List all available targets in the file, then exit.') + bo.add_argument('--requires', nargs="*", + help='Build a special image from these requirements. Requires --name') + bo.add_argument('--name', type=str, + help="Name for custom docker images (requires --requires)") + + df = parser.add_argument_group('Dockerfiles') + df.add_argument('-p', '--print-dockerfiles', '--print_dockerfiles', action='store_true', + help="Print out the generated dockerfiles named `Dockerfile.[image]`") + df.add_argument('-n', '--no_build', action='store_true', + help='Only print Dockerfiles, don\'t build them. Implies --print.') + + ca = parser.add_argument_group('Image caching') + ca.add_argument('--pull', action='store_true', + help='Always try to pull updated FROM images') + ca.add_argument('--no-cache', action='store_true', + help="Rebuild every layer") + ca.add_argument('--bust-cache', action='append', + help='Force docker to rebuilt all layers in this image. You can bust ' + 'multiple image layers by passing --bust-cache multiple times.') + ca.add_argument('--clear-copy-cache', '--clear-cache', action='store_true', + help="Remove docker-make's cache of files for `copy-from`.") + + rt = parser.add_argument_group('Repositories and tags') + rt.add_argument('--repository', '-r', '-u', + help="Prepend this repository to all built images, e.g.\n" + "`docker-make hello-world -u quay.io/elvis` will tag the image " + "as `quay.io/elvis/hello-world`. You can add a ':' to the end to " + "image names into tags:\n `docker-make -u quay.io/elvis/repo: hello-world` " + "will create the image in the elvis repository: quay.io/elvis/repo:hello-world") + rt.add_argument('--tag', '-t', type=str, + help='Tag all built images with this tag. If image names are ALREADY tags (i.e.,' + ' your repo name ends in a ":"), this will append the tag name with a dash. ' + 'For example: `docker-make hello-world -u elvis/repo: -t 1.0` will create ' + 'the image "elvis/repo:hello-world-1.0') + rt.add_argument('--push-to-registry', '-P', action='store_true', + help='Push all built images to the repository specified ' + '(only if image repository contains a URL) -- to push to dockerhub.com, ' + 'use index.docker.io as the registry)') + + hh = parser.add_argument_group('Help') + hh.add_argument('--version', action='store_true', + help="Print version and exit.") + hh.add_argument('--help-yaml', action='store_true', + help="Print summary of YAML file format and exit.") + + return parser + + +def print_yaml_help(): + print("A brief introduction to writing Dockerfile.yml files:\n") + + print('SYNTAX:') + print(printable_code("""[image_name]: + build_directory: [relative path where the ADD and COPY commands will look for files] + requires: + - [other image name] + - [yet another image name] + FROM: [named_base_image] + build: | + RUN [something] + ADD [something else] + [Dockerfile commands go here] + +[other image name]: ... +[yet another image name]: ...""")) + + print() + print(textwrap.fill("The idea is to write dockerfile commands for each specific " + 'piece of functionality in the build field, and "inherit" all other' + ' functionality from a list of other components that your image requires. ' + 'If you need to add files with the ADD and COPY commands, specify the root' + ' directory for those files with build_directory. Your tree of ' + '"requires" must have exactly one unique named base image ' + 'in the FROM field.')) + + print('\n\nAN EXAMPLE:') + print(printable_code("""devbase: + FROM: phusion/baseimage + build: | + RUN apt-get -y update && apt-get -y install build-essential + +airline_data: + requires: + - devbase + build_directory: sample_data/airline_data + build: | + ADD AirlinePassengers.csv + +python_image: + requires: + - devbase + build: | + RUN apt-get -y update \ + && apt-get install -y python python-pip \ + && pip install pandas + +data_science: + requires: + - python_image + - airline_data""")) + + +def printable_code(c): + output = [] + dedented = textwrap.dedent(c) + for line in dedented.split('\n'): + output.append(' >> ' + line) + return '\n'.join(output) + diff --git a/dockermake/imagedefs.py b/dockermake/imagedefs.py new file mode 100644 index 0000000..bf908b7 --- /dev/null +++ b/dockermake/imagedefs.py @@ -0,0 +1,201 @@ +#!/usr/bin/env python2.7 +# Copyright 2016 Autodesk Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from __future__ import print_function +from builtins import object + +import os +from collections import OrderedDict +import yaml +from future.utils import iteritems + +from . import builds +from . import staging + +RECOGNIZED_KEYS = set('requires build_directory build copy_from FROM description _sourcefile' + .split()) +SPECIAL_FIELDS = set('_ALL_ _SOURCES_'.split()) + +class ImageDefs(object): + """ Stores and processes the image definitions + """ + def __init__(self, makefile_path): + self._sources = set() + self.makefile_path = makefile_path + print('Working directory: %s' % os.path.abspath(os.curdir)) + print('Copy cache directory: %s' % staging.TMPDIR) + self.ymldefs = self.parse_yaml(self.makefile_path) + self.all_targets = self.ymldefs.pop('_ALL_', None) + + def parse_yaml(self, filename): + fname = os.path.expanduser(filename) + print('READING %s' % os.path.expanduser(fname)) + if fname in self._sources: + raise ValueError('Circular _SOURCES_') + self._sources.add(fname) + + with open(fname, 'r') as yaml_file: + yamldefs = yaml.load(yaml_file) + + self._fix_file_paths(filename, yamldefs) + + sourcedefs = {} + for s in yamldefs.get('_SOURCES_', []): + src = self.parse_yaml(s) + sourcedefs.update(src) + + sourcedefs.update(yamldefs) + return sourcedefs + + @staticmethod + def _fix_file_paths(ymlfilepath, yamldefs): + """ Interpret all paths relative the the current yaml file + """ + pathroot = os.path.dirname(ymlfilepath) + + for field, item in iteritems(yamldefs): + if field == '_SOURCES_': + yamldefs['_SOURCES_'] = [os.path.relpath(_get_abspath(pathroot, p)) + for p in yamldefs['_SOURCES_']] + continue + elif field in SPECIAL_FIELDS: + continue + elif 'build_directory' in item: + item['build_directory'] = _get_abspath(pathroot, item['build_directory']) + + # save the file path for logging + f = os.path.relpath(ymlfilepath) + if '/' not in f: + f = './%s' % f + item['_sourcefile'] = f + + for key in item: + if key not in RECOGNIZED_KEYS: + raise KeyError('Field "%s" in image "%s" not recognized' % + (key, field)) + + def generate_build(self, image, targetname, rebuilds=None): + """ + Separate the build into a series of one or more intermediate steps. + Each specified build directory gets its own step + + Args: + image (str): name of the image as defined in the dockermake.py file + targetname (str): name to tag the final built image with + rebuilds (List[str]): list of image layers to rebuild (i.e., without docker's cache) + """ + from_image = self.get_external_base_image(image) + build_steps = [] + istep = 0 + sourceimages = set() + if rebuilds is None: + rebuilds = [] + else: + rebuilds = set(rebuilds) + + base_image = from_image + for base_name in self.sort_dependencies(image): + istep += 1 + buildname = 'dmkbuild_%s_%d' % (image, istep) + build_steps.append(builds.BuildStep(base_name, + base_image, + self.ymldefs[base_name], + buildname, + bust_cache=base_name in rebuilds)) + base_image = buildname + + for sourceimage, files in iteritems(self.ymldefs[base_name].get('copy_from', {})): + sourceimages.add(sourceimage) + for sourcepath, destpath in iteritems(files): + istep += 1 + buildname = 'dmkbuild_%s_%d' % (image, istep) + build_steps.append(builds.FileCopyStep(sourceimage, sourcepath, + base_image, destpath, + buildname, + self.ymldefs[base_name], + base_name)) + base_image = buildname + + sourcebuilds = [self.generate_build(img, img) for img in sourceimages] + + return builds.BuildTarget(imagename=image, + targetname=targetname, + steps=build_steps, + sourcebuilds=sourcebuilds, + from_image=from_image) + + def sort_dependencies(self, image, dependencies=None): + """ + Topologically sort the docker commands by their requirements + + Args: + image (str): process this docker image's dependencies + dependencies (OrderedDict): running cache of sorted dependencies (ordered dict) + + Returns: + List[str]: list of dependencies a topologically-sorted build order + """ + if dependencies is None: + dependencies = OrderedDict() # using this as an ordered set - not storing any values + + if image in dependencies: + return + + requires = self.ymldefs[image].get('requires', []) + assert type(requires) == list, 'Requirements for %s are not a list' % image + + for dep in requires: + self.sort_dependencies(dep, dependencies) + if image in dependencies: + raise ValueError('Circular dependency found', dependencies) + dependencies[image] = None + return dependencies.keys() + + def get_external_base_image(self, image): + """ Makes sure that this image has exactly one external base image + """ + externalbase = self.ymldefs[image].get('FROM', None) + + for base in self.ymldefs[image].get('requires', []): + try: + otherexternal = self.get_external_base_image(base) + except ValueError: + continue + + if externalbase is None: + externalbase = otherexternal + elif otherexternal is None: + continue + elif externalbase != otherexternal: + error = ('Multiple external dependencies: depends on:\n' % image + + ' %s (FROM: %s), and\n' % (image, externalbase) + + ' %s (FROM: %s).' % (base, otherexternal)) + raise ValueError(error) + + if not externalbase: + raise ValueError("No base image found in %s's dependencies" % image) + return externalbase + + +def _get_abspath(pathroot, relpath): + path = os.path.expanduser(pathroot) + buildpath = os.path.expanduser(relpath) + + if not os.path.isabs(buildpath): + buildpath = os.path.join(os.path.abspath(path), buildpath) + + return buildpath + + diff --git a/dockermake/staging.py b/dockermake/staging.py new file mode 100644 index 0000000..b381d2b --- /dev/null +++ b/dockermake/staging.py @@ -0,0 +1,129 @@ +# Copyright 2015-2017 Autodesk Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from __future__ import print_function + +import docker.errors +from builtins import object + +import os +import tempfile +import shutil + +from . import utils + +TMPDIR = tempfile.gettempdir() +BUILD_CACHEDIR = os.path.join(TMPDIR, 'dmk_cache') +BUILD_TEMPDIR = os.path.join(TMPDIR, 'dmk_download') + + +def clear_copy_cache(): + for path in (BUILD_CACHEDIR, BUILD_TEMPDIR): + if os.path.exists(path): + assert os.path.isdir(path), "'%s' is not a directory!" + print('Removing docker-make cache %s' % path) + shutil.rmtree(path) + else: + print('Cache directory %s does not exist.' % path) + + +class StagedFile(object): + """ Tracks a file or directory that will be built in one image, then copied into others + + Args: + sourceimage (str): name of the image to copy from + sourcepath (str): path in the source image + destpath (str): path in the target image + """ + def __init__(self, sourceimage, sourcepath, destpath): + self.sourceimage = sourceimage + self.sourcepath = sourcepath + self.destpath = destpath + self._sourceobj = None + self._cachedir = None + + def stage(self, startimage, newimage): + """ Copies the file from source to target + + Args: + startimage (str): name of the image to stage these files into + newimage (str): name of the created image + """ + from .builds import BuildError + + client = utils.get_client() + print(' * Copying FROM "%s:/%s" TO "%s://%s/"'%(self.sourceimage, self.sourcepath, + startimage, self.destpath)) + + # copy build artifacts from the container if necessary + cachedir = self._setcache(client) + cacherelpath = os.path.relpath(cachedir, TMPDIR) + if not os.path.exists(cachedir): + print(' * Creating cache at %s' % cacherelpath) + container = client.containers.create(self.sourceimage) + try: + tarfile_stream, tarfile_stats = container.get_archive(self.sourcepath) + except docker.errors.NotFound: + raise IOError('File "%s" does not exist in image "%s"!' % + (self.sourcepath, self.sourceimage)) + + # write files to disk (would be nice to stream them, haven't gotten it to work) + tempdir = tempfile.mkdtemp(dir=BUILD_TEMPDIR) + with open(os.path.join(tempdir, 'content.tar'), 'wb') as localfile: + for chunk in tarfile_stream.stream(): + localfile.write(chunk) + os.mkdir(cachedir) + os.rename(tempdir, cachedir) + else: + print(' * Using cached files from %s' % cacherelpath) + + # write Dockerfile for the new image and then build it + dockerfile = 'FROM %s\nADD content.tar %s' % (startimage, self.destpath) + with open(os.path.join(cachedir, 'Dockerfile'), 'w') as df: + df.write(dockerfile) + + buildargs = dict(path=cachedir, + tag=newimage, + decode=True) + + # Build and show logs + stream = client.api.build(**buildargs) + try: + utils.stream_docker_logs(stream, newimage) + except ValueError as e: + raise BuildError(dockerfile, e.args[0], build_args=buildargs) + + def _setcache(self, client): + if self._sourceobj is None: # get image and set up cache if necessary + + self._sourceobj = client.images.get(self.sourceimage) + + if not os.path.exists(BUILD_CACHEDIR): + os.mkdir(BUILD_CACHEDIR) + + if not os.path.exists(BUILD_TEMPDIR): + os.mkdir(BUILD_TEMPDIR) + + image_cachedir = os.path.join(BUILD_CACHEDIR, + self._sourceobj.id.replace('sha256:', '')) + if not os.path.exists(image_cachedir): + os.mkdir(image_cachedir) + + self._cachedir = os.path.join(image_cachedir, + self.sourcepath.replace('/', '_-')) + return self._cachedir + + else: # make sure image ID hasn't changed + assert self._sourceobj.id == client.images.get(self.sourceimage) + return self._cachedir diff --git a/dockermake/utils.py b/dockermake/utils.py new file mode 100644 index 0000000..6c7b377 --- /dev/null +++ b/dockermake/utils.py @@ -0,0 +1,211 @@ +# Copyright 2015-2017 Autodesk Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +from __future__ import print_function + +import collections +import os +import sys +import textwrap + +import yaml +import docker + +_dockerclient = None + + +def get_client_api(): + return get_client().api + + +def get_client(): + global _dockerclient + + if _dockerclient is None: + _dockerclient = docker.from_env(version='auto') + + return _dockerclient + + +def list_image_defs(args, defs): + from . import imagedefs + + print('TARGETS in `%s`' % args.makefile) + for item in sorted(defs.ymldefs.keys()): + if item in imagedefs.SPECIAL_FIELDS: + continue + print(' *', item) + return + + +def generate_name(image, args): + repo_base = args.repository + + if repo_base is not None: + if repo_base[-1] not in ':/': + repo_base += '/' + repo_name = repo_base + image + else: + repo_name = image + + if args.tag: + if ':' in repo_name: + repo_name += '-'+args.tag + else: + repo_name += ':'+args.tag + + return repo_name + + +def get_build_targets(args, defs): + if args.requires or args.name: + # Assemble a custom target from requirements + assert args.requires and args.name + assert args.name not in defs.ymldefs + defs.ymldefs[args.name] = {'requires': args.requires, + '_sourcefile': "command line arguments"} + targets = [args.name] + + elif args.all: + # build all targets in the file + assert len(args.TARGETS) == 0, "Pass either a list of targets or `--all`, not both" + if defs.all_targets is not None: + targets = defs.all_targets + else: + targets = list(defs.ymldefs.keys()) + else: + # build the user-specified targets + targets = args.TARGETS + + return targets + + +def build_targets(args, defs, targets): + if args.no_build: + client = None + else: + client = get_client_api() + built, warnings = [], [] + builders = [defs.generate_build(t, generate_name(t, args), rebuilds=args.bust_cache) + for t in targets] + for b in builders: + b.build(client, + printdockerfiles=args.print_dockerfiles, + nobuild=args.no_build, + usecache=not args.no_cache, + pull=args.pull) + print(' docker-make built:', b.targetname) + built.append(b.targetname) + if args.push_to_registry: + success, w = push(client, b.targetname) + warnings.extend(w) + if not success: + built[-1] += ' -- PUSH FAILED' + else: + built[-1] += ' -- pushed to %s' % b.targetname.split('/')[0] + + return built, warnings + + +def push(client, name): + success = False + warnings = [] + if '/' not in name or name.split('/')[0].find('.') < 0: + warn = 'WARNING: could not push %s - ' \ + 'repository name does not contain a registry URL' % name + warnings.append(warn) + print(warn) + else: + print(' Pushing %s to %s:' % (name, name.split('/')[0])) + stream = _linestream(client.push(name, stream=True)) + line = stream_docker_logs(stream, 'PUSH %s' % name) + + if 'error' in line: + warnings.append('WARNING: push failed for %s. Message: %s' % (name, line['error'])) + else: + success = True + return success, warnings + + +def _linestream(textstream): + for item in textstream: + for line in item.splitlines(): + yield yaml.load(line) + + +def human_readable_size(num, suffix='B'): + """ FROM http://stackoverflow.com/a/1094933/1958900 + """ + for unit in ['','Ki','Mi','Gi','Ti','Pi','Ei','Zi']: + if abs(num) < 1024.0: + return "%3.1f%s%s" % (num, unit, suffix) + num /= 1024.0 + return "%.1f%s%s" % (num, 'Yi', suffix) + + +def stream_docker_logs(stream, name): + textwidth = get_console_width() - 5 + wrapper = textwrap.TextWrapper(initial_indent=' ', + subsequent_indent=' ', + break_on_hyphens=False, + width=textwidth) + + logtitle = '%s: BUILD LOG' % name + numdash = (textwidth - len(logtitle) - 7) // 2 + header = ''.join([' ','-'*numdash, " %s " % logtitle, '-'*numdash]) + print(header) + + pullstats = collections.OrderedDict() + for item in stream: + if list(item.keys()) == ['stream']: + line = item['stream'].strip() + elif 'errorDetail' in item or 'error' in item: + raise ValueError(item) + elif 'status' in item and 'id' in item: # for pulling images + line = _show_xfer_state(pullstats, item) + if line is None: continue + else: + line = str(item) + + for s in wrapper.wrap(line): + print(s) + + print(' ', '-'*len(header)) + return line + + +def get_console_width(): + try: + _, consolewidth = map(int, os.popen('stty size', 'r').read().split()) + except: + consolewidth = 80 + return consolewidth + + +SHOWSIZE = set(('Pushing', 'Pulling', 'Pulled', 'Downloaded', 'Downloading')) + +def _show_xfer_state(pullstats, item): + imgid = item['id'] + stat = item['status'] + if stat != pullstats.get(imgid, None): + pullstats[imgid] = stat + + if stat in SHOWSIZE and item.get('progressDetail', {}).get('total', None): + toprint = '%s: %s (%s)' % (imgid, stat, + human_readable_size(item['progressDetail']['total'])) + else: + toprint = '%s: %s' % (imgid, stat) + + return toprint + else: + return None diff --git a/example/DockerMake.yml b/example/DockerMake.yml index 6f39477..94436fa 100644 --- a/example/DockerMake.yml +++ b/example/DockerMake.yml @@ -1,13 +1,12 @@ data_image: - FROM: phusion/baseimage + FROM: debian:jessie build: RUN mkdir /data devbase: - FROM: phusion/baseimage + FROM: debian:jessie build: | - RUN apt-get -y update && apt-get -y install \ - build-essential \ - && mkdir -p /opt + RUN apt-get -y update && apt-get -y install wget + RUN mkdir -p /opt airline_data: requires: @@ -23,21 +22,22 @@ plant_data: build: | ADD Puromycin.csv /data +blank_file_build: + description: | + This image isn't intended for deployment! Rather, files built in this environment + can be copied into other images (see data_science.copy_from below) + FROM: debian:jessie + build: | + RUN mkdir -p /data + RUN dd if=/dev/zero of=/data/file.txt count=1024 bs=1024 + python_image: requires: - devbase build: | RUN apt-get install -y \ python \ - python-dev \ - python-pip \ - python-numpy \ - python-scipy \ - python-pandas \ - liblapack-dev \ - libblas-dev \ - gfortran \ - libpng12-dev + python-pip RUN pip install Pint data_science: @@ -45,3 +45,12 @@ data_science: - python_image - airline_data - plant_data + copy_from: + blank_file_build: + /data/file.txt: /data + +final: + requires: + - data_science + build: | + ENV version=0.01 diff --git a/requirements.txt b/requirements.txt index 557ce0b..ae68759 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,2 +1,3 @@ -docker-py -PyYAML +docker +future +pyyaml diff --git a/setup.cfg b/setup.cfg new file mode 100644 index 0000000..d79aacb --- /dev/null +++ b/setup.cfg @@ -0,0 +1,6 @@ +[versioneer] +VCS = git +style = pep440 +versionfile_source = dockermake/_version.py +versionfile_build = dockermake/_version.py +tag_prefix = '' diff --git a/setup.py b/setup.py new file mode 100644 index 0000000..88d2123 --- /dev/null +++ b/setup.py @@ -0,0 +1,24 @@ +from distutils.core import setup +import versioneer + +with open('requirements.txt', 'r') as reqfile: + requirements = [x.strip() for x in reqfile if x.strip()] + +setup( + name='DockerMake', + version=versioneer.get_version(), + cmdclass=versioneer.get_cmdclass(), + packages=['dockermake'], + license='Apache 2.0', + author='Aaron Virshup', + author_email='avirshup@gmail.com', + description='Build manager for docker images', + url="https://github.com/avirshup/dockermake", + entry_points={ + 'console_scripts': [ + 'docker-make = dockermake.__main__:main' + ] + }, + install_requires=requirements, + +) diff --git a/test/test_example.py b/test/test_example.py new file mode 100644 index 0000000..66e96e8 --- /dev/null +++ b/test/test_example.py @@ -0,0 +1,43 @@ +import os +import subprocess +import pytest + +EXAMPLEDIR = os.path.join('../example') + + +def test_executable_in_path(): + subprocess.check_call('which docker-make'.split(), + cwd=EXAMPLEDIR) + + +def test_help_string(): + subprocess.check_call('docker-make --help'.split(), + cwd=EXAMPLEDIR) + + +def test_list(): + subprocess.check_call('docker-make --list'.split(), + cwd=EXAMPLEDIR) + + output = subprocess.check_output('docker-make --list'.split(), + cwd=EXAMPLEDIR) + + expected = set(('airline_data blank_file_build data_image data_science ' + 'devbase final plant_data python_image').split()) + + for line in list(output.splitlines())[4:]: + image = line[3:] + assert image in expected + expected.remove(image) + + assert len(expected) == 0 + + +def test_example_build(): + subprocess.check_call( + "docker-make final --repo myrepo --tag mytag".split(), + cwd=EXAMPLEDIR) + + subprocess.check_call( + "docker run myrepo/final:mytag ls data/AirPassengers.csv data/Puromycin.csv data/file.txt".split(), + cwd=EXAMPLEDIR) diff --git a/versioneer.py b/versioneer.py new file mode 100644 index 0000000..64fea1c --- /dev/null +++ b/versioneer.py @@ -0,0 +1,1822 @@ + +# Version: 0.18 + +"""The Versioneer - like a rocketeer, but for versions. + +The Versioneer +============== + +* like a rocketeer, but for versions! +* https://github.com/warner/python-versioneer +* Brian Warner +* License: Public Domain +* Compatible With: python2.6, 2.7, 3.2, 3.3, 3.4, 3.5, 3.6, and pypy +* [![Latest Version] +(https://pypip.in/version/versioneer/badge.svg?style=flat) +](https://pypi.python.org/pypi/versioneer/) +* [![Build Status] +(https://travis-ci.org/warner/python-versioneer.png?branch=master) +](https://travis-ci.org/warner/python-versioneer) + +This is a tool for managing a recorded version number in distutils-based +python projects. The goal is to remove the tedious and error-prone "update +the embedded version string" step from your release process. Making a new +release should be as easy as recording a new tag in your version-control +system, and maybe making new tarballs. + + +## Quick Install + +* `pip install versioneer` to somewhere to your $PATH +* add a `[versioneer]` section to your setup.cfg (see below) +* run `versioneer install` in your source tree, commit the results + +## Version Identifiers + +Source trees come from a variety of places: + +* a version-control system checkout (mostly used by developers) +* a nightly tarball, produced by build automation +* a snapshot tarball, produced by a web-based VCS browser, like github's + "tarball from tag" feature +* a release tarball, produced by "setup.py sdist", distributed through PyPI + +Within each source tree, the version identifier (either a string or a number, +this tool is format-agnostic) can come from a variety of places: + +* ask the VCS tool itself, e.g. "git describe" (for checkouts), which knows + about recent "tags" and an absolute revision-id +* the name of the directory into which the tarball was unpacked +* an expanded VCS keyword ($Id$, etc) +* a `_version.py` created by some earlier build step + +For released software, the version identifier is closely related to a VCS +tag. Some projects use tag names that include more than just the version +string (e.g. "myproject-1.2" instead of just "1.2"), in which case the tool +needs to strip the tag prefix to extract the version identifier. For +unreleased software (between tags), the version identifier should provide +enough information to help developers recreate the same tree, while also +giving them an idea of roughly how old the tree is (after version 1.2, before +version 1.3). Many VCS systems can report a description that captures this, +for example `git describe --tags --dirty --always` reports things like +"0.7-1-g574ab98-dirty" to indicate that the checkout is one revision past the +0.7 tag, has a unique revision id of "574ab98", and is "dirty" (it has +uncommitted changes. + +The version identifier is used for multiple purposes: + +* to allow the module to self-identify its version: `myproject.__version__` +* to choose a name and prefix for a 'setup.py sdist' tarball + +## Theory of Operation + +Versioneer works by adding a special `_version.py` file into your source +tree, where your `__init__.py` can import it. This `_version.py` knows how to +dynamically ask the VCS tool for version information at import time. + +`_version.py` also contains `$Revision$` markers, and the installation +process marks `_version.py` to have this marker rewritten with a tag name +during the `git archive` command. As a result, generated tarballs will +contain enough information to get the proper version. + +To allow `setup.py` to compute a version too, a `versioneer.py` is added to +the top level of your source tree, next to `setup.py` and the `setup.cfg` +that configures it. This overrides several distutils/setuptools commands to +compute the version when invoked, and changes `setup.py build` and `setup.py +sdist` to replace `_version.py` with a small static file that contains just +the generated version data. + +## Installation + +See [INSTALL.md](./INSTALL.md) for detailed installation instructions. + +## Version-String Flavors + +Code which uses Versioneer can learn about its version string at runtime by +importing `_version` from your main `__init__.py` file and running the +`get_versions()` function. From the "outside" (e.g. in `setup.py`), you can +import the top-level `versioneer.py` and run `get_versions()`. + +Both functions return a dictionary with different flavors of version +information: + +* `['version']`: A condensed version string, rendered using the selected + style. This is the most commonly used value for the project's version + string. The default "pep440" style yields strings like `0.11`, + `0.11+2.g1076c97`, or `0.11+2.g1076c97.dirty`. See the "Styles" section + below for alternative styles. + +* `['full-revisionid']`: detailed revision identifier. For Git, this is the + full SHA1 commit id, e.g. "1076c978a8d3cfc70f408fe5974aa6c092c949ac". + +* `['date']`: Date and time of the latest `HEAD` commit. For Git, it is the + commit date in ISO 8601 format. This will be None if the date is not + available. + +* `['dirty']`: a boolean, True if the tree has uncommitted changes. Note that + this is only accurate if run in a VCS checkout, otherwise it is likely to + be False or None + +* `['error']`: if the version string could not be computed, this will be set + to a string describing the problem, otherwise it will be None. It may be + useful to throw an exception in setup.py if this is set, to avoid e.g. + creating tarballs with a version string of "unknown". + +Some variants are more useful than others. Including `full-revisionid` in a +bug report should allow developers to reconstruct the exact code being tested +(or indicate the presence of local changes that should be shared with the +developers). `version` is suitable for display in an "about" box or a CLI +`--version` output: it can be easily compared against release notes and lists +of bugs fixed in various releases. + +The installer adds the following text to your `__init__.py` to place a basic +version in `YOURPROJECT.__version__`: + + from ._version import get_versions + __version__ = get_versions()['version'] + del get_versions + +## Styles + +The setup.cfg `style=` configuration controls how the VCS information is +rendered into a version string. + +The default style, "pep440", produces a PEP440-compliant string, equal to the +un-prefixed tag name for actual releases, and containing an additional "local +version" section with more detail for in-between builds. For Git, this is +TAG[+DISTANCE.gHEX[.dirty]] , using information from `git describe --tags +--dirty --always`. For example "0.11+2.g1076c97.dirty" indicates that the +tree is like the "1076c97" commit but has uncommitted changes (".dirty"), and +that this commit is two revisions ("+2") beyond the "0.11" tag. For released +software (exactly equal to a known tag), the identifier will only contain the +stripped tag, e.g. "0.11". + +Other styles are available. See [details.md](details.md) in the Versioneer +source tree for descriptions. + +## Debugging + +Versioneer tries to avoid fatal errors: if something goes wrong, it will tend +to return a version of "0+unknown". To investigate the problem, run `setup.py +version`, which will run the version-lookup code in a verbose mode, and will +display the full contents of `get_versions()` (including the `error` string, +which may help identify what went wrong). + +## Known Limitations + +Some situations are known to cause problems for Versioneer. This details the +most significant ones. More can be found on Github +[issues page](https://github.com/warner/python-versioneer/issues). + +### Subprojects + +Versioneer has limited support for source trees in which `setup.py` is not in +the root directory (e.g. `setup.py` and `.git/` are *not* siblings). The are +two common reasons why `setup.py` might not be in the root: + +* Source trees which contain multiple subprojects, such as + [Buildbot](https://github.com/buildbot/buildbot), which contains both + "master" and "slave" subprojects, each with their own `setup.py`, + `setup.cfg`, and `tox.ini`. Projects like these produce multiple PyPI + distributions (and upload multiple independently-installable tarballs). +* Source trees whose main purpose is to contain a C library, but which also + provide bindings to Python (and perhaps other langauges) in subdirectories. + +Versioneer will look for `.git` in parent directories, and most operations +should get the right version string. However `pip` and `setuptools` have bugs +and implementation details which frequently cause `pip install .` from a +subproject directory to fail to find a correct version string (so it usually +defaults to `0+unknown`). + +`pip install --editable .` should work correctly. `setup.py install` might +work too. + +Pip-8.1.1 is known to have this problem, but hopefully it will get fixed in +some later version. + +[Bug #38](https://github.com/warner/python-versioneer/issues/38) is tracking +this issue. The discussion in +[PR #61](https://github.com/warner/python-versioneer/pull/61) describes the +issue from the Versioneer side in more detail. +[pip PR#3176](https://github.com/pypa/pip/pull/3176) and +[pip PR#3615](https://github.com/pypa/pip/pull/3615) contain work to improve +pip to let Versioneer work correctly. + +Versioneer-0.16 and earlier only looked for a `.git` directory next to the +`setup.cfg`, so subprojects were completely unsupported with those releases. + +### Editable installs with setuptools <= 18.5 + +`setup.py develop` and `pip install --editable .` allow you to install a +project into a virtualenv once, then continue editing the source code (and +test) without re-installing after every change. + +"Entry-point scripts" (`setup(entry_points={"console_scripts": ..})`) are a +convenient way to specify executable scripts that should be installed along +with the python package. + +These both work as expected when using modern setuptools. When using +setuptools-18.5 or earlier, however, certain operations will cause +`pkg_resources.DistributionNotFound` errors when running the entrypoint +script, which must be resolved by re-installing the package. This happens +when the install happens with one version, then the egg_info data is +regenerated while a different version is checked out. Many setup.py commands +cause egg_info to be rebuilt (including `sdist`, `wheel`, and installing into +a different virtualenv), so this can be surprising. + +[Bug #83](https://github.com/warner/python-versioneer/issues/83) describes +this one, but upgrading to a newer version of setuptools should probably +resolve it. + +### Unicode version strings + +While Versioneer works (and is continually tested) with both Python 2 and +Python 3, it is not entirely consistent with bytes-vs-unicode distinctions. +Newer releases probably generate unicode version strings on py2. It's not +clear that this is wrong, but it may be surprising for applications when then +write these strings to a network connection or include them in bytes-oriented +APIs like cryptographic checksums. + +[Bug #71](https://github.com/warner/python-versioneer/issues/71) investigates +this question. + + +## Updating Versioneer + +To upgrade your project to a new release of Versioneer, do the following: + +* install the new Versioneer (`pip install -U versioneer` or equivalent) +* edit `setup.cfg`, if necessary, to include any new configuration settings + indicated by the release notes. See [UPGRADING](./UPGRADING.md) for details. +* re-run `versioneer install` in your source tree, to replace + `SRC/_version.py` +* commit any changed files + +## Future Directions + +This tool is designed to make it easily extended to other version-control +systems: all VCS-specific components are in separate directories like +src/git/ . The top-level `versioneer.py` script is assembled from these +components by running make-versioneer.py . In the future, make-versioneer.py +will take a VCS name as an argument, and will construct a version of +`versioneer.py` that is specific to the given VCS. It might also take the +configuration arguments that are currently provided manually during +installation by editing setup.py . Alternatively, it might go the other +direction and include code from all supported VCS systems, reducing the +number of intermediate scripts. + + +## License + +To make Versioneer easier to embed, all its code is dedicated to the public +domain. The `_version.py` that it creates is also in the public domain. +Specifically, both are released under the Creative Commons "Public Domain +Dedication" license (CC0-1.0), as described in +https://creativecommons.org/publicdomain/zero/1.0/ . + +""" + +from __future__ import print_function +try: + import configparser +except ImportError: + import ConfigParser as configparser +import errno +import json +import os +import re +import subprocess +import sys + + +class VersioneerConfig: + """Container for Versioneer configuration parameters.""" + + +def get_root(): + """Get the project root directory. + + We require that all commands are run from the project root, i.e. the + directory that contains setup.py, setup.cfg, and versioneer.py . + """ + root = os.path.realpath(os.path.abspath(os.getcwd())) + setup_py = os.path.join(root, "setup.py") + versioneer_py = os.path.join(root, "versioneer.py") + if not (os.path.exists(setup_py) or os.path.exists(versioneer_py)): + # allow 'python path/to/setup.py COMMAND' + root = os.path.dirname(os.path.realpath(os.path.abspath(sys.argv[0]))) + setup_py = os.path.join(root, "setup.py") + versioneer_py = os.path.join(root, "versioneer.py") + if not (os.path.exists(setup_py) or os.path.exists(versioneer_py)): + err = ("Versioneer was unable to run the project root directory. " + "Versioneer requires setup.py to be executed from " + "its immediate directory (like 'python setup.py COMMAND'), " + "or in a way that lets it use sys.argv[0] to find the root " + "(like 'python path/to/setup.py COMMAND').") + raise VersioneerBadRootError(err) + try: + # Certain runtime workflows (setup.py install/develop in a setuptools + # tree) execute all dependencies in a single python process, so + # "versioneer" may be imported multiple times, and python's shared + # module-import table will cache the first one. So we can't use + # os.path.dirname(__file__), as that will find whichever + # versioneer.py was first imported, even in later projects. + me = os.path.realpath(os.path.abspath(__file__)) + me_dir = os.path.normcase(os.path.splitext(me)[0]) + vsr_dir = os.path.normcase(os.path.splitext(versioneer_py)[0]) + if me_dir != vsr_dir: + print("Warning: build in %s is using versioneer.py from %s" + % (os.path.dirname(me), versioneer_py)) + except NameError: + pass + return root + + +def get_config_from_root(root): + """Read the project setup.cfg file to determine Versioneer config.""" + # This might raise EnvironmentError (if setup.cfg is missing), or + # configparser.NoSectionError (if it lacks a [versioneer] section), or + # configparser.NoOptionError (if it lacks "VCS="). See the docstring at + # the top of versioneer.py for instructions on writing your setup.cfg . + setup_cfg = os.path.join(root, "setup.cfg") + parser = configparser.SafeConfigParser() + with open(setup_cfg, "r") as f: + parser.readfp(f) + VCS = parser.get("versioneer", "VCS") # mandatory + + def get(parser, name): + if parser.has_option("versioneer", name): + return parser.get("versioneer", name) + return None + cfg = VersioneerConfig() + cfg.VCS = VCS + cfg.style = get(parser, "style") or "" + cfg.versionfile_source = get(parser, "versionfile_source") + cfg.versionfile_build = get(parser, "versionfile_build") + cfg.tag_prefix = get(parser, "tag_prefix") + if cfg.tag_prefix in ("''", '""'): + cfg.tag_prefix = "" + cfg.parentdir_prefix = get(parser, "parentdir_prefix") + cfg.verbose = get(parser, "verbose") + return cfg + + +class NotThisMethod(Exception): + """Exception raised if a method is not valid for the current scenario.""" + + +# these dictionaries contain VCS-specific tools +LONG_VERSION_PY = {} +HANDLERS = {} + + +def register_vcs_handler(vcs, method): # decorator + """Decorator to mark a method as the handler for a particular VCS.""" + def decorate(f): + """Store f in HANDLERS[vcs][method].""" + if vcs not in HANDLERS: + HANDLERS[vcs] = {} + HANDLERS[vcs][method] = f + return f + return decorate + + +def run_command(commands, args, cwd=None, verbose=False, hide_stderr=False, + env=None): + """Call the given command(s).""" + assert isinstance(commands, list) + p = None + for c in commands: + try: + dispcmd = str([c] + args) + # remember shell=False, so use git.cmd on windows, not just git + p = subprocess.Popen([c] + args, cwd=cwd, env=env, + stdout=subprocess.PIPE, + stderr=(subprocess.PIPE if hide_stderr + else None)) + break + except EnvironmentError: + e = sys.exc_info()[1] + if e.errno == errno.ENOENT: + continue + if verbose: + print("unable to run %s" % dispcmd) + print(e) + return None, None + else: + if verbose: + print("unable to find command, tried %s" % (commands,)) + return None, None + stdout = p.communicate()[0].strip() + if sys.version_info[0] >= 3: + stdout = stdout.decode() + if p.returncode != 0: + if verbose: + print("unable to run %s (error)" % dispcmd) + print("stdout was %s" % stdout) + return None, p.returncode + return stdout, p.returncode + + +LONG_VERSION_PY['git'] = ''' +# This file helps to compute a version number in source trees obtained from +# git-archive tarball (such as those provided by githubs download-from-tag +# feature). Distribution tarballs (built by setup.py sdist) and build +# directories (produced by setup.py build) will contain a much shorter file +# that just contains the computed version number. + +# This file is released into the public domain. Generated by +# versioneer-0.18 (https://github.com/warner/python-versioneer) + +"""Git implementation of _version.py.""" + +import errno +import os +import re +import subprocess +import sys + + +def get_keywords(): + """Get the keywords needed to look up the version information.""" + # these strings will be replaced by git during git-archive. + # setup.py/versioneer.py will grep for the variable names, so they must + # each be defined on a line of their own. _version.py will just call + # get_keywords(). + git_refnames = "%(DOLLAR)sFormat:%%d%(DOLLAR)s" + git_full = "%(DOLLAR)sFormat:%%H%(DOLLAR)s" + git_date = "%(DOLLAR)sFormat:%%ci%(DOLLAR)s" + keywords = {"refnames": git_refnames, "full": git_full, "date": git_date} + return keywords + + +class VersioneerConfig: + """Container for Versioneer configuration parameters.""" + + +def get_config(): + """Create, populate and return the VersioneerConfig() object.""" + # these strings are filled in when 'setup.py versioneer' creates + # _version.py + cfg = VersioneerConfig() + cfg.VCS = "git" + cfg.style = "%(STYLE)s" + cfg.tag_prefix = "%(TAG_PREFIX)s" + cfg.parentdir_prefix = "%(PARENTDIR_PREFIX)s" + cfg.versionfile_source = "%(VERSIONFILE_SOURCE)s" + cfg.verbose = False + return cfg + + +class NotThisMethod(Exception): + """Exception raised if a method is not valid for the current scenario.""" + + +LONG_VERSION_PY = {} +HANDLERS = {} + + +def register_vcs_handler(vcs, method): # decorator + """Decorator to mark a method as the handler for a particular VCS.""" + def decorate(f): + """Store f in HANDLERS[vcs][method].""" + if vcs not in HANDLERS: + HANDLERS[vcs] = {} + HANDLERS[vcs][method] = f + return f + return decorate + + +def run_command(commands, args, cwd=None, verbose=False, hide_stderr=False, + env=None): + """Call the given command(s).""" + assert isinstance(commands, list) + p = None + for c in commands: + try: + dispcmd = str([c] + args) + # remember shell=False, so use git.cmd on windows, not just git + p = subprocess.Popen([c] + args, cwd=cwd, env=env, + stdout=subprocess.PIPE, + stderr=(subprocess.PIPE if hide_stderr + else None)) + break + except EnvironmentError: + e = sys.exc_info()[1] + if e.errno == errno.ENOENT: + continue + if verbose: + print("unable to run %%s" %% dispcmd) + print(e) + return None, None + else: + if verbose: + print("unable to find command, tried %%s" %% (commands,)) + return None, None + stdout = p.communicate()[0].strip() + if sys.version_info[0] >= 3: + stdout = stdout.decode() + if p.returncode != 0: + if verbose: + print("unable to run %%s (error)" %% dispcmd) + print("stdout was %%s" %% stdout) + return None, p.returncode + return stdout, p.returncode + + +def versions_from_parentdir(parentdir_prefix, root, verbose): + """Try to determine the version from the parent directory name. + + Source tarballs conventionally unpack into a directory that includes both + the project name and a version string. We will also support searching up + two directory levels for an appropriately named parent directory + """ + rootdirs = [] + + for i in range(3): + dirname = os.path.basename(root) + if dirname.startswith(parentdir_prefix): + return {"version": dirname[len(parentdir_prefix):], + "full-revisionid": None, + "dirty": False, "error": None, "date": None} + else: + rootdirs.append(root) + root = os.path.dirname(root) # up a level + + if verbose: + print("Tried directories %%s but none started with prefix %%s" %% + (str(rootdirs), parentdir_prefix)) + raise NotThisMethod("rootdir doesn't start with parentdir_prefix") + + +@register_vcs_handler("git", "get_keywords") +def git_get_keywords(versionfile_abs): + """Extract version information from the given file.""" + # the code embedded in _version.py can just fetch the value of these + # keywords. When used from setup.py, we don't want to import _version.py, + # so we do it with a regexp instead. This function is not used from + # _version.py. + keywords = {} + try: + f = open(versionfile_abs, "r") + for line in f.readlines(): + if line.strip().startswith("git_refnames ="): + mo = re.search(r'=\s*"(.*)"', line) + if mo: + keywords["refnames"] = mo.group(1) + if line.strip().startswith("git_full ="): + mo = re.search(r'=\s*"(.*)"', line) + if mo: + keywords["full"] = mo.group(1) + if line.strip().startswith("git_date ="): + mo = re.search(r'=\s*"(.*)"', line) + if mo: + keywords["date"] = mo.group(1) + f.close() + except EnvironmentError: + pass + return keywords + + +@register_vcs_handler("git", "keywords") +def git_versions_from_keywords(keywords, tag_prefix, verbose): + """Get version information from git keywords.""" + if not keywords: + raise NotThisMethod("no keywords at all, weird") + date = keywords.get("date") + if date is not None: + # git-2.2.0 added "%%cI", which expands to an ISO-8601 -compliant + # datestamp. However we prefer "%%ci" (which expands to an "ISO-8601 + # -like" string, which we must then edit to make compliant), because + # it's been around since git-1.5.3, and it's too difficult to + # discover which version we're using, or to work around using an + # older one. + date = date.strip().replace(" ", "T", 1).replace(" ", "", 1) + refnames = keywords["refnames"].strip() + if refnames.startswith("$Format"): + if verbose: + print("keywords are unexpanded, not using") + raise NotThisMethod("unexpanded keywords, not a git-archive tarball") + refs = set([r.strip() for r in refnames.strip("()").split(",")]) + # starting in git-1.8.3, tags are listed as "tag: foo-1.0" instead of + # just "foo-1.0". If we see a "tag: " prefix, prefer those. + TAG = "tag: " + tags = set([r[len(TAG):] for r in refs if r.startswith(TAG)]) + if not tags: + # Either we're using git < 1.8.3, or there really are no tags. We use + # a heuristic: assume all version tags have a digit. The old git %%d + # expansion behaves like git log --decorate=short and strips out the + # refs/heads/ and refs/tags/ prefixes that would let us distinguish + # between branches and tags. By ignoring refnames without digits, we + # filter out many common branch names like "release" and + # "stabilization", as well as "HEAD" and "master". + tags = set([r for r in refs if re.search(r'\d', r)]) + if verbose: + print("discarding '%%s', no digits" %% ",".join(refs - tags)) + if verbose: + print("likely tags: %%s" %% ",".join(sorted(tags))) + for ref in sorted(tags): + # sorting will prefer e.g. "2.0" over "2.0rc1" + if ref.startswith(tag_prefix): + r = ref[len(tag_prefix):] + if verbose: + print("picking %%s" %% r) + return {"version": r, + "full-revisionid": keywords["full"].strip(), + "dirty": False, "error": None, + "date": date} + # no suitable tags, so version is "0+unknown", but full hex is still there + if verbose: + print("no suitable tags, using unknown + full revision id") + return {"version": "0+unknown", + "full-revisionid": keywords["full"].strip(), + "dirty": False, "error": "no suitable tags", "date": None} + + +@register_vcs_handler("git", "pieces_from_vcs") +def git_pieces_from_vcs(tag_prefix, root, verbose, run_command=run_command): + """Get version from 'git describe' in the root of the source tree. + + This only gets called if the git-archive 'subst' keywords were *not* + expanded, and _version.py hasn't already been rewritten with a short + version string, meaning we're inside a checked out source tree. + """ + GITS = ["git"] + if sys.platform == "win32": + GITS = ["git.cmd", "git.exe"] + + out, rc = run_command(GITS, ["rev-parse", "--git-dir"], cwd=root, + hide_stderr=True) + if rc != 0: + if verbose: + print("Directory %%s not under git control" %% root) + raise NotThisMethod("'git rev-parse --git-dir' returned error") + + # if there is a tag matching tag_prefix, this yields TAG-NUM-gHEX[-dirty] + # if there isn't one, this yields HEX[-dirty] (no NUM) + describe_out, rc = run_command(GITS, ["describe", "--tags", "--dirty", + "--always", "--long", + "--match", "%%s*" %% tag_prefix], + cwd=root) + # --long was added in git-1.5.5 + if describe_out is None: + raise NotThisMethod("'git describe' failed") + describe_out = describe_out.strip() + full_out, rc = run_command(GITS, ["rev-parse", "HEAD"], cwd=root) + if full_out is None: + raise NotThisMethod("'git rev-parse' failed") + full_out = full_out.strip() + + pieces = {} + pieces["long"] = full_out + pieces["short"] = full_out[:7] # maybe improved later + pieces["error"] = None + + # parse describe_out. It will be like TAG-NUM-gHEX[-dirty] or HEX[-dirty] + # TAG might have hyphens. + git_describe = describe_out + + # look for -dirty suffix + dirty = git_describe.endswith("-dirty") + pieces["dirty"] = dirty + if dirty: + git_describe = git_describe[:git_describe.rindex("-dirty")] + + # now we have TAG-NUM-gHEX or HEX + + if "-" in git_describe: + # TAG-NUM-gHEX + mo = re.search(r'^(.+)-(\d+)-g([0-9a-f]+)$', git_describe) + if not mo: + # unparseable. Maybe git-describe is misbehaving? + pieces["error"] = ("unable to parse git-describe output: '%%s'" + %% describe_out) + return pieces + + # tag + full_tag = mo.group(1) + if not full_tag.startswith(tag_prefix): + if verbose: + fmt = "tag '%%s' doesn't start with prefix '%%s'" + print(fmt %% (full_tag, tag_prefix)) + pieces["error"] = ("tag '%%s' doesn't start with prefix '%%s'" + %% (full_tag, tag_prefix)) + return pieces + pieces["closest-tag"] = full_tag[len(tag_prefix):] + + # distance: number of commits since tag + pieces["distance"] = int(mo.group(2)) + + # commit: short hex revision ID + pieces["short"] = mo.group(3) + + else: + # HEX: no tags + pieces["closest-tag"] = None + count_out, rc = run_command(GITS, ["rev-list", "HEAD", "--count"], + cwd=root) + pieces["distance"] = int(count_out) # total number of commits + + # commit date: see ISO-8601 comment in git_versions_from_keywords() + date = run_command(GITS, ["show", "-s", "--format=%%ci", "HEAD"], + cwd=root)[0].strip() + pieces["date"] = date.strip().replace(" ", "T", 1).replace(" ", "", 1) + + return pieces + + +def plus_or_dot(pieces): + """Return a + if we don't already have one, else return a .""" + if "+" in pieces.get("closest-tag", ""): + return "." + return "+" + + +def render_pep440(pieces): + """Build up version string, with post-release "local version identifier". + + Our goal: TAG[+DISTANCE.gHEX[.dirty]] . Note that if you + get a tagged build and then dirty it, you'll get TAG+0.gHEX.dirty + + Exceptions: + 1: no tags. git_describe was just HEX. 0+untagged.DISTANCE.gHEX[.dirty] + """ + if pieces["closest-tag"]: + rendered = pieces["closest-tag"] + if pieces["distance"] or pieces["dirty"]: + rendered += plus_or_dot(pieces) + rendered += "%%d.g%%s" %% (pieces["distance"], pieces["short"]) + if pieces["dirty"]: + rendered += ".dirty" + else: + # exception #1 + rendered = "0+untagged.%%d.g%%s" %% (pieces["distance"], + pieces["short"]) + if pieces["dirty"]: + rendered += ".dirty" + return rendered + + +def render_pep440_pre(pieces): + """TAG[.post.devDISTANCE] -- No -dirty. + + Exceptions: + 1: no tags. 0.post.devDISTANCE + """ + if pieces["closest-tag"]: + rendered = pieces["closest-tag"] + if pieces["distance"]: + rendered += ".post.dev%%d" %% pieces["distance"] + else: + # exception #1 + rendered = "0.post.dev%%d" %% pieces["distance"] + return rendered + + +def render_pep440_post(pieces): + """TAG[.postDISTANCE[.dev0]+gHEX] . + + The ".dev0" means dirty. Note that .dev0 sorts backwards + (a dirty tree will appear "older" than the corresponding clean one), + but you shouldn't be releasing software with -dirty anyways. + + Exceptions: + 1: no tags. 0.postDISTANCE[.dev0] + """ + if pieces["closest-tag"]: + rendered = pieces["closest-tag"] + if pieces["distance"] or pieces["dirty"]: + rendered += ".post%%d" %% pieces["distance"] + if pieces["dirty"]: + rendered += ".dev0" + rendered += plus_or_dot(pieces) + rendered += "g%%s" %% pieces["short"] + else: + # exception #1 + rendered = "0.post%%d" %% pieces["distance"] + if pieces["dirty"]: + rendered += ".dev0" + rendered += "+g%%s" %% pieces["short"] + return rendered + + +def render_pep440_old(pieces): + """TAG[.postDISTANCE[.dev0]] . + + The ".dev0" means dirty. + + Eexceptions: + 1: no tags. 0.postDISTANCE[.dev0] + """ + if pieces["closest-tag"]: + rendered = pieces["closest-tag"] + if pieces["distance"] or pieces["dirty"]: + rendered += ".post%%d" %% pieces["distance"] + if pieces["dirty"]: + rendered += ".dev0" + else: + # exception #1 + rendered = "0.post%%d" %% pieces["distance"] + if pieces["dirty"]: + rendered += ".dev0" + return rendered + + +def render_git_describe(pieces): + """TAG[-DISTANCE-gHEX][-dirty]. + + Like 'git describe --tags --dirty --always'. + + Exceptions: + 1: no tags. HEX[-dirty] (note: no 'g' prefix) + """ + if pieces["closest-tag"]: + rendered = pieces["closest-tag"] + if pieces["distance"]: + rendered += "-%%d-g%%s" %% (pieces["distance"], pieces["short"]) + else: + # exception #1 + rendered = pieces["short"] + if pieces["dirty"]: + rendered += "-dirty" + return rendered + + +def render_git_describe_long(pieces): + """TAG-DISTANCE-gHEX[-dirty]. + + Like 'git describe --tags --dirty --always -long'. + The distance/hash is unconditional. + + Exceptions: + 1: no tags. HEX[-dirty] (note: no 'g' prefix) + """ + if pieces["closest-tag"]: + rendered = pieces["closest-tag"] + rendered += "-%%d-g%%s" %% (pieces["distance"], pieces["short"]) + else: + # exception #1 + rendered = pieces["short"] + if pieces["dirty"]: + rendered += "-dirty" + return rendered + + +def render(pieces, style): + """Render the given version pieces into the requested style.""" + if pieces["error"]: + return {"version": "unknown", + "full-revisionid": pieces.get("long"), + "dirty": None, + "error": pieces["error"], + "date": None} + + if not style or style == "default": + style = "pep440" # the default + + if style == "pep440": + rendered = render_pep440(pieces) + elif style == "pep440-pre": + rendered = render_pep440_pre(pieces) + elif style == "pep440-post": + rendered = render_pep440_post(pieces) + elif style == "pep440-old": + rendered = render_pep440_old(pieces) + elif style == "git-describe": + rendered = render_git_describe(pieces) + elif style == "git-describe-long": + rendered = render_git_describe_long(pieces) + else: + raise ValueError("unknown style '%%s'" %% style) + + return {"version": rendered, "full-revisionid": pieces["long"], + "dirty": pieces["dirty"], "error": None, + "date": pieces.get("date")} + + +def get_versions(): + """Get version information or return default if unable to do so.""" + # I am in _version.py, which lives at ROOT/VERSIONFILE_SOURCE. If we have + # __file__, we can work backwards from there to the root. Some + # py2exe/bbfreeze/non-CPython implementations don't do __file__, in which + # case we can only use expanded keywords. + + cfg = get_config() + verbose = cfg.verbose + + try: + return git_versions_from_keywords(get_keywords(), cfg.tag_prefix, + verbose) + except NotThisMethod: + pass + + try: + root = os.path.realpath(__file__) + # versionfile_source is the relative path from the top of the source + # tree (where the .git directory might live) to this file. Invert + # this to find the root from __file__. + for i in cfg.versionfile_source.split('/'): + root = os.path.dirname(root) + except NameError: + return {"version": "0+unknown", "full-revisionid": None, + "dirty": None, + "error": "unable to find root of source tree", + "date": None} + + try: + pieces = git_pieces_from_vcs(cfg.tag_prefix, root, verbose) + return render(pieces, cfg.style) + except NotThisMethod: + pass + + try: + if cfg.parentdir_prefix: + return versions_from_parentdir(cfg.parentdir_prefix, root, verbose) + except NotThisMethod: + pass + + return {"version": "0+unknown", "full-revisionid": None, + "dirty": None, + "error": "unable to compute version", "date": None} +''' + + +@register_vcs_handler("git", "get_keywords") +def git_get_keywords(versionfile_abs): + """Extract version information from the given file.""" + # the code embedded in _version.py can just fetch the value of these + # keywords. When used from setup.py, we don't want to import _version.py, + # so we do it with a regexp instead. This function is not used from + # _version.py. + keywords = {} + try: + f = open(versionfile_abs, "r") + for line in f.readlines(): + if line.strip().startswith("git_refnames ="): + mo = re.search(r'=\s*"(.*)"', line) + if mo: + keywords["refnames"] = mo.group(1) + if line.strip().startswith("git_full ="): + mo = re.search(r'=\s*"(.*)"', line) + if mo: + keywords["full"] = mo.group(1) + if line.strip().startswith("git_date ="): + mo = re.search(r'=\s*"(.*)"', line) + if mo: + keywords["date"] = mo.group(1) + f.close() + except EnvironmentError: + pass + return keywords + + +@register_vcs_handler("git", "keywords") +def git_versions_from_keywords(keywords, tag_prefix, verbose): + """Get version information from git keywords.""" + if not keywords: + raise NotThisMethod("no keywords at all, weird") + date = keywords.get("date") + if date is not None: + # git-2.2.0 added "%cI", which expands to an ISO-8601 -compliant + # datestamp. However we prefer "%ci" (which expands to an "ISO-8601 + # -like" string, which we must then edit to make compliant), because + # it's been around since git-1.5.3, and it's too difficult to + # discover which version we're using, or to work around using an + # older one. + date = date.strip().replace(" ", "T", 1).replace(" ", "", 1) + refnames = keywords["refnames"].strip() + if refnames.startswith("$Format"): + if verbose: + print("keywords are unexpanded, not using") + raise NotThisMethod("unexpanded keywords, not a git-archive tarball") + refs = set([r.strip() for r in refnames.strip("()").split(",")]) + # starting in git-1.8.3, tags are listed as "tag: foo-1.0" instead of + # just "foo-1.0". If we see a "tag: " prefix, prefer those. + TAG = "tag: " + tags = set([r[len(TAG):] for r in refs if r.startswith(TAG)]) + if not tags: + # Either we're using git < 1.8.3, or there really are no tags. We use + # a heuristic: assume all version tags have a digit. The old git %d + # expansion behaves like git log --decorate=short and strips out the + # refs/heads/ and refs/tags/ prefixes that would let us distinguish + # between branches and tags. By ignoring refnames without digits, we + # filter out many common branch names like "release" and + # "stabilization", as well as "HEAD" and "master". + tags = set([r for r in refs if re.search(r'\d', r)]) + if verbose: + print("discarding '%s', no digits" % ",".join(refs - tags)) + if verbose: + print("likely tags: %s" % ",".join(sorted(tags))) + for ref in sorted(tags): + # sorting will prefer e.g. "2.0" over "2.0rc1" + if ref.startswith(tag_prefix): + r = ref[len(tag_prefix):] + if verbose: + print("picking %s" % r) + return {"version": r, + "full-revisionid": keywords["full"].strip(), + "dirty": False, "error": None, + "date": date} + # no suitable tags, so version is "0+unknown", but full hex is still there + if verbose: + print("no suitable tags, using unknown + full revision id") + return {"version": "0+unknown", + "full-revisionid": keywords["full"].strip(), + "dirty": False, "error": "no suitable tags", "date": None} + + +@register_vcs_handler("git", "pieces_from_vcs") +def git_pieces_from_vcs(tag_prefix, root, verbose, run_command=run_command): + """Get version from 'git describe' in the root of the source tree. + + This only gets called if the git-archive 'subst' keywords were *not* + expanded, and _version.py hasn't already been rewritten with a short + version string, meaning we're inside a checked out source tree. + """ + GITS = ["git"] + if sys.platform == "win32": + GITS = ["git.cmd", "git.exe"] + + out, rc = run_command(GITS, ["rev-parse", "--git-dir"], cwd=root, + hide_stderr=True) + if rc != 0: + if verbose: + print("Directory %s not under git control" % root) + raise NotThisMethod("'git rev-parse --git-dir' returned error") + + # if there is a tag matching tag_prefix, this yields TAG-NUM-gHEX[-dirty] + # if there isn't one, this yields HEX[-dirty] (no NUM) + describe_out, rc = run_command(GITS, ["describe", "--tags", "--dirty", + "--always", "--long", + "--match", "%s*" % tag_prefix], + cwd=root) + # --long was added in git-1.5.5 + if describe_out is None: + raise NotThisMethod("'git describe' failed") + describe_out = describe_out.strip() + full_out, rc = run_command(GITS, ["rev-parse", "HEAD"], cwd=root) + if full_out is None: + raise NotThisMethod("'git rev-parse' failed") + full_out = full_out.strip() + + pieces = {} + pieces["long"] = full_out + pieces["short"] = full_out[:7] # maybe improved later + pieces["error"] = None + + # parse describe_out. It will be like TAG-NUM-gHEX[-dirty] or HEX[-dirty] + # TAG might have hyphens. + git_describe = describe_out + + # look for -dirty suffix + dirty = git_describe.endswith("-dirty") + pieces["dirty"] = dirty + if dirty: + git_describe = git_describe[:git_describe.rindex("-dirty")] + + # now we have TAG-NUM-gHEX or HEX + + if "-" in git_describe: + # TAG-NUM-gHEX + mo = re.search(r'^(.+)-(\d+)-g([0-9a-f]+)$', git_describe) + if not mo: + # unparseable. Maybe git-describe is misbehaving? + pieces["error"] = ("unable to parse git-describe output: '%s'" + % describe_out) + return pieces + + # tag + full_tag = mo.group(1) + if not full_tag.startswith(tag_prefix): + if verbose: + fmt = "tag '%s' doesn't start with prefix '%s'" + print(fmt % (full_tag, tag_prefix)) + pieces["error"] = ("tag '%s' doesn't start with prefix '%s'" + % (full_tag, tag_prefix)) + return pieces + pieces["closest-tag"] = full_tag[len(tag_prefix):] + + # distance: number of commits since tag + pieces["distance"] = int(mo.group(2)) + + # commit: short hex revision ID + pieces["short"] = mo.group(3) + + else: + # HEX: no tags + pieces["closest-tag"] = None + count_out, rc = run_command(GITS, ["rev-list", "HEAD", "--count"], + cwd=root) + pieces["distance"] = int(count_out) # total number of commits + + # commit date: see ISO-8601 comment in git_versions_from_keywords() + date = run_command(GITS, ["show", "-s", "--format=%ci", "HEAD"], + cwd=root)[0].strip() + pieces["date"] = date.strip().replace(" ", "T", 1).replace(" ", "", 1) + + return pieces + + +def do_vcs_install(manifest_in, versionfile_source, ipy): + """Git-specific installation logic for Versioneer. + + For Git, this means creating/changing .gitattributes to mark _version.py + for export-subst keyword substitution. + """ + GITS = ["git"] + if sys.platform == "win32": + GITS = ["git.cmd", "git.exe"] + files = [manifest_in, versionfile_source] + if ipy: + files.append(ipy) + try: + me = __file__ + if me.endswith(".pyc") or me.endswith(".pyo"): + me = os.path.splitext(me)[0] + ".py" + versioneer_file = os.path.relpath(me) + except NameError: + versioneer_file = "versioneer.py" + files.append(versioneer_file) + present = False + try: + f = open(".gitattributes", "r") + for line in f.readlines(): + if line.strip().startswith(versionfile_source): + if "export-subst" in line.strip().split()[1:]: + present = True + f.close() + except EnvironmentError: + pass + if not present: + f = open(".gitattributes", "a+") + f.write("%s export-subst\n" % versionfile_source) + f.close() + files.append(".gitattributes") + run_command(GITS, ["add", "--"] + files) + + +def versions_from_parentdir(parentdir_prefix, root, verbose): + """Try to determine the version from the parent directory name. + + Source tarballs conventionally unpack into a directory that includes both + the project name and a version string. We will also support searching up + two directory levels for an appropriately named parent directory + """ + rootdirs = [] + + for i in range(3): + dirname = os.path.basename(root) + if dirname.startswith(parentdir_prefix): + return {"version": dirname[len(parentdir_prefix):], + "full-revisionid": None, + "dirty": False, "error": None, "date": None} + else: + rootdirs.append(root) + root = os.path.dirname(root) # up a level + + if verbose: + print("Tried directories %s but none started with prefix %s" % + (str(rootdirs), parentdir_prefix)) + raise NotThisMethod("rootdir doesn't start with parentdir_prefix") + + +SHORT_VERSION_PY = """ +# This file was generated by 'versioneer.py' (0.18) from +# revision-control system data, or from the parent directory name of an +# unpacked source archive. Distribution tarballs contain a pre-generated copy +# of this file. + +import json + +version_json = ''' +%s +''' # END VERSION_JSON + + +def get_versions(): + return json.loads(version_json) +""" + + +def versions_from_file(filename): + """Try to determine the version from _version.py if present.""" + try: + with open(filename) as f: + contents = f.read() + except EnvironmentError: + raise NotThisMethod("unable to read _version.py") + mo = re.search(r"version_json = '''\n(.*)''' # END VERSION_JSON", + contents, re.M | re.S) + if not mo: + mo = re.search(r"version_json = '''\r\n(.*)''' # END VERSION_JSON", + contents, re.M | re.S) + if not mo: + raise NotThisMethod("no version_json in _version.py") + return json.loads(mo.group(1)) + + +def write_to_version_file(filename, versions): + """Write the given version number to the given _version.py file.""" + os.unlink(filename) + contents = json.dumps(versions, sort_keys=True, + indent=1, separators=(",", ": ")) + with open(filename, "w") as f: + f.write(SHORT_VERSION_PY % contents) + + print("set %s to '%s'" % (filename, versions["version"])) + + +def plus_or_dot(pieces): + """Return a + if we don't already have one, else return a .""" + if "+" in pieces.get("closest-tag", ""): + return "." + return "+" + + +def render_pep440(pieces): + """Build up version string, with post-release "local version identifier". + + Our goal: TAG[+DISTANCE.gHEX[.dirty]] . Note that if you + get a tagged build and then dirty it, you'll get TAG+0.gHEX.dirty + + Exceptions: + 1: no tags. git_describe was just HEX. 0+untagged.DISTANCE.gHEX[.dirty] + """ + if pieces["closest-tag"]: + rendered = pieces["closest-tag"] + if pieces["distance"] or pieces["dirty"]: + rendered += plus_or_dot(pieces) + rendered += "%d.g%s" % (pieces["distance"], pieces["short"]) + if pieces["dirty"]: + rendered += ".dirty" + else: + # exception #1 + rendered = "0+untagged.%d.g%s" % (pieces["distance"], + pieces["short"]) + if pieces["dirty"]: + rendered += ".dirty" + return rendered + + +def render_pep440_pre(pieces): + """TAG[.post.devDISTANCE] -- No -dirty. + + Exceptions: + 1: no tags. 0.post.devDISTANCE + """ + if pieces["closest-tag"]: + rendered = pieces["closest-tag"] + if pieces["distance"]: + rendered += ".post.dev%d" % pieces["distance"] + else: + # exception #1 + rendered = "0.post.dev%d" % pieces["distance"] + return rendered + + +def render_pep440_post(pieces): + """TAG[.postDISTANCE[.dev0]+gHEX] . + + The ".dev0" means dirty. Note that .dev0 sorts backwards + (a dirty tree will appear "older" than the corresponding clean one), + but you shouldn't be releasing software with -dirty anyways. + + Exceptions: + 1: no tags. 0.postDISTANCE[.dev0] + """ + if pieces["closest-tag"]: + rendered = pieces["closest-tag"] + if pieces["distance"] or pieces["dirty"]: + rendered += ".post%d" % pieces["distance"] + if pieces["dirty"]: + rendered += ".dev0" + rendered += plus_or_dot(pieces) + rendered += "g%s" % pieces["short"] + else: + # exception #1 + rendered = "0.post%d" % pieces["distance"] + if pieces["dirty"]: + rendered += ".dev0" + rendered += "+g%s" % pieces["short"] + return rendered + + +def render_pep440_old(pieces): + """TAG[.postDISTANCE[.dev0]] . + + The ".dev0" means dirty. + + Eexceptions: + 1: no tags. 0.postDISTANCE[.dev0] + """ + if pieces["closest-tag"]: + rendered = pieces["closest-tag"] + if pieces["distance"] or pieces["dirty"]: + rendered += ".post%d" % pieces["distance"] + if pieces["dirty"]: + rendered += ".dev0" + else: + # exception #1 + rendered = "0.post%d" % pieces["distance"] + if pieces["dirty"]: + rendered += ".dev0" + return rendered + + +def render_git_describe(pieces): + """TAG[-DISTANCE-gHEX][-dirty]. + + Like 'git describe --tags --dirty --always'. + + Exceptions: + 1: no tags. HEX[-dirty] (note: no 'g' prefix) + """ + if pieces["closest-tag"]: + rendered = pieces["closest-tag"] + if pieces["distance"]: + rendered += "-%d-g%s" % (pieces["distance"], pieces["short"]) + else: + # exception #1 + rendered = pieces["short"] + if pieces["dirty"]: + rendered += "-dirty" + return rendered + + +def render_git_describe_long(pieces): + """TAG-DISTANCE-gHEX[-dirty]. + + Like 'git describe --tags --dirty --always -long'. + The distance/hash is unconditional. + + Exceptions: + 1: no tags. HEX[-dirty] (note: no 'g' prefix) + """ + if pieces["closest-tag"]: + rendered = pieces["closest-tag"] + rendered += "-%d-g%s" % (pieces["distance"], pieces["short"]) + else: + # exception #1 + rendered = pieces["short"] + if pieces["dirty"]: + rendered += "-dirty" + return rendered + + +def render(pieces, style): + """Render the given version pieces into the requested style.""" + if pieces["error"]: + return {"version": "unknown", + "full-revisionid": pieces.get("long"), + "dirty": None, + "error": pieces["error"], + "date": None} + + if not style or style == "default": + style = "pep440" # the default + + if style == "pep440": + rendered = render_pep440(pieces) + elif style == "pep440-pre": + rendered = render_pep440_pre(pieces) + elif style == "pep440-post": + rendered = render_pep440_post(pieces) + elif style == "pep440-old": + rendered = render_pep440_old(pieces) + elif style == "git-describe": + rendered = render_git_describe(pieces) + elif style == "git-describe-long": + rendered = render_git_describe_long(pieces) + else: + raise ValueError("unknown style '%s'" % style) + + return {"version": rendered, "full-revisionid": pieces["long"], + "dirty": pieces["dirty"], "error": None, + "date": pieces.get("date")} + + +class VersioneerBadRootError(Exception): + """The project root directory is unknown or missing key files.""" + + +def get_versions(verbose=False): + """Get the project version from whatever source is available. + + Returns dict with two keys: 'version' and 'full'. + """ + if "versioneer" in sys.modules: + # see the discussion in cmdclass.py:get_cmdclass() + del sys.modules["versioneer"] + + root = get_root() + cfg = get_config_from_root(root) + + assert cfg.VCS is not None, "please set [versioneer]VCS= in setup.cfg" + handlers = HANDLERS.get(cfg.VCS) + assert handlers, "unrecognized VCS '%s'" % cfg.VCS + verbose = verbose or cfg.verbose + assert cfg.versionfile_source is not None, \ + "please set versioneer.versionfile_source" + assert cfg.tag_prefix is not None, "please set versioneer.tag_prefix" + + versionfile_abs = os.path.join(root, cfg.versionfile_source) + + # extract version from first of: _version.py, VCS command (e.g. 'git + # describe'), parentdir. This is meant to work for developers using a + # source checkout, for users of a tarball created by 'setup.py sdist', + # and for users of a tarball/zipball created by 'git archive' or github's + # download-from-tag feature or the equivalent in other VCSes. + + get_keywords_f = handlers.get("get_keywords") + from_keywords_f = handlers.get("keywords") + if get_keywords_f and from_keywords_f: + try: + keywords = get_keywords_f(versionfile_abs) + ver = from_keywords_f(keywords, cfg.tag_prefix, verbose) + if verbose: + print("got version from expanded keyword %s" % ver) + return ver + except NotThisMethod: + pass + + try: + ver = versions_from_file(versionfile_abs) + if verbose: + print("got version from file %s %s" % (versionfile_abs, ver)) + return ver + except NotThisMethod: + pass + + from_vcs_f = handlers.get("pieces_from_vcs") + if from_vcs_f: + try: + pieces = from_vcs_f(cfg.tag_prefix, root, verbose) + ver = render(pieces, cfg.style) + if verbose: + print("got version from VCS %s" % ver) + return ver + except NotThisMethod: + pass + + try: + if cfg.parentdir_prefix: + ver = versions_from_parentdir(cfg.parentdir_prefix, root, verbose) + if verbose: + print("got version from parentdir %s" % ver) + return ver + except NotThisMethod: + pass + + if verbose: + print("unable to compute version") + + return {"version": "0+unknown", "full-revisionid": None, + "dirty": None, "error": "unable to compute version", + "date": None} + + +def get_version(): + """Get the short version string for this project.""" + return get_versions()["version"] + + +def get_cmdclass(): + """Get the custom setuptools/distutils subclasses used by Versioneer.""" + if "versioneer" in sys.modules: + del sys.modules["versioneer"] + # this fixes the "python setup.py develop" case (also 'install' and + # 'easy_install .'), in which subdependencies of the main project are + # built (using setup.py bdist_egg) in the same python process. Assume + # a main project A and a dependency B, which use different versions + # of Versioneer. A's setup.py imports A's Versioneer, leaving it in + # sys.modules by the time B's setup.py is executed, causing B to run + # with the wrong versioneer. Setuptools wraps the sub-dep builds in a + # sandbox that restores sys.modules to it's pre-build state, so the + # parent is protected against the child's "import versioneer". By + # removing ourselves from sys.modules here, before the child build + # happens, we protect the child from the parent's versioneer too. + # Also see https://github.com/warner/python-versioneer/issues/52 + + cmds = {} + + # we add "version" to both distutils and setuptools + from distutils.core import Command + + class cmd_version(Command): + description = "report generated version string" + user_options = [] + boolean_options = [] + + def initialize_options(self): + pass + + def finalize_options(self): + pass + + def run(self): + vers = get_versions(verbose=True) + print("Version: %s" % vers["version"]) + print(" full-revisionid: %s" % vers.get("full-revisionid")) + print(" dirty: %s" % vers.get("dirty")) + print(" date: %s" % vers.get("date")) + if vers["error"]: + print(" error: %s" % vers["error"]) + cmds["version"] = cmd_version + + # we override "build_py" in both distutils and setuptools + # + # most invocation pathways end up running build_py: + # distutils/build -> build_py + # distutils/install -> distutils/build ->.. + # setuptools/bdist_wheel -> distutils/install ->.. + # setuptools/bdist_egg -> distutils/install_lib -> build_py + # setuptools/install -> bdist_egg ->.. + # setuptools/develop -> ? + # pip install: + # copies source tree to a tempdir before running egg_info/etc + # if .git isn't copied too, 'git describe' will fail + # then does setup.py bdist_wheel, or sometimes setup.py install + # setup.py egg_info -> ? + + # we override different "build_py" commands for both environments + if "setuptools" in sys.modules: + from setuptools.command.build_py import build_py as _build_py + else: + from distutils.command.build_py import build_py as _build_py + + class cmd_build_py(_build_py): + def run(self): + root = get_root() + cfg = get_config_from_root(root) + versions = get_versions() + _build_py.run(self) + # now locate _version.py in the new build/ directory and replace + # it with an updated value + if cfg.versionfile_build: + target_versionfile = os.path.join(self.build_lib, + cfg.versionfile_build) + print("UPDATING %s" % target_versionfile) + write_to_version_file(target_versionfile, versions) + cmds["build_py"] = cmd_build_py + + if "cx_Freeze" in sys.modules: # cx_freeze enabled? + from cx_Freeze.dist import build_exe as _build_exe + # nczeczulin reports that py2exe won't like the pep440-style string + # as FILEVERSION, but it can be used for PRODUCTVERSION, e.g. + # setup(console=[{ + # "version": versioneer.get_version().split("+", 1)[0], # FILEVERSION + # "product_version": versioneer.get_version(), + # ... + + class cmd_build_exe(_build_exe): + def run(self): + root = get_root() + cfg = get_config_from_root(root) + versions = get_versions() + target_versionfile = cfg.versionfile_source + print("UPDATING %s" % target_versionfile) + write_to_version_file(target_versionfile, versions) + + _build_exe.run(self) + os.unlink(target_versionfile) + with open(cfg.versionfile_source, "w") as f: + LONG = LONG_VERSION_PY[cfg.VCS] + f.write(LONG % + {"DOLLAR": "$", + "STYLE": cfg.style, + "TAG_PREFIX": cfg.tag_prefix, + "PARENTDIR_PREFIX": cfg.parentdir_prefix, + "VERSIONFILE_SOURCE": cfg.versionfile_source, + }) + cmds["build_exe"] = cmd_build_exe + del cmds["build_py"] + + if 'py2exe' in sys.modules: # py2exe enabled? + try: + from py2exe.distutils_buildexe import py2exe as _py2exe # py3 + except ImportError: + from py2exe.build_exe import py2exe as _py2exe # py2 + + class cmd_py2exe(_py2exe): + def run(self): + root = get_root() + cfg = get_config_from_root(root) + versions = get_versions() + target_versionfile = cfg.versionfile_source + print("UPDATING %s" % target_versionfile) + write_to_version_file(target_versionfile, versions) + + _py2exe.run(self) + os.unlink(target_versionfile) + with open(cfg.versionfile_source, "w") as f: + LONG = LONG_VERSION_PY[cfg.VCS] + f.write(LONG % + {"DOLLAR": "$", + "STYLE": cfg.style, + "TAG_PREFIX": cfg.tag_prefix, + "PARENTDIR_PREFIX": cfg.parentdir_prefix, + "VERSIONFILE_SOURCE": cfg.versionfile_source, + }) + cmds["py2exe"] = cmd_py2exe + + # we override different "sdist" commands for both environments + if "setuptools" in sys.modules: + from setuptools.command.sdist import sdist as _sdist + else: + from distutils.command.sdist import sdist as _sdist + + class cmd_sdist(_sdist): + def run(self): + versions = get_versions() + self._versioneer_generated_versions = versions + # unless we update this, the command will keep using the old + # version + self.distribution.metadata.version = versions["version"] + return _sdist.run(self) + + def make_release_tree(self, base_dir, files): + root = get_root() + cfg = get_config_from_root(root) + _sdist.make_release_tree(self, base_dir, files) + # now locate _version.py in the new base_dir directory + # (remembering that it may be a hardlink) and replace it with an + # updated value + target_versionfile = os.path.join(base_dir, cfg.versionfile_source) + print("UPDATING %s" % target_versionfile) + write_to_version_file(target_versionfile, + self._versioneer_generated_versions) + cmds["sdist"] = cmd_sdist + + return cmds + + +CONFIG_ERROR = """ +setup.cfg is missing the necessary Versioneer configuration. You need +a section like: + + [versioneer] + VCS = git + style = pep440 + versionfile_source = src/myproject/_version.py + versionfile_build = myproject/_version.py + tag_prefix = + parentdir_prefix = myproject- + +You will also need to edit your setup.py to use the results: + + import versioneer + setup(version=versioneer.get_version(), + cmdclass=versioneer.get_cmdclass(), ...) + +Please read the docstring in ./versioneer.py for configuration instructions, +edit setup.cfg, and re-run the installer or 'python versioneer.py setup'. +""" + +SAMPLE_CONFIG = """ +# See the docstring in versioneer.py for instructions. Note that you must +# re-run 'versioneer.py setup' after changing this section, and commit the +# resulting files. + +[versioneer] +#VCS = git +#style = pep440 +#versionfile_source = +#versionfile_build = +#tag_prefix = +#parentdir_prefix = + +""" + +INIT_PY_SNIPPET = """ +from ._version import get_versions +__version__ = get_versions()['version'] +del get_versions +""" + + +def do_setup(): + """Main VCS-independent setup function for installing Versioneer.""" + root = get_root() + try: + cfg = get_config_from_root(root) + except (EnvironmentError, configparser.NoSectionError, + configparser.NoOptionError) as e: + if isinstance(e, (EnvironmentError, configparser.NoSectionError)): + print("Adding sample versioneer config to setup.cfg", + file=sys.stderr) + with open(os.path.join(root, "setup.cfg"), "a") as f: + f.write(SAMPLE_CONFIG) + print(CONFIG_ERROR, file=sys.stderr) + return 1 + + print(" creating %s" % cfg.versionfile_source) + with open(cfg.versionfile_source, "w") as f: + LONG = LONG_VERSION_PY[cfg.VCS] + f.write(LONG % {"DOLLAR": "$", + "STYLE": cfg.style, + "TAG_PREFIX": cfg.tag_prefix, + "PARENTDIR_PREFIX": cfg.parentdir_prefix, + "VERSIONFILE_SOURCE": cfg.versionfile_source, + }) + + ipy = os.path.join(os.path.dirname(cfg.versionfile_source), + "__init__.py") + if os.path.exists(ipy): + try: + with open(ipy, "r") as f: + old = f.read() + except EnvironmentError: + old = "" + if INIT_PY_SNIPPET not in old: + print(" appending to %s" % ipy) + with open(ipy, "a") as f: + f.write(INIT_PY_SNIPPET) + else: + print(" %s unmodified" % ipy) + else: + print(" %s doesn't exist, ok" % ipy) + ipy = None + + # Make sure both the top-level "versioneer.py" and versionfile_source + # (PKG/_version.py, used by runtime code) are in MANIFEST.in, so + # they'll be copied into source distributions. Pip won't be able to + # install the package without this. + manifest_in = os.path.join(root, "MANIFEST.in") + simple_includes = set() + try: + with open(manifest_in, "r") as f: + for line in f: + if line.startswith("include "): + for include in line.split()[1:]: + simple_includes.add(include) + except EnvironmentError: + pass + # That doesn't cover everything MANIFEST.in can do + # (http://docs.python.org/2/distutils/sourcedist.html#commands), so + # it might give some false negatives. Appending redundant 'include' + # lines is safe, though. + if "versioneer.py" not in simple_includes: + print(" appending 'versioneer.py' to MANIFEST.in") + with open(manifest_in, "a") as f: + f.write("include versioneer.py\n") + else: + print(" 'versioneer.py' already in MANIFEST.in") + if cfg.versionfile_source not in simple_includes: + print(" appending versionfile_source ('%s') to MANIFEST.in" % + cfg.versionfile_source) + with open(manifest_in, "a") as f: + f.write("include %s\n" % cfg.versionfile_source) + else: + print(" versionfile_source already in MANIFEST.in") + + # Make VCS-specific changes. For git, this means creating/changing + # .gitattributes to mark _version.py for export-subst keyword + # substitution. + do_vcs_install(manifest_in, cfg.versionfile_source, ipy) + return 0 + + +def scan_setup_py(): + """Validate the contents of setup.py against Versioneer's expectations.""" + found = set() + setters = False + errors = 0 + with open("setup.py", "r") as f: + for line in f.readlines(): + if "import versioneer" in line: + found.add("import") + if "versioneer.get_cmdclass()" in line: + found.add("cmdclass") + if "versioneer.get_version()" in line: + found.add("get_version") + if "versioneer.VCS" in line: + setters = True + if "versioneer.versionfile_source" in line: + setters = True + if len(found) != 3: + print("") + print("Your setup.py appears to be missing some important items") + print("(but I might be wrong). Please make sure it has something") + print("roughly like the following:") + print("") + print(" import versioneer") + print(" setup( version=versioneer.get_version(),") + print(" cmdclass=versioneer.get_cmdclass(), ...)") + print("") + errors += 1 + if setters: + print("You should remove lines like 'versioneer.VCS = ' and") + print("'versioneer.versionfile_source = ' . This configuration") + print("now lives in setup.cfg, and should be removed from setup.py") + print("") + errors += 1 + return errors + + +if __name__ == "__main__": + cmd = sys.argv[1] + if cmd == "setup": + errors = do_setup() + errors += scan_setup_py() + if errors: + sys.exit(1)