From e9efd9c3e9c647940880100b904b4090ee13cce8 Mon Sep 17 00:00:00 2001 From: Aaron Virshup Date: Thu, 19 Jan 2017 18:36:42 -0800 Subject: [PATCH 01/27] port updates from MDT vendor --- docker-make.py | 42 ++++++++++++++++++++++++++++++++++++------ 1 file changed, 36 insertions(+), 6 deletions(-) diff --git a/docker-make.py b/docker-make.py index fd98900..9ff7fa9 100755 --- a/docker-make.py +++ b/docker-make.py @@ -16,6 +16,7 @@ Multiple inheritance for your dockerfiles. Requires: python 2.7, docker-py, pyyaml (RUN: easy_install pip; pip install docker-py pyyaml) """ +import json import sys import os import textwrap @@ -44,7 +45,8 @@ def __init__(self, makefile, repository=None, # Connect to docker daemon if necessary if build_images: connection = docker.utils.kwargs_from_env() - connection['tls'].assert_hostname = False + if 'tls' in connection: + connection['tls'].assert_hostname = False self.client = docker.Client(**connection) else: self.client = None @@ -72,7 +74,10 @@ def parse_yaml(self, filename): sourcedefs = {} for s in yamldefs.get('_SOURCES_', []): - sourcedefs.update(self.parse_yaml(s)) + src = self.parse_yaml(s) + for item in src.itervalues(): + _fix_build_path(item, os.path.dirname(s)) + sourcedefs.update(src) sourcedefs.update(yamldefs) return sourcedefs @@ -120,8 +125,8 @@ def build_step(self, step, dockerfile): nocache=self.no_cache) if step.build_dir is not None: tempname = '_docker_make_tmp/' - tempdir = '%s/%s' % (step.build_dir, tempname) - temp_df = tempdir + 'Dockerfile' + tempdir = os.path.abspath(os.path.join(step.build_dir, tempname)) + temp_df = os.path.join(tempdir, 'Dockerfile') if not os.path.isdir(tempdir): os.makedirs(tempdir) with open(temp_df, 'w') as df_out: @@ -132,17 +137,27 @@ def build_step(self, step, dockerfile): else: build_args['fileobj'] = StringIO(unicode(dockerfile)) + # TODO: remove this workaround for docker/docker-py#1134 -- AMV 7/19/16 + build_args['decode'] = False + # start the build stream = self.client.build(**build_args) # monitor the output for item in stream: + # TODO: this is more workaround for docker/docker-py#1134 + try: + item = json.loads(item) + except ValueError: + print item, + continue + #### end of workaround - this can be removed once resolved - AMV 7/19/16 if item.keys() == ['stream']: print item['stream'].strip() elif 'errorDetail' in item or 'error' in item: raise BuildError(dockerfile, item, build_args) else: - print item + print item, # remove the temporary dockerfile if step.build_dir is not None: @@ -408,6 +423,20 @@ def printable_code(c): return '\n'.join(output) +def _fix_build_path(item, filepath): + path = os.path.expanduser(filepath) + + if 'build_directory' not in item: + return + + elif os.path.isabs(item['build_directory']): + return + + else: + item['build_directory'] = os.path.join(os.path.abspath(path), + item['build_directory']) + + def make_arg_parser(): parser = argparse.ArgumentParser(description= "NOTE: Docker environmental variables must be set.\n" @@ -488,4 +517,5 @@ def make_arg_parser(): (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.""" -if __name__ == '__main__': main() +if __name__ == '__main__': + main() From ed8d3ba6f3f6fbb7900d6fb1d21292e645b65f28 Mon Sep 17 00:00:00 2001 From: Aaron Virshup Date: Thu, 19 Jan 2017 18:46:07 -0800 Subject: [PATCH 02/27] Set up as package --- .gitattributes | 1 + MANIFEST.in | 2 + docker-make/__init__.py | 4 + docker-make/_version.py | 520 +++++ docker-make.py => docker-make/docker-make.py | 0 setup.cfg | 6 + setup.py | 13 + versioneer.py | 1822 ++++++++++++++++++ 8 files changed, 2368 insertions(+) create mode 100644 .gitattributes create mode 100644 MANIFEST.in create mode 100644 docker-make/__init__.py create mode 100644 docker-make/_version.py rename docker-make.py => docker-make/docker-make.py (100%) create mode 100644 setup.cfg create mode 100644 setup.py create mode 100644 versioneer.py diff --git a/.gitattributes b/.gitattributes new file mode 100644 index 0000000..e8427b8 --- /dev/null +++ b/.gitattributes @@ -0,0 +1 @@ +docker-make/_version.py export-subst diff --git a/MANIFEST.in b/MANIFEST.in new file mode 100644 index 0000000..aae37dc --- /dev/null +++ b/MANIFEST.in @@ -0,0 +1,2 @@ +include versioneer.py +include docker-make/_version.py diff --git a/docker-make/__init__.py b/docker-make/__init__.py new file mode 100644 index 0000000..74f4e66 --- /dev/null +++ b/docker-make/__init__.py @@ -0,0 +1,4 @@ + +from ._version import get_versions +__version__ = get_versions()['version'] +del get_versions diff --git a/docker-make/_version.py b/docker-make/_version.py new file mode 100644 index 0000000..aa18204 --- /dev/null +++ b/docker-make/_version.py @@ -0,0 +1,520 @@ + +# This file helps to compute a version number in source trees obtained from +# git-archive tarball (such as those provided by githubs download-from-tag +# feature). Distribution tarballs (built by setup.py sdist) and build +# directories (produced by setup.py build) will contain a much shorter file +# that just contains the computed version number. + +# This file is released into the public domain. Generated by +# versioneer-0.18 (https://github.com/warner/python-versioneer) + +"""Git implementation of _version.py.""" + +import errno +import os +import re +import subprocess +import sys + + +def get_keywords(): + """Get the keywords needed to look up the version information.""" + # these strings will be replaced by git during git-archive. + # setup.py/versioneer.py will grep for the variable names, so they must + # each be defined on a line of their own. _version.py will just call + # get_keywords(). + git_refnames = "$Format:%d$" + git_full = "$Format:%H$" + git_date = "$Format:%ci$" + keywords = {"refnames": git_refnames, "full": git_full, "date": git_date} + return keywords + + +class VersioneerConfig: + """Container for Versioneer configuration parameters.""" + + +def get_config(): + """Create, populate and return the VersioneerConfig() object.""" + # these strings are filled in when 'setup.py versioneer' creates + # _version.py + cfg = VersioneerConfig() + cfg.VCS = "git" + cfg.style = "pep440" + cfg.tag_prefix = "" + cfg.parentdir_prefix = "None" + cfg.versionfile_source = "docker-make/_version.py" + cfg.verbose = False + return cfg + + +class NotThisMethod(Exception): + """Exception raised if a method is not valid for the current scenario.""" + + +LONG_VERSION_PY = {} +HANDLERS = {} + + +def register_vcs_handler(vcs, method): # decorator + """Decorator to mark a method as the handler for a particular VCS.""" + def decorate(f): + """Store f in HANDLERS[vcs][method].""" + if vcs not in HANDLERS: + HANDLERS[vcs] = {} + HANDLERS[vcs][method] = f + return f + return decorate + + +def run_command(commands, args, cwd=None, verbose=False, hide_stderr=False, + env=None): + """Call the given command(s).""" + assert isinstance(commands, list) + p = None + for c in commands: + try: + dispcmd = str([c] + args) + # remember shell=False, so use git.cmd on windows, not just git + p = subprocess.Popen([c] + args, cwd=cwd, env=env, + stdout=subprocess.PIPE, + stderr=(subprocess.PIPE if hide_stderr + else None)) + break + except EnvironmentError: + e = sys.exc_info()[1] + if e.errno == errno.ENOENT: + continue + if verbose: + print("unable to run %s" % dispcmd) + print(e) + return None, None + else: + if verbose: + print("unable to find command, tried %s" % (commands,)) + return None, None + stdout = p.communicate()[0].strip() + if sys.version_info[0] >= 3: + stdout = stdout.decode() + if p.returncode != 0: + if verbose: + print("unable to run %s (error)" % dispcmd) + print("stdout was %s" % stdout) + return None, p.returncode + return stdout, p.returncode + + +def versions_from_parentdir(parentdir_prefix, root, verbose): + """Try to determine the version from the parent directory name. + + Source tarballs conventionally unpack into a directory that includes both + the project name and a version string. We will also support searching up + two directory levels for an appropriately named parent directory + """ + rootdirs = [] + + for i in range(3): + dirname = os.path.basename(root) + if dirname.startswith(parentdir_prefix): + return {"version": dirname[len(parentdir_prefix):], + "full-revisionid": None, + "dirty": False, "error": None, "date": None} + else: + rootdirs.append(root) + root = os.path.dirname(root) # up a level + + if verbose: + print("Tried directories %s but none started with prefix %s" % + (str(rootdirs), parentdir_prefix)) + raise NotThisMethod("rootdir doesn't start with parentdir_prefix") + + +@register_vcs_handler("git", "get_keywords") +def git_get_keywords(versionfile_abs): + """Extract version information from the given file.""" + # the code embedded in _version.py can just fetch the value of these + # keywords. When used from setup.py, we don't want to import _version.py, + # so we do it with a regexp instead. This function is not used from + # _version.py. + keywords = {} + try: + f = open(versionfile_abs, "r") + for line in f.readlines(): + if line.strip().startswith("git_refnames ="): + mo = re.search(r'=\s*"(.*)"', line) + if mo: + keywords["refnames"] = mo.group(1) + if line.strip().startswith("git_full ="): + mo = re.search(r'=\s*"(.*)"', line) + if mo: + keywords["full"] = mo.group(1) + if line.strip().startswith("git_date ="): + mo = re.search(r'=\s*"(.*)"', line) + if mo: + keywords["date"] = mo.group(1) + f.close() + except EnvironmentError: + pass + return keywords + + +@register_vcs_handler("git", "keywords") +def git_versions_from_keywords(keywords, tag_prefix, verbose): + """Get version information from git keywords.""" + if not keywords: + raise NotThisMethod("no keywords at all, weird") + date = keywords.get("date") + if date is not None: + # git-2.2.0 added "%cI", which expands to an ISO-8601 -compliant + # datestamp. However we prefer "%ci" (which expands to an "ISO-8601 + # -like" string, which we must then edit to make compliant), because + # it's been around since git-1.5.3, and it's too difficult to + # discover which version we're using, or to work around using an + # older one. + date = date.strip().replace(" ", "T", 1).replace(" ", "", 1) + refnames = keywords["refnames"].strip() + if refnames.startswith("$Format"): + if verbose: + print("keywords are unexpanded, not using") + raise NotThisMethod("unexpanded keywords, not a git-archive tarball") + refs = set([r.strip() for r in refnames.strip("()").split(",")]) + # starting in git-1.8.3, tags are listed as "tag: foo-1.0" instead of + # just "foo-1.0". If we see a "tag: " prefix, prefer those. + TAG = "tag: " + tags = set([r[len(TAG):] for r in refs if r.startswith(TAG)]) + if not tags: + # Either we're using git < 1.8.3, or there really are no tags. We use + # a heuristic: assume all version tags have a digit. The old git %d + # expansion behaves like git log --decorate=short and strips out the + # refs/heads/ and refs/tags/ prefixes that would let us distinguish + # between branches and tags. By ignoring refnames without digits, we + # filter out many common branch names like "release" and + # "stabilization", as well as "HEAD" and "master". + tags = set([r for r in refs if re.search(r'\d', r)]) + if verbose: + print("discarding '%s', no digits" % ",".join(refs - tags)) + if verbose: + print("likely tags: %s" % ",".join(sorted(tags))) + for ref in sorted(tags): + # sorting will prefer e.g. "2.0" over "2.0rc1" + if ref.startswith(tag_prefix): + r = ref[len(tag_prefix):] + if verbose: + print("picking %s" % r) + return {"version": r, + "full-revisionid": keywords["full"].strip(), + "dirty": False, "error": None, + "date": date} + # no suitable tags, so version is "0+unknown", but full hex is still there + if verbose: + print("no suitable tags, using unknown + full revision id") + return {"version": "0+unknown", + "full-revisionid": keywords["full"].strip(), + "dirty": False, "error": "no suitable tags", "date": None} + + +@register_vcs_handler("git", "pieces_from_vcs") +def git_pieces_from_vcs(tag_prefix, root, verbose, run_command=run_command): + """Get version from 'git describe' in the root of the source tree. + + This only gets called if the git-archive 'subst' keywords were *not* + expanded, and _version.py hasn't already been rewritten with a short + version string, meaning we're inside a checked out source tree. + """ + GITS = ["git"] + if sys.platform == "win32": + GITS = ["git.cmd", "git.exe"] + + out, rc = run_command(GITS, ["rev-parse", "--git-dir"], cwd=root, + hide_stderr=True) + if rc != 0: + if verbose: + print("Directory %s not under git control" % root) + raise NotThisMethod("'git rev-parse --git-dir' returned error") + + # if there is a tag matching tag_prefix, this yields TAG-NUM-gHEX[-dirty] + # if there isn't one, this yields HEX[-dirty] (no NUM) + describe_out, rc = run_command(GITS, ["describe", "--tags", "--dirty", + "--always", "--long", + "--match", "%s*" % tag_prefix], + cwd=root) + # --long was added in git-1.5.5 + if describe_out is None: + raise NotThisMethod("'git describe' failed") + describe_out = describe_out.strip() + full_out, rc = run_command(GITS, ["rev-parse", "HEAD"], cwd=root) + if full_out is None: + raise NotThisMethod("'git rev-parse' failed") + full_out = full_out.strip() + + pieces = {} + pieces["long"] = full_out + pieces["short"] = full_out[:7] # maybe improved later + pieces["error"] = None + + # parse describe_out. It will be like TAG-NUM-gHEX[-dirty] or HEX[-dirty] + # TAG might have hyphens. + git_describe = describe_out + + # look for -dirty suffix + dirty = git_describe.endswith("-dirty") + pieces["dirty"] = dirty + if dirty: + git_describe = git_describe[:git_describe.rindex("-dirty")] + + # now we have TAG-NUM-gHEX or HEX + + if "-" in git_describe: + # TAG-NUM-gHEX + mo = re.search(r'^(.+)-(\d+)-g([0-9a-f]+)$', git_describe) + if not mo: + # unparseable. Maybe git-describe is misbehaving? + pieces["error"] = ("unable to parse git-describe output: '%s'" + % describe_out) + return pieces + + # tag + full_tag = mo.group(1) + if not full_tag.startswith(tag_prefix): + if verbose: + fmt = "tag '%s' doesn't start with prefix '%s'" + print(fmt % (full_tag, tag_prefix)) + pieces["error"] = ("tag '%s' doesn't start with prefix '%s'" + % (full_tag, tag_prefix)) + return pieces + pieces["closest-tag"] = full_tag[len(tag_prefix):] + + # distance: number of commits since tag + pieces["distance"] = int(mo.group(2)) + + # commit: short hex revision ID + pieces["short"] = mo.group(3) + + else: + # HEX: no tags + pieces["closest-tag"] = None + count_out, rc = run_command(GITS, ["rev-list", "HEAD", "--count"], + cwd=root) + pieces["distance"] = int(count_out) # total number of commits + + # commit date: see ISO-8601 comment in git_versions_from_keywords() + date = run_command(GITS, ["show", "-s", "--format=%ci", "HEAD"], + cwd=root)[0].strip() + pieces["date"] = date.strip().replace(" ", "T", 1).replace(" ", "", 1) + + return pieces + + +def plus_or_dot(pieces): + """Return a + if we don't already have one, else return a .""" + if "+" in pieces.get("closest-tag", ""): + return "." + return "+" + + +def render_pep440(pieces): + """Build up version string, with post-release "local version identifier". + + Our goal: TAG[+DISTANCE.gHEX[.dirty]] . Note that if you + get a tagged build and then dirty it, you'll get TAG+0.gHEX.dirty + + Exceptions: + 1: no tags. git_describe was just HEX. 0+untagged.DISTANCE.gHEX[.dirty] + """ + if pieces["closest-tag"]: + rendered = pieces["closest-tag"] + if pieces["distance"] or pieces["dirty"]: + rendered += plus_or_dot(pieces) + rendered += "%d.g%s" % (pieces["distance"], pieces["short"]) + if pieces["dirty"]: + rendered += ".dirty" + else: + # exception #1 + rendered = "0+untagged.%d.g%s" % (pieces["distance"], + pieces["short"]) + if pieces["dirty"]: + rendered += ".dirty" + return rendered + + +def render_pep440_pre(pieces): + """TAG[.post.devDISTANCE] -- No -dirty. + + Exceptions: + 1: no tags. 0.post.devDISTANCE + """ + if pieces["closest-tag"]: + rendered = pieces["closest-tag"] + if pieces["distance"]: + rendered += ".post.dev%d" % pieces["distance"] + else: + # exception #1 + rendered = "0.post.dev%d" % pieces["distance"] + return rendered + + +def render_pep440_post(pieces): + """TAG[.postDISTANCE[.dev0]+gHEX] . + + The ".dev0" means dirty. Note that .dev0 sorts backwards + (a dirty tree will appear "older" than the corresponding clean one), + but you shouldn't be releasing software with -dirty anyways. + + Exceptions: + 1: no tags. 0.postDISTANCE[.dev0] + """ + if pieces["closest-tag"]: + rendered = pieces["closest-tag"] + if pieces["distance"] or pieces["dirty"]: + rendered += ".post%d" % pieces["distance"] + if pieces["dirty"]: + rendered += ".dev0" + rendered += plus_or_dot(pieces) + rendered += "g%s" % pieces["short"] + else: + # exception #1 + rendered = "0.post%d" % pieces["distance"] + if pieces["dirty"]: + rendered += ".dev0" + rendered += "+g%s" % pieces["short"] + return rendered + + +def render_pep440_old(pieces): + """TAG[.postDISTANCE[.dev0]] . + + The ".dev0" means dirty. + + Eexceptions: + 1: no tags. 0.postDISTANCE[.dev0] + """ + if pieces["closest-tag"]: + rendered = pieces["closest-tag"] + if pieces["distance"] or pieces["dirty"]: + rendered += ".post%d" % pieces["distance"] + if pieces["dirty"]: + rendered += ".dev0" + else: + # exception #1 + rendered = "0.post%d" % pieces["distance"] + if pieces["dirty"]: + rendered += ".dev0" + return rendered + + +def render_git_describe(pieces): + """TAG[-DISTANCE-gHEX][-dirty]. + + Like 'git describe --tags --dirty --always'. + + Exceptions: + 1: no tags. HEX[-dirty] (note: no 'g' prefix) + """ + if pieces["closest-tag"]: + rendered = pieces["closest-tag"] + if pieces["distance"]: + rendered += "-%d-g%s" % (pieces["distance"], pieces["short"]) + else: + # exception #1 + rendered = pieces["short"] + if pieces["dirty"]: + rendered += "-dirty" + return rendered + + +def render_git_describe_long(pieces): + """TAG-DISTANCE-gHEX[-dirty]. + + Like 'git describe --tags --dirty --always -long'. + The distance/hash is unconditional. + + Exceptions: + 1: no tags. HEX[-dirty] (note: no 'g' prefix) + """ + if pieces["closest-tag"]: + rendered = pieces["closest-tag"] + rendered += "-%d-g%s" % (pieces["distance"], pieces["short"]) + else: + # exception #1 + rendered = pieces["short"] + if pieces["dirty"]: + rendered += "-dirty" + return rendered + + +def render(pieces, style): + """Render the given version pieces into the requested style.""" + if pieces["error"]: + return {"version": "unknown", + "full-revisionid": pieces.get("long"), + "dirty": None, + "error": pieces["error"], + "date": None} + + if not style or style == "default": + style = "pep440" # the default + + if style == "pep440": + rendered = render_pep440(pieces) + elif style == "pep440-pre": + rendered = render_pep440_pre(pieces) + elif style == "pep440-post": + rendered = render_pep440_post(pieces) + elif style == "pep440-old": + rendered = render_pep440_old(pieces) + elif style == "git-describe": + rendered = render_git_describe(pieces) + elif style == "git-describe-long": + rendered = render_git_describe_long(pieces) + else: + raise ValueError("unknown style '%s'" % style) + + return {"version": rendered, "full-revisionid": pieces["long"], + "dirty": pieces["dirty"], "error": None, + "date": pieces.get("date")} + + +def get_versions(): + """Get version information or return default if unable to do so.""" + # I am in _version.py, which lives at ROOT/VERSIONFILE_SOURCE. If we have + # __file__, we can work backwards from there to the root. Some + # py2exe/bbfreeze/non-CPython implementations don't do __file__, in which + # case we can only use expanded keywords. + + cfg = get_config() + verbose = cfg.verbose + + try: + return git_versions_from_keywords(get_keywords(), cfg.tag_prefix, + verbose) + except NotThisMethod: + pass + + try: + root = os.path.realpath(__file__) + # versionfile_source is the relative path from the top of the source + # tree (where the .git directory might live) to this file. Invert + # this to find the root from __file__. + for i in cfg.versionfile_source.split('/'): + root = os.path.dirname(root) + except NameError: + return {"version": "0+unknown", "full-revisionid": None, + "dirty": None, + "error": "unable to find root of source tree", + "date": None} + + try: + pieces = git_pieces_from_vcs(cfg.tag_prefix, root, verbose) + return render(pieces, cfg.style) + except NotThisMethod: + pass + + try: + if cfg.parentdir_prefix: + return versions_from_parentdir(cfg.parentdir_prefix, root, verbose) + except NotThisMethod: + pass + + return {"version": "0+unknown", "full-revisionid": None, + "dirty": None, + "error": "unable to compute version", "date": None} diff --git a/docker-make.py b/docker-make/docker-make.py similarity index 100% rename from docker-make.py rename to docker-make/docker-make.py diff --git a/setup.cfg b/setup.cfg new file mode 100644 index 0000000..9bbc27f --- /dev/null +++ b/setup.cfg @@ -0,0 +1,6 @@ +[versioneer] +VCS = git +style = pep440 +versionfile_source = docker-make/_version.py +versionfile_build = docker-make/_version.py +tag_prefix = '' diff --git a/setup.py b/setup.py new file mode 100644 index 0000000..18fda4e --- /dev/null +++ b/setup.py @@ -0,0 +1,13 @@ +from distutils.core import setup +import versioneer + +setup( + name='DockerMake', + version=versioneer.get_version(), + cmdclass=versioneer.get_cmdclass(), + packages=['docker-make'], + license='Apache 2.0', + author='Aaron Virshup', + author_email='avirshup@gmail.com', + description='Build manager for docker images' +) diff --git a/versioneer.py b/versioneer.py new file mode 100644 index 0000000..64fea1c --- /dev/null +++ b/versioneer.py @@ -0,0 +1,1822 @@ + +# Version: 0.18 + +"""The Versioneer - like a rocketeer, but for versions. + +The Versioneer +============== + +* like a rocketeer, but for versions! +* https://github.com/warner/python-versioneer +* Brian Warner +* License: Public Domain +* Compatible With: python2.6, 2.7, 3.2, 3.3, 3.4, 3.5, 3.6, and pypy +* [![Latest Version] +(https://pypip.in/version/versioneer/badge.svg?style=flat) +](https://pypi.python.org/pypi/versioneer/) +* [![Build Status] +(https://travis-ci.org/warner/python-versioneer.png?branch=master) +](https://travis-ci.org/warner/python-versioneer) + +This is a tool for managing a recorded version number in distutils-based +python projects. The goal is to remove the tedious and error-prone "update +the embedded version string" step from your release process. Making a new +release should be as easy as recording a new tag in your version-control +system, and maybe making new tarballs. + + +## Quick Install + +* `pip install versioneer` to somewhere to your $PATH +* add a `[versioneer]` section to your setup.cfg (see below) +* run `versioneer install` in your source tree, commit the results + +## Version Identifiers + +Source trees come from a variety of places: + +* a version-control system checkout (mostly used by developers) +* a nightly tarball, produced by build automation +* a snapshot tarball, produced by a web-based VCS browser, like github's + "tarball from tag" feature +* a release tarball, produced by "setup.py sdist", distributed through PyPI + +Within each source tree, the version identifier (either a string or a number, +this tool is format-agnostic) can come from a variety of places: + +* ask the VCS tool itself, e.g. "git describe" (for checkouts), which knows + about recent "tags" and an absolute revision-id +* the name of the directory into which the tarball was unpacked +* an expanded VCS keyword ($Id$, etc) +* a `_version.py` created by some earlier build step + +For released software, the version identifier is closely related to a VCS +tag. Some projects use tag names that include more than just the version +string (e.g. "myproject-1.2" instead of just "1.2"), in which case the tool +needs to strip the tag prefix to extract the version identifier. For +unreleased software (between tags), the version identifier should provide +enough information to help developers recreate the same tree, while also +giving them an idea of roughly how old the tree is (after version 1.2, before +version 1.3). Many VCS systems can report a description that captures this, +for example `git describe --tags --dirty --always` reports things like +"0.7-1-g574ab98-dirty" to indicate that the checkout is one revision past the +0.7 tag, has a unique revision id of "574ab98", and is "dirty" (it has +uncommitted changes. + +The version identifier is used for multiple purposes: + +* to allow the module to self-identify its version: `myproject.__version__` +* to choose a name and prefix for a 'setup.py sdist' tarball + +## Theory of Operation + +Versioneer works by adding a special `_version.py` file into your source +tree, where your `__init__.py` can import it. This `_version.py` knows how to +dynamically ask the VCS tool for version information at import time. + +`_version.py` also contains `$Revision$` markers, and the installation +process marks `_version.py` to have this marker rewritten with a tag name +during the `git archive` command. As a result, generated tarballs will +contain enough information to get the proper version. + +To allow `setup.py` to compute a version too, a `versioneer.py` is added to +the top level of your source tree, next to `setup.py` and the `setup.cfg` +that configures it. This overrides several distutils/setuptools commands to +compute the version when invoked, and changes `setup.py build` and `setup.py +sdist` to replace `_version.py` with a small static file that contains just +the generated version data. + +## Installation + +See [INSTALL.md](./INSTALL.md) for detailed installation instructions. + +## Version-String Flavors + +Code which uses Versioneer can learn about its version string at runtime by +importing `_version` from your main `__init__.py` file and running the +`get_versions()` function. From the "outside" (e.g. in `setup.py`), you can +import the top-level `versioneer.py` and run `get_versions()`. + +Both functions return a dictionary with different flavors of version +information: + +* `['version']`: A condensed version string, rendered using the selected + style. This is the most commonly used value for the project's version + string. The default "pep440" style yields strings like `0.11`, + `0.11+2.g1076c97`, or `0.11+2.g1076c97.dirty`. See the "Styles" section + below for alternative styles. + +* `['full-revisionid']`: detailed revision identifier. For Git, this is the + full SHA1 commit id, e.g. "1076c978a8d3cfc70f408fe5974aa6c092c949ac". + +* `['date']`: Date and time of the latest `HEAD` commit. For Git, it is the + commit date in ISO 8601 format. This will be None if the date is not + available. + +* `['dirty']`: a boolean, True if the tree has uncommitted changes. Note that + this is only accurate if run in a VCS checkout, otherwise it is likely to + be False or None + +* `['error']`: if the version string could not be computed, this will be set + to a string describing the problem, otherwise it will be None. It may be + useful to throw an exception in setup.py if this is set, to avoid e.g. + creating tarballs with a version string of "unknown". + +Some variants are more useful than others. Including `full-revisionid` in a +bug report should allow developers to reconstruct the exact code being tested +(or indicate the presence of local changes that should be shared with the +developers). `version` is suitable for display in an "about" box or a CLI +`--version` output: it can be easily compared against release notes and lists +of bugs fixed in various releases. + +The installer adds the following text to your `__init__.py` to place a basic +version in `YOURPROJECT.__version__`: + + from ._version import get_versions + __version__ = get_versions()['version'] + del get_versions + +## Styles + +The setup.cfg `style=` configuration controls how the VCS information is +rendered into a version string. + +The default style, "pep440", produces a PEP440-compliant string, equal to the +un-prefixed tag name for actual releases, and containing an additional "local +version" section with more detail for in-between builds. For Git, this is +TAG[+DISTANCE.gHEX[.dirty]] , using information from `git describe --tags +--dirty --always`. For example "0.11+2.g1076c97.dirty" indicates that the +tree is like the "1076c97" commit but has uncommitted changes (".dirty"), and +that this commit is two revisions ("+2") beyond the "0.11" tag. For released +software (exactly equal to a known tag), the identifier will only contain the +stripped tag, e.g. "0.11". + +Other styles are available. See [details.md](details.md) in the Versioneer +source tree for descriptions. + +## Debugging + +Versioneer tries to avoid fatal errors: if something goes wrong, it will tend +to return a version of "0+unknown". To investigate the problem, run `setup.py +version`, which will run the version-lookup code in a verbose mode, and will +display the full contents of `get_versions()` (including the `error` string, +which may help identify what went wrong). + +## Known Limitations + +Some situations are known to cause problems for Versioneer. This details the +most significant ones. More can be found on Github +[issues page](https://github.com/warner/python-versioneer/issues). + +### Subprojects + +Versioneer has limited support for source trees in which `setup.py` is not in +the root directory (e.g. `setup.py` and `.git/` are *not* siblings). The are +two common reasons why `setup.py` might not be in the root: + +* Source trees which contain multiple subprojects, such as + [Buildbot](https://github.com/buildbot/buildbot), which contains both + "master" and "slave" subprojects, each with their own `setup.py`, + `setup.cfg`, and `tox.ini`. Projects like these produce multiple PyPI + distributions (and upload multiple independently-installable tarballs). +* Source trees whose main purpose is to contain a C library, but which also + provide bindings to Python (and perhaps other langauges) in subdirectories. + +Versioneer will look for `.git` in parent directories, and most operations +should get the right version string. However `pip` and `setuptools` have bugs +and implementation details which frequently cause `pip install .` from a +subproject directory to fail to find a correct version string (so it usually +defaults to `0+unknown`). + +`pip install --editable .` should work correctly. `setup.py install` might +work too. + +Pip-8.1.1 is known to have this problem, but hopefully it will get fixed in +some later version. + +[Bug #38](https://github.com/warner/python-versioneer/issues/38) is tracking +this issue. The discussion in +[PR #61](https://github.com/warner/python-versioneer/pull/61) describes the +issue from the Versioneer side in more detail. +[pip PR#3176](https://github.com/pypa/pip/pull/3176) and +[pip PR#3615](https://github.com/pypa/pip/pull/3615) contain work to improve +pip to let Versioneer work correctly. + +Versioneer-0.16 and earlier only looked for a `.git` directory next to the +`setup.cfg`, so subprojects were completely unsupported with those releases. + +### Editable installs with setuptools <= 18.5 + +`setup.py develop` and `pip install --editable .` allow you to install a +project into a virtualenv once, then continue editing the source code (and +test) without re-installing after every change. + +"Entry-point scripts" (`setup(entry_points={"console_scripts": ..})`) are a +convenient way to specify executable scripts that should be installed along +with the python package. + +These both work as expected when using modern setuptools. When using +setuptools-18.5 or earlier, however, certain operations will cause +`pkg_resources.DistributionNotFound` errors when running the entrypoint +script, which must be resolved by re-installing the package. This happens +when the install happens with one version, then the egg_info data is +regenerated while a different version is checked out. Many setup.py commands +cause egg_info to be rebuilt (including `sdist`, `wheel`, and installing into +a different virtualenv), so this can be surprising. + +[Bug #83](https://github.com/warner/python-versioneer/issues/83) describes +this one, but upgrading to a newer version of setuptools should probably +resolve it. + +### Unicode version strings + +While Versioneer works (and is continually tested) with both Python 2 and +Python 3, it is not entirely consistent with bytes-vs-unicode distinctions. +Newer releases probably generate unicode version strings on py2. It's not +clear that this is wrong, but it may be surprising for applications when then +write these strings to a network connection or include them in bytes-oriented +APIs like cryptographic checksums. + +[Bug #71](https://github.com/warner/python-versioneer/issues/71) investigates +this question. + + +## Updating Versioneer + +To upgrade your project to a new release of Versioneer, do the following: + +* install the new Versioneer (`pip install -U versioneer` or equivalent) +* edit `setup.cfg`, if necessary, to include any new configuration settings + indicated by the release notes. See [UPGRADING](./UPGRADING.md) for details. +* re-run `versioneer install` in your source tree, to replace + `SRC/_version.py` +* commit any changed files + +## Future Directions + +This tool is designed to make it easily extended to other version-control +systems: all VCS-specific components are in separate directories like +src/git/ . The top-level `versioneer.py` script is assembled from these +components by running make-versioneer.py . In the future, make-versioneer.py +will take a VCS name as an argument, and will construct a version of +`versioneer.py` that is specific to the given VCS. It might also take the +configuration arguments that are currently provided manually during +installation by editing setup.py . Alternatively, it might go the other +direction and include code from all supported VCS systems, reducing the +number of intermediate scripts. + + +## License + +To make Versioneer easier to embed, all its code is dedicated to the public +domain. The `_version.py` that it creates is also in the public domain. +Specifically, both are released under the Creative Commons "Public Domain +Dedication" license (CC0-1.0), as described in +https://creativecommons.org/publicdomain/zero/1.0/ . + +""" + +from __future__ import print_function +try: + import configparser +except ImportError: + import ConfigParser as configparser +import errno +import json +import os +import re +import subprocess +import sys + + +class VersioneerConfig: + """Container for Versioneer configuration parameters.""" + + +def get_root(): + """Get the project root directory. + + We require that all commands are run from the project root, i.e. the + directory that contains setup.py, setup.cfg, and versioneer.py . + """ + root = os.path.realpath(os.path.abspath(os.getcwd())) + setup_py = os.path.join(root, "setup.py") + versioneer_py = os.path.join(root, "versioneer.py") + if not (os.path.exists(setup_py) or os.path.exists(versioneer_py)): + # allow 'python path/to/setup.py COMMAND' + root = os.path.dirname(os.path.realpath(os.path.abspath(sys.argv[0]))) + setup_py = os.path.join(root, "setup.py") + versioneer_py = os.path.join(root, "versioneer.py") + if not (os.path.exists(setup_py) or os.path.exists(versioneer_py)): + err = ("Versioneer was unable to run the project root directory. " + "Versioneer requires setup.py to be executed from " + "its immediate directory (like 'python setup.py COMMAND'), " + "or in a way that lets it use sys.argv[0] to find the root " + "(like 'python path/to/setup.py COMMAND').") + raise VersioneerBadRootError(err) + try: + # Certain runtime workflows (setup.py install/develop in a setuptools + # tree) execute all dependencies in a single python process, so + # "versioneer" may be imported multiple times, and python's shared + # module-import table will cache the first one. So we can't use + # os.path.dirname(__file__), as that will find whichever + # versioneer.py was first imported, even in later projects. + me = os.path.realpath(os.path.abspath(__file__)) + me_dir = os.path.normcase(os.path.splitext(me)[0]) + vsr_dir = os.path.normcase(os.path.splitext(versioneer_py)[0]) + if me_dir != vsr_dir: + print("Warning: build in %s is using versioneer.py from %s" + % (os.path.dirname(me), versioneer_py)) + except NameError: + pass + return root + + +def get_config_from_root(root): + """Read the project setup.cfg file to determine Versioneer config.""" + # This might raise EnvironmentError (if setup.cfg is missing), or + # configparser.NoSectionError (if it lacks a [versioneer] section), or + # configparser.NoOptionError (if it lacks "VCS="). See the docstring at + # the top of versioneer.py for instructions on writing your setup.cfg . + setup_cfg = os.path.join(root, "setup.cfg") + parser = configparser.SafeConfigParser() + with open(setup_cfg, "r") as f: + parser.readfp(f) + VCS = parser.get("versioneer", "VCS") # mandatory + + def get(parser, name): + if parser.has_option("versioneer", name): + return parser.get("versioneer", name) + return None + cfg = VersioneerConfig() + cfg.VCS = VCS + cfg.style = get(parser, "style") or "" + cfg.versionfile_source = get(parser, "versionfile_source") + cfg.versionfile_build = get(parser, "versionfile_build") + cfg.tag_prefix = get(parser, "tag_prefix") + if cfg.tag_prefix in ("''", '""'): + cfg.tag_prefix = "" + cfg.parentdir_prefix = get(parser, "parentdir_prefix") + cfg.verbose = get(parser, "verbose") + return cfg + + +class NotThisMethod(Exception): + """Exception raised if a method is not valid for the current scenario.""" + + +# these dictionaries contain VCS-specific tools +LONG_VERSION_PY = {} +HANDLERS = {} + + +def register_vcs_handler(vcs, method): # decorator + """Decorator to mark a method as the handler for a particular VCS.""" + def decorate(f): + """Store f in HANDLERS[vcs][method].""" + if vcs not in HANDLERS: + HANDLERS[vcs] = {} + HANDLERS[vcs][method] = f + return f + return decorate + + +def run_command(commands, args, cwd=None, verbose=False, hide_stderr=False, + env=None): + """Call the given command(s).""" + assert isinstance(commands, list) + p = None + for c in commands: + try: + dispcmd = str([c] + args) + # remember shell=False, so use git.cmd on windows, not just git + p = subprocess.Popen([c] + args, cwd=cwd, env=env, + stdout=subprocess.PIPE, + stderr=(subprocess.PIPE if hide_stderr + else None)) + break + except EnvironmentError: + e = sys.exc_info()[1] + if e.errno == errno.ENOENT: + continue + if verbose: + print("unable to run %s" % dispcmd) + print(e) + return None, None + else: + if verbose: + print("unable to find command, tried %s" % (commands,)) + return None, None + stdout = p.communicate()[0].strip() + if sys.version_info[0] >= 3: + stdout = stdout.decode() + if p.returncode != 0: + if verbose: + print("unable to run %s (error)" % dispcmd) + print("stdout was %s" % stdout) + return None, p.returncode + return stdout, p.returncode + + +LONG_VERSION_PY['git'] = ''' +# This file helps to compute a version number in source trees obtained from +# git-archive tarball (such as those provided by githubs download-from-tag +# feature). Distribution tarballs (built by setup.py sdist) and build +# directories (produced by setup.py build) will contain a much shorter file +# that just contains the computed version number. + +# This file is released into the public domain. Generated by +# versioneer-0.18 (https://github.com/warner/python-versioneer) + +"""Git implementation of _version.py.""" + +import errno +import os +import re +import subprocess +import sys + + +def get_keywords(): + """Get the keywords needed to look up the version information.""" + # these strings will be replaced by git during git-archive. + # setup.py/versioneer.py will grep for the variable names, so they must + # each be defined on a line of their own. _version.py will just call + # get_keywords(). + git_refnames = "%(DOLLAR)sFormat:%%d%(DOLLAR)s" + git_full = "%(DOLLAR)sFormat:%%H%(DOLLAR)s" + git_date = "%(DOLLAR)sFormat:%%ci%(DOLLAR)s" + keywords = {"refnames": git_refnames, "full": git_full, "date": git_date} + return keywords + + +class VersioneerConfig: + """Container for Versioneer configuration parameters.""" + + +def get_config(): + """Create, populate and return the VersioneerConfig() object.""" + # these strings are filled in when 'setup.py versioneer' creates + # _version.py + cfg = VersioneerConfig() + cfg.VCS = "git" + cfg.style = "%(STYLE)s" + cfg.tag_prefix = "%(TAG_PREFIX)s" + cfg.parentdir_prefix = "%(PARENTDIR_PREFIX)s" + cfg.versionfile_source = "%(VERSIONFILE_SOURCE)s" + cfg.verbose = False + return cfg + + +class NotThisMethod(Exception): + """Exception raised if a method is not valid for the current scenario.""" + + +LONG_VERSION_PY = {} +HANDLERS = {} + + +def register_vcs_handler(vcs, method): # decorator + """Decorator to mark a method as the handler for a particular VCS.""" + def decorate(f): + """Store f in HANDLERS[vcs][method].""" + if vcs not in HANDLERS: + HANDLERS[vcs] = {} + HANDLERS[vcs][method] = f + return f + return decorate + + +def run_command(commands, args, cwd=None, verbose=False, hide_stderr=False, + env=None): + """Call the given command(s).""" + assert isinstance(commands, list) + p = None + for c in commands: + try: + dispcmd = str([c] + args) + # remember shell=False, so use git.cmd on windows, not just git + p = subprocess.Popen([c] + args, cwd=cwd, env=env, + stdout=subprocess.PIPE, + stderr=(subprocess.PIPE if hide_stderr + else None)) + break + except EnvironmentError: + e = sys.exc_info()[1] + if e.errno == errno.ENOENT: + continue + if verbose: + print("unable to run %%s" %% dispcmd) + print(e) + return None, None + else: + if verbose: + print("unable to find command, tried %%s" %% (commands,)) + return None, None + stdout = p.communicate()[0].strip() + if sys.version_info[0] >= 3: + stdout = stdout.decode() + if p.returncode != 0: + if verbose: + print("unable to run %%s (error)" %% dispcmd) + print("stdout was %%s" %% stdout) + return None, p.returncode + return stdout, p.returncode + + +def versions_from_parentdir(parentdir_prefix, root, verbose): + """Try to determine the version from the parent directory name. + + Source tarballs conventionally unpack into a directory that includes both + the project name and a version string. We will also support searching up + two directory levels for an appropriately named parent directory + """ + rootdirs = [] + + for i in range(3): + dirname = os.path.basename(root) + if dirname.startswith(parentdir_prefix): + return {"version": dirname[len(parentdir_prefix):], + "full-revisionid": None, + "dirty": False, "error": None, "date": None} + else: + rootdirs.append(root) + root = os.path.dirname(root) # up a level + + if verbose: + print("Tried directories %%s but none started with prefix %%s" %% + (str(rootdirs), parentdir_prefix)) + raise NotThisMethod("rootdir doesn't start with parentdir_prefix") + + +@register_vcs_handler("git", "get_keywords") +def git_get_keywords(versionfile_abs): + """Extract version information from the given file.""" + # the code embedded in _version.py can just fetch the value of these + # keywords. When used from setup.py, we don't want to import _version.py, + # so we do it with a regexp instead. This function is not used from + # _version.py. + keywords = {} + try: + f = open(versionfile_abs, "r") + for line in f.readlines(): + if line.strip().startswith("git_refnames ="): + mo = re.search(r'=\s*"(.*)"', line) + if mo: + keywords["refnames"] = mo.group(1) + if line.strip().startswith("git_full ="): + mo = re.search(r'=\s*"(.*)"', line) + if mo: + keywords["full"] = mo.group(1) + if line.strip().startswith("git_date ="): + mo = re.search(r'=\s*"(.*)"', line) + if mo: + keywords["date"] = mo.group(1) + f.close() + except EnvironmentError: + pass + return keywords + + +@register_vcs_handler("git", "keywords") +def git_versions_from_keywords(keywords, tag_prefix, verbose): + """Get version information from git keywords.""" + if not keywords: + raise NotThisMethod("no keywords at all, weird") + date = keywords.get("date") + if date is not None: + # git-2.2.0 added "%%cI", which expands to an ISO-8601 -compliant + # datestamp. However we prefer "%%ci" (which expands to an "ISO-8601 + # -like" string, which we must then edit to make compliant), because + # it's been around since git-1.5.3, and it's too difficult to + # discover which version we're using, or to work around using an + # older one. + date = date.strip().replace(" ", "T", 1).replace(" ", "", 1) + refnames = keywords["refnames"].strip() + if refnames.startswith("$Format"): + if verbose: + print("keywords are unexpanded, not using") + raise NotThisMethod("unexpanded keywords, not a git-archive tarball") + refs = set([r.strip() for r in refnames.strip("()").split(",")]) + # starting in git-1.8.3, tags are listed as "tag: foo-1.0" instead of + # just "foo-1.0". If we see a "tag: " prefix, prefer those. + TAG = "tag: " + tags = set([r[len(TAG):] for r in refs if r.startswith(TAG)]) + if not tags: + # Either we're using git < 1.8.3, or there really are no tags. We use + # a heuristic: assume all version tags have a digit. The old git %%d + # expansion behaves like git log --decorate=short and strips out the + # refs/heads/ and refs/tags/ prefixes that would let us distinguish + # between branches and tags. By ignoring refnames without digits, we + # filter out many common branch names like "release" and + # "stabilization", as well as "HEAD" and "master". + tags = set([r for r in refs if re.search(r'\d', r)]) + if verbose: + print("discarding '%%s', no digits" %% ",".join(refs - tags)) + if verbose: + print("likely tags: %%s" %% ",".join(sorted(tags))) + for ref in sorted(tags): + # sorting will prefer e.g. "2.0" over "2.0rc1" + if ref.startswith(tag_prefix): + r = ref[len(tag_prefix):] + if verbose: + print("picking %%s" %% r) + return {"version": r, + "full-revisionid": keywords["full"].strip(), + "dirty": False, "error": None, + "date": date} + # no suitable tags, so version is "0+unknown", but full hex is still there + if verbose: + print("no suitable tags, using unknown + full revision id") + return {"version": "0+unknown", + "full-revisionid": keywords["full"].strip(), + "dirty": False, "error": "no suitable tags", "date": None} + + +@register_vcs_handler("git", "pieces_from_vcs") +def git_pieces_from_vcs(tag_prefix, root, verbose, run_command=run_command): + """Get version from 'git describe' in the root of the source tree. + + This only gets called if the git-archive 'subst' keywords were *not* + expanded, and _version.py hasn't already been rewritten with a short + version string, meaning we're inside a checked out source tree. + """ + GITS = ["git"] + if sys.platform == "win32": + GITS = ["git.cmd", "git.exe"] + + out, rc = run_command(GITS, ["rev-parse", "--git-dir"], cwd=root, + hide_stderr=True) + if rc != 0: + if verbose: + print("Directory %%s not under git control" %% root) + raise NotThisMethod("'git rev-parse --git-dir' returned error") + + # if there is a tag matching tag_prefix, this yields TAG-NUM-gHEX[-dirty] + # if there isn't one, this yields HEX[-dirty] (no NUM) + describe_out, rc = run_command(GITS, ["describe", "--tags", "--dirty", + "--always", "--long", + "--match", "%%s*" %% tag_prefix], + cwd=root) + # --long was added in git-1.5.5 + if describe_out is None: + raise NotThisMethod("'git describe' failed") + describe_out = describe_out.strip() + full_out, rc = run_command(GITS, ["rev-parse", "HEAD"], cwd=root) + if full_out is None: + raise NotThisMethod("'git rev-parse' failed") + full_out = full_out.strip() + + pieces = {} + pieces["long"] = full_out + pieces["short"] = full_out[:7] # maybe improved later + pieces["error"] = None + + # parse describe_out. It will be like TAG-NUM-gHEX[-dirty] or HEX[-dirty] + # TAG might have hyphens. + git_describe = describe_out + + # look for -dirty suffix + dirty = git_describe.endswith("-dirty") + pieces["dirty"] = dirty + if dirty: + git_describe = git_describe[:git_describe.rindex("-dirty")] + + # now we have TAG-NUM-gHEX or HEX + + if "-" in git_describe: + # TAG-NUM-gHEX + mo = re.search(r'^(.+)-(\d+)-g([0-9a-f]+)$', git_describe) + if not mo: + # unparseable. Maybe git-describe is misbehaving? + pieces["error"] = ("unable to parse git-describe output: '%%s'" + %% describe_out) + return pieces + + # tag + full_tag = mo.group(1) + if not full_tag.startswith(tag_prefix): + if verbose: + fmt = "tag '%%s' doesn't start with prefix '%%s'" + print(fmt %% (full_tag, tag_prefix)) + pieces["error"] = ("tag '%%s' doesn't start with prefix '%%s'" + %% (full_tag, tag_prefix)) + return pieces + pieces["closest-tag"] = full_tag[len(tag_prefix):] + + # distance: number of commits since tag + pieces["distance"] = int(mo.group(2)) + + # commit: short hex revision ID + pieces["short"] = mo.group(3) + + else: + # HEX: no tags + pieces["closest-tag"] = None + count_out, rc = run_command(GITS, ["rev-list", "HEAD", "--count"], + cwd=root) + pieces["distance"] = int(count_out) # total number of commits + + # commit date: see ISO-8601 comment in git_versions_from_keywords() + date = run_command(GITS, ["show", "-s", "--format=%%ci", "HEAD"], + cwd=root)[0].strip() + pieces["date"] = date.strip().replace(" ", "T", 1).replace(" ", "", 1) + + return pieces + + +def plus_or_dot(pieces): + """Return a + if we don't already have one, else return a .""" + if "+" in pieces.get("closest-tag", ""): + return "." + return "+" + + +def render_pep440(pieces): + """Build up version string, with post-release "local version identifier". + + Our goal: TAG[+DISTANCE.gHEX[.dirty]] . Note that if you + get a tagged build and then dirty it, you'll get TAG+0.gHEX.dirty + + Exceptions: + 1: no tags. git_describe was just HEX. 0+untagged.DISTANCE.gHEX[.dirty] + """ + if pieces["closest-tag"]: + rendered = pieces["closest-tag"] + if pieces["distance"] or pieces["dirty"]: + rendered += plus_or_dot(pieces) + rendered += "%%d.g%%s" %% (pieces["distance"], pieces["short"]) + if pieces["dirty"]: + rendered += ".dirty" + else: + # exception #1 + rendered = "0+untagged.%%d.g%%s" %% (pieces["distance"], + pieces["short"]) + if pieces["dirty"]: + rendered += ".dirty" + return rendered + + +def render_pep440_pre(pieces): + """TAG[.post.devDISTANCE] -- No -dirty. + + Exceptions: + 1: no tags. 0.post.devDISTANCE + """ + if pieces["closest-tag"]: + rendered = pieces["closest-tag"] + if pieces["distance"]: + rendered += ".post.dev%%d" %% pieces["distance"] + else: + # exception #1 + rendered = "0.post.dev%%d" %% pieces["distance"] + return rendered + + +def render_pep440_post(pieces): + """TAG[.postDISTANCE[.dev0]+gHEX] . + + The ".dev0" means dirty. Note that .dev0 sorts backwards + (a dirty tree will appear "older" than the corresponding clean one), + but you shouldn't be releasing software with -dirty anyways. + + Exceptions: + 1: no tags. 0.postDISTANCE[.dev0] + """ + if pieces["closest-tag"]: + rendered = pieces["closest-tag"] + if pieces["distance"] or pieces["dirty"]: + rendered += ".post%%d" %% pieces["distance"] + if pieces["dirty"]: + rendered += ".dev0" + rendered += plus_or_dot(pieces) + rendered += "g%%s" %% pieces["short"] + else: + # exception #1 + rendered = "0.post%%d" %% pieces["distance"] + if pieces["dirty"]: + rendered += ".dev0" + rendered += "+g%%s" %% pieces["short"] + return rendered + + +def render_pep440_old(pieces): + """TAG[.postDISTANCE[.dev0]] . + + The ".dev0" means dirty. + + Eexceptions: + 1: no tags. 0.postDISTANCE[.dev0] + """ + if pieces["closest-tag"]: + rendered = pieces["closest-tag"] + if pieces["distance"] or pieces["dirty"]: + rendered += ".post%%d" %% pieces["distance"] + if pieces["dirty"]: + rendered += ".dev0" + else: + # exception #1 + rendered = "0.post%%d" %% pieces["distance"] + if pieces["dirty"]: + rendered += ".dev0" + return rendered + + +def render_git_describe(pieces): + """TAG[-DISTANCE-gHEX][-dirty]. + + Like 'git describe --tags --dirty --always'. + + Exceptions: + 1: no tags. HEX[-dirty] (note: no 'g' prefix) + """ + if pieces["closest-tag"]: + rendered = pieces["closest-tag"] + if pieces["distance"]: + rendered += "-%%d-g%%s" %% (pieces["distance"], pieces["short"]) + else: + # exception #1 + rendered = pieces["short"] + if pieces["dirty"]: + rendered += "-dirty" + return rendered + + +def render_git_describe_long(pieces): + """TAG-DISTANCE-gHEX[-dirty]. + + Like 'git describe --tags --dirty --always -long'. + The distance/hash is unconditional. + + Exceptions: + 1: no tags. HEX[-dirty] (note: no 'g' prefix) + """ + if pieces["closest-tag"]: + rendered = pieces["closest-tag"] + rendered += "-%%d-g%%s" %% (pieces["distance"], pieces["short"]) + else: + # exception #1 + rendered = pieces["short"] + if pieces["dirty"]: + rendered += "-dirty" + return rendered + + +def render(pieces, style): + """Render the given version pieces into the requested style.""" + if pieces["error"]: + return {"version": "unknown", + "full-revisionid": pieces.get("long"), + "dirty": None, + "error": pieces["error"], + "date": None} + + if not style or style == "default": + style = "pep440" # the default + + if style == "pep440": + rendered = render_pep440(pieces) + elif style == "pep440-pre": + rendered = render_pep440_pre(pieces) + elif style == "pep440-post": + rendered = render_pep440_post(pieces) + elif style == "pep440-old": + rendered = render_pep440_old(pieces) + elif style == "git-describe": + rendered = render_git_describe(pieces) + elif style == "git-describe-long": + rendered = render_git_describe_long(pieces) + else: + raise ValueError("unknown style '%%s'" %% style) + + return {"version": rendered, "full-revisionid": pieces["long"], + "dirty": pieces["dirty"], "error": None, + "date": pieces.get("date")} + + +def get_versions(): + """Get version information or return default if unable to do so.""" + # I am in _version.py, which lives at ROOT/VERSIONFILE_SOURCE. If we have + # __file__, we can work backwards from there to the root. Some + # py2exe/bbfreeze/non-CPython implementations don't do __file__, in which + # case we can only use expanded keywords. + + cfg = get_config() + verbose = cfg.verbose + + try: + return git_versions_from_keywords(get_keywords(), cfg.tag_prefix, + verbose) + except NotThisMethod: + pass + + try: + root = os.path.realpath(__file__) + # versionfile_source is the relative path from the top of the source + # tree (where the .git directory might live) to this file. Invert + # this to find the root from __file__. + for i in cfg.versionfile_source.split('/'): + root = os.path.dirname(root) + except NameError: + return {"version": "0+unknown", "full-revisionid": None, + "dirty": None, + "error": "unable to find root of source tree", + "date": None} + + try: + pieces = git_pieces_from_vcs(cfg.tag_prefix, root, verbose) + return render(pieces, cfg.style) + except NotThisMethod: + pass + + try: + if cfg.parentdir_prefix: + return versions_from_parentdir(cfg.parentdir_prefix, root, verbose) + except NotThisMethod: + pass + + return {"version": "0+unknown", "full-revisionid": None, + "dirty": None, + "error": "unable to compute version", "date": None} +''' + + +@register_vcs_handler("git", "get_keywords") +def git_get_keywords(versionfile_abs): + """Extract version information from the given file.""" + # the code embedded in _version.py can just fetch the value of these + # keywords. When used from setup.py, we don't want to import _version.py, + # so we do it with a regexp instead. This function is not used from + # _version.py. + keywords = {} + try: + f = open(versionfile_abs, "r") + for line in f.readlines(): + if line.strip().startswith("git_refnames ="): + mo = re.search(r'=\s*"(.*)"', line) + if mo: + keywords["refnames"] = mo.group(1) + if line.strip().startswith("git_full ="): + mo = re.search(r'=\s*"(.*)"', line) + if mo: + keywords["full"] = mo.group(1) + if line.strip().startswith("git_date ="): + mo = re.search(r'=\s*"(.*)"', line) + if mo: + keywords["date"] = mo.group(1) + f.close() + except EnvironmentError: + pass + return keywords + + +@register_vcs_handler("git", "keywords") +def git_versions_from_keywords(keywords, tag_prefix, verbose): + """Get version information from git keywords.""" + if not keywords: + raise NotThisMethod("no keywords at all, weird") + date = keywords.get("date") + if date is not None: + # git-2.2.0 added "%cI", which expands to an ISO-8601 -compliant + # datestamp. However we prefer "%ci" (which expands to an "ISO-8601 + # -like" string, which we must then edit to make compliant), because + # it's been around since git-1.5.3, and it's too difficult to + # discover which version we're using, or to work around using an + # older one. + date = date.strip().replace(" ", "T", 1).replace(" ", "", 1) + refnames = keywords["refnames"].strip() + if refnames.startswith("$Format"): + if verbose: + print("keywords are unexpanded, not using") + raise NotThisMethod("unexpanded keywords, not a git-archive tarball") + refs = set([r.strip() for r in refnames.strip("()").split(",")]) + # starting in git-1.8.3, tags are listed as "tag: foo-1.0" instead of + # just "foo-1.0". If we see a "tag: " prefix, prefer those. + TAG = "tag: " + tags = set([r[len(TAG):] for r in refs if r.startswith(TAG)]) + if not tags: + # Either we're using git < 1.8.3, or there really are no tags. We use + # a heuristic: assume all version tags have a digit. The old git %d + # expansion behaves like git log --decorate=short and strips out the + # refs/heads/ and refs/tags/ prefixes that would let us distinguish + # between branches and tags. By ignoring refnames without digits, we + # filter out many common branch names like "release" and + # "stabilization", as well as "HEAD" and "master". + tags = set([r for r in refs if re.search(r'\d', r)]) + if verbose: + print("discarding '%s', no digits" % ",".join(refs - tags)) + if verbose: + print("likely tags: %s" % ",".join(sorted(tags))) + for ref in sorted(tags): + # sorting will prefer e.g. "2.0" over "2.0rc1" + if ref.startswith(tag_prefix): + r = ref[len(tag_prefix):] + if verbose: + print("picking %s" % r) + return {"version": r, + "full-revisionid": keywords["full"].strip(), + "dirty": False, "error": None, + "date": date} + # no suitable tags, so version is "0+unknown", but full hex is still there + if verbose: + print("no suitable tags, using unknown + full revision id") + return {"version": "0+unknown", + "full-revisionid": keywords["full"].strip(), + "dirty": False, "error": "no suitable tags", "date": None} + + +@register_vcs_handler("git", "pieces_from_vcs") +def git_pieces_from_vcs(tag_prefix, root, verbose, run_command=run_command): + """Get version from 'git describe' in the root of the source tree. + + This only gets called if the git-archive 'subst' keywords were *not* + expanded, and _version.py hasn't already been rewritten with a short + version string, meaning we're inside a checked out source tree. + """ + GITS = ["git"] + if sys.platform == "win32": + GITS = ["git.cmd", "git.exe"] + + out, rc = run_command(GITS, ["rev-parse", "--git-dir"], cwd=root, + hide_stderr=True) + if rc != 0: + if verbose: + print("Directory %s not under git control" % root) + raise NotThisMethod("'git rev-parse --git-dir' returned error") + + # if there is a tag matching tag_prefix, this yields TAG-NUM-gHEX[-dirty] + # if there isn't one, this yields HEX[-dirty] (no NUM) + describe_out, rc = run_command(GITS, ["describe", "--tags", "--dirty", + "--always", "--long", + "--match", "%s*" % tag_prefix], + cwd=root) + # --long was added in git-1.5.5 + if describe_out is None: + raise NotThisMethod("'git describe' failed") + describe_out = describe_out.strip() + full_out, rc = run_command(GITS, ["rev-parse", "HEAD"], cwd=root) + if full_out is None: + raise NotThisMethod("'git rev-parse' failed") + full_out = full_out.strip() + + pieces = {} + pieces["long"] = full_out + pieces["short"] = full_out[:7] # maybe improved later + pieces["error"] = None + + # parse describe_out. It will be like TAG-NUM-gHEX[-dirty] or HEX[-dirty] + # TAG might have hyphens. + git_describe = describe_out + + # look for -dirty suffix + dirty = git_describe.endswith("-dirty") + pieces["dirty"] = dirty + if dirty: + git_describe = git_describe[:git_describe.rindex("-dirty")] + + # now we have TAG-NUM-gHEX or HEX + + if "-" in git_describe: + # TAG-NUM-gHEX + mo = re.search(r'^(.+)-(\d+)-g([0-9a-f]+)$', git_describe) + if not mo: + # unparseable. Maybe git-describe is misbehaving? + pieces["error"] = ("unable to parse git-describe output: '%s'" + % describe_out) + return pieces + + # tag + full_tag = mo.group(1) + if not full_tag.startswith(tag_prefix): + if verbose: + fmt = "tag '%s' doesn't start with prefix '%s'" + print(fmt % (full_tag, tag_prefix)) + pieces["error"] = ("tag '%s' doesn't start with prefix '%s'" + % (full_tag, tag_prefix)) + return pieces + pieces["closest-tag"] = full_tag[len(tag_prefix):] + + # distance: number of commits since tag + pieces["distance"] = int(mo.group(2)) + + # commit: short hex revision ID + pieces["short"] = mo.group(3) + + else: + # HEX: no tags + pieces["closest-tag"] = None + count_out, rc = run_command(GITS, ["rev-list", "HEAD", "--count"], + cwd=root) + pieces["distance"] = int(count_out) # total number of commits + + # commit date: see ISO-8601 comment in git_versions_from_keywords() + date = run_command(GITS, ["show", "-s", "--format=%ci", "HEAD"], + cwd=root)[0].strip() + pieces["date"] = date.strip().replace(" ", "T", 1).replace(" ", "", 1) + + return pieces + + +def do_vcs_install(manifest_in, versionfile_source, ipy): + """Git-specific installation logic for Versioneer. + + For Git, this means creating/changing .gitattributes to mark _version.py + for export-subst keyword substitution. + """ + GITS = ["git"] + if sys.platform == "win32": + GITS = ["git.cmd", "git.exe"] + files = [manifest_in, versionfile_source] + if ipy: + files.append(ipy) + try: + me = __file__ + if me.endswith(".pyc") or me.endswith(".pyo"): + me = os.path.splitext(me)[0] + ".py" + versioneer_file = os.path.relpath(me) + except NameError: + versioneer_file = "versioneer.py" + files.append(versioneer_file) + present = False + try: + f = open(".gitattributes", "r") + for line in f.readlines(): + if line.strip().startswith(versionfile_source): + if "export-subst" in line.strip().split()[1:]: + present = True + f.close() + except EnvironmentError: + pass + if not present: + f = open(".gitattributes", "a+") + f.write("%s export-subst\n" % versionfile_source) + f.close() + files.append(".gitattributes") + run_command(GITS, ["add", "--"] + files) + + +def versions_from_parentdir(parentdir_prefix, root, verbose): + """Try to determine the version from the parent directory name. + + Source tarballs conventionally unpack into a directory that includes both + the project name and a version string. We will also support searching up + two directory levels for an appropriately named parent directory + """ + rootdirs = [] + + for i in range(3): + dirname = os.path.basename(root) + if dirname.startswith(parentdir_prefix): + return {"version": dirname[len(parentdir_prefix):], + "full-revisionid": None, + "dirty": False, "error": None, "date": None} + else: + rootdirs.append(root) + root = os.path.dirname(root) # up a level + + if verbose: + print("Tried directories %s but none started with prefix %s" % + (str(rootdirs), parentdir_prefix)) + raise NotThisMethod("rootdir doesn't start with parentdir_prefix") + + +SHORT_VERSION_PY = """ +# This file was generated by 'versioneer.py' (0.18) from +# revision-control system data, or from the parent directory name of an +# unpacked source archive. Distribution tarballs contain a pre-generated copy +# of this file. + +import json + +version_json = ''' +%s +''' # END VERSION_JSON + + +def get_versions(): + return json.loads(version_json) +""" + + +def versions_from_file(filename): + """Try to determine the version from _version.py if present.""" + try: + with open(filename) as f: + contents = f.read() + except EnvironmentError: + raise NotThisMethod("unable to read _version.py") + mo = re.search(r"version_json = '''\n(.*)''' # END VERSION_JSON", + contents, re.M | re.S) + if not mo: + mo = re.search(r"version_json = '''\r\n(.*)''' # END VERSION_JSON", + contents, re.M | re.S) + if not mo: + raise NotThisMethod("no version_json in _version.py") + return json.loads(mo.group(1)) + + +def write_to_version_file(filename, versions): + """Write the given version number to the given _version.py file.""" + os.unlink(filename) + contents = json.dumps(versions, sort_keys=True, + indent=1, separators=(",", ": ")) + with open(filename, "w") as f: + f.write(SHORT_VERSION_PY % contents) + + print("set %s to '%s'" % (filename, versions["version"])) + + +def plus_or_dot(pieces): + """Return a + if we don't already have one, else return a .""" + if "+" in pieces.get("closest-tag", ""): + return "." + return "+" + + +def render_pep440(pieces): + """Build up version string, with post-release "local version identifier". + + Our goal: TAG[+DISTANCE.gHEX[.dirty]] . Note that if you + get a tagged build and then dirty it, you'll get TAG+0.gHEX.dirty + + Exceptions: + 1: no tags. git_describe was just HEX. 0+untagged.DISTANCE.gHEX[.dirty] + """ + if pieces["closest-tag"]: + rendered = pieces["closest-tag"] + if pieces["distance"] or pieces["dirty"]: + rendered += plus_or_dot(pieces) + rendered += "%d.g%s" % (pieces["distance"], pieces["short"]) + if pieces["dirty"]: + rendered += ".dirty" + else: + # exception #1 + rendered = "0+untagged.%d.g%s" % (pieces["distance"], + pieces["short"]) + if pieces["dirty"]: + rendered += ".dirty" + return rendered + + +def render_pep440_pre(pieces): + """TAG[.post.devDISTANCE] -- No -dirty. + + Exceptions: + 1: no tags. 0.post.devDISTANCE + """ + if pieces["closest-tag"]: + rendered = pieces["closest-tag"] + if pieces["distance"]: + rendered += ".post.dev%d" % pieces["distance"] + else: + # exception #1 + rendered = "0.post.dev%d" % pieces["distance"] + return rendered + + +def render_pep440_post(pieces): + """TAG[.postDISTANCE[.dev0]+gHEX] . + + The ".dev0" means dirty. Note that .dev0 sorts backwards + (a dirty tree will appear "older" than the corresponding clean one), + but you shouldn't be releasing software with -dirty anyways. + + Exceptions: + 1: no tags. 0.postDISTANCE[.dev0] + """ + if pieces["closest-tag"]: + rendered = pieces["closest-tag"] + if pieces["distance"] or pieces["dirty"]: + rendered += ".post%d" % pieces["distance"] + if pieces["dirty"]: + rendered += ".dev0" + rendered += plus_or_dot(pieces) + rendered += "g%s" % pieces["short"] + else: + # exception #1 + rendered = "0.post%d" % pieces["distance"] + if pieces["dirty"]: + rendered += ".dev0" + rendered += "+g%s" % pieces["short"] + return rendered + + +def render_pep440_old(pieces): + """TAG[.postDISTANCE[.dev0]] . + + The ".dev0" means dirty. + + Eexceptions: + 1: no tags. 0.postDISTANCE[.dev0] + """ + if pieces["closest-tag"]: + rendered = pieces["closest-tag"] + if pieces["distance"] or pieces["dirty"]: + rendered += ".post%d" % pieces["distance"] + if pieces["dirty"]: + rendered += ".dev0" + else: + # exception #1 + rendered = "0.post%d" % pieces["distance"] + if pieces["dirty"]: + rendered += ".dev0" + return rendered + + +def render_git_describe(pieces): + """TAG[-DISTANCE-gHEX][-dirty]. + + Like 'git describe --tags --dirty --always'. + + Exceptions: + 1: no tags. HEX[-dirty] (note: no 'g' prefix) + """ + if pieces["closest-tag"]: + rendered = pieces["closest-tag"] + if pieces["distance"]: + rendered += "-%d-g%s" % (pieces["distance"], pieces["short"]) + else: + # exception #1 + rendered = pieces["short"] + if pieces["dirty"]: + rendered += "-dirty" + return rendered + + +def render_git_describe_long(pieces): + """TAG-DISTANCE-gHEX[-dirty]. + + Like 'git describe --tags --dirty --always -long'. + The distance/hash is unconditional. + + Exceptions: + 1: no tags. HEX[-dirty] (note: no 'g' prefix) + """ + if pieces["closest-tag"]: + rendered = pieces["closest-tag"] + rendered += "-%d-g%s" % (pieces["distance"], pieces["short"]) + else: + # exception #1 + rendered = pieces["short"] + if pieces["dirty"]: + rendered += "-dirty" + return rendered + + +def render(pieces, style): + """Render the given version pieces into the requested style.""" + if pieces["error"]: + return {"version": "unknown", + "full-revisionid": pieces.get("long"), + "dirty": None, + "error": pieces["error"], + "date": None} + + if not style or style == "default": + style = "pep440" # the default + + if style == "pep440": + rendered = render_pep440(pieces) + elif style == "pep440-pre": + rendered = render_pep440_pre(pieces) + elif style == "pep440-post": + rendered = render_pep440_post(pieces) + elif style == "pep440-old": + rendered = render_pep440_old(pieces) + elif style == "git-describe": + rendered = render_git_describe(pieces) + elif style == "git-describe-long": + rendered = render_git_describe_long(pieces) + else: + raise ValueError("unknown style '%s'" % style) + + return {"version": rendered, "full-revisionid": pieces["long"], + "dirty": pieces["dirty"], "error": None, + "date": pieces.get("date")} + + +class VersioneerBadRootError(Exception): + """The project root directory is unknown or missing key files.""" + + +def get_versions(verbose=False): + """Get the project version from whatever source is available. + + Returns dict with two keys: 'version' and 'full'. + """ + if "versioneer" in sys.modules: + # see the discussion in cmdclass.py:get_cmdclass() + del sys.modules["versioneer"] + + root = get_root() + cfg = get_config_from_root(root) + + assert cfg.VCS is not None, "please set [versioneer]VCS= in setup.cfg" + handlers = HANDLERS.get(cfg.VCS) + assert handlers, "unrecognized VCS '%s'" % cfg.VCS + verbose = verbose or cfg.verbose + assert cfg.versionfile_source is not None, \ + "please set versioneer.versionfile_source" + assert cfg.tag_prefix is not None, "please set versioneer.tag_prefix" + + versionfile_abs = os.path.join(root, cfg.versionfile_source) + + # extract version from first of: _version.py, VCS command (e.g. 'git + # describe'), parentdir. This is meant to work for developers using a + # source checkout, for users of a tarball created by 'setup.py sdist', + # and for users of a tarball/zipball created by 'git archive' or github's + # download-from-tag feature or the equivalent in other VCSes. + + get_keywords_f = handlers.get("get_keywords") + from_keywords_f = handlers.get("keywords") + if get_keywords_f and from_keywords_f: + try: + keywords = get_keywords_f(versionfile_abs) + ver = from_keywords_f(keywords, cfg.tag_prefix, verbose) + if verbose: + print("got version from expanded keyword %s" % ver) + return ver + except NotThisMethod: + pass + + try: + ver = versions_from_file(versionfile_abs) + if verbose: + print("got version from file %s %s" % (versionfile_abs, ver)) + return ver + except NotThisMethod: + pass + + from_vcs_f = handlers.get("pieces_from_vcs") + if from_vcs_f: + try: + pieces = from_vcs_f(cfg.tag_prefix, root, verbose) + ver = render(pieces, cfg.style) + if verbose: + print("got version from VCS %s" % ver) + return ver + except NotThisMethod: + pass + + try: + if cfg.parentdir_prefix: + ver = versions_from_parentdir(cfg.parentdir_prefix, root, verbose) + if verbose: + print("got version from parentdir %s" % ver) + return ver + except NotThisMethod: + pass + + if verbose: + print("unable to compute version") + + return {"version": "0+unknown", "full-revisionid": None, + "dirty": None, "error": "unable to compute version", + "date": None} + + +def get_version(): + """Get the short version string for this project.""" + return get_versions()["version"] + + +def get_cmdclass(): + """Get the custom setuptools/distutils subclasses used by Versioneer.""" + if "versioneer" in sys.modules: + del sys.modules["versioneer"] + # this fixes the "python setup.py develop" case (also 'install' and + # 'easy_install .'), in which subdependencies of the main project are + # built (using setup.py bdist_egg) in the same python process. Assume + # a main project A and a dependency B, which use different versions + # of Versioneer. A's setup.py imports A's Versioneer, leaving it in + # sys.modules by the time B's setup.py is executed, causing B to run + # with the wrong versioneer. Setuptools wraps the sub-dep builds in a + # sandbox that restores sys.modules to it's pre-build state, so the + # parent is protected against the child's "import versioneer". By + # removing ourselves from sys.modules here, before the child build + # happens, we protect the child from the parent's versioneer too. + # Also see https://github.com/warner/python-versioneer/issues/52 + + cmds = {} + + # we add "version" to both distutils and setuptools + from distutils.core import Command + + class cmd_version(Command): + description = "report generated version string" + user_options = [] + boolean_options = [] + + def initialize_options(self): + pass + + def finalize_options(self): + pass + + def run(self): + vers = get_versions(verbose=True) + print("Version: %s" % vers["version"]) + print(" full-revisionid: %s" % vers.get("full-revisionid")) + print(" dirty: %s" % vers.get("dirty")) + print(" date: %s" % vers.get("date")) + if vers["error"]: + print(" error: %s" % vers["error"]) + cmds["version"] = cmd_version + + # we override "build_py" in both distutils and setuptools + # + # most invocation pathways end up running build_py: + # distutils/build -> build_py + # distutils/install -> distutils/build ->.. + # setuptools/bdist_wheel -> distutils/install ->.. + # setuptools/bdist_egg -> distutils/install_lib -> build_py + # setuptools/install -> bdist_egg ->.. + # setuptools/develop -> ? + # pip install: + # copies source tree to a tempdir before running egg_info/etc + # if .git isn't copied too, 'git describe' will fail + # then does setup.py bdist_wheel, or sometimes setup.py install + # setup.py egg_info -> ? + + # we override different "build_py" commands for both environments + if "setuptools" in sys.modules: + from setuptools.command.build_py import build_py as _build_py + else: + from distutils.command.build_py import build_py as _build_py + + class cmd_build_py(_build_py): + def run(self): + root = get_root() + cfg = get_config_from_root(root) + versions = get_versions() + _build_py.run(self) + # now locate _version.py in the new build/ directory and replace + # it with an updated value + if cfg.versionfile_build: + target_versionfile = os.path.join(self.build_lib, + cfg.versionfile_build) + print("UPDATING %s" % target_versionfile) + write_to_version_file(target_versionfile, versions) + cmds["build_py"] = cmd_build_py + + if "cx_Freeze" in sys.modules: # cx_freeze enabled? + from cx_Freeze.dist import build_exe as _build_exe + # nczeczulin reports that py2exe won't like the pep440-style string + # as FILEVERSION, but it can be used for PRODUCTVERSION, e.g. + # setup(console=[{ + # "version": versioneer.get_version().split("+", 1)[0], # FILEVERSION + # "product_version": versioneer.get_version(), + # ... + + class cmd_build_exe(_build_exe): + def run(self): + root = get_root() + cfg = get_config_from_root(root) + versions = get_versions() + target_versionfile = cfg.versionfile_source + print("UPDATING %s" % target_versionfile) + write_to_version_file(target_versionfile, versions) + + _build_exe.run(self) + os.unlink(target_versionfile) + with open(cfg.versionfile_source, "w") as f: + LONG = LONG_VERSION_PY[cfg.VCS] + f.write(LONG % + {"DOLLAR": "$", + "STYLE": cfg.style, + "TAG_PREFIX": cfg.tag_prefix, + "PARENTDIR_PREFIX": cfg.parentdir_prefix, + "VERSIONFILE_SOURCE": cfg.versionfile_source, + }) + cmds["build_exe"] = cmd_build_exe + del cmds["build_py"] + + if 'py2exe' in sys.modules: # py2exe enabled? + try: + from py2exe.distutils_buildexe import py2exe as _py2exe # py3 + except ImportError: + from py2exe.build_exe import py2exe as _py2exe # py2 + + class cmd_py2exe(_py2exe): + def run(self): + root = get_root() + cfg = get_config_from_root(root) + versions = get_versions() + target_versionfile = cfg.versionfile_source + print("UPDATING %s" % target_versionfile) + write_to_version_file(target_versionfile, versions) + + _py2exe.run(self) + os.unlink(target_versionfile) + with open(cfg.versionfile_source, "w") as f: + LONG = LONG_VERSION_PY[cfg.VCS] + f.write(LONG % + {"DOLLAR": "$", + "STYLE": cfg.style, + "TAG_PREFIX": cfg.tag_prefix, + "PARENTDIR_PREFIX": cfg.parentdir_prefix, + "VERSIONFILE_SOURCE": cfg.versionfile_source, + }) + cmds["py2exe"] = cmd_py2exe + + # we override different "sdist" commands for both environments + if "setuptools" in sys.modules: + from setuptools.command.sdist import sdist as _sdist + else: + from distutils.command.sdist import sdist as _sdist + + class cmd_sdist(_sdist): + def run(self): + versions = get_versions() + self._versioneer_generated_versions = versions + # unless we update this, the command will keep using the old + # version + self.distribution.metadata.version = versions["version"] + return _sdist.run(self) + + def make_release_tree(self, base_dir, files): + root = get_root() + cfg = get_config_from_root(root) + _sdist.make_release_tree(self, base_dir, files) + # now locate _version.py in the new base_dir directory + # (remembering that it may be a hardlink) and replace it with an + # updated value + target_versionfile = os.path.join(base_dir, cfg.versionfile_source) + print("UPDATING %s" % target_versionfile) + write_to_version_file(target_versionfile, + self._versioneer_generated_versions) + cmds["sdist"] = cmd_sdist + + return cmds + + +CONFIG_ERROR = """ +setup.cfg is missing the necessary Versioneer configuration. You need +a section like: + + [versioneer] + VCS = git + style = pep440 + versionfile_source = src/myproject/_version.py + versionfile_build = myproject/_version.py + tag_prefix = + parentdir_prefix = myproject- + +You will also need to edit your setup.py to use the results: + + import versioneer + setup(version=versioneer.get_version(), + cmdclass=versioneer.get_cmdclass(), ...) + +Please read the docstring in ./versioneer.py for configuration instructions, +edit setup.cfg, and re-run the installer or 'python versioneer.py setup'. +""" + +SAMPLE_CONFIG = """ +# See the docstring in versioneer.py for instructions. Note that you must +# re-run 'versioneer.py setup' after changing this section, and commit the +# resulting files. + +[versioneer] +#VCS = git +#style = pep440 +#versionfile_source = +#versionfile_build = +#tag_prefix = +#parentdir_prefix = + +""" + +INIT_PY_SNIPPET = """ +from ._version import get_versions +__version__ = get_versions()['version'] +del get_versions +""" + + +def do_setup(): + """Main VCS-independent setup function for installing Versioneer.""" + root = get_root() + try: + cfg = get_config_from_root(root) + except (EnvironmentError, configparser.NoSectionError, + configparser.NoOptionError) as e: + if isinstance(e, (EnvironmentError, configparser.NoSectionError)): + print("Adding sample versioneer config to setup.cfg", + file=sys.stderr) + with open(os.path.join(root, "setup.cfg"), "a") as f: + f.write(SAMPLE_CONFIG) + print(CONFIG_ERROR, file=sys.stderr) + return 1 + + print(" creating %s" % cfg.versionfile_source) + with open(cfg.versionfile_source, "w") as f: + LONG = LONG_VERSION_PY[cfg.VCS] + f.write(LONG % {"DOLLAR": "$", + "STYLE": cfg.style, + "TAG_PREFIX": cfg.tag_prefix, + "PARENTDIR_PREFIX": cfg.parentdir_prefix, + "VERSIONFILE_SOURCE": cfg.versionfile_source, + }) + + ipy = os.path.join(os.path.dirname(cfg.versionfile_source), + "__init__.py") + if os.path.exists(ipy): + try: + with open(ipy, "r") as f: + old = f.read() + except EnvironmentError: + old = "" + if INIT_PY_SNIPPET not in old: + print(" appending to %s" % ipy) + with open(ipy, "a") as f: + f.write(INIT_PY_SNIPPET) + else: + print(" %s unmodified" % ipy) + else: + print(" %s doesn't exist, ok" % ipy) + ipy = None + + # Make sure both the top-level "versioneer.py" and versionfile_source + # (PKG/_version.py, used by runtime code) are in MANIFEST.in, so + # they'll be copied into source distributions. Pip won't be able to + # install the package without this. + manifest_in = os.path.join(root, "MANIFEST.in") + simple_includes = set() + try: + with open(manifest_in, "r") as f: + for line in f: + if line.startswith("include "): + for include in line.split()[1:]: + simple_includes.add(include) + except EnvironmentError: + pass + # That doesn't cover everything MANIFEST.in can do + # (http://docs.python.org/2/distutils/sourcedist.html#commands), so + # it might give some false negatives. Appending redundant 'include' + # lines is safe, though. + if "versioneer.py" not in simple_includes: + print(" appending 'versioneer.py' to MANIFEST.in") + with open(manifest_in, "a") as f: + f.write("include versioneer.py\n") + else: + print(" 'versioneer.py' already in MANIFEST.in") + if cfg.versionfile_source not in simple_includes: + print(" appending versionfile_source ('%s') to MANIFEST.in" % + cfg.versionfile_source) + with open(manifest_in, "a") as f: + f.write("include %s\n" % cfg.versionfile_source) + else: + print(" versionfile_source already in MANIFEST.in") + + # Make VCS-specific changes. For git, this means creating/changing + # .gitattributes to mark _version.py for export-subst keyword + # substitution. + do_vcs_install(manifest_in, cfg.versionfile_source, ipy) + return 0 + + +def scan_setup_py(): + """Validate the contents of setup.py against Versioneer's expectations.""" + found = set() + setters = False + errors = 0 + with open("setup.py", "r") as f: + for line in f.readlines(): + if "import versioneer" in line: + found.add("import") + if "versioneer.get_cmdclass()" in line: + found.add("cmdclass") + if "versioneer.get_version()" in line: + found.add("get_version") + if "versioneer.VCS" in line: + setters = True + if "versioneer.versionfile_source" in line: + setters = True + if len(found) != 3: + print("") + print("Your setup.py appears to be missing some important items") + print("(but I might be wrong). Please make sure it has something") + print("roughly like the following:") + print("") + print(" import versioneer") + print(" setup( version=versioneer.get_version(),") + print(" cmdclass=versioneer.get_cmdclass(), ...)") + print("") + errors += 1 + if setters: + print("You should remove lines like 'versioneer.VCS = ' and") + print("'versioneer.versionfile_source = ' . This configuration") + print("now lives in setup.cfg, and should be removed from setup.py") + print("") + errors += 1 + return errors + + +if __name__ == "__main__": + cmd = sys.argv[1] + if cmd == "setup": + errors = do_setup() + errors += scan_setup_py() + if errors: + sys.exit(1) From 1f6cfe5788373dbf7445501ec6ad4e4936f4c35a Mon Sep 17 00:00:00 2001 From: Aaron Virshup Date: Thu, 19 Jan 2017 22:25:58 -0800 Subject: [PATCH 03/27] Begin refactor into more maintainable classes --- MANIFEST.in | 2 +- {docker-make => dockermake}/__init__.py | 0 {docker-make => dockermake}/_version.py | 0 dockermake/builds.py | 179 +++++++++++++++++++++ {docker-make => dockermake}/docker-make.py | 139 +--------------- dockermake/imagedefs.py | 135 ++++++++++++++++ example/DockerMake.yml | 4 +- requirements.txt | 2 + setup.cfg | 4 +- setup.py | 2 +- 10 files changed, 323 insertions(+), 144 deletions(-) rename {docker-make => dockermake}/__init__.py (100%) rename {docker-make => dockermake}/_version.py (100%) create mode 100644 dockermake/builds.py rename {docker-make => dockermake}/docker-make.py (73%) create mode 100644 dockermake/imagedefs.py create mode 100644 requirements.txt diff --git a/MANIFEST.in b/MANIFEST.in index aae37dc..9f70142 100644 --- a/MANIFEST.in +++ b/MANIFEST.in @@ -1,2 +1,2 @@ include versioneer.py -include docker-make/_version.py +include dockermake/_version.py diff --git a/docker-make/__init__.py b/dockermake/__init__.py similarity index 100% rename from docker-make/__init__.py rename to dockermake/__init__.py diff --git a/docker-make/_version.py b/dockermake/_version.py similarity index 100% rename from docker-make/_version.py rename to dockermake/_version.py diff --git a/dockermake/builds.py b/dockermake/builds.py new file mode 100644 index 0000000..174fdfb --- /dev/null +++ b/dockermake/builds.py @@ -0,0 +1,179 @@ +# Copyright 2016-2017 Autodesk Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import os +from io import BytesIO, StringIO +import pprint + +DOCKER_TMPDIR = '_docker_make_tmp/' + + +class StagedFile(object): + """ Tracks a file or directory that will be built in one container then copied into another + """ + def __init__(self, sourceimage, sourcepath, destpath): + self.sourceimage = sourceimage + self.sourcepath = sourcepath + self.destpath = destpath + + +class BuildStep(object): + """ Stores and runs the instructions to build a single image. + + Args: + imagename (str): name of this image definition + baseimage (str): name of the image to inherit from (through "FROM") + img_def (dict): yaml definition of this image + buildname (str): what to call this image, once built + """ + + def __init__(self, imagename, baseimage, img_def, buildname): + self.imagename = imagename + self.baseimage = baseimage + self.dockerfile_lines = ['FROM %s\n' % baseimage, + img_def['build']] + self.buildname = buildname + self.build_dir = img_def.get('build_directory', None) + + def build(self, client, pull=False, usecache=True): + """ + Drives an individual build step. Build steps are separated by build_directory. + If a build has zero one or less build_directories, it will be built in a single + step. + + Args: + client (docker.Client): docker client object that will build the image + pull (bool): whether to pull dependent layers from remote repositories + usecache (bool): whether to use cached layers or rebuild from scratch + """ + print ' * Build directory: %s' % self.build_dir + print ' * Target image name: %s' % self.buildname + + dockerfile = '\n'.join(self.dockerfile_lines) + + build_args = dict(tag=self.buildname, pull=pull, nocache=not usecache, + decode=True, rm=True) + + if self.build_dir is not None: + tempdir = self.write_dockerfile(dockerfile) + build_args.update(fileobj=None, + path=os.path.abspath(os.path.expanduser(self.build_dir)), + dockerfile=os.path.join(DOCKER_TMPDIR, 'Dockerfile')) + else: + build_args.update(fileobj=StringIO(unicode(dockerfile)), + path=None, + dockerfile=None) + + # start the build + stream = client.build(**build_args) + + # monitor the output + for item in stream: + if item.keys() == ['stream']: + print item['stream'].strip() + elif 'errorDetail' in item or 'error' in item: + raise BuildError(dockerfile, item, build_args) + else: + print item, + + # remove the temporary dockerfile + if self.build_dir is not None: + os.unlink(os.path.join(tempdir, 'Dockerfile')) + os.rmdir(tempdir) + + def write_dockerfile(self, dockerfile): + tempdir = os.path.abspath(os.path.join(self.build_dir, DOCKER_TMPDIR)) + temp_df = os.path.join(tempdir, 'Dockerfile') + if not os.path.isdir(tempdir): + os.makedirs(tempdir) + with open(temp_df, 'w') as df_out: + print >> df_out, dockerfile + return tempdir + + def printfile(self): + if not os.path.exists('docker_makefiles'): + os.makedirs('docker_makefiles') + filename = 'docker_makefiles/Dockerfile.%s' % self.imagename + + with open(filename, 'w') as dfout: + print >> dfout, '\n'.join(self.dockerfile_lines) + + +class BuildTarget(object): + """ Represents a target docker image. + + Args: + imagename (str): name of the image definition + targetname (str): name to assign the final built image + steps (List[BuildStep]): list of steps required to build this image + stagedfiles (List[StagedFile]): list of files to stage into this image from other images + + """ + def __init__(self, imagename, targetname, steps, stagedfiles): + self.imagename = imagename + self.steps = steps + self.stagedfiles = stagedfiles + self.targetname = targetname + + def build(self, client, + printdockerfiles=False, nobuild=False, keepbuildtags=False): + """ + Drives the build of the final image - get the list of steps and execute them. + + Args: + client (docker.Client): docker client object that will build the image + printdockerfiles (bool): create the dockerfile for this build + nobuild (bool): just create dockerfiles, don't actually build the image + keepbuildtags (bool): keep tags on intermediate images + """ + print 'docker-make starting build for "%s" (image definition "%s"' % ( + self.targetname, self.imagename) + for istep, step in enumerate(self.steps): + print ' **** DockerMake Step %d/%d: %s ***' % ( + istep + 1, len(self.steps), step.imagename) + + if printdockerfiles: + step.printfile() + + if not nobuild: + step.build(client) + + finalimage = step.buildname + + if not nobuild: + self.finalizenames(client, finalimage, keepbuildtags) + + def finalizenames(self, client, finalimage, keepbuildtags): + """ Tag the built image with its final name and untag intermediate containers + """ + client.tag(finalimage, self.targetname) + print 'Tagged final image as %s\n' % self.targetname + if not keepbuildtags: + for step in self.steps: + client.remove_image(step.buildname, force=True) + print 'Untagged intermediate container "%s"' % step.buildname + + +class BuildError(Exception): + def __init__(self, dockerfile, item, build_args): + with open('dockerfile.fail', 'w') as dff: + print>> dff, dockerfile + with BytesIO() as stream: + print >> stream, '\n -------- Docker daemon output --------' + pprint.pprint(item, stream, indent=4) + print >> stream, ' -------- Arguments to client.build --------' + pprint.pprint(build_args, stream, indent=4) + print >> stream, 'This dockerfile was written to dockerfile.fail' + stream.seek(0) + super(BuildError, self).__init__(stream.read()) diff --git a/docker-make/docker-make.py b/dockermake/docker-make.py similarity index 73% rename from docker-make/docker-make.py rename to dockermake/docker-make.py index 9ff7fa9..d7e4488 100755 --- a/docker-make/docker-make.py +++ b/dockermake/docker-make.py @@ -20,10 +20,8 @@ import sys import os import textwrap -from collections import OrderedDict -from io import StringIO, BytesIO +from io import StringIO import argparse -import pprint import docker, docker.utils import yaml @@ -39,7 +37,6 @@ def __init__(self, makefile, repository=None, self._sources = set() self.makefile_path = makefile - self.img_defs = self.parse_yaml(self.makefile_path) self.all_targets = self.img_defs.pop('_ALL_', None) # Connect to docker daemon if necessary @@ -63,25 +60,6 @@ def __init__(self, makefile, repository=None, self.pull = pull self.no_cache = no_cache - def parse_yaml(self, filename): - fname = os.path.expanduser(filename) - print 'READING %s' % os.path.expanduser(fname) - if fname in self._sources: raise ValueError('Circular _SOURCE_') - self._sources.add(fname) - - with open(fname, 'r') as yaml_file: - yamldefs = yaml.load(yaml_file) - - sourcedefs = {} - for s in yamldefs.get('_SOURCES_', []): - src = self.parse_yaml(s) - for item in src.itervalues(): - _fix_build_path(item, os.path.dirname(s)) - sourcedefs.update(src) - - sourcedefs.update(yamldefs) - return sourcedefs - def build(self, image): """ Drives the build of the final image - get the list of steps and execute them. @@ -164,110 +142,9 @@ def build_step(self, step, dockerfile): os.unlink(temp_df) os.rmdir(tempdir) - def generate_build_order(self, image): - """ - Separate the build into a series of one or more intermediate steps. - Each specified build directory gets its own step - """ - repo_name = self.repo + image - if self.tag: - if ':' in repo_name: - repo_name += '-' + self.tag - else: - repo_name += ':' + self.tag - dependencies = self.sort_dependencies(image) - base = self.get_external_base_image(image, dependencies) - - build_steps = [BuildStep(base)] - step = build_steps[0] - for d in dependencies: - dep_definition = self.img_defs[d] - mydir = dep_definition.get('build_directory', None) - if mydir is not None: - mydir = os.path.expanduser(mydir) # expands `~` to home directory - if step.build_dir is not None: - # Create a new build step if there's already a build directory - step.tag = '%dbuild_%s' % (len(build_steps), image) - build_steps.append(BuildStep(step.tag)) - step = build_steps[-1] - step.build_dir = mydir - - step.images.append(d) - if 'build' in dep_definition: - step.dockerfile.append('\n#Commands for %s' % d) - step.dockerfile.append(dep_definition['build']) - else: - step.dockerfile.append('\n####end of requirements for %s\n' % d) - - # Sets the last step's name to the final build target - step.tag = repo_name - for step in build_steps: - step.dockerfile.insert(0, '#Build directory: %s\n#tag: %s' % - (step.build_dir, step.tag)) - return build_steps - - def sort_dependencies(self, com, dependencies=None): - """ - Topologically sort the docker commands by their requirements - TODO: sort using a "maximum common tree"? - :param com: process this docker image's dependencies - :param dependencies: running cache of sorted dependencies (ordered dict) - :return type: OrderedDict - """ - if dependencies is None: dependencies = OrderedDict() - if com in dependencies: return - requires = self.img_defs[com].get('requires', []) - assert type(requires) == list, 'Requirements for %s are not a list' % com - for dep in requires: - self.sort_dependencies(dep, dependencies) - if com in dependencies: - raise ValueError('Circular dependency found', dependencies) - dependencies[com] = None - return dependencies - def get_external_base_image(self, image, dependencies): - """ - Makes sure that this image has exactly one external base image - """ - base = None - base_for = None - for d in dependencies: - this_base = self.img_defs[d].get('FROM', None) - if this_base is not None and base is not None and this_base != base: - error = ('Multiple external dependencies: image %s depends on:\n' % image + - ' %s (FROM: %s), and\n' % (base_for, base) + - ' %s (FROM: %s).' % (d, this_base)) - raise ValueError(error) - if this_base is not None: - base = this_base - base_for = d - if base is None: - raise ValueError("No base image found in %s's dependencies" % image) - return base - - -class BuildError(Exception): - def __init__(self, dockerfile, item, build_args): - with open('dockerfile.fail', 'w') as dff: - print>> dff, dockerfile - with BytesIO() as stream: - print >> stream, '\n -------- Docker daemon output --------' - pprint.pprint(item, stream, indent=4) - print >> stream, ' -------- Arguments to client.build --------' - pprint.pprint(build_args, stream, indent=4) - print >> stream, 'This dockerfile was written to dockerfile.fail' - stream.seek(0) - super(BuildError, self).__init__(stream.read()) - - -class BuildStep(object): - def __init__(self, baseimage): - self.dockerfile = ['FROM %s\n' % baseimage] - self.tag = None - self.build_dir = None - self.images = [] def main(): @@ -423,20 +300,6 @@ def printable_code(c): return '\n'.join(output) -def _fix_build_path(item, filepath): - path = os.path.expanduser(filepath) - - if 'build_directory' not in item: - return - - elif os.path.isabs(item['build_directory']): - return - - else: - item['build_directory'] = os.path.join(os.path.abspath(path), - item['build_directory']) - - def make_arg_parser(): parser = argparse.ArgumentParser(description= "NOTE: Docker environmental variables must be set.\n" diff --git a/dockermake/imagedefs.py b/dockermake/imagedefs.py new file mode 100644 index 0000000..677ed65 --- /dev/null +++ b/dockermake/imagedefs.py @@ -0,0 +1,135 @@ +#!/usr/bin/env python2.7 +# Copyright 2016 Autodesk Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import os +from collections import OrderedDict +import yaml + +from . import builds + + +class ImageDefs(object): + """ Stores and processes the image definitions + """ + def __init__(self, makefile_path): + self._sources = set() + self.makefile_path = makefile_path + self.defs = self.parse_yaml(self.makefile_path) + + def parse_yaml(self, filename): + fname = os.path.expanduser(filename) + print 'READING %s' % os.path.expanduser(fname) + if fname in self._sources: + raise ValueError('Circular _SOURCE_') + self._sources.add(fname) + + with open(fname, 'r') as yaml_file: + yamldefs = yaml.load(yaml_file) + + # Interpret build directory paths relative to this DockerMake.yml file + for item in yamldefs.itervalues(): + _fix_build_path(item, os.path.dirname(fname)) + + sourcedefs = {} + for s in yamldefs.get('_SOURCES_', []): + src = self.parse_yaml(s) + sourcedefs.update(src) + + sourcedefs.update(yamldefs) + return sourcedefs + + def generate_build(self, image, targetname): + """ + Separate the build into a series of one or more intermediate steps. + Each specified build directory gets its own step + + Args: + image (str): name of the image as defined in the dockermake.py file + targetname (str): name to tag the final built image with + """ + base_image = self.get_external_base_image(image) + build_steps = [] + for i, base_name in enumerate(self.sort_dependencies(image)): + buildname = 'dmkbuild_%s_%d' % (image, i+1) + build_steps.append(builds.BuildStep(base_name, + base_image, + self.defs[base_name], + buildname)) + base_image = buildname + + return builds.BuildTarget(imagename=image, + targetname=targetname, + steps=build_steps, + stagedfiles=[]) # TODO: this. + + def sort_dependencies(self, image, dependencies=None): + """ + Topologically sort the docker commands by their requirements + + Args: + image (str): process this docker image's dependencies + dependencies (OrderedDict): running cache of sorted dependencies (ordered dict) + + Returns: + OrderedDict: dictionary of this image's requirements + """ + if dependencies is None: + dependencies = OrderedDict() + + if image in dependencies: + return + + requires = self.defs[image].get('requires', []) + assert type(requires) == list, 'Requirements for %s are not a list' % image + + for dep in requires: + self.sort_dependencies(dep, dependencies) + if image in dependencies: + raise ValueError('Circular dependency found', dependencies) + dependencies[image] = None + return dependencies + + def get_external_base_image(self, image): + """ Makes sure that this image has exactly one external base image + """ + base = None + base_for = None + for d in self.defs[image]['requires']: + this_base = self.defs[d].get('FROM', None) + if this_base is not None and base is not None and this_base != base: + error = ('Multiple external dependencies: image %s depends on:\n' % image + + ' %s (FROM: %s), and\n' % (base_for, base) + + ' %s (FROM: %s).' % (d, this_base)) + raise ValueError(error) + if this_base is not None: + base = this_base + base_for = d + if not base: + raise ValueError("No base image found in %s's dependencies" % image) + return base + + +def _fix_build_path(item, filepath): + path = os.path.expanduser(filepath) + + if 'build_directory' not in item: + return + elif os.path.isabs(item['build_directory']): + return + else: + item['build_directory'] = os.path.join(os.path.abspath(path), + item['build_directory']) + + diff --git a/example/DockerMake.yml b/example/DockerMake.yml index 6f39477..3d5f1c0 100644 --- a/example/DockerMake.yml +++ b/example/DockerMake.yml @@ -1,9 +1,9 @@ data_image: - FROM: phusion/baseimage + FROM: debian:jessie build: RUN mkdir /data devbase: - FROM: phusion/baseimage + FROM: debian:jessie build: | RUN apt-get -y update && apt-get -y install \ build-essential \ diff --git a/requirements.txt b/requirements.txt new file mode 100644 index 0000000..cddddd6 --- /dev/null +++ b/requirements.txt @@ -0,0 +1,2 @@ +docker-py +pyyaml diff --git a/setup.cfg b/setup.cfg index 9bbc27f..d79aacb 100644 --- a/setup.cfg +++ b/setup.cfg @@ -1,6 +1,6 @@ [versioneer] VCS = git style = pep440 -versionfile_source = docker-make/_version.py -versionfile_build = docker-make/_version.py +versionfile_source = dockermake/_version.py +versionfile_build = dockermake/_version.py tag_prefix = '' diff --git a/setup.py b/setup.py index 18fda4e..d9fc69a 100644 --- a/setup.py +++ b/setup.py @@ -5,7 +5,7 @@ name='DockerMake', version=versioneer.get_version(), cmdclass=versioneer.get_cmdclass(), - packages=['docker-make'], + packages=['dockermake'], license='Apache 2.0', author='Aaron Virshup', author_email='avirshup@gmail.com', From 17b1cd04975eb5b4a925e58604e9b681984b7bfc Mon Sep 17 00:00:00 2001 From: Aaron Virshup Date: Thu, 19 Jan 2017 22:58:18 -0800 Subject: [PATCH 04/27] Get CLI working again --- dockermake/__main__.py | 190 +++++++++++++++++++ dockermake/builds.py | 11 +- dockermake/cli.py | 133 +++++++++++++ dockermake/docker-make.py | 384 -------------------------------------- dockermake/imagedefs.py | 11 +- 5 files changed, 336 insertions(+), 393 deletions(-) create mode 100755 dockermake/__main__.py create mode 100644 dockermake/cli.py delete mode 100755 dockermake/docker-make.py diff --git a/dockermake/__main__.py b/dockermake/__main__.py new file mode 100755 index 0000000..1bef275 --- /dev/null +++ b/dockermake/__main__.py @@ -0,0 +1,190 @@ +#!/usr/bin/env python2.7 +# Copyright 2016 Autodesk Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +""" +Multiple inheritance for your dockerfiles. +""" +import sys + +import yaml + +from . import cli +from .imagedefs import ImageDefs + + +def main(): + args = cli.make_arg_parser().parse_args() + + # Print help and exit + if args.help_yaml: + cli.print_yaml_help() + return + + defs = ImageDefs(args.makefile) + + if args.list: + list_image_defs(args, defs) + return + + targets = get_build_targets(args, defs) + if not targets: + print 'No build targets specified!' + list_image_defs(args, defs) + return + + # Actually build the images! (or just Dockerfiles) + built, warnings = build_targets(args, defs, targets) + + # Summarize the build process + print '\ndocker-make finished.' + print 'Built: ' + for item in built: + print ' *', item + if warnings: + print 'Warnings:' + for item in warnings: + print ' *', item + + +def get_client(): + import docker.utils + + connection = docker.utils.kwargs_from_env() + if 'tls' in connection: + connection['tls'].assert_hostname = False + return docker.Client(**connection) + + +def list_image_defs(args, defs): + print 'TARGETS in `%s`' % args.makefile + for item in defs.ymldefs.keys(): + print ' *', item + return + + +def generate_name(image, args): + repo_base = args.repository if args.repository is not None else '' + if repo_base[-1] not in ':/': + repo_base += '/' + repo_name = repo_base + image + if args.tag: + if ':' in repo_name: + repo_name += '-'+args.tag + else: + repo_name += ':'+args.tag + + return repo_name + + +def get_build_targets(args, defs): + if args.requires or args.name: + # Assemble a custom target from requirements + assert args.requires and args.name + assert args.name not in defs.ymldefs + defs.ymldefs[args.name] = {'requires': args.requires} + targets = [args.name] + elif args.all: + # build all targets in the file + assert len(args.TARGETS) == 0, "Pass either a list of targets or `--all`, not both" + if defs.all_targets is not None: + targets = defs.all_targets + else: + targets = defs.ymldefs.keys() + else: + # build the user-specified targets + targets = args.TARGETS + + return targets + + +def build_targets(args, defs, targets): + if args.no_build: + client = None + else: + client = get_client() + built, warnings = [], [] + builders = [defs.generate_build(t, generate_name(t, args)) for t in targets] + for b in builders: + b.build(client, + printdockerfiles=args.print_dockerfiles, + nobuild=args.no_build) + print ' docker-make built:', b.targetname + built.append(b.targetname) + if args.push_to_registry: + success, w = push(client, b.targetname) + warnings.extend(w) + if not success: + built[-1] += ' -- PUSH FAILED' + else: + built[-1] += ' -- pushed to %s' % b.targetname.split('/')[0] + + return built, warnings + + +def push(client, name): + success = False + warnings = [] + if '/' not in name or name.split('/')[0].find('.') < 0: + warn = 'WARNING: could not push %s - ' \ + 'repository name does not contain a registry URL' % name + warnings.append(warn) + print warn + else: + print ' Pushing %s to %s:' % (name, name.split('/')[0]) + line = {'error': 'no push information received'} + _lastid = None + for line in client.push(name, stream=True): + line = yaml.load(line) + if 'status' in line: + if line.get('id', None) == _lastid and line['status'] == 'Pushing': + print '\r', line['status'], line['id'], line.get('progress', ''), + sys.stdout.flush() + else: + print line['status'], line.get('id', '') + _lastid = line.get('id', None) + else: + print line + if 'error' in line: + warnings.append('WARNING: push failed for %s. Message: %s' % (name, line['error'])) + else: + success = True + return success, warnings + + + +__license__ = """Copyright (c) 2016, Autodesk Research +All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are met: + +1. Redistributions of source code must retain the above copyright notice, this + list of conditions and the following disclaimer. +2. Redistributions in binary form must reproduce the above copyright notice, + this list of conditions and the following disclaimer in the documentation + and/or other materials provided with the distribution. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND +ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR +ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES +(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; +LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND +ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS +SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.""" + +if __name__ == '__main__': + main() diff --git a/dockermake/builds.py b/dockermake/builds.py index 174fdfb..6a558e2 100644 --- a/dockermake/builds.py +++ b/dockermake/builds.py @@ -58,7 +58,7 @@ def build(self, client, pull=False, usecache=True): usecache (bool): whether to use cached layers or rebuild from scratch """ print ' * Build directory: %s' % self.build_dir - print ' * Target image name: %s' % self.buildname + print ' * Intermediate image: %s' % self.buildname dockerfile = '\n'.join(self.dockerfile_lines) @@ -140,8 +140,11 @@ def build(self, client, print 'docker-make starting build for "%s" (image definition "%s"' % ( self.targetname, self.imagename) for istep, step in enumerate(self.steps): - print ' **** DockerMake Step %d/%d: %s ***' % ( - istep + 1, len(self.steps), step.imagename) + print ' **** Building %s, Step %d/%d: "%s" requirement ***' % ( + self.imagename, + istep + 1, + len(self.steps), + step.imagename) if printdockerfiles: step.printfile() @@ -157,7 +160,7 @@ def build(self, client, def finalizenames(self, client, finalimage, keepbuildtags): """ Tag the built image with its final name and untag intermediate containers """ - client.tag(finalimage, self.targetname) + client.tag(finalimage, *self.targetname.split(':')) print 'Tagged final image as %s\n' % self.targetname if not keepbuildtags: for step in self.steps: diff --git a/dockermake/cli.py b/dockermake/cli.py new file mode 100644 index 0000000..782bb1d --- /dev/null +++ b/dockermake/cli.py @@ -0,0 +1,133 @@ +# Copyright 2016 Autodesk Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +import argparse +import textwrap + +def print_yaml_help(): + print "A brief introduction to writing Dockerfile.yml files:\n" + + print 'SYNTAX:' + print printable_code("""[image_name]: + build_directory: [relative path where the ADD and COPY commands will look for files] + requires: + - [other image name] + - [yet another image name] + FROM: [named_base_image] + build: | + RUN [something] + ADD [something else] + [Dockerfile commands go here] + +[other image name]: ... +[yet another image name]: ...""") + + print + print textwrap.fill("The idea is to write dockerfile commands for each specific " + 'piece of functionality in the build field, and "inherit" all other' + ' functionality from a list of other components that your image requires. ' + 'If you need to add files with the ADD and COPY commands, specify the root' + ' directory for those files with build_directory. Your tree of ' + '"requires" must have exactly one unique named base image ' + 'in the FROM field.') + + print '\n\nAN EXAMPLE:' + print printable_code("""devbase: + FROM: phusion/baseimage + build: | + RUN apt-get -y update && apt-get -y install build-essential + +airline_data: + requires: + - devbase + build_directory: sample_data/airline_data + build: | + ADD AirlinePassengers.csv + +python_image: + requires: + - devbase + build: | + RUN apt-get -y update \ + && apt-get install -y python python-pip \ + && pip install pandas + +data_science: + requires: + - python_image + - airline_data""") + + +def printable_code(c): + output = [] + dedented = textwrap.dedent(c) + for line in dedented.split('\n'): + output.append(' >> ' + line) + return '\n'.join(output) + + +def make_arg_parser(): + parser = argparse.ArgumentParser(description= + "NOTE: Docker environmental variables must be set.\n" + "For a docker-machine, run " + "`eval $(docker-machine env [machine-name])`") + bo = parser.add_argument_group('Choosing what to build') + bo.add_argument('TARGETS', nargs="*", + help='Docker images to build as specified in the YAML file') + bo.add_argument('-f', '--makefile', + default='DockerMake.yml', + help='YAML file containing build instructions') + bo.add_argument('-a', '--all', action='store_true', + help="Print or build all images (or those specified by _ALL_)") + bo.add_argument('-l', '--list', action='store_true', + help='List all available targets in the file, then exit.') + bo.add_argument('--requires', nargs="*", + help='Build a special image from these requirements. Requires --name') + bo.add_argument('--name', type=str, + help="Name for custom docker images (requires --requires)") + + df = parser.add_argument_group('Dockerfiles') + df.add_argument('-p', '--print_dockerfiles', action='store_true', + help="Print out the generated dockerfiles named `Dockerfile.[image]`") + df.add_argument('-n', '--no_build', action='store_true', + help='Only print Dockerfiles, don\'t build them. Implies --print.') + + ca = parser.add_argument_group('Image caching') + ca.add_argument('--pull', action='store_true', + help='Always try to pull updated FROM images') + ca.add_argument('--no-cache', action='store_true', + help="Rebuild every layer") + # TODO: add a way to invalidate a specific target + + rt = parser.add_argument_group('Repositories and tags') + rt.add_argument('--repository', '-r', '-u', + help="Prepend this repository to all built images, e.g.\n" + "`docker-make hello-world -u quay.io/elvis` will tag the image " + "as `quay.io/elvis/hello-world`. You can add a ':' to the end to " + "image names into tags:\n `docker-make -u quay.io/elvis/repo: hello-world` " + "will create the image in the elvis repository: quay.io/elvis/repo:hello-world") + rt.add_argument('--tag', '-t', type=str, + help='Tag all built images with this tag. If image names are ALREADY tags (i.e.,' + ' your repo name ends in a ":"), this will append the tag name with a dash. ' + 'For example: `docker-make hello-world -u elvis/repo: -t 1.0` will create ' + 'the image "elvis/repo:hello-world-1.0') + rt.add_argument('--push-to-registry', '-P', action='store_true', + help='Push all built images to the repository specified ' + '(only if image repository contains a URL) -- to push to dockerhub.com, ' + 'use index.docker.io as the registry)') + + hh = parser.add_argument_group('Help') + hh.add_argument('--help-yaml', action='store_true', + help="Print summary of YAML file format and exit.") + + return parser diff --git a/dockermake/docker-make.py b/dockermake/docker-make.py deleted file mode 100755 index d7e4488..0000000 --- a/dockermake/docker-make.py +++ /dev/null @@ -1,384 +0,0 @@ -#!/usr/bin/env python2.7 -# Copyright 2016 Autodesk Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -""" -Multiple inheritance for your dockerfiles. -Requires: python 2.7, docker-py, pyyaml (RUN: easy_install pip; pip install docker-py pyyaml) -""" -import json -import sys -import os -import textwrap -from io import StringIO -import argparse - -import docker, docker.utils -import yaml - - -class DockerMaker(object): - def __init__(self, makefile, repository=None, - build_images=True, - print_dockerfiles=False, - no_cache=False, - tag=None, - pull=False): - - self._sources = set() - self.makefile_path = makefile - self.all_targets = self.img_defs.pop('_ALL_', None) - - # Connect to docker daemon if necessary - if build_images: - connection = docker.utils.kwargs_from_env() - if 'tls' in connection: - connection['tls'].assert_hostname = False - self.client = docker.Client(**connection) - else: - self.client = None - - if repository and repository[-1] not in '/:': - self.repo = repository + '/' - elif repository is None: - self.repo = '' - else: - self.repo = repository - self.tag = tag - self.build_images = build_images - self.print_dockerfiles = print_dockerfiles - self.pull = pull - self.no_cache = no_cache - - def build(self, image): - """ - Drives the build of the final image - get the list of steps and execute them. - :param image: name of the image from the yaml file to build - :return: final tagged image name - """ - print 'docker-make starting build for %s' % image - build_steps = self.generate_build_order(image) - for istep, step in enumerate(build_steps): - print ' **** DockerMake Step %d/%d: %s ***' % (istep + 1, len(build_steps), ','.join(step.images)) - print ' * Build directory: %s' % step.build_dir - print ' * Target image name: %s' % step.tag - dockerfile = '\n'.join(step.dockerfile) - - # build the image - if self.build_images: - self.build_step(step, dockerfile) - - # Dump the dockerfile to a file - if self.print_dockerfiles: - if not os.path.exists('docker_makefiles'): - os.makedirs('docker_makefiles') - if '/' in step.tag: - filename = 'docker_makefiles/Dockerfile.%s' % image - else: - filename = 'docker_makefiles/Dockerfile.%s' % step.tag - with open(filename, 'w') as dfout: - print >> dfout, dockerfile - - return step.tag - - def build_step(self, step, dockerfile): - """ - Drives an individual build step. Build steps are separated by build_directory. - If a build has zero one or less build_directories, it will be built in a single - step. - """ - # set up the build context - build_args = dict(decode=True, tag=step.tag, pull=self.pull, - fileobj=None, path=None, dockerfile=None, - nocache=self.no_cache) - if step.build_dir is not None: - tempname = '_docker_make_tmp/' - tempdir = os.path.abspath(os.path.join(step.build_dir, tempname)) - temp_df = os.path.join(tempdir, 'Dockerfile') - if not os.path.isdir(tempdir): - os.makedirs(tempdir) - with open(temp_df, 'w') as df_out: - print >> df_out, dockerfile - - build_args['path'] = os.path.abspath(step.build_dir) - build_args['dockerfile'] = tempname + 'Dockerfile' - else: - build_args['fileobj'] = StringIO(unicode(dockerfile)) - - # TODO: remove this workaround for docker/docker-py#1134 -- AMV 7/19/16 - build_args['decode'] = False - - # start the build - stream = self.client.build(**build_args) - - # monitor the output - for item in stream: - # TODO: this is more workaround for docker/docker-py#1134 - try: - item = json.loads(item) - except ValueError: - print item, - continue - #### end of workaround - this can be removed once resolved - AMV 7/19/16 - if item.keys() == ['stream']: - print item['stream'].strip() - elif 'errorDetail' in item or 'error' in item: - raise BuildError(dockerfile, item, build_args) - else: - print item, - - # remove the temporary dockerfile - if step.build_dir is not None: - os.unlink(temp_df) - os.rmdir(tempdir) - - - - - - -def main(): - args = make_arg_parser().parse_args() - - # Help and exit - if args.help_yaml: - print_yaml_help() - return - - # Otherwise, parse the yaml file - maker = DockerMaker(args.makefile, repository=args.repository, - build_images=not (args.no_build or args.list), - print_dockerfiles=(args.print_dockerfiles or args.no_build), - pull=args.pull, no_cache=args.no_cache, tag=args.tag) - - if args.list: - print 'TARGETS in `%s`' % args.makefile - for item in maker.img_defs.keys(): print ' *', item - return - - # Assemble custom requirements target - if args.requires or args.name: - assert args.requires and args.name - assert args.name not in maker.img_defs - maker.img_defs[args.name] = {'requires': args.requires} - targets = [args.name] - elif args.all: - assert len(args.TARGETS) == 0, "Pass either a list of targets or `--all`, not both" - if maker.all_targets is not None: - targets = maker.all_targets - else: - targets = maker.img_defs.keys() - else: - targets = args.TARGETS - - if not targets: - print 'No build targets specified!' - print 'Targets in `%s`:' % args.makefile - for item in maker.img_defs.keys(): print ' *', item - return - - # Actually build the images! (or Dockerfiles) - built, warnings = [], [] - for t in targets: - name = maker.build(t) - print ' docker-make built:', name - built.append(name) - if args.push_to_registry: - success, w = push(maker, name) - warnings.extend(w) - if not success: built[-1] += ' -- PUSH FAILED' - else: built[-1] += ' -- pushed to %s' % name.split('/')[0] - - # Summarize the build process - print '\ndocker-make finished.' - print 'Built: ' - for item in built: print ' *', item - if warnings: - print 'Warnings:' - for item in warnings: print ' *', item - - -def push(maker, name): - success = False - warnings = [] - if '/' not in name or name.split('/')[0].find('.') < 0: - warn = 'WARNING: could not push %s - ' \ - 'repository name does not contain a registry URL' % name - warnings.append(warn) - print warn - else: - print ' Pushing %s to %s:' % (name, name.split('/')[0]) - line = {'error': 'no push information received'} - _lastid = None - for line in maker.client.push(name, stream=True): - line = yaml.load(line) - if 'status' in line: - if line.get('id', None) == _lastid and line['status'] == 'Pushing': - print '\r', line['status'], line['id'], line.get('progress', ''), - sys.stdout.flush() - else: - print line['status'], line.get('id', '') - _lastid = line.get('id', None) - else: - print line - if 'error' in line: - warnings.append('WARNING: push failed for %s. Message: %s' % (name, line['error'])) - else: - success = True - return success, warnings - - -def print_yaml_help(): - print "A brief introduction to writing Dockerfile.yml files:\n" - - print 'SYNTAX:' - print printable_code("""[image_name]: - build_directory: [relative path where the ADD and COPY commands will look for files] - requires: - - [other image name] - - [yet another image name] - FROM: [named_base_image] - build: | - RUN [something] - ADD [something else] - [Dockerfile commands go here] - -[other image name]: ... -[yet another image name]: ...""") - - print - print textwrap.fill("The idea is to write dockerfile commands for each specific " - 'piece of functionality in the build field, and "inherit" all other' - ' functionality from a list of other components that your image requires. ' - 'If you need to add files with the ADD and COPY commands, specify the root' - ' directory for those files with build_directory. Your tree of ' - '"requires" must have exactly one unique named base image ' - 'in the FROM field.') - - print '\n\nAN EXAMPLE:' - print printable_code("""devbase: - FROM: phusion/baseimage - build: | - RUN apt-get -y update && apt-get -y install build-essential - -airline_data: - requires: - - devbase - build_directory: sample_data/airline_data - build: | - ADD AirlinePassengers.csv - -python_image: - requires: - - devbase - build: | - RUN apt-get -y update \ - && apt-get install -y python python-pip \ - && pip install pandas - -data_science: - requires: - - python_image - - airline_data""") - - -def printable_code(c): - output = [] - dedented = textwrap.dedent(c) - for line in dedented.split('\n'): - output.append(' >> ' + line) - return '\n'.join(output) - - -def make_arg_parser(): - parser = argparse.ArgumentParser(description= - "NOTE: Docker environmental variables must be set.\n" - "For a docker-machine, run " - "`eval $(docker-machine env [machine-name])`") - bo = parser.add_argument_group('Choosing what to build') - bo.add_argument('TARGETS', nargs="*", - help='Docker images to build as specified in the YAML file') - bo.add_argument('-f', '--makefile', - default='DockerMake.yml', - help='YAML file containing build instructions') - bo.add_argument('-a', '--all', action='store_true', - help="Print or build all images (or those specified by _ALL_)") - bo.add_argument('-l', '--list', action='store_true', - help='List all available targets in the file, then exit.') - bo.add_argument('--requires', nargs="*", - help='Build a special image from these requirements. Requires --name') - bo.add_argument('--name', type=str, - help="Name for custom docker images (requires --requires)") - - df = parser.add_argument_group('Dockerfiles') - df.add_argument('-p', '--print_dockerfiles', action='store_true', - help="Print out the generated dockerfiles named `Dockerfile.[image]`") - df.add_argument('-n', '--no_build', action='store_true', - help='Only print Dockerfiles, don\'t build them. Implies --print.') - - ca = parser.add_argument_group('Image caching') - ca.add_argument('--pull', action='store_true', - help='Always try to pull updated FROM images') - ca.add_argument('--no-cache', action='store_true', - help="Rebuild every layer") - # TODO: add a way to invalidate a specific target - - rt = parser.add_argument_group('Repositories and tags') - rt.add_argument('--repository', '-r', '-u', - help="Prepend this repository to all built images, e.g.\n" - "`docker-make hello-world -u quay.io/elvis` will tag the image " - "as `quay.io/elvis/hello-world`. You can add a ':' to the end to " - "image names into tags:\n `docker-make -u quay.io/elvis/repo: hello-world` " - "will create the image in the elvis repository: quay.io/elvis/repo:hello-world") - rt.add_argument('--tag', '-t', type=str, - help='Tag all built images with this tag. If image names are ALREADY tags (i.e.,' - ' your repo name ends in a ":"), this will append the tag name with a dash. ' - 'For example: `docker-make hello-world -u elvis/repo: -t 1.0` will create ' - 'the image "elvis/repo:hello-world-1.0') - rt.add_argument('--push-to-registry', '-P', action='store_true', - help='Push all built images to the repository specified ' - '(only if image repository contains a URL) -- to push to dockerhub.com, ' - 'use index.docker.io as the registry)') - - hh = parser.add_argument_group('Help') - hh.add_argument('--help-yaml', action='store_true', - help="Print summary of YAML file format and exit.") - - return parser - - -__license__ = """Copyright (c) 2016, Autodesk Research -All rights reserved. - -Redistribution and use in source and binary forms, with or without -modification, are permitted provided that the following conditions are met: - -1. Redistributions of source code must retain the above copyright notice, this - list of conditions and the following disclaimer. -2. Redistributions in binary form must reproduce the above copyright notice, - this list of conditions and the following disclaimer in the documentation - and/or other materials provided with the distribution. - -THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND -ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED -WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE -DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR -ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES -(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; -LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND -ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS -SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.""" - -if __name__ == '__main__': - main() diff --git a/dockermake/imagedefs.py b/dockermake/imagedefs.py index 677ed65..7b61ef0 100644 --- a/dockermake/imagedefs.py +++ b/dockermake/imagedefs.py @@ -26,7 +26,8 @@ class ImageDefs(object): def __init__(self, makefile_path): self._sources = set() self.makefile_path = makefile_path - self.defs = self.parse_yaml(self.makefile_path) + self.ymldefs = self.parse_yaml(self.makefile_path) + self.all_targets = self.ymldefs.pop('_ALL_', None) def parse_yaml(self, filename): fname = os.path.expanduser(filename) @@ -65,7 +66,7 @@ def generate_build(self, image, targetname): buildname = 'dmkbuild_%s_%d' % (image, i+1) build_steps.append(builds.BuildStep(base_name, base_image, - self.defs[base_name], + self.ymldefs[base_name], buildname)) base_image = buildname @@ -91,7 +92,7 @@ def sort_dependencies(self, image, dependencies=None): if image in dependencies: return - requires = self.defs[image].get('requires', []) + requires = self.ymldefs[image].get('requires', []) assert type(requires) == list, 'Requirements for %s are not a list' % image for dep in requires: @@ -106,8 +107,8 @@ def get_external_base_image(self, image): """ base = None base_for = None - for d in self.defs[image]['requires']: - this_base = self.defs[d].get('FROM', None) + for d in self.ymldefs[image]['requires']: + this_base = self.ymldefs[d].get('FROM', None) if this_base is not None and base is not None and this_base != base: error = ('Multiple external dependencies: image %s depends on:\n' % image + ' %s (FROM: %s), and\n' % (base_for, base) + From f7f4e98fdabe873b4f3110029bf1baf1b0647b3b Mon Sep 17 00:00:00 2001 From: Aaron Virshup Date: Thu, 19 Jan 2017 23:20:18 -0800 Subject: [PATCH 05/27] Add executable entry point --- dockermake/__init__.py | 25 ++++++- dockermake/__main__.py | 153 +++++------------------------------------ dockermake/cli.py | 114 +++++++++++++++--------------- dockermake/utils.py | 118 +++++++++++++++++++++++++++++++ setup.py | 7 +- 5 files changed, 222 insertions(+), 195 deletions(-) create mode 100644 dockermake/utils.py diff --git a/dockermake/__init__.py b/dockermake/__init__.py index 74f4e66..28e17dd 100644 --- a/dockermake/__init__.py +++ b/dockermake/__init__.py @@ -1,4 +1,27 @@ - from ._version import get_versions __version__ = get_versions()['version'] del get_versions + + +__license__ = """Copyright (c) 2016, Autodesk Research +All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are met: + +1. Redistributions of source code must retain the above copyright notice, this + list of conditions and the following disclaimer. +2. Redistributions in binary form must reproduce the above copyright notice, + this list of conditions and the following disclaimer in the documentation + and/or other materials provided with the distribution. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND +ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR +ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES +(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; +LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND +ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS +SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.""" diff --git a/dockermake/__main__.py b/dockermake/__main__.py index 1bef275..9877822 100755 --- a/dockermake/__main__.py +++ b/dockermake/__main__.py @@ -15,36 +15,44 @@ """ Multiple inheritance for your dockerfiles. """ -import sys - -import yaml +import os from . import cli from .imagedefs import ImageDefs +from . import utils def main(): - args = cli.make_arg_parser().parse_args() + + parser = cli.make_arg_parser() + args = parser.parse_args() # Print help and exit if args.help_yaml: cli.print_yaml_help() return + if not os.path.exists(args.makefile): + if args.makefile == 'DockerMake.yml': + parser.print_help() + return + else: + raise IOError('No docker makefile found at path "%s"' % args.makefile) + defs = ImageDefs(args.makefile) if args.list: - list_image_defs(args, defs) + utils.list_image_defs(args, defs) return - targets = get_build_targets(args, defs) + targets = utils.get_build_targets(args, defs) if not targets: print 'No build targets specified!' - list_image_defs(args, defs) + utils.list_image_defs(args, defs) return # Actually build the images! (or just Dockerfiles) - built, warnings = build_targets(args, defs, targets) + built, warnings = utils.build_targets(args, defs, targets) # Summarize the build process print '\ndocker-make finished.' @@ -57,134 +65,5 @@ def main(): print ' *', item -def get_client(): - import docker.utils - - connection = docker.utils.kwargs_from_env() - if 'tls' in connection: - connection['tls'].assert_hostname = False - return docker.Client(**connection) - - -def list_image_defs(args, defs): - print 'TARGETS in `%s`' % args.makefile - for item in defs.ymldefs.keys(): - print ' *', item - return - - -def generate_name(image, args): - repo_base = args.repository if args.repository is not None else '' - if repo_base[-1] not in ':/': - repo_base += '/' - repo_name = repo_base + image - if args.tag: - if ':' in repo_name: - repo_name += '-'+args.tag - else: - repo_name += ':'+args.tag - - return repo_name - - -def get_build_targets(args, defs): - if args.requires or args.name: - # Assemble a custom target from requirements - assert args.requires and args.name - assert args.name not in defs.ymldefs - defs.ymldefs[args.name] = {'requires': args.requires} - targets = [args.name] - elif args.all: - # build all targets in the file - assert len(args.TARGETS) == 0, "Pass either a list of targets or `--all`, not both" - if defs.all_targets is not None: - targets = defs.all_targets - else: - targets = defs.ymldefs.keys() - else: - # build the user-specified targets - targets = args.TARGETS - - return targets - - -def build_targets(args, defs, targets): - if args.no_build: - client = None - else: - client = get_client() - built, warnings = [], [] - builders = [defs.generate_build(t, generate_name(t, args)) for t in targets] - for b in builders: - b.build(client, - printdockerfiles=args.print_dockerfiles, - nobuild=args.no_build) - print ' docker-make built:', b.targetname - built.append(b.targetname) - if args.push_to_registry: - success, w = push(client, b.targetname) - warnings.extend(w) - if not success: - built[-1] += ' -- PUSH FAILED' - else: - built[-1] += ' -- pushed to %s' % b.targetname.split('/')[0] - - return built, warnings - - -def push(client, name): - success = False - warnings = [] - if '/' not in name or name.split('/')[0].find('.') < 0: - warn = 'WARNING: could not push %s - ' \ - 'repository name does not contain a registry URL' % name - warnings.append(warn) - print warn - else: - print ' Pushing %s to %s:' % (name, name.split('/')[0]) - line = {'error': 'no push information received'} - _lastid = None - for line in client.push(name, stream=True): - line = yaml.load(line) - if 'status' in line: - if line.get('id', None) == _lastid and line['status'] == 'Pushing': - print '\r', line['status'], line['id'], line.get('progress', ''), - sys.stdout.flush() - else: - print line['status'], line.get('id', '') - _lastid = line.get('id', None) - else: - print line - if 'error' in line: - warnings.append('WARNING: push failed for %s. Message: %s' % (name, line['error'])) - else: - success = True - return success, warnings - - - -__license__ = """Copyright (c) 2016, Autodesk Research -All rights reserved. - -Redistribution and use in source and binary forms, with or without -modification, are permitted provided that the following conditions are met: - -1. Redistributions of source code must retain the above copyright notice, this - list of conditions and the following disclaimer. -2. Redistributions in binary form must reproduce the above copyright notice, - this list of conditions and the following disclaimer in the documentation - and/or other materials provided with the distribution. - -THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND -ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED -WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE -DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR -ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES -(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; -LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND -ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS -SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.""" - if __name__ == '__main__': main() diff --git a/dockermake/cli.py b/dockermake/cli.py index 782bb1d..bf8460c 100644 --- a/dockermake/cli.py +++ b/dockermake/cli.py @@ -14,6 +14,64 @@ import argparse import textwrap + +def make_arg_parser(): + parser = argparse.ArgumentParser(description= + "NOTE: Docker environmental variables must be set.\n" + "For a docker-machine, run " + "`eval $(docker-machine env [machine-name])`") + bo = parser.add_argument_group('Choosing what to build') + bo.add_argument('TARGETS', nargs="*", + help='Docker images to build as specified in the YAML file') + bo.add_argument('-f', '--makefile', + default='DockerMake.yml', + help='YAML file containing build instructions') + bo.add_argument('-a', '--all', action='store_true', + help="Print or build all images (or those specified by _ALL_)") + bo.add_argument('-l', '--list', action='store_true', + help='List all available targets in the file, then exit.') + bo.add_argument('--requires', nargs="*", + help='Build a special image from these requirements. Requires --name') + bo.add_argument('--name', type=str, + help="Name for custom docker images (requires --requires)") + + df = parser.add_argument_group('Dockerfiles') + df.add_argument('-p', '--print_dockerfiles', action='store_true', + help="Print out the generated dockerfiles named `Dockerfile.[image]`") + df.add_argument('-n', '--no_build', action='store_true', + help='Only print Dockerfiles, don\'t build them. Implies --print.') + + ca = parser.add_argument_group('Image caching') + ca.add_argument('--pull', action='store_true', + help='Always try to pull updated FROM images') + ca.add_argument('--no-cache', action='store_true', + help="Rebuild every layer") + # TODO: add a way to invalidate a specific target + + rt = parser.add_argument_group('Repositories and tags') + rt.add_argument('--repository', '-r', '-u', + help="Prepend this repository to all built images, e.g.\n" + "`docker-make hello-world -u quay.io/elvis` will tag the image " + "as `quay.io/elvis/hello-world`. You can add a ':' to the end to " + "image names into tags:\n `docker-make -u quay.io/elvis/repo: hello-world` " + "will create the image in the elvis repository: quay.io/elvis/repo:hello-world") + rt.add_argument('--tag', '-t', type=str, + help='Tag all built images with this tag. If image names are ALREADY tags (i.e.,' + ' your repo name ends in a ":"), this will append the tag name with a dash. ' + 'For example: `docker-make hello-world -u elvis/repo: -t 1.0` will create ' + 'the image "elvis/repo:hello-world-1.0') + rt.add_argument('--push-to-registry', '-P', action='store_true', + help='Push all built images to the repository specified ' + '(only if image repository contains a URL) -- to push to dockerhub.com, ' + 'use index.docker.io as the registry)') + + hh = parser.add_argument_group('Help') + hh.add_argument('--help-yaml', action='store_true', + help="Print summary of YAML file format and exit.") + + return parser + + def print_yaml_help(): print "A brief introduction to writing Dockerfile.yml files:\n" @@ -75,59 +133,3 @@ def printable_code(c): output.append(' >> ' + line) return '\n'.join(output) - -def make_arg_parser(): - parser = argparse.ArgumentParser(description= - "NOTE: Docker environmental variables must be set.\n" - "For a docker-machine, run " - "`eval $(docker-machine env [machine-name])`") - bo = parser.add_argument_group('Choosing what to build') - bo.add_argument('TARGETS', nargs="*", - help='Docker images to build as specified in the YAML file') - bo.add_argument('-f', '--makefile', - default='DockerMake.yml', - help='YAML file containing build instructions') - bo.add_argument('-a', '--all', action='store_true', - help="Print or build all images (or those specified by _ALL_)") - bo.add_argument('-l', '--list', action='store_true', - help='List all available targets in the file, then exit.') - bo.add_argument('--requires', nargs="*", - help='Build a special image from these requirements. Requires --name') - bo.add_argument('--name', type=str, - help="Name for custom docker images (requires --requires)") - - df = parser.add_argument_group('Dockerfiles') - df.add_argument('-p', '--print_dockerfiles', action='store_true', - help="Print out the generated dockerfiles named `Dockerfile.[image]`") - df.add_argument('-n', '--no_build', action='store_true', - help='Only print Dockerfiles, don\'t build them. Implies --print.') - - ca = parser.add_argument_group('Image caching') - ca.add_argument('--pull', action='store_true', - help='Always try to pull updated FROM images') - ca.add_argument('--no-cache', action='store_true', - help="Rebuild every layer") - # TODO: add a way to invalidate a specific target - - rt = parser.add_argument_group('Repositories and tags') - rt.add_argument('--repository', '-r', '-u', - help="Prepend this repository to all built images, e.g.\n" - "`docker-make hello-world -u quay.io/elvis` will tag the image " - "as `quay.io/elvis/hello-world`. You can add a ':' to the end to " - "image names into tags:\n `docker-make -u quay.io/elvis/repo: hello-world` " - "will create the image in the elvis repository: quay.io/elvis/repo:hello-world") - rt.add_argument('--tag', '-t', type=str, - help='Tag all built images with this tag. If image names are ALREADY tags (i.e.,' - ' your repo name ends in a ":"), this will append the tag name with a dash. ' - 'For example: `docker-make hello-world -u elvis/repo: -t 1.0` will create ' - 'the image "elvis/repo:hello-world-1.0') - rt.add_argument('--push-to-registry', '-P', action='store_true', - help='Push all built images to the repository specified ' - '(only if image repository contains a URL) -- to push to dockerhub.com, ' - 'use index.docker.io as the registry)') - - hh = parser.add_argument_group('Help') - hh.add_argument('--help-yaml', action='store_true', - help="Print summary of YAML file format and exit.") - - return parser diff --git a/dockermake/utils.py b/dockermake/utils.py new file mode 100644 index 0000000..895e868 --- /dev/null +++ b/dockermake/utils.py @@ -0,0 +1,118 @@ +# Copyright 2016 Autodesk Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +def get_client(): + import docker.utils + + connection = docker.utils.kwargs_from_env() + if 'tls' in connection: + connection['tls'].assert_hostname = False + return docker.Client(**connection) + + +def list_image_defs(args, defs): + print 'TARGETS in `%s`' % args.makefile + for item in defs.ymldefs.keys(): + print ' *', item + return + + +def generate_name(image, args): + repo_base = args.repository if args.repository is not None else '' + if repo_base[-1] not in ':/': + repo_base += '/' + repo_name = repo_base + image + if args.tag: + if ':' in repo_name: + repo_name += '-'+args.tag + else: + repo_name += ':'+args.tag + + return repo_name + + +def get_build_targets(args, defs): + if args.requires or args.name: + # Assemble a custom target from requirements + assert args.requires and args.name + assert args.name not in defs.ymldefs + defs.ymldefs[args.name] = {'requires': args.requires} + targets = [args.name] + elif args.all: + # build all targets in the file + assert len(args.TARGETS) == 0, "Pass either a list of targets or `--all`, not both" + if defs.all_targets is not None: + targets = defs.all_targets + else: + targets = defs.ymldefs.keys() + else: + # build the user-specified targets + targets = args.TARGETS + + return targets + + +def build_targets(args, defs, targets): + if args.no_build: + client = None + else: + client = get_client() + built, warnings = [], [] + builders = [defs.generate_build(t, generate_name(t, args)) for t in targets] + for b in builders: + b.build(client, + printdockerfiles=args.print_dockerfiles, + nobuild=args.no_build) + print ' docker-make built:', b.targetname + built.append(b.targetname) + if args.push_to_registry: + success, w = push(client, b.targetname) + warnings.extend(w) + if not success: + built[-1] += ' -- PUSH FAILED' + else: + built[-1] += ' -- pushed to %s' % b.targetname.split('/')[0] + + return built, warnings + + +def push(client, name): + success = False + warnings = [] + if '/' not in name or name.split('/')[0].find('.') < 0: + warn = 'WARNING: could not push %s - ' \ + 'repository name does not contain a registry URL' % name + warnings.append(warn) + print warn + else: + print ' Pushing %s to %s:' % (name, name.split('/')[0]) + line = {'error': 'no push information received'} + _lastid = None + for line in client.push(name, stream=True): + line = yaml.load(line) + if 'status' in line: + if line.get('id', None) == _lastid and line['status'] == 'Pushing': + print '\r', line['status'], line['id'], line.get('progress', ''), + sys.stdout.flush() + else: + print line['status'], line.get('id', '') + _lastid = line.get('id', None) + else: + print line + if 'error' in line: + warnings.append('WARNING: push failed for %s. Message: %s' % (name, line['error'])) + else: + success = True + return success, warnings diff --git a/setup.py b/setup.py index d9fc69a..f3d8d51 100644 --- a/setup.py +++ b/setup.py @@ -9,5 +9,10 @@ license='Apache 2.0', author='Aaron Virshup', author_email='avirshup@gmail.com', - description='Build manager for docker images' + description='Build manager for docker images', + entry_points={ + 'console_scripts': [ + 'docker-make = dockermake.__main__:main' + ] + } ) From dc504f2c799bde7d2b3abf980ef8d69ac37c2c9a Mon Sep 17 00:00:00 2001 From: Aaron Virshup Date: Fri, 20 Jan 2017 09:02:12 -0800 Subject: [PATCH 06/27] Fix license and copyright notices --- .gitignore | 3 +- LICENSE | 223 +++++++++++++++++++++++++++++++++++++---- README.md | 68 +++++++++---- dockermake/__main__.py | 2 +- dockermake/builds.py | 2 +- dockermake/cli.py | 2 +- dockermake/utils.py | 2 +- 7 files changed, 255 insertions(+), 47 deletions(-) diff --git a/.gitignore b/.gitignore index 9bea4a8..067409f 100644 --- a/.gitignore +++ b/.gitignore @@ -1,8 +1,7 @@ -### Example user template template -### Example user template docker_makefiles Dockerfile.fail _docker_make_tmp +dockerfile.fail # IntelliJ project files .idea diff --git a/LICENSE b/LICENSE index 1d80f04..85ef87f 100644 --- a/LICENSE +++ b/LICENSE @@ -1,22 +1,201 @@ -Copyright (c) 2015, Autodesk Inc. -All rights reserved. - -Redistribution and use in source and binary forms, with or without -modification, are permitted provided that the following conditions are met: - -1. Redistributions of source code must retain the above copyright notice, this - list of conditions and the following disclaimer. -2. Redistributions in binary form must reproduce the above copyright notice, - this list of conditions and the following disclaimer in the documentation - and/or other materials provided with the distribution. - -THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND -ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED -WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE -DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR -ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES -(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; -LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND -ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS -SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. \ No newline at end of file + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + +TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + +1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + +2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + +3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + +4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + +5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + +6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + +7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + +8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + +9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + +END OF TERMS AND CONDITIONS + +APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + +Copyright [yyyy] [name of copyright owner] + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. diff --git a/README.md b/README.md index 706f64e..1278809 100644 --- a/README.md +++ b/README.md @@ -1,29 +1,49 @@ # Docker-make -Build and manage stacks of docker images - a dependency graph for Dockerfiles +Build and manage stacks of docker images - a dependency graph for Docker images Table of Contents ================= + * [Install](#Install) + * [Run it](#Run-it) * [What you can do with it](#what-you-can-do-with-it) * [Example](#example) * [Writing DockerMake\.yaml](#writing-dockermakeyaml) * [Requirements](#requirements) * [Command line usage](#command-line-usage) + +### Install + +Requires Docker (obviously), and Python 2.7 or 3.4+ with pip. + +``` +pip install git+https://github.com/avirshup/DockerMake +``` + +This will install the command line tool, `docker-make`, and its supporting python package, which you can import as `import dockermake`. + + +### Run it + +To build some illustrative examples, try running: + +```bash +wget https://raw.githubusercontent.com/avirshup/DockerMake/master/example/DockerMake.yml +docker-make --list +docker-make data_science --repo docker.io/myusername --tag testbuild +``` + + ### What you can do with it * Define small pieces of configuration or functionality, then mix them together into production docker images. - * "Inherit" from multiple image builds + * "Inherit" Dockerfile instructions from multiple sources + * **New**: Build an artifact (such as an executable or library) in one image, then copy it into a smaller image for deployment * Easily manage images that pull files from multiple directories on your filesystem * Rebuild an entire stack of images as needed with a single command -**How is this different from docker-compose?**
`docker-make` automates and manages the process of building docker images. `docker-compose` spins up containers and links them to make serivces. - -**How is this different from the FROM command in Dockerfiles?** - 1. Using the `requires` field, you can inherit from multiple images. - 2. You can create builds that reference multiple directories on your filesystem using the `build_directory` keyword. - 3. The builds are not tied to any image's tag or repository - when you build an image with `docker-make`, it will be up-to-date. ### Example -[Click here to see a production-level example.](https://github.com/Autodesk/molecular-design-toolkit/blob/master/docker_images/DockerMake.yml) +[Click here to see how we're using this in production.](https://github.com/Autodesk/molecular-design-toolkit/blob/master/docker_images/DockerMake.yml) This example builds a single docker image called `data_science`. It does this by mixing together three components: `devbase` (the base image), `airline_data` (a big CSV file), and `python_image` (a python installation). `docker-make` will create an image that combines all of these components. @@ -77,18 +97,28 @@ Here's the dependency graph and generated Dockerfiles: The idea is to write dockerfile commands for each specific piece of functionality in the `build` field, and "inherit" all other functionality from a list of other components that your image `requires`. If you need to add files with the ADD and COPY commands, specify the root directory for those files with `build_directory`. Your tree of "requires" must have _exactly one_ unique named base image in the `FROM` field. ```yaml [image_name]: - build_directory: [relative path where the ADD and COPY commands will look for files] requires: - - [other image name] - - [yet another image name] + - [other image name] + - [yet another image name] + [...] FROM: [named_base_image] build: | - RUN [something] - ADD [something else] - [Dockerfile commands go here] - -[other image name]: ... -[yet another image name]: ... + RUN [something] + ADD [something else] + [Dockerfile commands go here] + build_directory: [path where the ADD and COPY commands will look for files] + # note that the "build_directory" path can be relative or absolute. + # if it's relative, it's interpreted relative to DockerMake.yml's directory + : + [source_image]: + [source path1]:[destination path1] + [source path2]:[destination path2] + [...] + [...] + + +[other image name]: [...] +[...] ``` @@ -163,4 +193,4 @@ Help: Written by Aaron Virshup, Bio/Nano Research Group, Autodesk Research -Copyright (c) 2016, Autodesk Inc. Released under the simplified BSD license. +Copyright (c) 2015-2017, Autodesk Inc. Released under the Apache 2.0 License. diff --git a/dockermake/__main__.py b/dockermake/__main__.py index 9877822..8d1c127 100755 --- a/dockermake/__main__.py +++ b/dockermake/__main__.py @@ -1,5 +1,5 @@ #!/usr/bin/env python2.7 -# Copyright 2016 Autodesk Inc. +# Copyright 2015-2017 Autodesk Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/dockermake/builds.py b/dockermake/builds.py index 6a558e2..5bab1ff 100644 --- a/dockermake/builds.py +++ b/dockermake/builds.py @@ -1,4 +1,4 @@ -# Copyright 2016-2017 Autodesk Inc. +# Copyright 2015-2017 Autodesk Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/dockermake/cli.py b/dockermake/cli.py index bf8460c..78cd3ca 100644 --- a/dockermake/cli.py +++ b/dockermake/cli.py @@ -1,4 +1,4 @@ -# Copyright 2016 Autodesk Inc. +# Copyright 2015-2017 Autodesk Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/dockermake/utils.py b/dockermake/utils.py index 895e868..f1641dd 100644 --- a/dockermake/utils.py +++ b/dockermake/utils.py @@ -1,4 +1,4 @@ -# Copyright 2016 Autodesk Inc. +# Copyright 2015-2017 Autodesk Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. From 0fc42d0908ba7ddcf4a42f6caefd4552087900b6 Mon Sep 17 00:00:00 2001 From: Aaron Virshup Date: Fri, 20 Jan 2017 10:48:34 -0800 Subject: [PATCH 07/27] Prep for py3 support --- dockermake/__main__.py | 13 +++++++------ dockermake/_version.py | 1 + dockermake/builds.py | 35 ++++++++++++++++++----------------- dockermake/cli.py | 21 +++++++++++---------- dockermake/imagedefs.py | 35 +++++++++++++++++++++-------------- dockermake/utils.py | 30 ++++++++++++++++++------------ 6 files changed, 76 insertions(+), 59 deletions(-) diff --git a/dockermake/__main__.py b/dockermake/__main__.py index 8d1c127..068ac40 100755 --- a/dockermake/__main__.py +++ b/dockermake/__main__.py @@ -15,6 +15,7 @@ """ Multiple inheritance for your dockerfiles. """ +from __future__ import print_function import os from . import cli @@ -47,7 +48,7 @@ def main(): targets = utils.get_build_targets(args, defs) if not targets: - print 'No build targets specified!' + print('No build targets specified!') utils.list_image_defs(args, defs) return @@ -55,14 +56,14 @@ def main(): built, warnings = utils.build_targets(args, defs, targets) # Summarize the build process - print '\ndocker-make finished.' - print 'Built: ' + print('\ndocker-make finished.') + print('Built: ') for item in built: - print ' *', item + print(' *', item) if warnings: - print 'Warnings:' + print('Warnings:') for item in warnings: - print ' *', item + print(' *', item) if __name__ == '__main__': diff --git a/dockermake/_version.py b/dockermake/_version.py index aa18204..b437111 100644 --- a/dockermake/_version.py +++ b/dockermake/_version.py @@ -9,6 +9,7 @@ # versioneer-0.18 (https://github.com/warner/python-versioneer) """Git implementation of _version.py.""" +from __future__ import print_function import errno import os diff --git a/dockermake/builds.py b/dockermake/builds.py index 5bab1ff..da6a152 100644 --- a/dockermake/builds.py +++ b/dockermake/builds.py @@ -11,6 +11,7 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. +from __future__ import print_function import os from io import BytesIO, StringIO @@ -42,7 +43,7 @@ def __init__(self, imagename, baseimage, img_def, buildname): self.imagename = imagename self.baseimage = baseimage self.dockerfile_lines = ['FROM %s\n' % baseimage, - img_def['build']] + img_def.get('build', '')] self.buildname = buildname self.build_dir = img_def.get('build_directory', None) @@ -57,8 +58,8 @@ def build(self, client, pull=False, usecache=True): pull (bool): whether to pull dependent layers from remote repositories usecache (bool): whether to use cached layers or rebuild from scratch """ - print ' * Build directory: %s' % self.build_dir - print ' * Intermediate image: %s' % self.buildname + print(' * Build directory: %s' % self.build_dir) + print(' * Intermediate image: %s' % self.buildname) dockerfile = '\n'.join(self.dockerfile_lines) @@ -81,11 +82,11 @@ def build(self, client, pull=False, usecache=True): # monitor the output for item in stream: if item.keys() == ['stream']: - print item['stream'].strip() + print(item['stream'].strip()) elif 'errorDetail' in item or 'error' in item: raise BuildError(dockerfile, item, build_args) else: - print item, + print(item, end=' ') # remove the temporary dockerfile if self.build_dir is not None: @@ -98,7 +99,7 @@ def write_dockerfile(self, dockerfile): if not os.path.isdir(tempdir): os.makedirs(tempdir) with open(temp_df, 'w') as df_out: - print >> df_out, dockerfile + print(dockerfile, file=df_out) return tempdir def printfile(self): @@ -107,7 +108,7 @@ def printfile(self): filename = 'docker_makefiles/Dockerfile.%s' % self.imagename with open(filename, 'w') as dfout: - print >> dfout, '\n'.join(self.dockerfile_lines) + print('\n'.join(self.dockerfile_lines), file=dfout) class BuildTarget(object): @@ -137,14 +138,14 @@ def build(self, client, nobuild (bool): just create dockerfiles, don't actually build the image keepbuildtags (bool): keep tags on intermediate images """ - print 'docker-make starting build for "%s" (image definition "%s"' % ( - self.targetname, self.imagename) + print('docker-make starting build for "%s" (image definition "%s"'%( + self.targetname, self.imagename)) for istep, step in enumerate(self.steps): - print ' **** Building %s, Step %d/%d: "%s" requirement ***' % ( + print(' **** Building %s, Step %d/%d: "%s" requirement ***'%( self.imagename, istep + 1, len(self.steps), - step.imagename) + step.imagename)) if printdockerfiles: step.printfile() @@ -161,22 +162,22 @@ def finalizenames(self, client, finalimage, keepbuildtags): """ Tag the built image with its final name and untag intermediate containers """ client.tag(finalimage, *self.targetname.split(':')) - print 'Tagged final image as %s\n' % self.targetname + print('Tagged final image as %s\n' % self.targetname) if not keepbuildtags: for step in self.steps: client.remove_image(step.buildname, force=True) - print 'Untagged intermediate container "%s"' % step.buildname + print('Untagged intermediate container "%s"' % step.buildname) class BuildError(Exception): def __init__(self, dockerfile, item, build_args): with open('dockerfile.fail', 'w') as dff: - print>> dff, dockerfile + print(dockerfile, file=dff) with BytesIO() as stream: - print >> stream, '\n -------- Docker daemon output --------' + print('\n -------- Docker daemon output --------', file=stream) pprint.pprint(item, stream, indent=4) - print >> stream, ' -------- Arguments to client.build --------' + print(' -------- Arguments to client.build --------', file=stream) pprint.pprint(build_args, stream, indent=4) - print >> stream, 'This dockerfile was written to dockerfile.fail' + print('This dockerfile was written to dockerfile.fail', file=stream) stream.seek(0) super(BuildError, self).__init__(stream.read()) diff --git a/dockermake/cli.py b/dockermake/cli.py index 78cd3ca..38d9820 100644 --- a/dockermake/cli.py +++ b/dockermake/cli.py @@ -1,3 +1,4 @@ +from __future__ import print_function # Copyright 2015-2017 Autodesk Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -73,10 +74,10 @@ def make_arg_parser(): def print_yaml_help(): - print "A brief introduction to writing Dockerfile.yml files:\n" + print("A brief introduction to writing Dockerfile.yml files:\n") - print 'SYNTAX:' - print printable_code("""[image_name]: + print('SYNTAX:') + print(printable_code("""[image_name]: build_directory: [relative path where the ADD and COPY commands will look for files] requires: - [other image name] @@ -88,19 +89,19 @@ def print_yaml_help(): [Dockerfile commands go here] [other image name]: ... -[yet another image name]: ...""") +[yet another image name]: ...""")) - print - print textwrap.fill("The idea is to write dockerfile commands for each specific " + print() + print(textwrap.fill("The idea is to write dockerfile commands for each specific " 'piece of functionality in the build field, and "inherit" all other' ' functionality from a list of other components that your image requires. ' 'If you need to add files with the ADD and COPY commands, specify the root' ' directory for those files with build_directory. Your tree of ' '"requires" must have exactly one unique named base image ' - 'in the FROM field.') + 'in the FROM field.')) - print '\n\nAN EXAMPLE:' - print printable_code("""devbase: + print('\n\nAN EXAMPLE:') + print(printable_code("""devbase: FROM: phusion/baseimage build: | RUN apt-get -y update && apt-get -y install build-essential @@ -123,7 +124,7 @@ def print_yaml_help(): data_science: requires: - python_image - - airline_data""") + - airline_data""")) def printable_code(c): diff --git a/dockermake/imagedefs.py b/dockermake/imagedefs.py index 7b61ef0..d16d69f 100644 --- a/dockermake/imagedefs.py +++ b/dockermake/imagedefs.py @@ -13,6 +13,7 @@ # See the License for the specific language governing permissions and # limitations under the License. +from __future__ import print_function import os from collections import OrderedDict import yaml @@ -31,7 +32,7 @@ def __init__(self, makefile_path): def parse_yaml(self, filename): fname = os.path.expanduser(filename) - print 'READING %s' % os.path.expanduser(fname) + print('READING %s' % os.path.expanduser(fname)) if fname in self._sources: raise ValueError('Circular _SOURCE_') self._sources.add(fname) @@ -105,21 +106,27 @@ def sort_dependencies(self, image, dependencies=None): def get_external_base_image(self, image): """ Makes sure that this image has exactly one external base image """ - base = None - base_for = None - for d in self.ymldefs[image]['requires']: - this_base = self.ymldefs[d].get('FROM', None) - if this_base is not None and base is not None and this_base != base: - error = ('Multiple external dependencies: image %s depends on:\n' % image + - ' %s (FROM: %s), and\n' % (base_for, base) + - ' %s (FROM: %s).' % (d, this_base)) + externalbase = self.ymldefs[image].get('FROM', None) + + for base in self.ymldefs[image].get('requires', []): + try: + otherexternal = self.get_external_base_image(base) + except ValueError: + continue + + if externalbase is None: + externalbase = otherexternal + elif otherexternal is None: + continue + elif externalbase != otherexternal: + error = ('Multiple external dependencies: depends on:\n' % image + + ' %s (FROM: %s), and\n' % (image, externalbase) + + ' %s (FROM: %s).' % (base, otherexternal)) raise ValueError(error) - if this_base is not None: - base = this_base - base_for = d - if not base: + + if not externalbase: raise ValueError("No base image found in %s's dependencies" % image) - return base + return externalbase def _fix_build_path(item, filepath): diff --git a/dockermake/utils.py b/dockermake/utils.py index f1641dd..91f68c5 100644 --- a/dockermake/utils.py +++ b/dockermake/utils.py @@ -1,3 +1,4 @@ +from __future__ import print_function # Copyright 2015-2017 Autodesk Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -23,17 +24,22 @@ def get_client(): def list_image_defs(args, defs): - print 'TARGETS in `%s`' % args.makefile + print('TARGETS in `%s`' % args.makefile) for item in defs.ymldefs.keys(): - print ' *', item + print(' *', item) return def generate_name(image, args): - repo_base = args.repository if args.repository is not None else '' - if repo_base[-1] not in ':/': - repo_base += '/' - repo_name = repo_base + image + repo_base = args.repository + + if repo_base is not None: + if repo_base[-1] not in ':/': + repo_base += '/' + repo_name = repo_base + image + else: + repo_name = image + if args.tag: if ':' in repo_name: repo_name += '-'+args.tag @@ -75,7 +81,7 @@ def build_targets(args, defs, targets): b.build(client, printdockerfiles=args.print_dockerfiles, nobuild=args.no_build) - print ' docker-make built:', b.targetname + print(' docker-make built:', b.targetname) built.append(b.targetname) if args.push_to_registry: success, w = push(client, b.targetname) @@ -95,22 +101,22 @@ def push(client, name): warn = 'WARNING: could not push %s - ' \ 'repository name does not contain a registry URL' % name warnings.append(warn) - print warn + print(warn) else: - print ' Pushing %s to %s:' % (name, name.split('/')[0]) + print(' Pushing %s to %s:' % (name, name.split('/')[0])) line = {'error': 'no push information received'} _lastid = None for line in client.push(name, stream=True): line = yaml.load(line) if 'status' in line: if line.get('id', None) == _lastid and line['status'] == 'Pushing': - print '\r', line['status'], line['id'], line.get('progress', ''), + print('\r', line['status'], line['id'], line.get('progress', ''), end=' ') sys.stdout.flush() else: - print line['status'], line.get('id', '') + print(line['status'], line.get('id', '')) _lastid = line.get('id', None) else: - print line + print(line) if 'error' in line: warnings.append('WARNING: push failed for %s. Message: %s' % (name, line['error'])) else: From ace255a2d1647e449056a1e94afc2679e4bb076c Mon Sep 17 00:00:00 2001 From: Aaron Virshup Date: Fri, 20 Jan 2017 10:56:07 -0800 Subject: [PATCH 08/27] Add py2/py3 support --- dockermake/__main__.py | 8 +++++++- dockermake/builds.py | 6 ++++-- dockermake/cli.py | 2 ++ dockermake/imagedefs.py | 3 ++- dockermake/utils.py | 4 ++-- 5 files changed, 17 insertions(+), 6 deletions(-) diff --git a/dockermake/__main__.py b/dockermake/__main__.py index 068ac40..f5776c5 100755 --- a/dockermake/__main__.py +++ b/dockermake/__main__.py @@ -1,4 +1,4 @@ -#!/usr/bin/env python2.7 +#!/usr/bin/env python # Copyright 2015-2017 Autodesk Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -28,6 +28,12 @@ def main(): parser = cli.make_arg_parser() args = parser.parse_args() + # print version and exit + if args.version: + from . import __version__ + print('docker-make version %s' % __version__) + return + # Print help and exit if args.help_yaml: cli.print_yaml_help() diff --git a/dockermake/builds.py b/dockermake/builds.py index da6a152..0722a17 100644 --- a/dockermake/builds.py +++ b/dockermake/builds.py @@ -12,6 +12,8 @@ # See the License for the specific language governing permissions and # limitations under the License. from __future__ import print_function +from builtins import str +from builtins import object import os from io import BytesIO, StringIO @@ -72,7 +74,7 @@ def build(self, client, pull=False, usecache=True): path=os.path.abspath(os.path.expanduser(self.build_dir)), dockerfile=os.path.join(DOCKER_TMPDIR, 'Dockerfile')) else: - build_args.update(fileobj=StringIO(unicode(dockerfile)), + build_args.update(fileobj=StringIO(str(dockerfile)), path=None, dockerfile=None) @@ -81,7 +83,7 @@ def build(self, client, pull=False, usecache=True): # monitor the output for item in stream: - if item.keys() == ['stream']: + if list(item.keys()) == ['stream']: print(item['stream'].strip()) elif 'errorDetail' in item or 'error' in item: raise BuildError(dockerfile, item, build_args) diff --git a/dockermake/cli.py b/dockermake/cli.py index 38d9820..349ca7e 100644 --- a/dockermake/cli.py +++ b/dockermake/cli.py @@ -67,6 +67,8 @@ def make_arg_parser(): 'use index.docker.io as the registry)') hh = parser.add_argument_group('Help') + hh.add_argument('--version', action='store_true', + help="Print version and exit.") hh.add_argument('--help-yaml', action='store_true', help="Print summary of YAML file format and exit.") diff --git a/dockermake/imagedefs.py b/dockermake/imagedefs.py index d16d69f..8e44c39 100644 --- a/dockermake/imagedefs.py +++ b/dockermake/imagedefs.py @@ -14,6 +14,7 @@ # limitations under the License. from __future__ import print_function +from builtins import object import os from collections import OrderedDict import yaml @@ -41,7 +42,7 @@ def parse_yaml(self, filename): yamldefs = yaml.load(yaml_file) # Interpret build directory paths relative to this DockerMake.yml file - for item in yamldefs.itervalues(): + for item in yamldefs.values(): _fix_build_path(item, os.path.dirname(fname)) sourcedefs = {} diff --git a/dockermake/utils.py b/dockermake/utils.py index 91f68c5..f040612 100644 --- a/dockermake/utils.py +++ b/dockermake/utils.py @@ -25,7 +25,7 @@ def get_client(): def list_image_defs(args, defs): print('TARGETS in `%s`' % args.makefile) - for item in defs.ymldefs.keys(): + for item in list(defs.ymldefs.keys()): print(' *', item) return @@ -62,7 +62,7 @@ def get_build_targets(args, defs): if defs.all_targets is not None: targets = defs.all_targets else: - targets = defs.ymldefs.keys() + targets = list(defs.ymldefs.keys()) else: # build the user-specified targets targets = args.TARGETS From 4b37bb62a0aaf54de6efa4d1ad8caec2864ad754 Mon Sep 17 00:00:00 2001 From: Aaron Virshup Date: Fri, 20 Jan 2017 14:05:05 -0800 Subject: [PATCH 09/27] File staging works --- dockermake/builds.py | 20 +++---- dockermake/imagedefs.py | 1 + dockermake/staging.py | 117 ++++++++++++++++++++++++++++++++++++++++ dockermake/utils.py | 20 ++++--- requirements.txt | 2 +- 5 files changed, 139 insertions(+), 21 deletions(-) create mode 100644 dockermake/staging.py diff --git a/dockermake/builds.py b/dockermake/builds.py index 0722a17..df58690 100644 --- a/dockermake/builds.py +++ b/dockermake/builds.py @@ -12,23 +12,15 @@ # See the License for the specific language governing permissions and # limitations under the License. from __future__ import print_function -from builtins import str -from builtins import object import os -from io import BytesIO, StringIO import pprint +from io import BytesIO, StringIO -DOCKER_TMPDIR = '_docker_make_tmp/' - +from builtins import object +from builtins import str -class StagedFile(object): - """ Tracks a file or directory that will be built in one container then copied into another - """ - def __init__(self, sourceimage, sourcepath, destpath): - self.sourceimage = sourceimage - self.sourcepath = sourcepath - self.destpath = destpath +DOCKER_TMPDIR = '_docker_make_tmp/' class BuildStep(object): @@ -56,7 +48,7 @@ def build(self, client, pull=False, usecache=True): step. Args: - client (docker.Client): docker client object that will build the image + client (docker.APIClient): docker client object that will build the image pull (bool): whether to pull dependent layers from remote repositories usecache (bool): whether to use cached layers or rebuild from scratch """ @@ -135,7 +127,7 @@ def build(self, client, Drives the build of the final image - get the list of steps and execute them. Args: - client (docker.Client): docker client object that will build the image + client (docker.APIClient): docker client object that will build the image printdockerfiles (bool): create the dockerfile for this build nobuild (bool): just create dockerfiles, don't actually build the image keepbuildtags (bool): keep tags on intermediate images diff --git a/dockermake/imagedefs.py b/dockermake/imagedefs.py index 8e44c39..c558929 100644 --- a/dockermake/imagedefs.py +++ b/dockermake/imagedefs.py @@ -15,6 +15,7 @@ from __future__ import print_function from builtins import object + import os from collections import OrderedDict import yaml diff --git a/dockermake/staging.py b/dockermake/staging.py new file mode 100644 index 0000000..5e8e75f --- /dev/null +++ b/dockermake/staging.py @@ -0,0 +1,117 @@ +# Copyright 2015-2017 Autodesk Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from __future__ import print_function +from builtins import object + +import os +import tempfile + +from . import utils + +BUILD_CACHEDIR = os.path.join(tempfile.gettempdir(), 'dmk_cache') +BUILD_TEMPDIR = os.path.join(tempfile.gettempdir(), 'dmk_download') + + +class StagedFile(object): + """ Tracks a file or directory that will be built in one container then made available to + be copied into another + + Args: + sourceimage (str): name of the image to copy from + sourcepath (str): path in the source image + destpath (str): path in the target image + """ + def __init__(self, sourceimage, sourcepath, destpath): + self.sourceimage = sourceimage + self.sourcepath = sourcepath + self.destpath = destpath + self._sourceobj = None + self._cachedir = None + + def stage(self, startimage, newimage): + """ Copies the file from source to target + + Args: + startimage (str): name of the image to stage these files into + newimage (str): name of the created image + """ + client = utils.get_client() + print('\nCopying %s://%s -> %s://%s ...' % (self.sourceimage, self.sourcepath, + startimage, self.destpath)) + + # copy build artifacts from the container if necessary + cachedir = self._setcache(client) + if not os.path.exists(cachedir): + container = client.containers.create(self.sourceimage) + tarfile_stream, tarfile_stats = container.get_archive(self.sourcepath) + + # write files to disk (would be nice to stream them, not sure how) + tempdir = tempfile.mkdtemp(dir=BUILD_TEMPDIR) + with open(os.path.join(tempdir, 'content.tar'), 'wb') as localfile: + for chunk in tarfile_stream.stream(): + localfile.write(chunk) + os.mkdir(cachedir) + os.rename(tempdir, cachedir) + + # write Dockerfile for the new image and then build it + with open(os.path.join(cachedir, 'Dockerfile'), 'w') as df: + df.write('FROM %s\nADD content.tar %s' % (startimage, self.destpath)) + client.images.build(path=cachedir, + tag=newimage) + print('Done. Created image "%s"' % newimage) + + def _setcache(self, client): + if self._sourceobj is None: # get image and set up cache if necessary + + self._sourceobj = client.images.get(self.sourceimage) + + if not os.path.exists(BUILD_CACHEDIR): + os.mkdir(BUILD_CACHEDIR) + + if not os.path.exists(BUILD_TEMPDIR): + os.mkdir(BUILD_TEMPDIR) + + image_cachedir = os.path.join(BUILD_CACHEDIR, + self._sourceobj.id) + if not os.path.exists(image_cachedir): + os.mkdir(image_cachedir) + + self._cachedir = os.path.join(image_cachedir, + self.sourcepath.replace('/', '--')) + return self._cachedir + + else: # make sure image ID hasn't changed + assert self._sourceobj.id == client.images.get(self.sourceimage) + return self._cachedir + + + + + + + +### failed attempt to deal with these tarfiles as streams: +# dftext = bytes('FROM %s\nADD . %s' % (startimage, self.destpath)) +# dftarinfo = tarfile.TarInfo('./Dockerfile') +# dftarinfo.size = len(dftext) +# dfstream = io.BytesIO(dftext) +# tarfile_stream, tarfile_stats = container.get_archive(self.sourcepath) +# tarinfo = tarfile.TarInfo() +# tarinfo.size = tarfile_stats['size'] +# print(tarfile_stats) +# #tfread = tarfile.open(fileobj=tarfile_stream, mode='r|*') +# tfwrite = tarfile.open(fileobj=io.BytesIO, mode='w|') +# tfwrite.addfile(tarinfo, fileobj=tarfile_stream) +# tfwrite.addfile(dftarinfo, fileobj=dfstream) \ No newline at end of file diff --git a/dockermake/utils.py b/dockermake/utils.py index f040612..723a5da 100644 --- a/dockermake/utils.py +++ b/dockermake/utils.py @@ -13,14 +13,22 @@ # See the License for the specific language governing permissions and # limitations under the License. +import docker + +_dockerclient = None + + +def get_client_api(): + return get_client().api + def get_client(): - import docker.utils + global _dockerclient + + if _dockerclient is None: + _dockerclient = docker.from_env() - connection = docker.utils.kwargs_from_env() - if 'tls' in connection: - connection['tls'].assert_hostname = False - return docker.Client(**connection) + return _dockerclient def list_image_defs(args, defs): @@ -74,7 +82,7 @@ def build_targets(args, defs, targets): if args.no_build: client = None else: - client = get_client() + client = get_client_api() built, warnings = [], [] builders = [defs.generate_build(t, generate_name(t, args)) for t in targets] for b in builders: diff --git a/requirements.txt b/requirements.txt index cddddd6..11b69ce 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,2 +1,2 @@ -docker-py +docker pyyaml From 924f4f9b47931ddd8472cdc2e4c4b77e9d51d1cf Mon Sep 17 00:00:00 2001 From: Aaron Virshup Date: Fri, 20 Jan 2017 14:33:04 -0800 Subject: [PATCH 10/27] CLI now does staging from DockerMake.yml *IF* prerequisites are built manually --- README.md | 2 +- dockermake/builds.py | 24 +++++++++++++++++++++--- dockermake/imagedefs.py | 15 +++++++++++++-- dockermake/staging.py | 28 ++++++---------------------- example/DockerMake.yml | 10 ++++++++++ 5 files changed, 51 insertions(+), 28 deletions(-) diff --git a/README.md b/README.md index 1278809..80abd7e 100644 --- a/README.md +++ b/README.md @@ -109,7 +109,7 @@ The idea is to write dockerfile commands for each specific piece of functionalit build_directory: [path where the ADD and COPY commands will look for files] # note that the "build_directory" path can be relative or absolute. # if it's relative, it's interpreted relative to DockerMake.yml's directory - : + built_files: [source_image]: [source path1]:[destination path1] [source path2]:[destination path2] diff --git a/dockermake/builds.py b/dockermake/builds.py index df58690..d072b00 100644 --- a/dockermake/builds.py +++ b/dockermake/builds.py @@ -20,6 +20,8 @@ from builtins import object from builtins import str +from . import staging + DOCKER_TMPDIR = '_docker_make_tmp/' @@ -40,6 +42,7 @@ def __init__(self, imagename, baseimage, img_def, buildname): img_def.get('build', '')] self.buildname = buildname self.build_dir = img_def.get('build_directory', None) + self.requirement_name = '"%s" image layer' % self.imagename def build(self, client, pull=False, usecache=True): """ @@ -105,6 +108,21 @@ def printfile(self): print('\n'.join(self.dockerfile_lines), file=dfout) +class FileCopyStep(BuildStep): + def __init__(self, sourceimage, sourcepath, base_image, destpath, buildname): + self.sourceimage = sourceimage + self.sourcepath = sourcepath + self.base_image = base_image + self.destpath = destpath + self.buildname = buildname + + self.requirement_name = 'file copy from %s://%s' % (self.sourceimage, self.sourcepath) + + def build(self, client, pull=False, usecache=True): + stage = staging.StagedFile(self.sourceimage, self.sourcepath, self.destpath) + stage.stage(self.base_image, self.buildname) + + class BuildTarget(object): """ Represents a target docker image. @@ -135,11 +153,11 @@ def build(self, client, print('docker-make starting build for "%s" (image definition "%s"'%( self.targetname, self.imagename)) for istep, step in enumerate(self.steps): - print(' **** Building %s, Step %d/%d: "%s" requirement ***'%( + print(' **** Building %s, Step %d/%d: %s ***'%( self.imagename, - istep + 1, + istep+1, len(self.steps), - step.imagename)) + step.requirement_name)) if printdockerfiles: step.printfile() diff --git a/dockermake/imagedefs.py b/dockermake/imagedefs.py index c558929..48e7949 100644 --- a/dockermake/imagedefs.py +++ b/dockermake/imagedefs.py @@ -65,14 +65,25 @@ def generate_build(self, image, targetname): """ base_image = self.get_external_base_image(image) build_steps = [] - for i, base_name in enumerate(self.sort_dependencies(image)): - buildname = 'dmkbuild_%s_%d' % (image, i+1) + istep = 0 + for base_name in self.sort_dependencies(image): + istep += 1 + buildname = 'dmkbuild_%s_%d' % (image, istep) build_steps.append(builds.BuildStep(base_name, base_image, self.ymldefs[base_name], buildname)) base_image = buildname + for sourceimage, files in self.ymldefs[image].get('built_files', {}).iteritems(): + for sourcepath, destpath in files.iteritems(): + istep += 1 + buildname = 'dmkbuild_%s_%d' % (image, istep) + build_steps.append(builds.FileCopyStep(sourceimage, sourcepath, + base_image, destpath, + buildname)) + base_image = buildname + return builds.BuildTarget(imagename=image, targetname=targetname, steps=build_steps, diff --git a/dockermake/staging.py b/dockermake/staging.py index 5e8e75f..117007a 100644 --- a/dockermake/staging.py +++ b/dockermake/staging.py @@ -48,12 +48,13 @@ def stage(self, startimage, newimage): newimage (str): name of the created image """ client = utils.get_client() - print('\nCopying %s://%s -> %s://%s ...' % (self.sourceimage, self.sourcepath, - startimage, self.destpath)) + print('\nCopying %s://%s -> %s://%s/ ...'%(self.sourceimage, self.sourcepath, + startimage, self.destpath)) # copy build artifacts from the container if necessary cachedir = self._setcache(client) if not os.path.exists(cachedir): + print('Creating cache at %s' % cachedir) container = client.containers.create(self.sourceimage) tarfile_stream, tarfile_stats = container.get_archive(self.sourcepath) @@ -64,6 +65,9 @@ def stage(self, startimage, newimage): localfile.write(chunk) os.mkdir(cachedir) os.rename(tempdir, cachedir) + else: + print('Using cached files from %s' % cachedir) + # write Dockerfile for the new image and then build it with open(os.path.join(cachedir, 'Dockerfile'), 'w') as df: @@ -95,23 +99,3 @@ def _setcache(self, client): else: # make sure image ID hasn't changed assert self._sourceobj.id == client.images.get(self.sourceimage) return self._cachedir - - - - - - - -### failed attempt to deal with these tarfiles as streams: -# dftext = bytes('FROM %s\nADD . %s' % (startimage, self.destpath)) -# dftarinfo = tarfile.TarInfo('./Dockerfile') -# dftarinfo.size = len(dftext) -# dfstream = io.BytesIO(dftext) -# tarfile_stream, tarfile_stats = container.get_archive(self.sourcepath) -# tarinfo = tarfile.TarInfo() -# tarinfo.size = tarfile_stats['size'] -# print(tarfile_stats) -# #tfread = tarfile.open(fileobj=tarfile_stream, mode='r|*') -# tfwrite = tarfile.open(fileobj=io.BytesIO, mode='w|') -# tfwrite.addfile(tarinfo, fileobj=tarfile_stream) -# tfwrite.addfile(dftarinfo, fileobj=dfstream) \ No newline at end of file diff --git a/example/DockerMake.yml b/example/DockerMake.yml index 3d5f1c0..98ce532 100644 --- a/example/DockerMake.yml +++ b/example/DockerMake.yml @@ -23,6 +23,12 @@ plant_data: build: | ADD Puromycin.csv /data +blank_file: + FROM: debian:jessie + build: | + RUN mkdir -p /data + RUN dd if=/dev/zero of=/data/file.txt count=1024 bs=1024 + python_image: requires: - devbase @@ -45,3 +51,7 @@ data_science: - python_image - airline_data - plant_data + built_files: + blank_file: + /data/file.txt: /data/blankfile.txt + From 55a2a8e853acd6f661749b9601af9d1d7d9038b6 Mon Sep 17 00:00:00 2001 From: Aaron Virshup Date: Fri, 20 Jan 2017 14:50:17 -0800 Subject: [PATCH 11/27] Update file source images no more than once for each execution --- dockermake/builds.py | 26 ++++++++++++++++++-------- dockermake/imagedefs.py | 7 ++++++- dockermake/staging.py | 1 - 3 files changed, 24 insertions(+), 10 deletions(-) diff --git a/dockermake/builds.py b/dockermake/builds.py index d072b00..c812326 100644 --- a/dockermake/builds.py +++ b/dockermake/builds.py @@ -24,6 +24,8 @@ DOCKER_TMPDIR = '_docker_make_tmp/' +_updated_staging_images = set() + class BuildStep(object): """ Stores and runs the instructions to build a single image. @@ -133,10 +135,10 @@ class BuildTarget(object): stagedfiles (List[StagedFile]): list of files to stage into this image from other images """ - def __init__(self, imagename, targetname, steps, stagedfiles): + def __init__(self, imagename, targetname, steps, sourcebuilds): self.imagename = imagename self.steps = steps - self.stagedfiles = stagedfiles + self.sourcebuilds = sourcebuilds self.targetname = targetname def build(self, client, @@ -150,15 +152,16 @@ def build(self, client, nobuild (bool): just create dockerfiles, don't actually build the image keepbuildtags (bool): keep tags on intermediate images """ + if not nobuild: + self.update_source_images(client) + print('docker-make starting build for "%s" (image definition "%s"'%( self.targetname, self.imagename)) for istep, step in enumerate(self.steps): - print(' **** Building %s, Step %d/%d: %s ***'%( - self.imagename, - istep+1, - len(self.steps), - step.requirement_name)) - + print(' **** Building %s, Step %d/%d: %s ***' % (self.imagename, + istep+1, + len(self.steps), + step.requirement_name)) if printdockerfiles: step.printfile() @@ -170,6 +173,13 @@ def build(self, client, if not nobuild: self.finalizenames(client, finalimage, keepbuildtags) + def update_source_images(self, client): + for build in self.sourcebuilds: + if build.targetname in _updated_staging_images: + continue + print('\n\nUpdating source image %s' % build.targetname) + build.build(client) + def finalizenames(self, client, finalimage, keepbuildtags): """ Tag the built image with its final name and untag intermediate containers """ diff --git a/dockermake/imagedefs.py b/dockermake/imagedefs.py index 48e7949..f51c27b 100644 --- a/dockermake/imagedefs.py +++ b/dockermake/imagedefs.py @@ -75,7 +75,9 @@ def generate_build(self, image, targetname): buildname)) base_image = buildname + sourceimages = set() for sourceimage, files in self.ymldefs[image].get('built_files', {}).iteritems(): + sourceimages.add(sourceimage) for sourcepath, destpath in files.iteritems(): istep += 1 buildname = 'dmkbuild_%s_%d' % (image, istep) @@ -84,10 +86,13 @@ def generate_build(self, image, targetname): buildname)) base_image = buildname + sourcebuilds = [self.generate_build(image, image) for image in sourceimages] + + return builds.BuildTarget(imagename=image, targetname=targetname, steps=build_steps, - stagedfiles=[]) # TODO: this. + sourcebuilds=sourcebuilds) def sort_dependencies(self, image, dependencies=None): """ diff --git a/dockermake/staging.py b/dockermake/staging.py index 117007a..8e3011b 100644 --- a/dockermake/staging.py +++ b/dockermake/staging.py @@ -68,7 +68,6 @@ def stage(self, startimage, newimage): else: print('Using cached files from %s' % cachedir) - # write Dockerfile for the new image and then build it with open(os.path.join(cachedir, 'Dockerfile'), 'w') as df: df.write('FROM %s\nADD content.tar %s' % (startimage, self.destpath)) From 6dea5d788265790b59e1bf62f8be23fe0e165a76 Mon Sep 17 00:00:00 2001 From: Aaron Virshup Date: Fri, 20 Jan 2017 16:34:17 -0800 Subject: [PATCH 12/27] Fix stdout --- dockermake/builds.py | 3 ++- dockermake/imagedefs.py | 2 +- 2 files changed, 3 insertions(+), 2 deletions(-) diff --git a/dockermake/builds.py b/dockermake/builds.py index c812326..9c257f1 100644 --- a/dockermake/builds.py +++ b/dockermake/builds.py @@ -177,8 +177,9 @@ def update_source_images(self, client): for build in self.sourcebuilds: if build.targetname in _updated_staging_images: continue - print('\n\nUpdating source image %s' % build.targetname) + print('\nUpdating source image %s' % build.targetname) build.build(client) + print('Done with source image %s\n') def finalizenames(self, client, finalimage, keepbuildtags): """ Tag the built image with its final name and untag intermediate containers diff --git a/dockermake/imagedefs.py b/dockermake/imagedefs.py index f51c27b..a117b3d 100644 --- a/dockermake/imagedefs.py +++ b/dockermake/imagedefs.py @@ -86,7 +86,7 @@ def generate_build(self, image, targetname): buildname)) base_image = buildname - sourcebuilds = [self.generate_build(image, image) for image in sourceimages] + sourcebuilds = [self.generate_build(img, img) for img in sourceimages] return builds.BuildTarget(imagename=image, From b36335dc3574e56ab082c899a1e92d0d864d8fd4 Mon Sep 17 00:00:00 2001 From: Aaron Virshup Date: Fri, 20 Jan 2017 19:59:39 -0800 Subject: [PATCH 13/27] Copy files correctly at every level of the build stack --- dockermake/builds.py | 4 +++- dockermake/imagedefs.py | 22 +++++++++++----------- example/DockerMake.yml | 5 +++++ 3 files changed, 19 insertions(+), 12 deletions(-) diff --git a/dockermake/builds.py b/dockermake/builds.py index 9c257f1..abeeb5a 100644 --- a/dockermake/builds.py +++ b/dockermake/builds.py @@ -187,9 +187,11 @@ def finalizenames(self, client, finalimage, keepbuildtags): client.tag(finalimage, *self.targetname.split(':')) print('Tagged final image as %s\n' % self.targetname) if not keepbuildtags: + print('Untagging intermediate containers:', end='') for step in self.steps: client.remove_image(step.buildname, force=True) - print('Untagged intermediate container "%s"' % step.buildname) + print(step.buildname, end=',') + print() class BuildError(Exception): diff --git a/dockermake/imagedefs.py b/dockermake/imagedefs.py index a117b3d..1e0af85 100644 --- a/dockermake/imagedefs.py +++ b/dockermake/imagedefs.py @@ -66,6 +66,8 @@ def generate_build(self, image, targetname): base_image = self.get_external_base_image(image) build_steps = [] istep = 0 + sourceimages = set() + for base_name in self.sort_dependencies(image): istep += 1 buildname = 'dmkbuild_%s_%d' % (image, istep) @@ -75,20 +77,18 @@ def generate_build(self, image, targetname): buildname)) base_image = buildname - sourceimages = set() - for sourceimage, files in self.ymldefs[image].get('built_files', {}).iteritems(): - sourceimages.add(sourceimage) - for sourcepath, destpath in files.iteritems(): - istep += 1 - buildname = 'dmkbuild_%s_%d' % (image, istep) - build_steps.append(builds.FileCopyStep(sourceimage, sourcepath, - base_image, destpath, - buildname)) - base_image = buildname + for sourceimage, files in self.ymldefs[base_name].get('built_files', {}).iteritems(): + sourceimages.add(sourceimage) + for sourcepath, destpath in files.iteritems(): + istep += 1 + buildname = 'dmkbuild_%s_%d' % (image, istep) + build_steps.append(builds.FileCopyStep(sourceimage, sourcepath, + base_image, destpath, + buildname)) + base_image = buildname sourcebuilds = [self.generate_build(img, img) for img in sourceimages] - return builds.BuildTarget(imagename=image, targetname=targetname, steps=build_steps, diff --git a/example/DockerMake.yml b/example/DockerMake.yml index 98ce532..d3fb68c 100644 --- a/example/DockerMake.yml +++ b/example/DockerMake.yml @@ -55,3 +55,8 @@ data_science: blank_file: /data/file.txt: /data/blankfile.txt +final: + requires: + - data_science + build: | + ENV version=0.01 From fcf2daecbdcfcbd11baa318bf9b64b0efe0df8fa Mon Sep 17 00:00:00 2001 From: Aaron Virshup Date: Sun, 22 Jan 2017 13:33:17 -0800 Subject: [PATCH 14/27] Overhaul output. Get pull and nocache working again. Add cache-busting CLI option --- dockermake/builds.py | 126 +++++++++++++++++++++++++++++----------- dockermake/cli.py | 8 ++- dockermake/imagedefs.py | 81 +++++++++++++++++++------- dockermake/staging.py | 47 +++++++++++---- dockermake/utils.py | 71 +++++++++++++++++++++- example/DockerMake.yml | 2 +- 6 files changed, 261 insertions(+), 74 deletions(-) diff --git a/dockermake/builds.py b/dockermake/builds.py index abeeb5a..9beb3d7 100644 --- a/dockermake/builds.py +++ b/dockermake/builds.py @@ -20,11 +20,12 @@ from builtins import object from builtins import str -from . import staging +from . import staging, utils DOCKER_TMPDIR = '_docker_make_tmp/' -_updated_staging_images = set() +_updated_staging_images = set() # stored per session so that we don't try to update them repeatedly +_rebuilt = set() # only rebuild a unique stack of images ONCE per session class BuildStep(object): @@ -35,16 +36,18 @@ class BuildStep(object): baseimage (str): name of the image to inherit from (through "FROM") img_def (dict): yaml definition of this image buildname (str): what to call this image, once built + bust_cache(bool): never use docker cache for this build step """ - def __init__(self, imagename, baseimage, img_def, buildname): + def __init__(self, imagename, baseimage, img_def, buildname, bust_cache=False): self.imagename = imagename self.baseimage = baseimage self.dockerfile_lines = ['FROM %s\n' % baseimage, img_def.get('build', '')] self.buildname = buildname self.build_dir = img_def.get('build_directory', None) - self.requirement_name = '"%s" image layer' % self.imagename + self.bust_cache = bust_cache + self.sourcefile = img_def['_sourcefile'] def build(self, client, pull=False, usecache=True): """ @@ -57,12 +60,20 @@ def build(self, client, pull=False, usecache=True): pull (bool): whether to pull dependent layers from remote repositories usecache (bool): whether to use cached layers or rebuild from scratch """ - print(' * Build directory: %s' % self.build_dir) - print(' * Intermediate image: %s' % self.buildname) + print(' Image definition "%s" from file %s' % (self.imagename, + self.sourcefile)) + + if self.bust_cache: + usecache = False + + if not usecache: + print(' INFO: Docker caching disabled - forcing rebuild') dockerfile = '\n'.join(self.dockerfile_lines) - build_args = dict(tag=self.buildname, pull=pull, nocache=not usecache, + build_args = dict(tag=self.buildname, + pull=pull, + nocache=not usecache, decode=True, rm=True) if self.build_dir is not None: @@ -74,21 +85,17 @@ def build(self, client, pull=False, usecache=True): build_args.update(fileobj=StringIO(str(dockerfile)), path=None, dockerfile=None) + tempdir = None # start the build stream = client.build(**build_args) - - # monitor the output - for item in stream: - if list(item.keys()) == ['stream']: - print(item['stream'].strip()) - elif 'errorDetail' in item or 'error' in item: - raise BuildError(dockerfile, item, build_args) - else: - print(item, end=' ') + try: + utils.stream_build_log(stream, self.buildname) + except ValueError as e: + raise BuildError(dockerfile, e.args[0], build_args) # remove the temporary dockerfile - if self.build_dir is not None: + if tempdir is not None: os.unlink(os.path.join(tempdir, 'Dockerfile')) os.rmdir(tempdir) @@ -111,20 +118,43 @@ def printfile(self): class FileCopyStep(BuildStep): - def __init__(self, sourceimage, sourcepath, base_image, destpath, buildname): + """ + A specialized build step that copies files into an image from another image. + + Args: + sourceimage (str): name of image to copy file from + sourcepath (str): file path in source image + base_image (str): name of image to copy file into + destpath (str): directory to copy the file into + buildname (str): name of the built image + ymldef (Dict): yml definition of this build step + definitionname (str): name of this definition + """ + + bust_cache = False # can't bust this + + def __init__(self, sourceimage, sourcepath, base_image, destpath, buildname, + ymldef, definitionname): self.sourceimage = sourceimage self.sourcepath = sourcepath self.base_image = base_image self.destpath = destpath self.buildname = buildname - - self.requirement_name = 'file copy from %s://%s' % (self.sourceimage, self.sourcepath) + self.definitionname = definitionname + self.sourcefile = ymldef['_sourcefile'] def build(self, client, pull=False, usecache=True): + """ + Note: + `pull` and `usecache` are for compatibility only. They're irrelevant because + hey were applied when BUILDING self.sourceimage + """ + print(' File copy from "%s", defined in file %s' % (self.definitionname, self.sourcefile)) stage = staging.StagedFile(self.sourceimage, self.sourcepath, self.destpath) stage.stage(self.base_image, self.buildname) + class BuildTarget(object): """ Represents a target docker image. @@ -133,16 +163,21 @@ class BuildTarget(object): targetname (str): name to assign the final built image steps (List[BuildStep]): list of steps required to build this image stagedfiles (List[StagedFile]): list of files to stage into this image from other images - + from_iamge (str): External base image name """ - def __init__(self, imagename, targetname, steps, sourcebuilds): + def __init__(self, imagename, targetname, steps, sourcebuilds, from_image): self.imagename = imagename self.steps = steps self.sourcebuilds = sourcebuilds self.targetname = targetname + self.from_image = from_image def build(self, client, - printdockerfiles=False, nobuild=False, keepbuildtags=False): + printdockerfiles=False, + nobuild=False, + keepbuildtags=False, + usecache=True, + pull=False): """ Drives the build of the final image - get the list of steps and execute them. @@ -151,41 +186,62 @@ def build(self, client, printdockerfiles (bool): create the dockerfile for this build nobuild (bool): just create dockerfiles, don't actually build the image keepbuildtags (bool): keep tags on intermediate images + usecache (bool): use docker cache, or rebuild everything from scratch? + pull (bool): try to pull new versions of repository images? """ if not nobuild: - self.update_source_images(client) + self.update_source_images(client, + usecache=usecache, + pull=pull) + + print('\n' + '-'*utils.get_console_width()) + print(' STARTING BUILD for "%s" (image definition "%s" from %s)\n' % ( + self.targetname, self.imagename, self.steps[-1].sourcefile)) - print('docker-make starting build for "%s" (image definition "%s"'%( - self.targetname, self.imagename)) for istep, step in enumerate(self.steps): - print(' **** Building %s, Step %d/%d: %s ***' % (self.imagename, - istep+1, - len(self.steps), - step.requirement_name)) + print(' * Building %s, Step %d/%d:' % (self.imagename, + istep+1, + len(self.steps))) if printdockerfiles: step.printfile() if not nobuild: - step.build(client) + if step.bust_cache: + stackkey = self._get_stack_key(istep) + if stackkey in _rebuilt: + step.bust_cache = False + + step.build(client, usecache=usecache) + print(" - Created intermediate image %s\n" % step.buildname) + + if step.bust_cache: + _rebuilt.add(stackkey) finalimage = step.buildname if not nobuild: self.finalizenames(client, finalimage, keepbuildtags) + print(' *** Successfully built image %s\n' % self.targetname) + + def _get_stack_key(self, istep): + names = [self.from_image] + [step.imagename for step in self.steps[:istep+1]] + return tuple(names) - def update_source_images(self, client): + def update_source_images(self, client, usecache, pull): for build in self.sourcebuilds: if build.targetname in _updated_staging_images: continue print('\nUpdating source image %s' % build.targetname) - build.build(client) - print('Done with source image %s\n') + build.build(client, + usecache=usecache, + pull=pull) + print(' *** Done with source image %s\n' % build.targetname) def finalizenames(self, client, finalimage, keepbuildtags): """ Tag the built image with its final name and untag intermediate containers """ client.tag(finalimage, *self.targetname.split(':')) - print('Tagged final image as %s\n' % self.targetname) + print('Tagged final image as %s' % self.targetname) if not keepbuildtags: print('Untagging intermediate containers:', end='') for step in self.steps: diff --git a/dockermake/cli.py b/dockermake/cli.py index 349ca7e..fcd9be0 100644 --- a/dockermake/cli.py +++ b/dockermake/cli.py @@ -37,7 +37,7 @@ def make_arg_parser(): help="Name for custom docker images (requires --requires)") df = parser.add_argument_group('Dockerfiles') - df.add_argument('-p', '--print_dockerfiles', action='store_true', + df.add_argument('-p', '--print-dockerfiles', '--print_dockerfiles', action='store_true', help="Print out the generated dockerfiles named `Dockerfile.[image]`") df.add_argument('-n', '--no_build', action='store_true', help='Only print Dockerfiles, don\'t build them. Implies --print.') @@ -47,7 +47,11 @@ def make_arg_parser(): help='Always try to pull updated FROM images') ca.add_argument('--no-cache', action='store_true', help="Rebuild every layer") - # TODO: add a way to invalidate a specific target + ca.add_argument('--bust-cache', action='append', + help='Force docker to rebuilt all layers in this image. You can bust ' + 'multiple image layers by passing --bust-cache multiple times.') + ca.add_argument('--clean-copycache', action='store_true', + help="Remove docker-make's cache of files for `copy-from`.") rt = parser.add_argument_group('Repositories and tags') rt.add_argument('--repository', '-r', '-u', diff --git a/dockermake/imagedefs.py b/dockermake/imagedefs.py index 1e0af85..3abe942 100644 --- a/dockermake/imagedefs.py +++ b/dockermake/imagedefs.py @@ -22,6 +22,9 @@ from . import builds +RECOGNIZED_KEYS = set('requires build_directory build copy_from FROM description _sourcefile' + .split()) +SPECIAL_FIELDS = set('_ALL_ _SOURCES_'.split()) class ImageDefs(object): """ Stores and processes the image definitions @@ -29,6 +32,7 @@ class ImageDefs(object): def __init__(self, makefile_path): self._sources = set() self.makefile_path = makefile_path + print('Working directory: %s' % os.path.abspath(os.curdir)) self.ymldefs = self.parse_yaml(self.makefile_path) self.all_targets = self.ymldefs.pop('_ALL_', None) @@ -36,15 +40,13 @@ def parse_yaml(self, filename): fname = os.path.expanduser(filename) print('READING %s' % os.path.expanduser(fname)) if fname in self._sources: - raise ValueError('Circular _SOURCE_') + raise ValueError('Circular _SOURCES_') self._sources.add(fname) with open(fname, 'r') as yaml_file: yamldefs = yaml.load(yaml_file) - # Interpret build directory paths relative to this DockerMake.yml file - for item in yamldefs.values(): - _fix_build_path(item, os.path.dirname(fname)) + self._fix_file_paths(filename, yamldefs) sourcedefs = {} for s in yamldefs.get('_SOURCES_', []): @@ -54,7 +56,34 @@ def parse_yaml(self, filename): sourcedefs.update(yamldefs) return sourcedefs - def generate_build(self, image, targetname): + @staticmethod + def _fix_file_paths(ymlfilepath, yamldefs): + """ Interpret all paths relative the the current yaml file + """ + pathroot = os.path.dirname(ymlfilepath) + + for field, item in yamldefs.iteritems(): + if field == '_SOURCES_': + yamldefs['_SOURCES_'] = [os.path.relpath(_get_abspath(pathroot, p)) + for p in yamldefs['_SOURCES_']] + continue + elif field in SPECIAL_FIELDS: + continue + elif 'build_directory' in item: + item['build_directory'] = _get_abspath(pathroot, item['build_directory']) + + # save the file path for logging + f = os.path.relpath(ymlfilepath) + if '/' not in f: + f = './%s' % f + item['_sourcefile'] = f + + for key in item: + if key not in RECOGNIZED_KEYS: + raise KeyError('Field "%s" in image "%s" not recognized' % + (key, field)) + + def generate_build(self, image, targetname, rebuilds=None): """ Separate the build into a series of one or more intermediate steps. Each specified build directory gets its own step @@ -62,29 +91,38 @@ def generate_build(self, image, targetname): Args: image (str): name of the image as defined in the dockermake.py file targetname (str): name to tag the final built image with + rebuilds (List[str]): list of image layers to rebuild (i.e., without docker's cache) """ - base_image = self.get_external_base_image(image) + from_image = self.get_external_base_image(image) build_steps = [] istep = 0 sourceimages = set() + if rebuilds is None: + rebuilds = [] + else: + rebuilds = set(rebuilds) + base_image = from_image for base_name in self.sort_dependencies(image): istep += 1 buildname = 'dmkbuild_%s_%d' % (image, istep) build_steps.append(builds.BuildStep(base_name, base_image, self.ymldefs[base_name], - buildname)) + buildname, + bust_cache=base_name in rebuilds)) base_image = buildname - for sourceimage, files in self.ymldefs[base_name].get('built_files', {}).iteritems(): + for sourceimage, files in self.ymldefs[base_name].get('copy_from', {}).iteritems(): sourceimages.add(sourceimage) for sourcepath, destpath in files.iteritems(): istep += 1 buildname = 'dmkbuild_%s_%d' % (image, istep) build_steps.append(builds.FileCopyStep(sourceimage, sourcepath, base_image, destpath, - buildname)) + buildname, + self.ymldefs[base_name], + base_name)) base_image = buildname sourcebuilds = [self.generate_build(img, img) for img in sourceimages] @@ -92,7 +130,8 @@ def generate_build(self, image, targetname): return builds.BuildTarget(imagename=image, targetname=targetname, steps=build_steps, - sourcebuilds=sourcebuilds) + sourcebuilds=sourcebuilds, + from_image=from_image) def sort_dependencies(self, image, dependencies=None): """ @@ -103,10 +142,10 @@ def sort_dependencies(self, image, dependencies=None): dependencies (OrderedDict): running cache of sorted dependencies (ordered dict) Returns: - OrderedDict: dictionary of this image's requirements + List[str]: list of dependencies a topologically-sorted build order """ if dependencies is None: - dependencies = OrderedDict() + dependencies = OrderedDict() # using this as an ordered set - not storing any values if image in dependencies: return @@ -119,7 +158,7 @@ def sort_dependencies(self, image, dependencies=None): if image in dependencies: raise ValueError('Circular dependency found', dependencies) dependencies[image] = None - return dependencies + return dependencies.keys() def get_external_base_image(self, image): """ Makes sure that this image has exactly one external base image @@ -147,15 +186,13 @@ def get_external_base_image(self, image): return externalbase -def _fix_build_path(item, filepath): - path = os.path.expanduser(filepath) +def _get_abspath(pathroot, relpath): + path = os.path.expanduser(pathroot) + buildpath = os.path.expanduser(relpath) + + if not os.path.isabs(buildpath): + buildpath = os.path.join(os.path.abspath(path), buildpath) - if 'build_directory' not in item: - return - elif os.path.isabs(item['build_directory']): - return - else: - item['build_directory'] = os.path.join(os.path.abspath(path), - item['build_directory']) + return buildpath diff --git a/dockermake/staging.py b/dockermake/staging.py index 8e3011b..f7d5a90 100644 --- a/dockermake/staging.py +++ b/dockermake/staging.py @@ -13,10 +13,13 @@ # limitations under the License. from __future__ import print_function + +import docker.errors from builtins import object import os import tempfile +import shutil from . import utils @@ -24,9 +27,16 @@ BUILD_TEMPDIR = os.path.join(tempfile.gettempdir(), 'dmk_download') +def clear_copy_cache(): + for path in (BUILD_CACHEDIR, BUILD_TEMPDIR): + if os.path.exists(path): + assert os.path.isdir(path), "'%s' is not a directory!" + print('Removing docker-make cache %s' % path) + shutil.rmtree(path) + + class StagedFile(object): - """ Tracks a file or directory that will be built in one container then made available to - be copied into another + """ Tracks a file or directory that will be built in one image, then copied into others Args: sourceimage (str): name of the image to copy from @@ -47,18 +57,24 @@ def stage(self, startimage, newimage): startimage (str): name of the image to stage these files into newimage (str): name of the created image """ + from .builds import BuildError + client = utils.get_client() - print('\nCopying %s://%s -> %s://%s/ ...'%(self.sourceimage, self.sourcepath, + print(' * Copying FROM "%s:/%s" TO "%s://%s/"'%(self.sourceimage, self.sourcepath, startimage, self.destpath)) # copy build artifacts from the container if necessary cachedir = self._setcache(client) if not os.path.exists(cachedir): - print('Creating cache at %s' % cachedir) + print(' * Creating cache at %s' % cachedir) container = client.containers.create(self.sourceimage) - tarfile_stream, tarfile_stats = container.get_archive(self.sourcepath) + try: + tarfile_stream, tarfile_stats = container.get_archive(self.sourcepath) + except docker.errors.NotFound: + raise IOError('File "%s" does not exist in image "%s"!' % + (self.sourcepath, self.sourceimage)) - # write files to disk (would be nice to stream them, not sure how) + # write files to disk (would be nice to stream them, haven't gotten it to work) tempdir = tempfile.mkdtemp(dir=BUILD_TEMPDIR) with open(os.path.join(tempdir, 'content.tar'), 'wb') as localfile: for chunk in tarfile_stream.stream(): @@ -66,14 +82,23 @@ def stage(self, startimage, newimage): os.mkdir(cachedir) os.rename(tempdir, cachedir) else: - print('Using cached files from %s' % cachedir) + print(' * Using cached files from %s' % cachedir) # write Dockerfile for the new image and then build it + dockerfile = 'FROM %s\nADD content.tar %s' % (startimage, self.destpath) with open(os.path.join(cachedir, 'Dockerfile'), 'w') as df: - df.write('FROM %s\nADD content.tar %s' % (startimage, self.destpath)) - client.images.build(path=cachedir, - tag=newimage) - print('Done. Created image "%s"' % newimage) + df.write(dockerfile) + + buildargs = dict(path=cachedir, + tag=newimage, + decode=True) + + # Build and show logs + stream = client.api.build(**buildargs) + try: + utils.stream_build_log(stream, newimage) + except ValueError as e: + raise BuildError(dockerfile, e.args[0], build_args=buildargs) def _setcache(self, client): if self._sourceobj is None: # get image and set up cache if necessary diff --git a/dockermake/utils.py b/dockermake/utils.py index 723a5da..000bc0e 100644 --- a/dockermake/utils.py +++ b/dockermake/utils.py @@ -12,7 +12,12 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. +import collections +import os +import sys +import textwrap +import yaml import docker _dockerclient = None @@ -62,8 +67,10 @@ def get_build_targets(args, defs): # Assemble a custom target from requirements assert args.requires and args.name assert args.name not in defs.ymldefs - defs.ymldefs[args.name] = {'requires': args.requires} + defs.ymldefs[args.name] = {'requires': args.requires, + '_sourcefile': "command line arguments"} targets = [args.name] + elif args.all: # build all targets in the file assert len(args.TARGETS) == 0, "Pass either a list of targets or `--all`, not both" @@ -84,11 +91,14 @@ def build_targets(args, defs, targets): else: client = get_client_api() built, warnings = [], [] - builders = [defs.generate_build(t, generate_name(t, args)) for t in targets] + builders = [defs.generate_build(t, generate_name(t, args), rebuilds=args.bust_cache) + for t in targets] for b in builders: b.build(client, printdockerfiles=args.print_dockerfiles, - nobuild=args.no_build) + nobuild=args.no_build, + usecache=not args.no_cache, + pull=args.pull) print(' docker-make built:', b.targetname) built.append(b.targetname) if args.push_to_registry: @@ -130,3 +140,58 @@ def push(client, name): else: success = True return success, warnings + + +def human_readable_size(num, suffix='B'): + """ FROM http://stackoverflow.com/a/1094933/1958900 + """ + for unit in ['','Ki','Mi','Gi','Ti','Pi','Ei','Zi']: + if abs(num) < 1024.0: + return "%3.1f%s%s" % (num, unit, suffix) + num /= 1024.0 + return "%.1f%s%s" % (num, 'Yi', suffix) + + +def stream_build_log(stream, name): + textwidth = get_console_width() - 10 + wrapper = textwrap.TextWrapper(initial_indent=u'|\u2022"', + subsequent_indent='| ', + break_on_hyphens=False, + width=textwidth) + + logtitle = '%s: BUILD LOG' % name + numdash = (textwidth - len(logtitle) - 7) // 2 + header = ''.join(['-'*numdash, " %s " % logtitle, '-'*numdash]) + print(header) + + pullstats = collections.OrderedDict() + for item in stream: + if list(item.keys()) == ['stream']: + line = item['stream'].strip() + elif 'errorDetail' in item or 'error' in item: + raise ValueError(item) + elif 'status' in item and 'id' in item: # for pulling images + line = _show_pull_status(pullstats, item) + else: + line = str(item) + + for s in wrapper.wrap(line): + print(s) + + print('-'*len(header)) + + +def get_console_width(): + try: + _, consolewidth = map(int, os.popen('stty size', 'r').read().split()) + except: + consolewidth = 80 + return consolewidth + + +def _show_pull_status(pullstats, item): + imgid = item['id'] + stat = item['status'] + if stat != pullstats.get(imgid, None): + pullstats[imgid] = stat + return '%s: %s' % (imgid, stat) diff --git a/example/DockerMake.yml b/example/DockerMake.yml index d3fb68c..6ace24e 100644 --- a/example/DockerMake.yml +++ b/example/DockerMake.yml @@ -51,7 +51,7 @@ data_science: - python_image - airline_data - plant_data - built_files: + copy_from: blank_file: /data/file.txt: /data/blankfile.txt From 241d4be1cf551a920e6ea693e7d7c7fabee5e425 Mon Sep 17 00:00:00 2001 From: Aaron Virshup Date: Sun, 22 Jan 2017 13:48:33 -0800 Subject: [PATCH 15/27] Shorten cache directory paths and activate CLI cache clearing --- dockermake/__main__.py | 7 +++++-- dockermake/cli.py | 2 +- dockermake/imagedefs.py | 2 ++ dockermake/staging.py | 18 +++++++++++------- dockermake/utils.py | 13 +++++++------ 5 files changed, 26 insertions(+), 16 deletions(-) diff --git a/dockermake/__main__.py b/dockermake/__main__.py index f5776c5..87e6ac2 100755 --- a/dockermake/__main__.py +++ b/dockermake/__main__.py @@ -18,9 +18,8 @@ from __future__ import print_function import os -from . import cli +from . import cli, utils, staging from .imagedefs import ImageDefs -from . import utils def main(): @@ -39,6 +38,10 @@ def main(): cli.print_yaml_help() return + if args.clear_copy_cache: + staging.clear_copy_cache() + return + if not os.path.exists(args.makefile): if args.makefile == 'DockerMake.yml': parser.print_help() diff --git a/dockermake/cli.py b/dockermake/cli.py index fcd9be0..066e98d 100644 --- a/dockermake/cli.py +++ b/dockermake/cli.py @@ -50,7 +50,7 @@ def make_arg_parser(): ca.add_argument('--bust-cache', action='append', help='Force docker to rebuilt all layers in this image. You can bust ' 'multiple image layers by passing --bust-cache multiple times.') - ca.add_argument('--clean-copycache', action='store_true', + ca.add_argument('--clear-copy-cache', '--clear-cache', action='store_true', help="Remove docker-make's cache of files for `copy-from`.") rt = parser.add_argument_group('Repositories and tags') diff --git a/dockermake/imagedefs.py b/dockermake/imagedefs.py index 3abe942..c98210e 100644 --- a/dockermake/imagedefs.py +++ b/dockermake/imagedefs.py @@ -21,6 +21,7 @@ import yaml from . import builds +from . import staging RECOGNIZED_KEYS = set('requires build_directory build copy_from FROM description _sourcefile' .split()) @@ -33,6 +34,7 @@ def __init__(self, makefile_path): self._sources = set() self.makefile_path = makefile_path print('Working directory: %s' % os.path.abspath(os.curdir)) + print('Copy cache directory: %s' % staging.TMPDIR) self.ymldefs = self.parse_yaml(self.makefile_path) self.all_targets = self.ymldefs.pop('_ALL_', None) diff --git a/dockermake/staging.py b/dockermake/staging.py index f7d5a90..fbe9df6 100644 --- a/dockermake/staging.py +++ b/dockermake/staging.py @@ -23,8 +23,9 @@ from . import utils -BUILD_CACHEDIR = os.path.join(tempfile.gettempdir(), 'dmk_cache') -BUILD_TEMPDIR = os.path.join(tempfile.gettempdir(), 'dmk_download') +TMPDIR = tempfile.gettempdir() +BUILD_CACHEDIR = os.path.join(TMPDIR, 'dmk_cache') +BUILD_TEMPDIR = os.path.join(TMPDIR, 'dmk_download') def clear_copy_cache(): @@ -33,6 +34,8 @@ def clear_copy_cache(): assert os.path.isdir(path), "'%s' is not a directory!" print('Removing docker-make cache %s' % path) shutil.rmtree(path) + else: + print('Cache directory %s does not exist.' % path) class StagedFile(object): @@ -61,12 +64,13 @@ def stage(self, startimage, newimage): client = utils.get_client() print(' * Copying FROM "%s:/%s" TO "%s://%s/"'%(self.sourceimage, self.sourcepath, - startimage, self.destpath)) + startimage, self.destpath)) # copy build artifacts from the container if necessary cachedir = self._setcache(client) + cacherelpath = os.path.relpath(cachedir, TMPDIR) if not os.path.exists(cachedir): - print(' * Creating cache at %s' % cachedir) + print(' * Creating cache at %s' % cacherelpath) container = client.containers.create(self.sourceimage) try: tarfile_stream, tarfile_stats = container.get_archive(self.sourcepath) @@ -82,7 +86,7 @@ def stage(self, startimage, newimage): os.mkdir(cachedir) os.rename(tempdir, cachedir) else: - print(' * Using cached files from %s' % cachedir) + print(' * Using cached files from %s' % cacherelpath) # write Dockerfile for the new image and then build it dockerfile = 'FROM %s\nADD content.tar %s' % (startimage, self.destpath) @@ -112,12 +116,12 @@ def _setcache(self, client): os.mkdir(BUILD_TEMPDIR) image_cachedir = os.path.join(BUILD_CACHEDIR, - self._sourceobj.id) + self._sourceobj.id.replace('sha256:', '')) if not os.path.exists(image_cachedir): os.mkdir(image_cachedir) self._cachedir = os.path.join(image_cachedir, - self.sourcepath.replace('/', '--')) + self.sourcepath.replace('/', '_-')) return self._cachedir else: # make sure image ID hasn't changed diff --git a/dockermake/utils.py b/dockermake/utils.py index 000bc0e..70de052 100644 --- a/dockermake/utils.py +++ b/dockermake/utils.py @@ -1,4 +1,3 @@ -from __future__ import print_function # Copyright 2015-2017 Autodesk Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -12,6 +11,8 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. +from __future__ import print_function + import collections import os import sys @@ -153,15 +154,15 @@ def human_readable_size(num, suffix='B'): def stream_build_log(stream, name): - textwidth = get_console_width() - 10 - wrapper = textwrap.TextWrapper(initial_indent=u'|\u2022"', - subsequent_indent='| ', + textwidth = get_console_width() - 5 + wrapper = textwrap.TextWrapper(initial_indent=' ', + subsequent_indent=' ', break_on_hyphens=False, width=textwidth) logtitle = '%s: BUILD LOG' % name numdash = (textwidth - len(logtitle) - 7) // 2 - header = ''.join(['-'*numdash, " %s " % logtitle, '-'*numdash]) + header = ''.join([' ','-'*numdash, " %s " % logtitle, '-'*numdash]) print(header) pullstats = collections.OrderedDict() @@ -178,7 +179,7 @@ def stream_build_log(stream, name): for s in wrapper.wrap(line): print(s) - print('-'*len(header)) + print(' ', '-'*len(header)) def get_console_width(): From 3b75bf8c19cdd4e10671cdef828664cc76a90557 Mon Sep 17 00:00:00 2001 From: Aaron Virshup Date: Wed, 25 Jan 2017 12:16:46 -0800 Subject: [PATCH 16/27] Pushes work again --- README.md | 43 +++++++++++++++++++++++-------------- dockermake/builds.py | 2 +- dockermake/staging.py | 2 +- dockermake/utils.py | 49 +++++++++++++++++++++++++++---------------- 4 files changed, 60 insertions(+), 36 deletions(-) diff --git a/README.md b/README.md index 80abd7e..65d0eb3 100644 --- a/README.md +++ b/README.md @@ -14,10 +14,10 @@ Table of Contents ### Install -Requires Docker (obviously), and Python 2.7 or 3.4+ with pip. +Requires [Docker](https://www.docker.com/products/docker), and Python (2.7, or 3.4+). ``` -pip install git+https://github.com/avirshup/DockerMake +pip install git+https://github.com/autodesk/DockerMake ``` This will install the command line tool, `docker-make`, and its supporting python package, which you can import as `import dockermake`. @@ -28,22 +28,25 @@ This will install the command line tool, `docker-make`, and its supporting pytho To build some illustrative examples, try running: ```bash -wget https://raw.githubusercontent.com/avirshup/DockerMake/master/example/DockerMake.yml +wget https://raw.githubusercontent.com/autodesk/DockerMake/master/example/DockerMake.yml + docker-make --list docker-make data_science --repo docker.io/myusername --tag testbuild ``` ### What you can do with it + * **New**: Build an artifact (such as an executable or library) in one image, then copy it into a smaller image for deployment + * **New**: easily invalidate the docker image cache at an arbitrary layer * Define small pieces of configuration or functionality, then mix them together into production docker images. * "Inherit" Dockerfile instructions from multiple sources - * **New**: Build an artifact (such as an executable or library) in one image, then copy it into a smaller image for deployment * Easily manage images that pull files from multiple directories on your filesystem - * Rebuild an entire stack of images as needed with a single command + * Easily manage images that pull binaries from other _docker images_ that you've defined + * Build and push an entire stack of images with a single command ### Example -[Click here to see how we're using this in production.](https://github.com/Autodesk/molecular-design-toolkit/blob/master/docker_images/DockerMake.yml) +[Click here to see how we're using this in production.](https://github.com/Autodesk/molecular-design-toolkit/blob/workflow_fixes/DockerMakefiles/) This example builds a single docker image called `data_science`. It does this by mixing together three components: `devbase` (the base image), `airline_data` (a big CSV file), and `python_image` (a python installation). `docker-make` will create an image that combines all of these components. @@ -109,12 +112,13 @@ The idea is to write dockerfile commands for each specific piece of functionalit build_directory: [path where the ADD and COPY commands will look for files] # note that the "build_directory" path can be relative or absolute. # if it's relative, it's interpreted relative to DockerMake.yml's directory - built_files: + copy_from: # Note: the copy_from commands will always run AFTER any build commands [source_image]: [source path1]:[destination path1] [source path2]:[destination path2] [...] - [...] + [source_image_2]: + [...] [other image name]: [...] @@ -131,12 +135,12 @@ eval $(docker-machine env [machine-name]) ### Command line usage ``` -usage: docker-make.py [-h] [-f MAKEFILE] [-a] [-l] - [--requires [REQUIRES [REQUIRES ...]]] [--name NAME] - [-p] [-n] [--pull] [--no-cache] - [--repository REPOSITORY] [--tag TAG] - [--push-to-registry] [--help-yaml] - [TARGETS [TARGETS ...]] +usage: docker-make [-h] [-f MAKEFILE] [-a] [-l] + [--requires [REQUIRES [REQUIRES ...]]] [--name NAME] [-p] + [-n] [--pull] [--no-cache] [--bust-cache BUST_CACHE] + [--clear-copy-cache] [--repository REPOSITORY] [--tag TAG] + [--push-to-registry] [--version] [--help-yaml] + [TARGETS [TARGETS ...]] NOTE: Docker environmental variables must be set. For a docker-machine, run `eval $(docker-machine env [machine-name])` @@ -157,7 +161,7 @@ Choosing what to build: --name NAME Name for custom docker images (requires --requires) Dockerfiles: - -p, --print_dockerfiles + -p, --print-dockerfiles, --print_dockerfiles Print out the generated dockerfiles named `Dockerfile.[image]` -n, --no_build Only print Dockerfiles, don't build them. Implies @@ -166,6 +170,12 @@ Dockerfiles: Image caching: --pull Always try to pull updated FROM images --no-cache Rebuild every layer + --bust-cache BUST_CACHE + Force docker to rebuilt all layers in this image. You + can bust multiple image layers by passing --bust-cache + multiple times. + --clear-copy-cache, --clear-cache + Remove docker-make's cache of files for `copy_from`. Repositories and tags: --repository REPOSITORY, -r REPOSITORY, -u REPOSITORY @@ -187,10 +197,11 @@ Repositories and tags: to dockerhub.com, use index.docker.io as the registry) Help: + --version Print version and exit. --help-yaml Print summary of YAML file format and exit. ``` -Written by Aaron Virshup, Bio/Nano Research Group, Autodesk Research +Written by Aaron Virshup, BioNano Group at Autodesk Copyright (c) 2015-2017, Autodesk Inc. Released under the Apache 2.0 License. diff --git a/dockermake/builds.py b/dockermake/builds.py index 9beb3d7..2d15569 100644 --- a/dockermake/builds.py +++ b/dockermake/builds.py @@ -90,7 +90,7 @@ def build(self, client, pull=False, usecache=True): # start the build stream = client.build(**build_args) try: - utils.stream_build_log(stream, self.buildname) + utils.stream_docker_logs(stream, self.buildname) except ValueError as e: raise BuildError(dockerfile, e.args[0], build_args) diff --git a/dockermake/staging.py b/dockermake/staging.py index fbe9df6..b381d2b 100644 --- a/dockermake/staging.py +++ b/dockermake/staging.py @@ -100,7 +100,7 @@ def stage(self, startimage, newimage): # Build and show logs stream = client.api.build(**buildargs) try: - utils.stream_build_log(stream, newimage) + utils.stream_docker_logs(stream, newimage) except ValueError as e: raise BuildError(dockerfile, e.args[0], build_args=buildargs) diff --git a/dockermake/utils.py b/dockermake/utils.py index 70de052..247e34a 100644 --- a/dockermake/utils.py +++ b/dockermake/utils.py @@ -38,8 +38,12 @@ def get_client(): def list_image_defs(args, defs): + from . import imagedefs + print('TARGETS in `%s`' % args.makefile) - for item in list(defs.ymldefs.keys()): + for item in sorted(defs.ymldefs.keys()): + if item in imagedefs.SPECIAL_FIELDS: + continue print(' *', item) return @@ -123,19 +127,9 @@ def push(client, name): print(warn) else: print(' Pushing %s to %s:' % (name, name.split('/')[0])) - line = {'error': 'no push information received'} - _lastid = None - for line in client.push(name, stream=True): - line = yaml.load(line) - if 'status' in line: - if line.get('id', None) == _lastid and line['status'] == 'Pushing': - print('\r', line['status'], line['id'], line.get('progress', ''), end=' ') - sys.stdout.flush() - else: - print(line['status'], line.get('id', '')) - _lastid = line.get('id', None) - else: - print(line) + stream = _linestream(client.push(name, stream=True)) + line = stream_docker_logs(stream, 'PUSH %s' % name) + if 'error' in line: warnings.append('WARNING: push failed for %s. Message: %s' % (name, line['error'])) else: @@ -143,6 +137,12 @@ def push(client, name): return success, warnings +def _linestream(textstream): + for item in textstream: + for line in item.splitlines(): + yield yaml.load(line) + + def human_readable_size(num, suffix='B'): """ FROM http://stackoverflow.com/a/1094933/1958900 """ @@ -153,7 +153,7 @@ def human_readable_size(num, suffix='B'): return "%.1f%s%s" % (num, 'Yi', suffix) -def stream_build_log(stream, name): +def stream_docker_logs(stream, name): textwidth = get_console_width() - 5 wrapper = textwrap.TextWrapper(initial_indent=' ', subsequent_indent=' ', @@ -172,7 +172,8 @@ def stream_build_log(stream, name): elif 'errorDetail' in item or 'error' in item: raise ValueError(item) elif 'status' in item and 'id' in item: # for pulling images - line = _show_pull_status(pullstats, item) + line = _show_xfer_state(pullstats, item) + if line is None: continue else: line = str(item) @@ -180,6 +181,7 @@ def stream_build_log(stream, name): print(s) print(' ', '-'*len(header)) + return line def get_console_width(): @@ -190,9 +192,20 @@ def get_console_width(): return consolewidth -def _show_pull_status(pullstats, item): +SHOWSIZE = set(('Pushing', 'Pulling', 'Pulled', 'Downloaded', 'Downloading')) + +def _show_xfer_state(pullstats, item): imgid = item['id'] stat = item['status'] if stat != pullstats.get(imgid, None): pullstats[imgid] = stat - return '%s: %s' % (imgid, stat) + + if stat in SHOWSIZE and item.get('progressDetail', {}).get('total', None): + toprint = '%s: %s (%s)' % (imgid, stat, + human_readable_size(item['progressDetail']['total'])) + else: + toprint = '%s: %s' % (imgid, stat) + + return toprint + else: + return None From c193701ea127c580eac20e6549630a7d78939091 Mon Sep 17 00:00:00 2001 From: Aaron Virshup Date: Wed, 1 Feb 2017 12:15:04 -0800 Subject: [PATCH 17/27] Update README.md --- README.md | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/README.md b/README.md index 65d0eb3..b9b6f46 100644 --- a/README.md +++ b/README.md @@ -14,7 +14,7 @@ Table of Contents ### Install -Requires [Docker](https://www.docker.com/products/docker), and Python (2.7, or 3.4+). +Requires [Docker](https://www.docker.com/products/docker), and Python (2.7 or 3.4+). ``` pip install git+https://github.com/autodesk/DockerMake @@ -85,7 +85,7 @@ data_science: To build an image called `alice/data_science`, you can run: ```bash -docker-make.py data_science --repository alice +docker-make data_science --repository alice ``` which will create an image with all the commands in `python_image` and `airline_data`. From 5b9c3be380e211c62084cac5cc188b3af69b07b8 Mon Sep 17 00:00:00 2001 From: Aaron Virshup Date: Tue, 28 Feb 2017 21:47:24 -0800 Subject: [PATCH 18/27] Update README.md --- README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README.md b/README.md index b9b6f46..7969caf 100644 --- a/README.md +++ b/README.md @@ -46,7 +46,7 @@ docker-make data_science --repo docker.io/myusername --tag testbuild ### Example -[Click here to see how we're using this in production.](https://github.com/Autodesk/molecular-design-toolkit/blob/workflow_fixes/DockerMakefiles/) +[Click here to see how we're using this in production.](https://github.com/Autodesk/molecular-design-toolkit/tree/master/DockerMakefiles) This example builds a single docker image called `data_science`. It does this by mixing together three components: `devbase` (the base image), `airline_data` (a big CSV file), and `python_image` (a python installation). `docker-make` will create an image that combines all of these components. From 9dbec9137a775ce4651af5cce188548970d48b38 Mon Sep 17 00:00:00 2001 From: Aaron Virshup Date: Sat, 1 Apr 2017 15:34:23 -0700 Subject: [PATCH 19/27] Add travis build --- .travis.yml | 19 +++++++++++++++++++ 1 file changed, 19 insertions(+) create mode 100644 .travis.yml diff --git a/.travis.yml b/.travis.yml new file mode 100644 index 0000000..59b4a14 --- /dev/null +++ b/.travis.yml @@ -0,0 +1,19 @@ +language: python + +python: + - "2.7" + +addons: + apt: + packages: + - git + +sudo: required +services: + - docker + +install: + - pip install -e . + + +#script: py.test -n 4 \ No newline at end of file From f833d9f970650c2d97b79e409d9f232a74ed81a1 Mon Sep 17 00:00:00 2001 From: Aaron Virshup Date: Sat, 1 Apr 2017 15:42:00 -0700 Subject: [PATCH 20/27] More travis --- .travis.yml | 10 ++-------- README.md | 2 ++ 2 files changed, 4 insertions(+), 8 deletions(-) diff --git a/.travis.yml b/.travis.yml index 59b4a14..8ebf33f 100644 --- a/.travis.yml +++ b/.travis.yml @@ -3,17 +3,11 @@ language: python python: - "2.7" -addons: - apt: - packages: - - git - sudo: required services: - docker install: - - pip install -e . - + - pip install . -#script: py.test -n 4 \ No newline at end of file +script: docker-make --help \ No newline at end of file diff --git a/README.md b/README.md index 7969caf..07a7dcb 100644 --- a/README.md +++ b/README.md @@ -1,6 +1,8 @@ # Docker-make Build and manage stacks of docker images - a dependency graph for Docker images +[![Build Status](https://travis-ci.org/avirshup/DockerMake.svg?branch=master)](https://travis-ci.org/avirshup/DockerMake) + Table of Contents ================= * [Install](#Install) From 7131d70efc4d61e826e047223d19a5ff019a506b Mon Sep 17 00:00:00 2001 From: Aaron Virshup Date: Sat, 1 Apr 2017 15:47:26 -0700 Subject: [PATCH 21/27] Add requirements to setup.py --- setup.py | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/setup.py b/setup.py index f3d8d51..267ac70 100644 --- a/setup.py +++ b/setup.py @@ -1,6 +1,9 @@ from distutils.core import setup import versioneer +with open('requirements.txt', 'r') as reqfile: + requirements = [x.strip() for x in reqfile if x.strip()] + setup( name='DockerMake', version=versioneer.get_version(), @@ -14,5 +17,7 @@ 'console_scripts': [ 'docker-make = dockermake.__main__:main' ] - } + }, + install_requires=requirements, + ) From f287b39af3f2168dfc62979698bf48c55645c60e Mon Sep 17 00:00:00 2001 From: Aaron Virshup Date: Sat, 1 Apr 2017 15:52:34 -0700 Subject: [PATCH 22/27] Add py3 support --- .travis.yml | 6 +++++- requirements.txt | 1 + 2 files changed, 6 insertions(+), 1 deletion(-) diff --git a/.travis.yml b/.travis.yml index 8ebf33f..ec96970 100644 --- a/.travis.yml +++ b/.travis.yml @@ -2,12 +2,16 @@ language: python python: - "2.7" + - "3.5" sudo: required services: - docker +before_install: + - pip install pytest + install: - pip install . -script: docker-make --help \ No newline at end of file +script: docker-make --help # placeholder \ No newline at end of file diff --git a/requirements.txt b/requirements.txt index 11b69ce..ae68759 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,2 +1,3 @@ docker +future pyyaml From 17d6a9ccf1ad95b3406f53a058e72d75c12441f0 Mon Sep 17 00:00:00 2001 From: Aaron Virshup Date: Sat, 1 Apr 2017 16:14:41 -0700 Subject: [PATCH 23/27] Test the example --- .travis.yml | 2 +- example/DockerMake.yml | 24 +++++++++--------------- test/test_example.py | 39 +++++++++++++++++++++++++++++++++++++++ 3 files changed, 49 insertions(+), 16 deletions(-) create mode 100644 test/test_example.py diff --git a/.travis.yml b/.travis.yml index ec96970..c4d6513 100644 --- a/.travis.yml +++ b/.travis.yml @@ -14,4 +14,4 @@ before_install: install: - pip install . -script: docker-make --help # placeholder \ No newline at end of file +script: cd test && py.test diff --git a/example/DockerMake.yml b/example/DockerMake.yml index 6ace24e..94436fa 100644 --- a/example/DockerMake.yml +++ b/example/DockerMake.yml @@ -5,9 +5,8 @@ data_image: devbase: FROM: debian:jessie build: | - RUN apt-get -y update && apt-get -y install \ - build-essential \ - && mkdir -p /opt + RUN apt-get -y update && apt-get -y install wget + RUN mkdir -p /opt airline_data: requires: @@ -23,7 +22,10 @@ plant_data: build: | ADD Puromycin.csv /data -blank_file: +blank_file_build: + description: | + This image isn't intended for deployment! Rather, files built in this environment + can be copied into other images (see data_science.copy_from below) FROM: debian:jessie build: | RUN mkdir -p /data @@ -35,15 +37,7 @@ python_image: build: | RUN apt-get install -y \ python \ - python-dev \ - python-pip \ - python-numpy \ - python-scipy \ - python-pandas \ - liblapack-dev \ - libblas-dev \ - gfortran \ - libpng12-dev + python-pip RUN pip install Pint data_science: @@ -52,8 +46,8 @@ data_science: - airline_data - plant_data copy_from: - blank_file: - /data/file.txt: /data/blankfile.txt + blank_file_build: + /data/file.txt: /data final: requires: diff --git a/test/test_example.py b/test/test_example.py new file mode 100644 index 0000000..094ba63 --- /dev/null +++ b/test/test_example.py @@ -0,0 +1,39 @@ +import os +import subprocess +import pytest + +EXAMPLEDIR = os.path.join('../example') + + +def test_executable_in_path(): + subprocess.check_call('which docker-make'.split(), + cwd=EXAMPLEDIR) + + +def test_help_string(): + subprocess.check_call('docker-make --help'.split(), + cwd=EXAMPLEDIR) + + +def test_list(): + subprocess.check_call('docker-make --list'.split(), + cwd=EXAMPLEDIR) + + output = subprocess.check_output('docker-make --list'.split(), + cwd=EXAMPLEDIR) + + expected = set(('airline_data blank_file data_image data_science ' + 'devbase final plant_data python_image').split()) + + for line in list(output.splitlines())[4:]: + image = line[3:] + assert image in expected + expected.remove(image) + + assert len(expected) == 0 + + +def test_example_build(): + subprocess.check_call( + "docker run final ls data/AirPassengers.csv data/Puromycin.csv data/file.txt".split(), + cwd=EXAMPLEDIR) From 22f268897a1418060d5d8af185a532a834435c85 Mon Sep 17 00:00:00 2001 From: Aaron Virshup Date: Sat, 1 Apr 2017 16:20:56 -0700 Subject: [PATCH 24/27] Fix tests --- test/test_example.py | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/test/test_example.py b/test/test_example.py index 094ba63..268c960 100644 --- a/test/test_example.py +++ b/test/test_example.py @@ -35,5 +35,9 @@ def test_list(): def test_example_build(): subprocess.check_call( - "docker run final ls data/AirPassengers.csv data/Puromycin.csv data/file.txt".split(), + "docker-make final --repo myrepo --tag mytag".split(), + cwd=EXAMPLEDIR) + + subprocess.check_call( + "docker run myrepo/final:mytag ls data/AirPassengers.csv data/Puromycin.csv data/file.txt".split(), cwd=EXAMPLEDIR) From ad3f7e779a7e882de2cf87964650cff151bc1944 Mon Sep 17 00:00:00 2001 From: Aaron Virshup Date: Sat, 1 Apr 2017 16:31:22 -0700 Subject: [PATCH 25/27] Try to auto-detect docker version # --- dockermake/utils.py | 2 +- test/test_example.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/dockermake/utils.py b/dockermake/utils.py index 247e34a..6c7b377 100644 --- a/dockermake/utils.py +++ b/dockermake/utils.py @@ -32,7 +32,7 @@ def get_client(): global _dockerclient if _dockerclient is None: - _dockerclient = docker.from_env() + _dockerclient = docker.from_env(version='auto') return _dockerclient diff --git a/test/test_example.py b/test/test_example.py index 268c960..66e96e8 100644 --- a/test/test_example.py +++ b/test/test_example.py @@ -22,7 +22,7 @@ def test_list(): output = subprocess.check_output('docker-make --list'.split(), cwd=EXAMPLEDIR) - expected = set(('airline_data blank_file data_image data_science ' + expected = set(('airline_data blank_file_build data_image data_science ' 'devbase final plant_data python_image').split()) for line in list(output.splitlines())[4:]: From eba63f8139562363a5769b0fff0f49bded3502ae Mon Sep 17 00:00:00 2001 From: Aaron Virshup Date: Sat, 1 Apr 2017 16:36:42 -0700 Subject: [PATCH 26/27] py3-compatible iteritems --- dockermake/imagedefs.py | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/dockermake/imagedefs.py b/dockermake/imagedefs.py index c98210e..bf908b7 100644 --- a/dockermake/imagedefs.py +++ b/dockermake/imagedefs.py @@ -19,6 +19,7 @@ import os from collections import OrderedDict import yaml +from future.utils import iteritems from . import builds from . import staging @@ -64,7 +65,7 @@ def _fix_file_paths(ymlfilepath, yamldefs): """ pathroot = os.path.dirname(ymlfilepath) - for field, item in yamldefs.iteritems(): + for field, item in iteritems(yamldefs): if field == '_SOURCES_': yamldefs['_SOURCES_'] = [os.path.relpath(_get_abspath(pathroot, p)) for p in yamldefs['_SOURCES_']] @@ -115,9 +116,9 @@ def generate_build(self, image, targetname, rebuilds=None): bust_cache=base_name in rebuilds)) base_image = buildname - for sourceimage, files in self.ymldefs[base_name].get('copy_from', {}).iteritems(): + for sourceimage, files in iteritems(self.ymldefs[base_name].get('copy_from', {})): sourceimages.add(sourceimage) - for sourcepath, destpath in files.iteritems(): + for sourcepath, destpath in iteritems(files): istep += 1 buildname = 'dmkbuild_%s_%d' % (image, istep) build_steps.append(builds.FileCopyStep(sourceimage, sourcepath, From 70c7ea73f07f124ac9d588c3efc83854602fcaa1 Mon Sep 17 00:00:00 2001 From: Aaron Virshup Date: Sat, 1 Apr 2017 16:51:19 -0700 Subject: [PATCH 27/27] Add PyPI deployment, remove explicit py3 support (io.StringIO and io.BytesIO are killing me) --- .travis.yml | 20 ++++++++++++++------ setup.py | 1 + 2 files changed, 15 insertions(+), 6 deletions(-) diff --git a/.travis.yml b/.travis.yml index c4d6513..66a30eb 100644 --- a/.travis.yml +++ b/.travis.yml @@ -1,17 +1,25 @@ language: python python: - - "2.7" - - "3.5" +- '2.7' sudo: required + services: - - docker +- docker before_install: - - pip install pytest +- pip install pytest install: - - pip install . - +- pip install . script: cd test && py.test + +deploy: + provider: pypi + user: avirshup + password: + secure: TYbx42AboT7VtAy+YCbmRkEraJhkDYMX4uQeEjZe+JNqsdFQ+4YWkEjkTCiMg2GO944W75NqBe3Cq6+UN3GYscM6QSZ/N+A/ME0V4JnC/N6AVPo0M+ns0lvBKm34dsafjWozf0J5Jwt5pK8UBByR6fEru5SvZej2IAow2cZzJRATFpq5YbVBuS77zDOZKk3526xzqGq1U/Ay/e/ar/ZTVTkdY78PDC3pLSs6aie+LtJJee80czUkMTXK8CTQz3g1ni37tNVhYWKdZjAdnfSbJWP8BB/SDW+/TBaKAHBhmTRbFWhiPJKvhFmqSt7JG7Nzu0zWqqh+blgXiQjseWmJB7hbPvr4g7lIHtUYdjTPPvQXYRt+6TNAm6N0Lx/gMPLnfN9t1VKv9ZAtgM8jKLoY+jBmpmZyx5QeO+clsLOqCh+wkeJUULtHJWeSQ6mz5PlgDgVu3gx6/d21uR1I7bHbohZAQ707J1bDhzjsolbQ+NzyMYlVuEoJs0NVnGTVR7q5NzjGSgYODFdbf1QZXeB/KeZmjskAzmNsUe1sGQsPIA4KtLS+PAYDdCJiH1i54/yaXF7Uj4g+vpdrOMKaVyEdtaS8yeiAcH18tDIqPwkWCFxboLcSreV4FcTrBocwUwbJhznC5vq2YgOKUf8DVtv+YeC4KcTLUufm0ybd24rbvW0= + on: + tags: true + repo: avirshup/DockerMake diff --git a/setup.py b/setup.py index 267ac70..88d2123 100644 --- a/setup.py +++ b/setup.py @@ -13,6 +13,7 @@ author='Aaron Virshup', author_email='avirshup@gmail.com', description='Build manager for docker images', + url="https://github.com/avirshup/dockermake", entry_points={ 'console_scripts': [ 'docker-make = dockermake.__main__:main'