diff --git a/charmtools/build/__init__.py b/charmtools/build/__init__.py index 80294d29..0d1d32d4 100755 --- a/charmtools/build/__init__.py +++ b/charmtools/build/__init__.py @@ -12,4 +12,3 @@ Fetched, log, ) - diff --git a/charmtools/build/builder.py b/charmtools/build/builder.py index 4a984700..7f18569e 100755 --- a/charmtools/build/builder.py +++ b/charmtools/build/builder.py @@ -59,14 +59,20 @@ def configured(self): class Fetched(Configable): - def __init__(self, url, target_repo, name=None): + def __init__(self, url, target_repo, name=None, lock=None, + use_branches=False): super(Fetched, self).__init__() self.url = url self.target_repo = target_repo / self.NAMESPACE self.directory = None self._name = name self.fetched = False + self.fetched_url = None + self.vcs = None self.revision = None + self.branch = None + self.lock = lock + self.use_branches = use_branches @property def name(self): @@ -85,7 +91,15 @@ def __div__(self, other): def fetch(self): try: - fetcher = get_fetcher(self.url) + # In order to lock the fetcher we need to adjust the self.url + # to get the right thing. Curiously, self.url is actually + # "layer:something" here, and so we can match on that. + if self.lock: + url = make_url_from_lock_for_layer( + self.lock, self.use_branches) + else: + url = self.url + fetcher = get_fetcher(url) except FetchError: # We might be passing a local dir path directly # which fetchers don't currently support @@ -99,7 +113,11 @@ def fetch(self): self.target_repo.makedirs_p() self.directory = path(fetcher.fetch(self.target_repo)) self.fetched = True + self.fetched_url = getattr(fetcher, "fetched_url", None) + self.vcs = getattr(fetcher, "vcs", None) self.revision = fetcher.get_revision(self.directory) + self.branch = fetcher.get_branch_for_revision(self.directory, + self.revision) if not self.directory.exists(): raise BuildError( @@ -150,10 +168,13 @@ def __init__(self): self.wheelhouse_overrides = None self.wheelhouse_per_layer = False self._warned_home = False + self.lock_items = [] + self.with_locks = {} @property def top_layer(self): if not self._top_layer: + # NOTE: no lock for branches for lop layer. self._top_layer = Layer(self.charm, self.cache_dir).fetch() return self._top_layer @@ -216,6 +237,10 @@ def target_dir(self): def manifest(self): return self.target_dir / '.build.manifest' + @property + def lock_file(self): + return self.source_dir / 'build.lock' + def check_series(self): """Make sure this is a either a multi-series charm, or we have a build series defined. If not, fall back to a default series. @@ -250,16 +275,17 @@ def find_or_create_target(self): @property def layers(self): layers = [] - for i in self._layers: - layers.append({ - 'url': i.url, - 'rev': i.revision, - }) - for i in self._interfaces: - layers.append({ - 'url': i.url, - 'rev': i.revision, - }) + for layer in (self._layers, self._interfaces): + for i in layer: + item = { + 'url': i.url, + 'rev': i.revision, + } + for opt in ('branch', 'fetched_url'): + attr = getattr(i, opt, None) + if attr: + item[opt] = attr + layers.append(item) return layers def fetch(self): @@ -268,7 +294,11 @@ def fetch(self): log.warn("The top level layer expects a " "valid layer.yaml file") # Manually create a layer object for the output - self.target = Layer(self.name, self.build_dir) + self.target = Layer(self.name, self.build_dir, + lock=self.lock_for(self.name), + use_branches=getattr(self, + 'use_lock_file_branches', + False)) self.target.directory = self.target_dir self.post_metrics('charm', self.name, False) return self.fetch_deps(self.top_layer) @@ -302,13 +332,19 @@ def fetch_dep(self, layer, results): # The order of these commands is important. We only want to # fetch something if we haven't already fetched it. if base.startswith("interface:"): - iface = Interface(base, self.cache_dir) + iface = Interface(base, self.cache_dir, + lock=self.lock_for(base), + use_branches=getattr( + self, 'use_lock_file_branches', False)) if iface.name in [i.name for i in results['interfaces']]: continue results["interfaces"].append(iface.fetch()) self.post_metrics('interface', iface.name, iface.fetched) else: - base_layer = Layer(base, self.cache_dir) + base_layer = Layer(base, self.cache_dir, + lock=self.lock_for(base), + use_branches=getattr( + self, 'use_lock_file_branches', False)) if base_layer.name in [i.name for i in results['layers']]: continue base_layer.fetch() @@ -316,6 +352,10 @@ def fetch_dep(self, layer, results): results["layers"].append(base_layer) self.post_metrics('layer', base_layer.name, base_layer.fetched) + def lock_for(self, base): + """Return a lock description for an item 'base' if it exists.""" + return self.with_locks.get(base, {}) + def build_tactics(self, entry, layer, next_config, current_config, output_files): relname = entry.relpath(layer.directory) @@ -356,6 +396,28 @@ def plan_layers(self, layers, output_files): next_config=next_config, current_config=current_config, output_files=output_files)) + # now we do update the wheelhouse.txt output file with the lock file if + # necessary. + if not getattr(self, 'ignore_lock_file', False): + lines = self.generate_python_modules_from_lock_file() + # override any existing lines with the python modules from the lock + # file. + existing_tactic = output_files.get('wheelhouse.txt') + lock_layer = Layer('lockfile-wheelhouse', + layers["layers"][-1].target_repo.dirname()) + lock_layer.directory = layers["layers"][-1].directory + wh_tactic = WheelhouseTactic( + "", + self.target, + lock_layer, + next_config, + ) + wh_tactic.lines = lines + wh_tactic.purge_wheels = True + if existing_tactic is not None: + wh_tactic.combine(existing_tactic) + output_files["wheelhouse.txt"] = wh_tactic + if self.wheelhouse_overrides: existing_tactic = output_files.get('wheelhouse.txt') wh_over_layer = Layer('--wheelhouse-overrides', @@ -571,6 +633,8 @@ def exec_plan(self, plan=None, layers=None): tactic.read() elif phase == "call": tactic() + if hasattr(tactic, "lock_info"): + self.lock_items.extend(tactic.lock_info) elif phase == "sign": sig = tactic.sign() if sig: @@ -584,6 +648,8 @@ def exec_plan(self, plan=None, layers=None): # write out the sigs if "sign" in self.PHASES: self.write_signatures(signatures, layers) + if getattr(self, 'write_lock_file', False): + self.write_the_lock_file() if self.report: self.write_report(new_repo, added, changed, removed) @@ -594,6 +660,99 @@ def write_signatures(self, signatures, layers): layers=layers, ), indent=2, sort_keys=True)) + def write_the_lock_file(self): + """Using the info in self.layers, write a lock file. + + The lock file can be used to ensure that the same versions (or + branches) are used to recreate the charm. + """ + locks = [] + # look at the layers first for locks + for layer in self.layers: + branch = layer.get('branch', None) + if branch is None or branch == 'refs/heads/master': + tag = layer.get('rev', None) + else: + tag = branch + lock_item = { + 'type': 'layer', + 'item': layer['url'], + 'url': layer.get('fetched_url', None), + 'vcs': layer.get('vcs', None), + 'branch': branch, + 'commit': tag + } + locks.append(lock_item) + # now iterate through the other lock items and add them in + locks.extend(self.lock_items) + self.lock_file.write_text(json.dumps({'locks': locks}, indent=2)) + + def maybe_read_lock_file(self): + """Read the lock file if it exists. + + The lock file is a list of elements (either layer, interface or python + module). This is then used in the fetcher system to fix the versions + that are used when rebuilding the charm. + """ + self.with_locks = {} + try: + with open(self.lock_file) as f: + with_locks = json.load(f) + except FileNotFoundError: + log.info("The lockfile %s was not found; building using latest " + "versions.", self.lock_file) + return + except Exception as err: + log.error("Problem decoding the lock file: %s\n%s", + self.lock_file, str(err)) + raise + # re-format the lock-file as a dictionary so that the items can be + # looked up. A layer is 'layer:', an interface is + # 'interface:', and a python moudle is 'python_module:' + for item in with_locks['locks']: + if item['type'] == 'python_module': + self.with_locks["python_module:{}".format(item['package'])] = \ + item + elif item['type'] == 'layer': + self.with_locks[item['item']] = item + else: + log.warning("Not sure how to deal with lock item '%s'?", item) + log.info("Using lockfile %s for build.", self.lock_file) + + def generate_python_modules_from_lock_file(self): + """Using self.with_locks[], generate a list of python module lines. + + This takes the python modules recorded in the lock file, as read into + self.with_locks[] and generate a list of python module line in the + style of a requiremens.txt file suitable for pip. These are absolute + version locks (or github commit/branch locks). + + :returns: list of python module lines + :rtype: List[str] + """ + lines = [] + for data in self.with_locks.values(): + if data['type'] != "python_module": + continue + vcs = data['vcs'] + if vcs == 'git': + if self.use_lock_file_branches: + branch = data.get('branch', '') + if branch.startswith("refs/heads/"): + branch = branch[len("refs/heads/"):] + line = "{}@{}#egg={}".format( + data['url'], branch, data['package']) + else: + line = "{}@{}#egg={}".format( + data['url'], data['version'], data['package']) + elif vcs is None: + line = "{}=={}".format(data['package'], data['version']) + else: + log.error("Unknown vcs type %s - aborting.", vcs) + sys.exit(1) + lines.append(line) + return lines + def generate(self): layers = self.fetch() self.formulate_plan(layers) @@ -762,6 +921,40 @@ def cleanup(self): self.cache_dir.rmtree_p() +def make_url_from_lock_for_layer(lock_spec, use_branches=False): + """Make a url from a lock spec for a layer or interface. + + lock_spec is: + + "layer:basic": { + "branch": "refs/heads/master", + "commit": "623e69c7b432456fd4364f6e1835424fd6b5425e", + "item": "layer:basic", + "type": "layer", + "url": "https://github.com/juju-solutions/layer-basic.git", + "vcs": null + } + + It is assumed that the VCS is git. + + TODO: Add other VCS in addition to Git? + + :param lock_spec: the lock specification for the layer + :type lock_spec: Dict[str, Dict[str, Optional[str]]] + :param use_branches: if True, use the branch, rather than the commit + :type use_branches: bool + :returns: the url for fetching the layer from the repository + :rtype: str + """ + if use_branches: + branch = lock_spec["branch"] + if branch.startswith("refs/heads/"): + branch = branch[len("refs/heads/"):] + return "{}@{}".format(lock_spec["url"], branch) + else: + return "{}@{}".format(lock_spec["url"], lock_spec["commit"]) + + def configLogging(build): global log logging.captureWarnings(True) @@ -908,6 +1101,19 @@ def main(args=None): "from the interface service.") parser.add_argument('-n', '--name', help="Build a charm of 'name' from 'charm'") + parser.add_argument('--write-lock-file', action="store_true", + default=False, + help="Write a lock file for reproducible builds. The " + "file is 'layers.lock' in the root of the layer " + "being build.") + parser.add_argument('--use-lock-file-branches', action="store_true", + default=False, + help="Use branch names if not master branch in " + "lockfile. This allows tracking of a stable " + "branch.") + parser.add_argument('--ignore-lock-file', action="store_true", + default=False, + help="Ignore the lock file even if it is present.") parser.add_argument('-r', '--report', action="store_true", default=True, help="Show post-build report of changes") parser.add_argument('-R', '--no-report', action="store_false", @@ -951,6 +1157,7 @@ def main(args=None): build.normalize_build_dir() build.normalize_cache_dir() build.check_paths() + build.maybe_read_lock_file() build() diff --git a/charmtools/build/fetchers.py b/charmtools/build/fetchers.py index 7444d871..36ed6cde 100644 --- a/charmtools/build/fetchers.py +++ b/charmtools/build/fetchers.py @@ -157,7 +157,6 @@ def _get_repo_fetcher_and_target(self, repo, dir_): def fetch(self, dir_): if hasattr(self, "path"): - log.debug('Using fetcher: {}'.format(super(LayerFetcher, self))) return super(LayerFetcher, self).fetch(dir_) elif hasattr(self, "repo"): f, target = self._get_repo_fetcher_and_target(self.repo, dir_) @@ -166,6 +165,10 @@ def fetch(self, dir_): log.debug('Adding branch: %s', self.BRANCH) f.revision = self.BRANCH orig_res = res = f.fetch(dir_) + log.debug("url fetched (for lockfile): %s", + getattr(f, 'fetched_url')) + self.fetched_url = getattr(f, 'fetched_url', None) + self.vcs = getattr(f, 'vcs', None) # make sure we save the revision of the actual repo, before we # start traversing subdirectories and moving contents around self.revision = self.get_revision(res) diff --git a/charmtools/build/tactics.py b/charmtools/build/tactics.py index 6a81fb31..676945fc 100644 --- a/charmtools/build/tactics.py +++ b/charmtools/build/tactics.py @@ -1,16 +1,20 @@ +from inspect import getargspec import errno import json -import jsonschema import logging import os +import tarfile import tempfile -from inspect import getargspec +import zipfile + +import jsonschema import requirements from path import Path as path from pkg_resources import safe_name from ruamel import yaml from charmtools import utils +from charmtools import fetchers from charmtools.build.errors import BuildError log = logging.getLogger(__name__) @@ -1048,6 +1052,8 @@ def __init__(self, *args, **kwargs): self._venv = None self.purge_wheels = False self._layer_refs = {} + self.modules = {} + self.lock_info = [] def __str__(self): directory = self.target.directory / 'wheelhouse' @@ -1064,6 +1070,7 @@ def combine(self, existing): try: req = next(requirements.parse(line)) new_pkgs.add(safe_name(req.name)) + self.modules[req.name] = req except (StopIteration, ValueError): pass # ignore comments, blank lines, etc existing_lines = [] @@ -1073,10 +1080,10 @@ def combine(self, existing): # new explicit reqs will override existing ones if safe_name(req.name) not in new_pkgs: existing_lines.append(line) + self.modules[req.name] = req else: existing_lines.append('# {} # overridden by {}' - ''.format(line, - self.layer.url)) + .format(line, self.layer.url)) except (StopIteration, ValueError): existing_lines.append(line) # ignore comments, blank lines, &c self.lines = existing_lines + self.lines @@ -1091,7 +1098,7 @@ def read(self): raise BuildError( 'Unable to determine package name for "{}"; ' 'did you forget "#egg=..."?' - ''.format(req.line.strip())) + .format(req.line.strip())) self._layer_refs[safe_name(req.name)] = self.layer.url self.lines = (['# ' + self.layer.url] + src.lines(retain=False) + @@ -1118,9 +1125,79 @@ def _add(self, wheelhouse, *reqs): self.removed.append(old_wheel) else: dest.remove_p() + # extract the version from the wheelhouse name + name = None + if wheel.name.endswith(".zip"): + name = wheel.name[:-4] + elif wheel.name.endswith(".tar.gz"): + name = wheel.name[:-7] + if name is not None: + ns = name.split('-') + version = ns[-1] + package = '-'.join(ns[:-1]) + log.debug("Version extracted is: %s version %s", + package, version) + # we also need to determine if it was a git repo and + # extract the branch/commit + if package in self.modules: + req = self.modules[package] + log.debug("module: %s - is vcs: %s", package, req.vcs) + if req.vcs: + (branch, version) = self._extract_pkg_vcs(wheel, + req) + log.debug("branch: %s, version=%s", + branch, version) + self.lock_info.append({ + "type": "python_module", + "package": package, + "url": req.uri, + "branch": branch, + "version": version, + "vcs": req.vcs + }) + else: + # keep the python module and version we build + log.debug("not a vcs, therefore %s==%s", + package, version) + self.lock_info.append({ + "type": "python_module", + "package": package, + "vcs": None, + "version": version, + }) + else: + # keep the python module and version we build + log.debug("Just keeping %s==%s", package, version) + self.lock_info.append({ + "type": "python_module", + "package": package, + "vcs": None, + "version": version, + }) wheel.move(wheelhouse) self.tracked.append(dest) + @staticmethod + def _extract_pkg_vcs(wheel, req): + with utils.tempdir(chdir=False) as temp_dir: + dst_file = temp_dir / wheel.name + dst_dir = temp_dir / 'unarchive' + wheel.copy(dst_file) + if dst_file.endswith('.zip'): + with zipfile.ZipFile(dst_file, 'r') as zip_ref: + zip_ref.extractall(dst_dir) + elif dst_file.endswith('.tar.gz'): + with tarfile.open(dst_file, 'r') as archive: + archive.extractall(path=dst_dir) + else: + raise RuntimeError("Can only handle zips or tar.gz?") + # now use Fetcher to extract the branch and revision + vcs_dir = dst_dir / req.name + fetcher = fetchers.Fetcher(vcs_dir) + revision = fetcher.get_revision(vcs_dir) + branch = fetcher.get_branch_for_revision(vcs_dir, revision) + return (branch, revision) + def _run_in_venv(self, *args): assert self._venv is not None # have to use bash to activate the venv properly first diff --git a/charmtools/fetchers.py b/charmtools/fetchers.py index 9c71d86e..413cf2ba 100644 --- a/charmtools/fetchers.py +++ b/charmtools/fetchers.py @@ -126,6 +126,21 @@ def get_revision(self, dir_): return rev_file.read_text().strip() return self.revision + def get_branch_for_revision(self, dir_, revision=None): + """Returns None if the revision doesn't match the actual branch name""" + if revision is None: + revision = self.revision + if not revision: + return None + for cmd in ("git branch --contains {} --format=\"%(refname)\"" + .format(revision), ): + try: + branch = check_output(cmd, cwd=dir_).decode('UTF-8').strip() + return branch + except FetchError: + continue + return None + class BzrFetcher(Fetcher): MATCH = re.compile(r""" @@ -141,6 +156,8 @@ def can_fetch(cls, url): def fetch(self, dir_): dir_ = tempfile.mkdtemp(dir=dir_) url = 'lp:' + self.repo + self.fetched_url = url + self.vcs = "bzr" cmd = 'branch --use-existing-dir {} {}'.format(url, dir_) if self.revision: log.debug('Switching to revision: {}'.format(self.revision)) @@ -159,6 +176,8 @@ def fetch(self, dir_): dir_ = tempfile.mkdtemp(dir=dir_) api_base = 'https://api.launchpad.net/devel/' url = api_base + self.repo + self.fetched_url = url + self.vcs = "bzr" merge_data = get(url).json() target = 'lp:' + merge_data['target_branch_link'][len(api_base):] source = 'lp:' + merge_data['source_branch_link'][len(api_base):] @@ -177,6 +196,8 @@ class LaunchpadGitFetcher(Fetcher): def fetch(self, dir_): dir_ = tempfile.mkdtemp(dir=dir_) url = 'https://git.launchpad.net/' + self.repo + self.fetched_url = url + self.vcs = "git" git('clone {} {}'.format(url, dir_)) if self.revision: log.debug('Switching to revision: {}'.format(self.revision)) @@ -193,6 +214,8 @@ class GithubFetcher(Fetcher): def fetch(self, dir_): dir_ = tempfile.mkdtemp(dir=dir_) url = 'https://github.com/' + self.repo + self.fetched_url = url + self.vcs = "git" git('clone {} {}'.format(url, dir_)) if self.revision: log.debug('Switching to revision: {}'.format(self.revision)) @@ -209,6 +232,8 @@ class OpendevFetcher(Fetcher): def fetch(self, dir_): dir_ = tempfile.mkdtemp(dir=dir_) url = 'https://opendev.org/' + self.repo + self.fetched_url = url + self.vcs = "git" git('clone {} {}'.format(url, dir_)) if self.revision: log.debug('Switching to revision: {}'.format(self.revision)) @@ -228,6 +253,8 @@ class GitFetcher(Fetcher): def fetch(self, dir_): dir_ = tempfile.mkdtemp(dir=dir_) + self.fetched_url = self.repo + self.vcs = "git" git('clone {} {}'.format(self.repo, dir_)) if self.revision: log.debug('Switching to revision: {}'.format(self.revision)) @@ -244,8 +271,11 @@ class BitbucketFetcher(Fetcher): def fetch(self, dir_): dir_ = tempfile.mkdtemp(dir=dir_) url = 'https://bitbucket.org/' + self.repo + self.fetched_url = url if url.endswith('.git'): + self.vcs = "git" return self._fetch_git(url, dir_) + self.vcs = "hg" return self._fetch_hg(url, dir_) def _fetch_git(self, url, dir_): @@ -296,6 +326,8 @@ def __init__(self, *args, **kw): def fetch(self, dir_): url = self.ARCHIVE_URL.format(self.entity) + self.fetched_url = url + self.vcs = "charmstore" archive = download_file(url, dir_) entity_dir = extract_archive(archive, dir_) return rename(entity_dir) diff --git a/docs/index.rst b/docs/index.rst index 079672d2..ce56f3ac 100644 --- a/docs/index.rst +++ b/docs/index.rst @@ -31,6 +31,7 @@ the command-line with: commands tactics + reproducible-charms .. toctree:: diff --git a/docs/reproducible-charms.rst b/docs/reproducible-charms.rst new file mode 100644 index 00000000..262e5403 --- /dev/null +++ b/docs/reproducible-charms.rst @@ -0,0 +1,102 @@ +Reproducible Charms +=================== + +When building charms, multiple layers are brought together in an ordered, +depth-first recursive fashion. The individual files of each layer are merged, +and then python modules are brought in according to ``wheelhouse.txt`` files +that may exist in each layer. + +Layers (and Interfaces) are typically Git repositories, and by default the +default branch (usually called ``master``) of the repository is fetched and +used. + +Also, although the top level Python modules can be pinned in the +``wheelhouse.txt`` files, any dependent modules are fetched as their latest +versions. This makes re-building a charm with the same layers and modules +tricky, which may be required for stable charms. It is possible, by populating +layer and interface directories directly, and by pinning every Python module in +a ``wheelhouse.txt`` override file that is passed using the +``--wheelhouse-overrides`` option to the ``charm build`` command. + +An alternative strategy is to use a new feature of the ``charm build`` command +which can generate a lock file that contains all of the layers and Python +modules, and their versions. This can then, for subsequent builds, be used to +fetch the same layer versions and Python modules to re-create the charm. + +As the lock file is a ``JSON`` file, it can be manually edited to change a +layer version if a new version of a stable charm is needed, or a python module +can be changed. + +Additionally, it is possible to track a branch in the repository for a layer so +that a stable (or feature) branch can be maintained and then charms rebuilt +from that branch. + +The new options for this feature are: + + * ``--write-lock-file`` + * ``--use-lock-file-branches`` + * ``--ignore-lock-file`` + + +Creating the lock file +---------------------- + +To create a lock file, the option ``--write-lock-file`` is passed to the +``charm build`` command. This option *automatically* ignores the existing lock +file, and rebuilds the charm using the latest versions of the layers and the +versions of the modules as determined in the various ``wheelhouse.txt`` files. + +Python module versions are also recorded. If a VCS repository is used for the +python module, then any branch specified is also recorded, along with the +commit. + +At the end of the build, the lock file is written with all of the layer and +Python module information. + +The lock file is installed *in* the base layer directory so that it can be +committed into the VCS and used for subsequent builds. + +The name of the lock file is ``build.lock``. + +Rebuilding the charm from the lock file +--------------------------------------- + +If a lock file (``build.lock``) is available in the top layer, then it will be +used to control the versions of the layers and modules *by default*. i.e. the +presence of the lock file controls the build. + +Three options are available which can influence the build when a lock file is +present: + + * ``--ignore-lock-file`` + * ``--use-lock-file-branches`` + * ``--wheelhouse-overrides`` + +If the ``--ignore-lock-file`` option is used, then the charm is built as though +there is no lock file. + +If the ``--use-lock-file-branches`` is used, then, for VCS items (layers, +interfaces, and Python modules specified using a VCS string), then the branch +(if there was one) will be used, rather than the commit version. This can be +used to track a branch in a layer or Python module. + +Note: if ``--wheelhouse-overrides`` is used, then that wheelhouse will override +the lock file. i.e. the lock file overrides the layers' ``wheelhouse.txt`` +file, and then the ``--wheelhouse-overrides`` then can override the lock-file. +This is intentional to allow the build to perform specific overrides as +needed. + +Other useful information +------------------------ + +This is the first iteration of 'reproducible charms'. As such, only Git is +supported as the VCS for the layers, and Git and Bazaar for Python modules. A +future iteration may support more VCS systems. + +Only the top layer is inspected for a ``build.lock`` file. Any other layers +are considered inputs and their ``build.lock`` files are ignored (if they are +present). + +Also, regardless of the ``wheelhouse.txt`` layers, the lock file will override +any changes that may be introduced in stable branches, if they are bing tracked +using ``--use-lock-file-branches``. This may lead to unexpected behaviour. diff --git a/requirements.txt b/requirements.txt index cfa6b78c..624754d7 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,3 +1,4 @@ +keyring<21.0 PyYAML>=3.11,<4.3 blessings==1.6 libcharmstore>=0.0.3 @@ -16,7 +17,6 @@ responses==0.3.0 ruamel.yaml<0.16.0 virtualenv>=1.11.4 jsonschema==2.5.1 -keyring<21 secretstorage<2.4 six>=1.11.0 dict2colander==0.2 diff --git a/tests/test_build.py b/tests/test_build.py index 2d16f136..61242fcc 100644 --- a/tests/test_build.py +++ b/tests/test_build.py @@ -82,6 +82,7 @@ def test_failed_proof(self, mBuilder, mproof, mparse_args): @mock.patch("charmtools.build.builder.Builder.plan_version") def test_tester_layer(self, pv): bu = build.Builder() + bu.ignore_lock_file = True bu.log_level = "WARNING" bu.build_dir = self.build_dir bu.cache_dir = bu.build_dir / "_cache" @@ -238,6 +239,7 @@ def test_remote_interface(self, pv): }''', content_type="application/json") bu = build.Builder() + bu.ignore_lock_file = True bu.log_level = "WARNING" bu.build_dir = self.build_dir bu.cache_dir = bu.build_dir / "_cache" @@ -279,6 +281,7 @@ def test_remote_layer(self, mcall, ph, pi, pv): }''', content_type="application/json") bu = build.Builder() + bu.ignore_lock_file = True bu.log_level = "WARNING" bu.build_dir = self.build_dir bu.cache_dir = bu.build_dir / "_cache" @@ -309,6 +312,7 @@ def test_remote_layer(self, mcall, ph, pi, pv): @mock.patch("charmtools.utils.Process") def test_pypi_installer(self, mcall, ph, pi, pv): bu = build.Builder() + bu.ignore_lock_file = True bu.log_level = "WARN" bu.build_dir = self.build_dir bu.cache_dir = bu.build_dir / "_cache" @@ -335,6 +339,7 @@ def test_pypi_installer(self, mcall, ph, pi, pv): def test_version_tactic_without_existing_version_file(self, mcall, ph, pi, get_sha): bu = build.Builder() + bu.ignore_lock_file = True bu.log_level = "WARN" bu.build_dir = self.build_dir bu.cache_dir = bu.build_dir / "_cache" @@ -364,6 +369,7 @@ def test_version_tactic_without_existing_version_file(self, mcall, ph, pi, @mock.patch("charmtools.build.builder.Builder.plan_hooks") def test_version_tactic_missing_cmd(self, ph, pi): bu = build.Builder() + bu.ignore_lock_file = True bu.log_level = "WARN" bu.build_dir = self.build_dir bu.cache_dir = bu.build_dir / "_cache" @@ -395,6 +401,7 @@ def test_version_tactic_missing_cmd(self, ph, pi): def test_version_tactic_with_existing_version_file(self, mcall, ph, pi, get_sha, read): bu = build.Builder() + bu.ignore_lock_file = True bu.log_level = "WARN" bu.build_dir = self.build_dir bu.cache_dir = bu.build_dir / "_cache" @@ -583,6 +590,7 @@ def _layer(tactics): url=tactics[0]) builder = build.builder.Builder() + builder.ignore_lock_file = True builder.build_dir = self.build_dir builder.cache_dir = builder.build_dir / "_cache" builder.charm = 'foo'