diff --git a/CHANGELOG.md b/CHANGELOG.md index 5f96cdbf5f..d3dc68aea3 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,5 +1,48 @@ # Changelog +## [2.14.0](https://www.github.com/bioconda/bioconda-utils/compare/v2.13.2...v2.14.0) (2024-04-01) + + +### Features + +* fetch artifacts from GitHub Actions ([#973](https://www.github.com/bioconda/bioconda-utils/issues/973)) ([858e1cf](https://www.github.com/bioconda/bioconda-utils/commit/858e1cfdd4435aca16c9978df5463db845ff9fe3)) +* update deployment target for osx-arm64 ([#967](https://www.github.com/bioconda/bioconda-utils/issues/967)) ([a7c591e](https://www.github.com/bioconda/bioconda-utils/commit/a7c591ea51fdf3308a0075951e8f5efee783c8d1)) + +### [2.13.2](https://www.github.com/bioconda/bioconda-utils/compare/v2.13.1...v2.13.2) (2024-03-27) + + +### Bug Fixes + +* update anaconda-client=1.12 to fix uploads ([#970](https://www.github.com/bioconda/bioconda-utils/issues/970)) ([06dcacc](https://www.github.com/bioconda/bioconda-utils/commit/06dcacca60c17c1a97770e14ae6348ac7acbe9a6)) + +### [2.13.1](https://www.github.com/bioconda/bioconda-utils/compare/v2.13.0...v2.13.1) (2024-03-27) + + +### Bug Fixes + +* detect errors when uploading artifacts ([#968](https://www.github.com/bioconda/bioconda-utils/issues/968)) ([d0a79cd](https://www.github.com/bioconda/bioconda-utils/commit/d0a79cdd5aeed30c4da88e2135329d66b336832f)) + +## [2.13.0](https://www.github.com/bioconda/bioconda-utils/compare/v2.12.0...v2.13.0) (2024-03-22) + + +### Features + +* add osx-arm64 to platform checks ([#965](https://www.github.com/bioconda/bioconda-utils/issues/965)) ([9f6df10](https://www.github.com/bioconda/bioconda-utils/commit/9f6df10bfecd048956acc80e7bb3d57952585529)) + +## [2.12.0](https://www.github.com/bioconda/bioconda-utils/compare/v2.11.1...v2.12.0) (2024-03-18) + + +### Features + +* add support for excluding otherwise-selected recipes ([#962](https://www.github.com/bioconda/bioconda-utils/issues/962)) ([3946732](https://www.github.com/bioconda/bioconda-utils/commit/3946732eb6129f6905e53b62d76287e09d4bef36)) +* bioconductor improvements ([#944](https://www.github.com/bioconda/bioconda-utils/issues/944)) ([b007d34](https://www.github.com/bioconda/bioconda-utils/commit/b007d34e6c723f7f9d6fcb5a6f58e072d4618cdf)) +* Bulk build failure wiki ([#948](https://www.github.com/bioconda/bioconda-utils/issues/948)) ([18f988d](https://www.github.com/bioconda/bioconda-utils/commit/18f988d70966f6f6296170d96cc1ced51ad10392)) + + +### Bug Fixes + +* Do not emit cython_needs_compiler if compiler("cxx") is set ([#927](https://www.github.com/bioconda/bioconda-utils/issues/927)) ([8255afd](https://www.github.com/bioconda/bioconda-utils/commit/8255afdd9e5c0fd3cb09cb11269f5ff3397c959e)) + ### [2.11.1](https://www.github.com/bioconda/bioconda-utils/compare/v2.11.0...v2.11.1) (2023-12-13) diff --git a/bioconda_utils/artifacts.py b/bioconda_utils/artifacts.py index eef49f5dfc..c77afb1fb4 100644 --- a/bioconda_utils/artifacts.py +++ b/bioconda_utils/artifacts.py @@ -1,5 +1,6 @@ +from enum import Enum import glob import os import re @@ -20,7 +21,14 @@ IMAGE_RE = re.compile(r"(.+)(?::|%3A)(.+)\.tar\.gz$") -def upload_pr_artifacts(config, repo, git_sha, dryrun=False, mulled_upload_target=None, label=None, artifact_source="azure") -> bool: +class UploadResult(Enum): + SUCCESS = 1 + FAILURE = 2 + NO_ARTIFACTS = 3 + NO_PR = 4 + + +def upload_pr_artifacts(config, repo, git_sha, dryrun=False, mulled_upload_target=None, label=None, artifact_source="azure") -> UploadResult: _config = utils.load_config(config) repodata = utils.RepoData() @@ -30,28 +38,35 @@ def upload_pr_artifacts(config, repo, git_sha, dryrun=False, mulled_upload_targe commit = repo.get_commit(git_sha) prs = commit.get_pulls() - if not prs: + if not prs or prs.totalCount < 1: # no PR found for the commit - return True + return UploadResult.NO_PR pr = prs[0] - artifacts = set(fetch_artifacts(pr, artifact_source)) + artifacts = set(fetch_artifacts(pr, artifact_source, repo)) if not artifacts: # no artifacts found, fail and rebuild packages logger.info("No artifacts found.") - return False + return UploadResult.NO_ARTIFACTS else: + success = [] for artifact in artifacts: with tempfile.TemporaryDirectory() as tmpdir: # download the artifact if artifact_source == "azure": artifact_path = os.path.join(tmpdir, os.path.basename(artifact)) - download_artifact(artifact, artifact_path) + download_artifact(artifact, artifact_path, artifact_source) zipfile.ZipFile(artifact_path).extractall(tmpdir) elif artifact_source == "circleci": artifact_dir = os.path.join(tmpdir, *(artifact.split("/")[-4:-1])) artifact_path = os.path.join(tmpdir, artifact_dir, os.path.basename(artifact)) - Path(artifact_dir).mkdir(parents=True, exist_ok=True) - download_artifact(artifact, artifact_path) + Path(artifact_dir).mkdir(parents=True, exist_ok=True) + download_artifact(artifact, artifact_path, artifact_source) + elif artifact_source == "github-actions": + artifact_dir = os.path.join(tmpdir, "artifacts") + artifact_path = os.path.join(artifact_dir, os.path.basename(artifact)) + Path(artifact_dir).mkdir(parents=True, exist_ok=True) + download_artifact(artifact, artifact_path, artifact_source) + zipfile.ZipFile(artifact_path).extractall(artifact_dir) # get all the contained packages and images and upload them platform_patterns = [repodata.platform2subdir(repodata.native_platform())] @@ -67,7 +82,7 @@ def upload_pr_artifacts(config, repo, git_sha, dryrun=False, mulled_upload_targe else: logger.info(f"Uploading {pkg} to anaconda.org.") # upload the package - anaconda_upload(pkg, label=label) + success.append(anaconda_upload(pkg, label=label)) if mulled_upload_target: quay_login = os.environ['QUAY_LOGIN'] @@ -90,17 +105,27 @@ def upload_pr_artifacts(config, repo, git_sha, dryrun=False, mulled_upload_targe else: # upload the image logger.info(f"Uploading {img} to {target}.") - skopeo_upload(fixed_img_name, target, creds=quay_login) - return True + success.append(skopeo_upload(fixed_img_name, target, creds=quay_login)) + if all(success): + return UploadResult.SUCCESS + else: + return UploadResult.FAILURE @backoff.on_exception( backoff.expo, requests.exceptions.RequestException ) -def download_artifact(url, to_path): +def download_artifact(url, to_path, artifact_source): logger.info(f"Downloading artifact {url}.") - resp = requests.get(url, stream=True, allow_redirects=True) + headers = {} + if artifact_source == "github-actions": + token = os.environ.get("GITHUB_TOKEN") + if not token: + logger.critical("GITHUB_TOKEN required to download GitHub Actions artifacts") + exit(1) + headers = {"Authorization": f"token {token}"} + resp = requests.get(url, stream=True, allow_redirects=True, headers=headers) resp.raise_for_status() with open(to_path, "wb") as f: for chunk in resp.iter_content(chunk_size=1024): @@ -108,7 +133,7 @@ def download_artifact(url, to_path): f.write(chunk) -def fetch_artifacts(pr, artifact_source): +def fetch_artifacts(pr, artifact_source, repo): """ Fetch artifacts from a PR. @@ -143,6 +168,13 @@ def fetch_artifacts(pr, artifact_source): # Circle CI builds artifact_url = get_circleci_artifacts(check_run, platform) yield from artifact_url + elif ( + artifact_source == "github-actions" and + check_run.app.slug == "github-actions" + ): + # GitHubActions builds + artifact_url = get_gha_artifacts(check_run, platform, repo) + yield from artifact_url def get_azure_artifacts(check_run): @@ -185,3 +217,18 @@ def get_circleci_artifacts(check_run, platform): continue else: yield artifact_url + +def parse_gha_build_id(url: str) -> str: + # Get workflow run id from URL + return re.search("runs/(\d+)/", url).group(1) + +def get_gha_artifacts(check_run, platform, repo): + gha_workflow_id = parse_gha_build_id(check_run.details_url) + if (gha_workflow_id) : + # The workflow run is different from the check run + run = repo.get_workflow_run(int(gha_workflow_id)) + artifacts = run.get_artifacts() + for artifact in artifacts: + # This URL is valid for 1 min and requires a token + artifact_url = artifact.archive_download_url + yield artifact_url diff --git a/bioconda_utils/bioconda_utils-conda_build_config.yaml b/bioconda_utils/bioconda_utils-conda_build_config.yaml index d0af4b8ac7..fc19ffaba3 100644 --- a/bioconda_utils/bioconda_utils-conda_build_config.yaml +++ b/bioconda_utils/bioconda_utils-conda_build_config.yaml @@ -9,8 +9,9 @@ # clear hard-coded default value for CONDA_BUILD_SYSROOT CONDA_BUILD_SYSROOT: - "" -MACOSX_DEPLOYMENT_TARGET: - - "10.9" +MACOSX_DEPLOYMENT_TARGET: # [osx] + - 11.0 # [osx and arm64] + - 10.9 # [osx and x86_64] pin_run_as_build: htslib: diff --git a/bioconda_utils/bioconda_utils-requirements.txt b/bioconda_utils/bioconda_utils-requirements.txt index db098c3f69..8070b6a0e7 100644 --- a/bioconda_utils/bioconda_utils-requirements.txt +++ b/bioconda_utils/bioconda_utils-requirements.txt @@ -26,7 +26,7 @@ pyopenssl>=22.1 # Stay compatible with cryptography conda-forge-pinning=2023.05.06.13.08.41 # tools -anaconda-client=1.6.* # anaconda_upload +anaconda-client=1.12.* # anaconda_upload involucro=1.1.* # mulled test and container build skopeo=1.11.* # docker upload git=2.* # well - git @@ -41,7 +41,7 @@ aiohttp-session aiohttp-security aiofiles=0.8.* # async open aioftp=0.12.* # FTP lib -backoff=1.6.* # retrying of failed API calls +backoff=2.2.* # retrying of failed API calls cachetools=3.0.* # request caching (NEEDED?) # client API wrappers @@ -51,7 +51,7 @@ gidgethub=3.0.* # githubhandler pyjwt>=2.4.0 # githubhandler (JWT signing), needs >=2.4.0, CVE-2022-29217 # unknown -beautifulsoup4=4.8.* +beautifulsoup4=4.12.* galaxy-lib>=18.9.1 jinja2>=2.10.1,<3 markupsafe<2.1 # markupsafe 2.1 breaks jinja2 @@ -66,7 +66,7 @@ markdown graphviz # The bioconductor skeleton needs this -requests=2.22.* +requests=2.29.* # merge handling pygithub diff --git a/bioconda_utils/bioconductor_skeleton.py b/bioconda_utils/bioconductor_skeleton.py index 32dec40994..9a98fedca1 100755 --- a/bioconda_utils/bioconductor_skeleton.py +++ b/bioconda_utils/bioconductor_skeleton.py @@ -774,7 +774,7 @@ def dependencies(self): dependency_mapping[prefix + name.lower() + version] = name # Check SystemRequirements in the DESCRIPTION file to make sure - # packages with such reqquirements are provided correct recipes. + # packages with such requirements are provided correct recipes. if (self.packages[self.package].get('SystemRequirements') is not None): logger.warning( "The 'SystemRequirements' {} are needed".format( @@ -940,7 +940,7 @@ def sub_placeholders(x): additional_run_deps = [] if self.is_data_package: additional_run_deps.append('curl') - additional_run_deps.append('bioconductor-data-packages>={}'.format(date.today().strftime('%Y%m%d'))) + additional_run_deps.append('bioconductor-data-packages >={}'.format(date.today().strftime('%Y%m%d'))) d = OrderedDict(( ( @@ -959,6 +959,7 @@ def sub_placeholders(x): 'build', OrderedDict(( ('number', self.build_number), ('rpaths', ['lib/R/lib/', 'lib/']), + ('run_exports', f'{{{{ pin_subpackage("bioconductor-{self.package_lower}", max_pin="x.x") }}}}'), )), ), ( @@ -1248,7 +1249,19 @@ def write_recipe(package, recipe_dir, config, bioc_data_packages=None, force=Fal (updated_version == current_version) and (updated_meta != current_meta) ): - proj.build_number = int(current_build_number) + 1 + # Sometimes when updating all packages, the updating process fails + # partway. Re-running the updating process should not bump the + # build number if no builds for this version exist yet in the repo. + existing_bldnos = utils.RepoData().get_package_data( + key="build_number", + name="bioconductor-" + proj.package.lower(), + version=updated_version + ) + if not existing_bldnos: + proj.build_number = 0 + else: + proj.build_number = sorted([int(i) for i in existing_bldnos]) [-1] + 1 + if 'extra' in current_meta: exclude = set(['final', 'copy_test_source_files']) proj.extra = {x: y for x, y in current_meta['extra'].items() if x not in exclude} diff --git a/bioconda_utils/build.py b/bioconda_utils/build.py index bdf77e002f..f58bb6d891 100644 --- a/bioconda_utils/build.py +++ b/bioconda_utils/build.py @@ -294,10 +294,13 @@ def do_not_consider_for_additional_platform(recipe_folder: str, recipe: str, pla Return True if current native platform are not included in recipe's additional platforms (no need to build). """ recipe_obj = _recipe.Recipe.from_file(recipe_folder, recipe) - # On linux-aarch64 env, only build recipe with linux-aarch64 extra_additional_platforms + # On linux-aarch64 or osx-arm64 env, only build recipe with matching extra_additional_platforms if platform == "linux-aarch64": if "linux-aarch64" not in recipe_obj.extra_additional_platforms: return True + if platform == "osx-arm64": + if "osx-arm64" not in recipe_obj.extra_additional_platforms: + return True return False @@ -318,7 +321,9 @@ def build_recipes(recipe_folder: str, config_path: str, recipes: List[str], record_build_failures: bool = False, skiplist_leafs: bool = False, live_logs: bool = True, - subdag_depth: int = None): + exclude: List[str] = None, + subdag_depth: int = None + ): """ Build one or many bioconda packages. @@ -347,6 +352,8 @@ def build_recipes(recipe_folder: str, config_path: str, recipes: List[str], keep_old_work: Do not remove anything from environment, even after successful build and test. skiplist_leafs: If True, blacklist leaf packages that fail to build live_logs: If True, enable live logging during the build process + exclude: list of recipes to exclude. Typically used for + temporary exclusion; otherwise consider adding recipe to skiplist. subdag_depth: Number of levels of nodes to skip. (Optional, only if using n_workers) """ if not recipes: @@ -377,6 +384,10 @@ def build_recipes(recipe_folder: str, config_path: str, recipes: List[str], failed = [] dag, name2recipes = graph.build(recipes, config=config_path, blacklist=blacklist) + if exclude: + for name in exclude: + dag.remove_node(name) + if not dag: logger.info("Nothing to be done.") return True diff --git a/bioconda_utils/build_failure.py b/bioconda_utils/build_failure.py index bec184754b..e9f722f001 100644 --- a/bioconda_utils/build_failure.py +++ b/bioconda_utils/build_failure.py @@ -1,4 +1,5 @@ import os +import sys import time from typing import Optional, Union import subprocess as sp @@ -13,6 +14,8 @@ import pandas as pd import networkx as nx +from .githandler import BiocondaRepo + from bioconda_utils.recipe import Recipe from bioconda_utils import graph, utils from bioconda_utils.githandler import GitHandler @@ -35,6 +38,8 @@ def __init__(self, recipe: Union[str, Recipe], platform: Optional[str]=None): self.platform = platform def load(path): + if os.path.getsize(path) == 0: + raise IOError("Unable to read build failure record {path}: empty file") with open(path, "r") as f: yaml=YAML() try: @@ -86,20 +91,26 @@ def write(self): with open(self.path, "w") as f: yaml=YAML() commented_map = CommentedMap() - commented_map.insert(0, "recipe_sha", self.recipe_sha, comment="The commit at which this recipe failed to build.") + commented_map.insert(0, "recipe_sha", self.recipe_sha, comment="The hash of the recipe's meta.yaml at which this recipe failed to build.") commented_map.insert(1, "skiplist", self.skiplist, comment="Set to true to skiplist this recipe so that it will be ignored as long as its latest commit is the one given above.") i = 2 - if self.log: + + _log = self.inner.get("log", "") + if _log: commented_map.insert( i, "log", # remove invalid chars and keep only the last 100 lines - LiteralScalarString("\n".join(utils.yaml_remove_invalid_chars(self.log).splitlines()[-100:])), + LiteralScalarString("\n".join(utils.yaml_remove_invalid_chars(_log).splitlines()[-100:])), comment="Last 100 lines of the build log." ) i += 1 if self.reason: commented_map.insert(i, "reason", LiteralScalarString(self.reason)) + i += 1 + if self.category: + commented_map.insert(i, "category", LiteralScalarString(self.category)) + i += 1 yaml.dump(commented_map, f) def remove(self): @@ -179,7 +190,7 @@ def category(self, value): self.inner["category"] = value -def collect_build_failure_dataframe(recipe_folder, config, channel, link_fmt="txt", link_prefix=""): +def collect_build_failure_dataframe(recipe_folder, config, channel, link_fmt="txt", link_prefix="", git_range=None): def get_build_failure_records(recipe): return filter( BuildFailureRecord.exists, @@ -190,6 +201,21 @@ def has_build_failure(recipe): return any(get_build_failure_records(recipe)) recipes = list(utils.get_recipes(recipe_folder)) + + if git_range: + if not git_range or len(git_range) > 2: + sys.exit("--git-range may have only one or two arguments") + other = git_range[0] + ref = "HEAD" if len(git_range) == 1 else git_range[1] + repo = BiocondaRepo(recipe_folder) + changed_recipes = repo.get_recipes_to_build(ref, other) + logger.info("Constraining to %s git modified recipes%s.", len(changed_recipes), + utils.ellipsize_recipes(changed_recipes, recipe_folder)) + recipes = [recipe for recipe in recipes if recipe in set(changed_recipes)] + if len(recipes) != len(changed_recipes): + logger.info("Overlap was %s recipes%s.", len(recipes), + utils.ellipsize_recipes(recipes, recipe_folder)) + dag, _ = graph.build(recipes, config) def get_data(): @@ -214,10 +240,11 @@ def get_data(): recs = list(get_build_failure_records(recipe)) failures = ", ".join(utils.format_link(rec.path, link_fmt, prefix=link_prefix, label=rec.platform) for rec in recs) + categories = ", ".join(rec.category for rec in recs) skiplisted = any(rec.skiplist for rec in recs) prs = utils.format_link(f"https://github.com/bioconda/bioconda-recipes/pulls?q=is%3Apr+is%3Aopen+{package}", link_fmt, label="show") - yield (recipe, downloads, descendants, skiplisted, failures, prs) + yield (recipe, downloads, descendants, skiplisted, categories, failures, prs) - data = pd.DataFrame(get_data(), columns=["recipe", "downloads", "depending", "skiplisted", "build failures", "pull requests"]) + data = pd.DataFrame(get_data(), columns=["recipe", "downloads", "depending", "skiplisted", "category", "build failures", "pull requests"]) data.sort_values(by=["depending", "downloads"], ascending=False, inplace=True) return data diff --git a/bioconda_utils/bulk.py b/bioconda_utils/bulk.py index b55a1c826c..59aa919cc0 100644 --- a/bioconda_utils/bulk.py +++ b/bioconda_utils/bulk.py @@ -8,7 +8,7 @@ def check_branch(): branch = utils.run(["git", "rev-parse", "--abbrev-ref", "HEAD"], mask=False).stdout if branch != "bulk": - logger.error("bulk-run-ci has to be executed on a checkout of the bulk branch") + logger.error("bulk-trigger-ci has to be executed on a checkout of the bulk branch") exit(1) diff --git a/bioconda_utils/cli.py b/bioconda_utils/cli.py index 84f69a4ad1..24bca16bff 100644 --- a/bioconda_utils/cli.py +++ b/bioconda_utils/cli.py @@ -9,7 +9,7 @@ import warnings from bioconda_utils import bulk -from bioconda_utils.artifacts import upload_pr_artifacts +from bioconda_utils.artifacts import UploadResult, upload_pr_artifacts from bioconda_utils.skiplist import Skiplist from bioconda_utils.build_failure import BuildFailureRecord, collect_build_failure_dataframe warnings.filterwarnings("ignore", message="numpy.dtype size changed") @@ -435,6 +435,7 @@ def do_lint(recipe_folder, config, packages="*", cache=None, list_checks=False, @arg("--record-build-failures", action="store_true", help="Record build failures in build_failure.yaml next to the recipe.") @arg("--skiplist-leafs", action="store_true", help="Skiplist leaf recipes (i.e. ones that are not depended on by any other recipes) that fail to build.") @arg('--disable-live-logs', action='store_true', help="Disable live logging during the build process") +@arg('--exclude', nargs='+', help='Packages to exclude during this run') @arg('--subdag-depth', type=int, help="Number of levels of root nodes to skip. (Optional, and only if using n_workers)") @enable_logging() def build(recipe_folder, config, packages="*", git_range=None, testonly=False, @@ -447,6 +448,7 @@ def build(recipe_folder, config, packages="*", git_range=None, testonly=False, record_build_failures=False, skiplist_leafs=False, disable_live_logs=False, + exclude=None, subdag_depth=None): cfg = utils.load_config(config) setup = cfg.get('setup', None) @@ -509,7 +511,9 @@ def build(recipe_folder, config, packages="*", git_range=None, testonly=False, record_build_failures=record_build_failures, skiplist_leafs=skiplist_leafs, live_logs=(not disable_live_logs), - subdag_depth=subdag_depth) + exclude=exclude, + subdag_depth=subdag_depth + ) exit(0 if success else 1) @@ -523,7 +527,7 @@ def build(recipe_folder, config, packages="*", git_range=None, testonly=False, @arg('--dryrun', action='store_true', help='''Do not actually upload anything.''') @arg('--fallback', choices=['build', 'ignore'], default='build', help="What to do if no artifacts are found in the PR.") @arg('--quay-upload-target', help="Provide a quay.io target to push docker images to.") -@arg('--artifact-source', choices=['azure', 'circleci'], default='azure', help="Application hosting build artifacts (e.g., Azure or Circle CI).") +@arg('--artifact-source', choices=['azure', 'circleci','github-actions'], default='azure', help="Application hosting build artifacts (e.g., Azure, Circle CI, or GitHub Actions).") @enable_logging() def handle_merged_pr( recipe_folder, @@ -537,10 +541,12 @@ def handle_merged_pr( ): label = os.getenv('BIOCONDA_LABEL', None) or None - success = upload_pr_artifacts( - config, repo, git_range[1], dryrun=dryrun, mulled_upload_target=quay_upload_target, label=label, artifact_source=artifact_source + res = upload_pr_artifacts( + config, repo, git_range[1], dryrun=dryrun, + mulled_upload_target=quay_upload_target, label=label, + artifact_source=artifact_source ) - if not success and fallback == 'build': + if res == UploadResult.NO_ARTIFACTS and fallback == 'build': success = build( recipe_folder, config, @@ -550,6 +556,8 @@ def handle_merged_pr( mulled_test=True, label=label, ) + else: + success = res != UploadResult.FAILURE exit(0 if success else 1) @recipe_folder_and_config() @@ -571,9 +579,7 @@ def dag(recipe_folder, config, packages="*", format='gml', hide_singletons=False """ Export the DAG of packages to a graph format file for visualization """ - dag, name2recipes = graph.build(utils.get_recipes(recipe_folder, "*"), config) - if packages != "*": - dag = graph.filter(dag, packages) + dag, name2recipes = graph.build(utils.get_recipes(recipe_folder, packages), config) if hide_singletons: for node in nx.nodes(dag): if dag.degree(node) == 0: @@ -1078,7 +1084,11 @@ def annotate_build_failures(recipes, skiplist=False, reason=None, category=None, @arg('--channel', help="Channel with packages to check", default="bioconda") @arg('--output-format', help="Output format", choices=['txt', 'markdown'], default="txt") @arg('--link-prefix', help="Prefix for links to build failures", default='') -def list_build_failures(recipe_folder, config, channel=None, output_format=None, link_prefix=None): +@arg('--git-range', nargs='+', + help='''Git range (e.g. commits or something like + "master HEAD" to check commits in HEAD vs master, or just "HEAD" to + include uncommitted changes).''') +def list_build_failures(recipe_folder, config, channel=None, output_format=None, link_prefix=None, git_range=None): """List recipes with build failure records""" df = collect_build_failure_dataframe( @@ -1087,6 +1097,7 @@ def list_build_failures(recipe_folder, config, channel=None, output_format=None, channel, link_fmt=output_format, link_prefix=link_prefix, + git_range=git_range ) if output_format == "markdown": fmt_writer = pandas.DataFrame.to_markdown @@ -1098,15 +1109,11 @@ def list_build_failures(recipe_folder, config, channel=None, output_format=None, fmt_writer(df, sys.stdout, index=False) -@arg( - 'message', - help="The commit message. Will be prepended with [ci skip] to avoid that commits accidentally trigger a rerun while bulk is already running" -) -def bulk_commit(message): - bulk.commit(message) - - def bulk_trigger_ci(): + """ + Create an empty commit with the string "[ci run]" and push, which + triggers a bulk CI run. Must be on the `bulk` branch. + """ bulk.trigger_ci() @@ -1118,5 +1125,5 @@ def main(): build, dag, dependent, do_lint, duplicates, update_pinning, bioconductor_skeleton, clean_cran_skeleton, autobump, handle_merged_pr, annotate_build_failures, list_build_failures, - bulk_commit, bulk_trigger_ci + bulk_trigger_ci ]) diff --git a/bioconda_utils/utils.py b/bioconda_utils/utils.py index 13855f1a77..cc6be6ec09 100644 --- a/bioconda_utils/utils.py +++ b/bioconda_utils/utils.py @@ -1520,6 +1520,8 @@ def native_platform(): return "linux-aarch64" if sys.platform.startswith("linux"): return "linux" + if sys.platform.startswith("darwin") and arch == "arm64": + return "osx-arm64" if sys.platform.startswith("darwin"): return "osx" raise ValueError("Running on unsupported platform") @@ -1532,11 +1534,13 @@ def platform2subdir(platform): return 'linux-aarch64' elif platform == 'osx': return 'osx-64' + elif platform == 'osx-arm64': + return 'osx-arm64' elif platform == 'noarch': return 'noarch' else: raise ValueError( - 'Unsupported platform: bioconda only supports linux, linux-aarch64, osx and noarch.') + 'Unsupported platform: bioconda only supports linux, linux-aarch64, osx, osx-arm64 and noarch.') def get_versions(self, name): @@ -1645,4 +1649,6 @@ def yaml_remove_invalid_chars(text: str, valid_chars_re=re.compile(r"[^ \t\n\w\d def get_package_downloads(channel, package): """Use anaconda API to obtain download counts.""" data = requests.get(f"https://api.anaconda.org/package/{channel}/{package}").json() - return sum(rec["ndownloads"] for rec in data["files"]) + if "files" in data: + return sum(rec["ndownloads"] for rec in data["files"]) + return 0 diff --git a/test/test_recipe.py b/test/test_recipe.py index 5e1b84559c..e3822ce7a9 100644 --- a/test/test_recipe.py +++ b/test/test_recipe.py @@ -260,6 +260,29 @@ def test_recipe_package_names(recipe): @with_recipes def test_recipe_extra_additional_platforms(recipe): + assert recipe.extra_additional_platforms == [] + recipe.meta_yaml += [ + 'extra:', + ' additional-platforms:', + ' - linux-aarch64', + ' - osx-arm64' + ] + recipe.render() + assert recipe.extra_additional_platforms == ["linux-aarch64", "osx-arm64"] + +@with_recipes +def test_recipe_extra_additional_platform_osx(recipe): + assert recipe.extra_additional_platforms == [] + recipe.meta_yaml += [ + 'extra:', + ' additional-platforms:', + ' - osx-arm64' + ] + recipe.render() + assert recipe.extra_additional_platforms == ["osx-arm64"] + +@with_recipes +def test_recipe_extra_additional_platform_linux(recipe): assert recipe.extra_additional_platforms == [] recipe.meta_yaml += [ 'extra:', diff --git a/test/test_utils.py b/test/test_utils.py index 916fb10894..12524ee3d8 100644 --- a/test/test_utils.py +++ b/test/test_utils.py @@ -156,6 +156,37 @@ def multi_build(request, recipes_fixture, config_fixture): ensure_missing(pkg) +@pytest.fixture(scope='module', params=PARAMS, ids=IDS) +def multi_build_exclude(request, recipes_fixture, config_fixture): + """ + Builds the "one" and "two" recipes; provides (but then excludes) the + "three" recipe. + """ + if request.param: + docker_builder = docker_utils.RecipeBuilder( + use_host_conda_bld=True, + docker_base_image=DOCKER_BASE_IMAGE) + mulled_test = True + else: + docker_builder = None + mulled_test = False + logger.error("Fixture: Building one/two (and not three) %s", + "within docker" if docker_builder else "locally") + build.build_recipes(recipes_fixture.basedir, config_fixture, + recipes_fixture.recipe_dirnames, + docker_builder=docker_builder, + mulled_test=mulled_test, + exclude=['three'], + ) + logger.error("Fixture: Building one/two (and not three) %s -- DONE", + "within docker" if docker_builder else "locally") + built_packages = recipes_fixture.pkgs + yield built_packages + for pkgs in built_packages.values(): + for pkg in pkgs: + ensure_missing(pkg) + + @pytest.fixture(scope='module') def single_upload(): """ @@ -216,6 +247,7 @@ def test_upload(single_upload): def test_single_build_only(single_build): for pkg in single_build: assert os.path.exists(pkg) + ensure_missing(pkg) @pytest.mark.skipif(SKIP_DOCKER_TESTS, reason='skipping on osx') @@ -229,6 +261,18 @@ def test_multi_build(multi_build): for v in multi_build.values(): for pkg in v: assert os.path.exists(pkg) + ensure_missing(pkg) + + +@pytest.mark.long_running_1 +def test_multi_build_exclude(multi_build_exclude): + for (k, v) in multi_build_exclude.items(): + for pkg in v: + if k == 'three': + assert not os.path.exists(pkg) + else: + assert os.path.exists(pkg) + ensure_missing(pkg) @pytest.mark.skipif(SKIP_DOCKER_TESTS, reason='skipping on osx') @@ -268,6 +312,7 @@ def test_docker_builder_build(recipes_fixture): build_args='', env={}) for pkg in pkgs: assert os.path.exists(pkg) + ensure_missing(pkg) @pytest.mark.skipif(SKIP_DOCKER_TESTS, reason='skipping on osx') @@ -894,10 +939,18 @@ def test_native_platform_skipping(): # Don't skip linux-x86 for any recipes ["one", "linux", False], ["two", "linux", False], - # Skip recipe without linux aarch64 enable on linux-aarch64 platform + ["three", "linux", False], + ["four", "linux", False], + # Skip recipes without linux aarch64 enable on linux-aarch64 platform ["one", "linux-aarch64", True], - # Don't skip recipe with linux aarch64 enable on linux-aarch64 platform + ["three", "linux-aarch64", True], + # Don't skip recipes with linux aarch64 enable on linux-aarch64 platform ["two", "linux-aarch64", False], + ["four", "linux-aarch64", False], + ["one", "osx-arm64", True], + ["two", "osx-arm64", True], + ["three", "osx-arm64", False], + ["four", "osx-arm64", False], ] r = Recipes( """ @@ -909,11 +962,28 @@ def test_native_platform_skipping(): two: meta.yaml: | package: - name: one + name: two + version: "0.1" + extra: + additional-platforms: + - linux-aarch64 + three: + meta.yaml: | + package: + name: three + version: "0.1" + extra: + additional-platforms: + - osx-arm64 + four: + meta.yaml: | + package: + name: four version: "0.1" extra: additional-platforms: - linux-aarch64 + - osx-arm64 """, from_string=True) r.write_recipes() # Make sure RepoData singleton init