Skip to content

Commit

Permalink
Merge branch 'master' into bulk-performance
Browse files Browse the repository at this point in the history
  • Loading branch information
aliciaaevans committed Apr 5, 2024
2 parents 7c4db37 + 869577a commit 323ac1e
Show file tree
Hide file tree
Showing 12 changed files with 304 additions and 56 deletions.
43 changes: 43 additions & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
@@ -1,5 +1,48 @@
# Changelog

## [2.14.0](https://www.github.com/bioconda/bioconda-utils/compare/v2.13.2...v2.14.0) (2024-04-01)


### Features

* fetch artifacts from GitHub Actions ([#973](https://www.github.com/bioconda/bioconda-utils/issues/973)) ([858e1cf](https://www.github.com/bioconda/bioconda-utils/commit/858e1cfdd4435aca16c9978df5463db845ff9fe3))
* update deployment target for osx-arm64 ([#967](https://www.github.com/bioconda/bioconda-utils/issues/967)) ([a7c591e](https://www.github.com/bioconda/bioconda-utils/commit/a7c591ea51fdf3308a0075951e8f5efee783c8d1))

### [2.13.2](https://www.github.com/bioconda/bioconda-utils/compare/v2.13.1...v2.13.2) (2024-03-27)


### Bug Fixes

* update anaconda-client=1.12 to fix uploads ([#970](https://www.github.com/bioconda/bioconda-utils/issues/970)) ([06dcacc](https://www.github.com/bioconda/bioconda-utils/commit/06dcacca60c17c1a97770e14ae6348ac7acbe9a6))

### [2.13.1](https://www.github.com/bioconda/bioconda-utils/compare/v2.13.0...v2.13.1) (2024-03-27)


### Bug Fixes

* detect errors when uploading artifacts ([#968](https://www.github.com/bioconda/bioconda-utils/issues/968)) ([d0a79cd](https://www.github.com/bioconda/bioconda-utils/commit/d0a79cdd5aeed30c4da88e2135329d66b336832f))

## [2.13.0](https://www.github.com/bioconda/bioconda-utils/compare/v2.12.0...v2.13.0) (2024-03-22)


### Features

* add osx-arm64 to platform checks ([#965](https://www.github.com/bioconda/bioconda-utils/issues/965)) ([9f6df10](https://www.github.com/bioconda/bioconda-utils/commit/9f6df10bfecd048956acc80e7bb3d57952585529))

## [2.12.0](https://www.github.com/bioconda/bioconda-utils/compare/v2.11.1...v2.12.0) (2024-03-18)


### Features

* add support for excluding otherwise-selected recipes ([#962](https://www.github.com/bioconda/bioconda-utils/issues/962)) ([3946732](https://www.github.com/bioconda/bioconda-utils/commit/3946732eb6129f6905e53b62d76287e09d4bef36))
* bioconductor improvements ([#944](https://www.github.com/bioconda/bioconda-utils/issues/944)) ([b007d34](https://www.github.com/bioconda/bioconda-utils/commit/b007d34e6c723f7f9d6fcb5a6f58e072d4618cdf))
* Bulk build failure wiki ([#948](https://www.github.com/bioconda/bioconda-utils/issues/948)) ([18f988d](https://www.github.com/bioconda/bioconda-utils/commit/18f988d70966f6f6296170d96cc1ced51ad10392))


### Bug Fixes

* Do not emit cython_needs_compiler if compiler("cxx") is set ([#927](https://www.github.com/bioconda/bioconda-utils/issues/927)) ([8255afd](https://www.github.com/bioconda/bioconda-utils/commit/8255afdd9e5c0fd3cb09cb11269f5ff3397c959e))

### [2.11.1](https://www.github.com/bioconda/bioconda-utils/compare/v2.11.0...v2.11.1) (2023-12-13)


Expand Down
75 changes: 61 additions & 14 deletions bioconda_utils/artifacts.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,6 @@


from enum import Enum
import glob
import os
import re
Expand All @@ -20,7 +21,14 @@
IMAGE_RE = re.compile(r"(.+)(?::|%3A)(.+)\.tar\.gz$")


def upload_pr_artifacts(config, repo, git_sha, dryrun=False, mulled_upload_target=None, label=None, artifact_source="azure") -> bool:
class UploadResult(Enum):
SUCCESS = 1
FAILURE = 2
NO_ARTIFACTS = 3
NO_PR = 4


def upload_pr_artifacts(config, repo, git_sha, dryrun=False, mulled_upload_target=None, label=None, artifact_source="azure") -> UploadResult:
_config = utils.load_config(config)
repodata = utils.RepoData()

Expand All @@ -30,28 +38,35 @@ def upload_pr_artifacts(config, repo, git_sha, dryrun=False, mulled_upload_targe

commit = repo.get_commit(git_sha)
prs = commit.get_pulls()
if not prs:
if not prs or prs.totalCount < 1:
# no PR found for the commit
return True
return UploadResult.NO_PR
pr = prs[0]
artifacts = set(fetch_artifacts(pr, artifact_source))
artifacts = set(fetch_artifacts(pr, artifact_source, repo))
if not artifacts:
# no artifacts found, fail and rebuild packages
logger.info("No artifacts found.")
return False
return UploadResult.NO_ARTIFACTS
else:
success = []
for artifact in artifacts:
with tempfile.TemporaryDirectory() as tmpdir:
# download the artifact
if artifact_source == "azure":
artifact_path = os.path.join(tmpdir, os.path.basename(artifact))
download_artifact(artifact, artifact_path)
download_artifact(artifact, artifact_path, artifact_source)
zipfile.ZipFile(artifact_path).extractall(tmpdir)
elif artifact_source == "circleci":
artifact_dir = os.path.join(tmpdir, *(artifact.split("/")[-4:-1]))
artifact_path = os.path.join(tmpdir, artifact_dir, os.path.basename(artifact))
Path(artifact_dir).mkdir(parents=True, exist_ok=True)
download_artifact(artifact, artifact_path)
Path(artifact_dir).mkdir(parents=True, exist_ok=True)
download_artifact(artifact, artifact_path, artifact_source)
elif artifact_source == "github-actions":
artifact_dir = os.path.join(tmpdir, "artifacts")
artifact_path = os.path.join(artifact_dir, os.path.basename(artifact))
Path(artifact_dir).mkdir(parents=True, exist_ok=True)
download_artifact(artifact, artifact_path, artifact_source)
zipfile.ZipFile(artifact_path).extractall(artifact_dir)

# get all the contained packages and images and upload them
platform_patterns = [repodata.platform2subdir(repodata.native_platform())]
Expand All @@ -67,7 +82,7 @@ def upload_pr_artifacts(config, repo, git_sha, dryrun=False, mulled_upload_targe
else:
logger.info(f"Uploading {pkg} to anaconda.org.")
# upload the package
anaconda_upload(pkg, label=label)
success.append(anaconda_upload(pkg, label=label))

if mulled_upload_target:
quay_login = os.environ['QUAY_LOGIN']
Expand All @@ -90,25 +105,35 @@ def upload_pr_artifacts(config, repo, git_sha, dryrun=False, mulled_upload_targe
else:
# upload the image
logger.info(f"Uploading {img} to {target}.")
skopeo_upload(fixed_img_name, target, creds=quay_login)
return True
success.append(skopeo_upload(fixed_img_name, target, creds=quay_login))
if all(success):
return UploadResult.SUCCESS
else:
return UploadResult.FAILURE


@backoff.on_exception(
backoff.expo,
requests.exceptions.RequestException
)
def download_artifact(url, to_path):
def download_artifact(url, to_path, artifact_source):
logger.info(f"Downloading artifact {url}.")
resp = requests.get(url, stream=True, allow_redirects=True)
headers = {}
if artifact_source == "github-actions":
token = os.environ.get("GITHUB_TOKEN")
if not token:
logger.critical("GITHUB_TOKEN required to download GitHub Actions artifacts")
exit(1)
headers = {"Authorization": f"token {token}"}
resp = requests.get(url, stream=True, allow_redirects=True, headers=headers)
resp.raise_for_status()
with open(to_path, "wb") as f:
for chunk in resp.iter_content(chunk_size=1024):
if chunk:
f.write(chunk)


def fetch_artifacts(pr, artifact_source):
def fetch_artifacts(pr, artifact_source, repo):
"""
Fetch artifacts from a PR.
Expand Down Expand Up @@ -143,6 +168,13 @@ def fetch_artifacts(pr, artifact_source):
# Circle CI builds
artifact_url = get_circleci_artifacts(check_run, platform)
yield from artifact_url
elif (
artifact_source == "github-actions" and
check_run.app.slug == "github-actions"
):
# GitHubActions builds
artifact_url = get_gha_artifacts(check_run, platform, repo)
yield from artifact_url


def get_azure_artifacts(check_run):
Expand Down Expand Up @@ -185,3 +217,18 @@ def get_circleci_artifacts(check_run, platform):
continue
else:
yield artifact_url

def parse_gha_build_id(url: str) -> str:
# Get workflow run id from URL
return re.search("runs/(\d+)/", url).group(1)

def get_gha_artifacts(check_run, platform, repo):
gha_workflow_id = parse_gha_build_id(check_run.details_url)
if (gha_workflow_id) :
# The workflow run is different from the check run
run = repo.get_workflow_run(int(gha_workflow_id))
artifacts = run.get_artifacts()
for artifact in artifacts:
# This URL is valid for 1 min and requires a token
artifact_url = artifact.archive_download_url
yield artifact_url
5 changes: 3 additions & 2 deletions bioconda_utils/bioconda_utils-conda_build_config.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -9,8 +9,9 @@
# clear hard-coded default value for CONDA_BUILD_SYSROOT
CONDA_BUILD_SYSROOT:
- ""
MACOSX_DEPLOYMENT_TARGET:
- "10.9"
MACOSX_DEPLOYMENT_TARGET: # [osx]
- 11.0 # [osx and arm64]
- 10.9 # [osx and x86_64]

pin_run_as_build:
htslib:
Expand Down
8 changes: 4 additions & 4 deletions bioconda_utils/bioconda_utils-requirements.txt
Original file line number Diff line number Diff line change
Expand Up @@ -26,7 +26,7 @@ pyopenssl>=22.1 # Stay compatible with cryptography
conda-forge-pinning=2023.05.06.13.08.41

# tools
anaconda-client=1.6.* # anaconda_upload
anaconda-client=1.12.* # anaconda_upload
involucro=1.1.* # mulled test and container build
skopeo=1.11.* # docker upload
git=2.* # well - git
Expand All @@ -41,7 +41,7 @@ aiohttp-session
aiohttp-security
aiofiles=0.8.* # async open
aioftp=0.12.* # FTP lib
backoff=1.6.* # retrying of failed API calls
backoff=2.2.* # retrying of failed API calls
cachetools=3.0.* # request caching (NEEDED?)

# client API wrappers
Expand All @@ -51,7 +51,7 @@ gidgethub=3.0.* # githubhandler
pyjwt>=2.4.0 # githubhandler (JWT signing), needs >=2.4.0, CVE-2022-29217

# unknown
beautifulsoup4=4.8.*
beautifulsoup4=4.12.*
galaxy-lib>=18.9.1
jinja2>=2.10.1,<3
markupsafe<2.1 # markupsafe 2.1 breaks jinja2
Expand All @@ -66,7 +66,7 @@ markdown
graphviz

# The bioconductor skeleton needs this
requests=2.22.*
requests=2.29.*

# merge handling
pygithub
Expand Down
19 changes: 16 additions & 3 deletions bioconda_utils/bioconductor_skeleton.py
Original file line number Diff line number Diff line change
Expand Up @@ -774,7 +774,7 @@ def dependencies(self):
dependency_mapping[prefix + name.lower() + version] = name

# Check SystemRequirements in the DESCRIPTION file to make sure
# packages with such reqquirements are provided correct recipes.
# packages with such requirements are provided correct recipes.
if (self.packages[self.package].get('SystemRequirements') is not None):
logger.warning(
"The 'SystemRequirements' {} are needed".format(
Expand Down Expand Up @@ -940,7 +940,7 @@ def sub_placeholders(x):
additional_run_deps = []
if self.is_data_package:
additional_run_deps.append('curl')
additional_run_deps.append('bioconductor-data-packages>={}'.format(date.today().strftime('%Y%m%d')))
additional_run_deps.append('bioconductor-data-packages >={}'.format(date.today().strftime('%Y%m%d')))

d = OrderedDict((
(
Expand All @@ -959,6 +959,7 @@ def sub_placeholders(x):
'build', OrderedDict((
('number', self.build_number),
('rpaths', ['lib/R/lib/', 'lib/']),
('run_exports', f'{{{{ pin_subpackage("bioconductor-{self.package_lower}", max_pin="x.x") }}}}'),
)),
),
(
Expand Down Expand Up @@ -1248,7 +1249,19 @@ def write_recipe(package, recipe_dir, config, bioc_data_packages=None, force=Fal
(updated_version == current_version) and
(updated_meta != current_meta)
):
proj.build_number = int(current_build_number) + 1
# Sometimes when updating all packages, the updating process fails
# partway. Re-running the updating process should not bump the
# build number if no builds for this version exist yet in the repo.
existing_bldnos = utils.RepoData().get_package_data(
key="build_number",
name="bioconductor-" + proj.package.lower(),
version=updated_version
)
if not existing_bldnos:
proj.build_number = 0
else:
proj.build_number = sorted([int(i) for i in existing_bldnos]) [-1] + 1

if 'extra' in current_meta:
exclude = set(['final', 'copy_test_source_files'])
proj.extra = {x: y for x, y in current_meta['extra'].items() if x not in exclude}
Expand Down
15 changes: 13 additions & 2 deletions bioconda_utils/build.py
Original file line number Diff line number Diff line change
Expand Up @@ -294,10 +294,13 @@ def do_not_consider_for_additional_platform(recipe_folder: str, recipe: str, pla
Return True if current native platform are not included in recipe's additional platforms (no need to build).
"""
recipe_obj = _recipe.Recipe.from_file(recipe_folder, recipe)
# On linux-aarch64 env, only build recipe with linux-aarch64 extra_additional_platforms
# On linux-aarch64 or osx-arm64 env, only build recipe with matching extra_additional_platforms
if platform == "linux-aarch64":
if "linux-aarch64" not in recipe_obj.extra_additional_platforms:
return True
if platform == "osx-arm64":
if "osx-arm64" not in recipe_obj.extra_additional_platforms:
return True
return False


Expand All @@ -318,7 +321,9 @@ def build_recipes(recipe_folder: str, config_path: str, recipes: List[str],
record_build_failures: bool = False,
skiplist_leafs: bool = False,
live_logs: bool = True,
subdag_depth: int = None):
exclude: List[str] = None,
subdag_depth: int = None
):
"""
Build one or many bioconda packages.
Expand Down Expand Up @@ -347,6 +352,8 @@ def build_recipes(recipe_folder: str, config_path: str, recipes: List[str],
keep_old_work: Do not remove anything from environment, even after successful build and test.
skiplist_leafs: If True, blacklist leaf packages that fail to build
live_logs: If True, enable live logging during the build process
exclude: list of recipes to exclude. Typically used for
temporary exclusion; otherwise consider adding recipe to skiplist.
subdag_depth: Number of levels of nodes to skip. (Optional, only if using n_workers)
"""
if not recipes:
Expand Down Expand Up @@ -377,6 +384,10 @@ def build_recipes(recipe_folder: str, config_path: str, recipes: List[str],
failed = []

dag, name2recipes = graph.build(recipes, config=config_path, blacklist=blacklist)
if exclude:
for name in exclude:
dag.remove_node(name)

if not dag:
logger.info("Nothing to be done.")
return True
Expand Down
Loading

0 comments on commit 323ac1e

Please sign in to comment.