diff --git a/.authors.yml b/.authors.yml index d338b122a..db280f260 100644 --- a/.authors.yml +++ b/.authors.yml @@ -119,7 +119,7 @@ first_commit: 2019-02-17 13:35:45 - name: Min RK email: benjaminrk@gmail.com - num_commits: 29 + num_commits: 30 first_commit: 2016-09-15 09:07:34 github: minrk - name: Phil Elson @@ -236,7 +236,7 @@ email: uwe.korn@quantco.com alternate_emails: - xhochy@users.noreply.github.com - num_commits: 32 + num_commits: 39 first_commit: 2019-06-27 08:26:49 github: xhochy - name: C.A.M. Gerlach @@ -393,7 +393,7 @@ github: shadowwalkersb - name: Matthew R. Becker email: becker.mr@gmail.com - num_commits: 152 + num_commits: 154 first_commit: 2020-01-07 17:44:33 github: beckermr alternate_emails: @@ -476,7 +476,7 @@ github: jwillemsen - name: Jaime Rodríguez-Guerra email: jaimergp@users.noreply.github.com - num_commits: 99 + num_commits: 104 first_commit: 2020-11-01 14:18:08 github: jaimergp - name: Bastian Zimmermann diff --git a/.mailmap b/.mailmap index 479d1f8be..772f28be2 100644 --- a/.mailmap +++ b/.mailmap @@ -12,16 +12,16 @@ Isuru Fernando Isuru Fernando Matthew R. Becker Matthew R Becker Matthew R. Becker beckermr +Jaime Rodríguez-Guerra Christopher J. Wright christopher Christopher J. Wright Christopher J. 'CJ' Wright Anthony Scopatz -Jaime Rodríguez-Guerra Phil Elson pelson Filipe Fernandes Filipe Filipe Fernandes ocefpaf +Uwe L. Korn Uwe L. Korn Dougal J. Sutherland shadow_walker -Uwe L. Korn Uwe L. Korn Michael Sarahan Mike Sarahan Min RK Leo Fang Leo Fang diff --git a/AUTHORS.rst b/AUTHORS.rst index fa48961a9..06177c32f 100644 --- a/AUTHORS.rst +++ b/AUTHORS.rst @@ -3,14 +3,14 @@ Authors are sorted by number of commits. * Isuru Fernando * Matthew R. Becker +* Jaime Rodríguez-Guerra * Christopher J. Wright * Anthony Scopatz -* Jaime Rodríguez-Guerra * Phil Elson * Filipe Fernandes +* Uwe L. Korn * Dougal J. Sutherland * shadow_walker -* Uwe L. Korn * Michael Sarahan * Min RK * Leo Fang diff --git a/CHANGELOG.rst b/CHANGELOG.rst index 8ee36975b..5e8de024a 100644 --- a/CHANGELOG.rst +++ b/CHANGELOG.rst @@ -4,6 +4,43 @@ conda-smithy Change Log .. current developments +v3.27.1 +==================== + +**Fixed:** + +* Crash when XDG_CACHE_DIR is defined + +**Authors:** + +* Min RK + + + +v3.27.0 +==================== + +**Added:** + +* Cache the contents of ``conda-forge-pinning`` and only check every 15min for an updated version. + The re-check interval can be configured via the ``CONDA_FORGE_PINNING_LIFETIME`` environment variable. + +**Changed:** + +* Do not strip version constraints for ``mamba update``. (#1773 via #1774) +* If one supplies ``--no-check-uptodate`` on the commandline, we will no longer check and print a warning if conda-smithy is outdated. + +**Removed:** + +* Removed the ``updatecb3`` command. It is advised to do this update manually if you still encounter a recipe using the old compiler ``toolchain``. + +**Authors:** + +* Jaime Rodríguez-Guerra +* Uwe L. Korn + + + v3.26.3 ==================== diff --git a/README.md b/README.md index 837a85032..8d1925d19 100644 --- a/README.md +++ b/README.md @@ -96,10 +96,6 @@ out of by specifying `--without-anaconda-token`, as such execpted package upload ``` github: user_or_org: YOUR_GITHUB_USER_OR_ORG - channels: - targets: - - - - YOUR_ANACONDA_CHANNEL ``` 6. **Re-render the feedstock:** ``conda smithy rerender --feedstock_directory ./foo-feedstock`` diff --git a/conda_smithy/cli.py b/conda_smithy/cli.py index cf5558933..477d0fbc2 100644 --- a/conda_smithy/cli.py +++ b/conda_smithy/cli.py @@ -141,7 +141,7 @@ def __call__(self, args): ) print( - "\nRepository created, please edit conda-forge.yml to configure the upload channels\n" + "\nRepository created, please edit recipe/conda_build_config.yaml to configure the upload channels\n" "and afterwards call 'conda smithy register-github'" ) @@ -597,50 +597,6 @@ def __call__(self, args): print(POST_SKELETON_MESSAGE.format(args=args).strip()) -class UpdateCB3(Subcommand): - subcommand = "update-cb3" - - def __init__(self, parser): - # conda-smithy update-cb3 ./ - super(UpdateCB3, self).__init__( - parser, "Update a feedstock for conda-build=3" - ) - scp = self.subcommand_parser - scp.add_argument( - "--recipe_directory", - default=os.path.join(os.getcwd(), "recipe"), - help="The path to the source recipe directory.", - ) - scp.add_argument( - "--output", - default=None, - help="Filename for the output. No output edits the recipe inplace", - ) - scp.add_argument( - "--cbc", - default=None, - help="Path to conda_build_config.yaml. No path will use conda-forge-pinning", - ) - - def __call__(self, args): - from conda_smithy.update_cb3 import update_cb3 - from conda_smithy.configure_feedstock import get_cfp_file_path - - recipe_file = os.path.join(args.recipe_directory, "meta.yaml") - output_file = args.output - if output_file is None: - output_file = recipe_file - if args.cbc is None: - cbc, _ = get_cfp_file_path() - else: - cbc = os.path.join(os.getcwd(), args.cbc) - output_content, messages = update_cb3(recipe_file, cbc) - with io.open(output_file, "w") as fh: - fh.write(output_content) - print("List of changes done to the recipe:") - print(messages) - - def main(): logging.basicConfig(level=logging.INFO) diff --git a/conda_smithy/configure_feedstock.py b/conda_smithy/configure_feedstock.py index 91a22986e..dd7191218 100644 --- a/conda_smithy/configure_feedstock.py +++ b/conda_smithy/configure_feedstock.py @@ -7,13 +7,14 @@ import sys import subprocess import textwrap +import time import yaml import warnings from collections import OrderedDict, namedtuple, Counter import copy import hashlib import requests -from pathlib import PurePath +from pathlib import Path, PurePath # The `requests` lib uses `simplejson` instead of `json` when available. # In consequence the same JSON library must be used or the `JSONDecodeError` @@ -66,6 +67,11 @@ "," ) +# Cache lifetime in seconds, default 15min +CONDA_FORGE_PINNING_LIFETIME = int( + os.environ.get("CONDA_FORGE_PINNING_LIFETIME", 15 * 60) +) + def package_key(config, used_loop_vars, subdir): # get the build string from whatever conda-build makes of the configuration @@ -727,6 +733,9 @@ def _render_ci_provider( if os.path.exists(_recipe_cbc): os.rename(_recipe_cbc, _recipe_cbc + ".conda.smithy.bak") + channel_sources = migrated_combined_variant_spec.get( + "channel_sources", [""] + )[0].split(",") metas = conda_build.api.render( os.path.join(forge_dir, forge_config["recipe_dir"]), platform=platform, @@ -736,9 +745,7 @@ def _render_ci_provider( permit_undefined_jinja=True, finalize=False, bypass_env_check=True, - channel_urls=forge_config.get("channels", {}).get( - "sources", [] - ), + channel_urls=channel_sources, ) finally: if os.path.exists(_recipe_cbc + ".conda.smithy.bak"): @@ -1606,11 +1613,23 @@ def render_README(jinja_env, forge_config, forge_dir, render_info=None): ci_support_path = os.path.join(forge_dir, ".ci_support") variants = [] + channel_targets = [] if os.path.exists(ci_support_path): for filename in os.listdir(ci_support_path): if filename.endswith(".yaml"): variant_name, _ = os.path.splitext(filename) variants.append(variant_name) + with open(os.path.join(ci_support_path, filename)) as fh: + data = yaml.safe_load(fh) + for channel in data.get("channel_targets", ()): + # channel_targets are in the form of "channel_name label" + channel_targets.append(channel.split(" ")) + if not channel_targets: + # default to conda-forge if no channel_targets are specified (shouldn't happen) + channel_targets = ["conda-forge main"] + else: + # de-duplicate in-order + channel_targets = list(dict.fromkeys(channel_targets)) subpackages_metas = OrderedDict((meta.name(), meta) for meta in metas) subpackages_about = [(package_name, package_about)] @@ -1642,6 +1661,7 @@ def render_README(jinja_env, forge_config, forge_dir, render_info=None): ) ) ) + forge_config["channel_targets"] = channel_targets if forge_config["azure"].get("build_id") is None: @@ -1818,10 +1838,6 @@ def _load_forge_config(forge_dir, exclusive_config_file, forge_yml=None): "max_py_ver": "37", "min_r_ver": "34", "max_r_ver": "34", - "channels": { - "sources": ["conda-forge"], - "targets": [["conda-forge", "main"]], - }, "github": { "user_or_org": "conda-forge", "repo_name": "", @@ -1898,6 +1914,11 @@ def _load_forge_config(forge_dir, exclusive_config_file, forge_yml=None): "Setting docker image in conda-forge.yml is removed now." " Use conda_build_config.yaml instead" ) + if file_config.get("channels"): + raise ValueError( + "Setting channels in conda-forge.yml is removed now." + " Use conda_build_config.yaml instead" + ) for plat in ["linux", "osx", "win"]: if config["azure"]["timeout_minutes"] is not None: @@ -2034,17 +2055,20 @@ def _load_forge_config(forge_dir, exclusive_config_file, forge_yml=None): if config["provider"]["linux_s390x"] in {"default", "native"}: config["provider"]["linux_s390x"] = ["travis"] - - config["remote_ci_setup"] = _santize_remote_ci_setup( - config["remote_ci_setup"] - ) - config["remote_ci_setup_names"] = [ - MatchSpec(pkg.strip('"').strip("'")).name - for pkg in config["remote_ci_setup"] - ] + config["pinned_packages"] = conda_build.utils.ensure_list( config["pinned_packages"] ) + config["remote_ci_setup"] = _santize_remote_ci_setup( + config["remote_ci_setup"] + ) + if config["conda_install_tool"] == "conda": + config["remote_ci_setup_update"] = [ + MatchSpec(pkg.strip('"').strip("'")).name + for pkg in config["remote_ci_setup"] + ] + else: + config["remote_ci_setup_update"] = config["remote_ci_setup"] # Older conda-smithy versions supported this with only one # entry. To avoid breakage, we are converting single elements @@ -2187,6 +2211,52 @@ def get_cfp_file_path(temporary_directory): return cf_pinning_file, cf_pinning_ver +def get_cache_dir(): + if sys.platform.startswith("win"): + return Path(os.environ.get("TEMP")) + else: + return Path(os.environ.get("XDG_CACHE_HOME", Path.home() / ".cache")) + + +def get_cached_cfp_file_path(temporary_directory): + if cache_dir := get_cache_dir(): + smithy_cache = cache_dir / "conda-smithy" + smithy_cache.mkdir(parents=True, exist_ok=True) + pinning_version = None + # Do we already have the pinning cached? + if (smithy_cache / "conda-forge-pinng-version").exists(): + pinning_version = ( + smithy_cache / "conda-forge-pinng-version" + ).read_text() + + # Check whether we have recently already updated the cache + current_ts = int(time.time()) + if (smithy_cache / "conda-forge-pinng-version-ts").exists(): + last_ts = int( + (smithy_cache / "conda-forge-pinng-version-ts").read_text() + ) + else: + last_ts = 0 + + if current_ts - last_ts > CONDA_FORGE_PINNING_LIFETIME: + current_pinning_version = get_most_recent_version( + "conda-forge-pinning" + ).version + (smithy_cache / "conda-forge-pinng-version-ts").write_text( + str(current_ts) + ) + if current_pinning_version != pinning_version: + get_cfp_file_path(smithy_cache) + (smithy_cache / "conda-forge-pinng-version").write_text( + current_pinning_version + ) + pinning_version = current_pinning_version + + return str(smithy_cache / "conda_build_config.yaml"), pinning_version + else: + return get_cfp_file_path(temporary_directory) + + def clear_variants(forge_dir): "Remove all variant files placed in the .ci_support path" if os.path.isdir(os.path.join(forge_dir, ".ci_support")): @@ -2340,14 +2410,11 @@ def main( loglevel = os.environ.get("CONDA_SMITHY_LOGLEVEL", "INFO").upper() logger.setLevel(loglevel) - if check: + if check or not no_check_uptodate: # Check that conda-smithy is up-to-date check_version_uptodate("conda-smithy", __version__, True) - return True - - error_on_warn = False if no_check_uptodate else True - # Check that conda-smithy is up-to-date - check_version_uptodate("conda-smithy", __version__, error_on_warn) + if check: + return True forge_dir = os.path.abspath(forge_file_directory) if exclusive_config_file is not None: @@ -2356,7 +2423,7 @@ def main( raise RuntimeError("Given exclusive-config-file not found.") cf_pinning_ver = None else: - exclusive_config_file, cf_pinning_ver = get_cfp_file_path( + exclusive_config_file, cf_pinning_ver = get_cached_cfp_file_path( temporary_directory ) diff --git a/conda_smithy/templates/README.md.tmpl b/conda_smithy/templates/README.md.tmpl index f12c816be..c00ab4a7c 100644 --- a/conda_smithy/templates/README.md.tmpl +++ b/conda_smithy/templates/README.md.tmpl @@ -1,4 +1,4 @@ -{%- set channel_name = channels.targets[0][0] -%} +{%- set channel_name = channel_targets[0] -%} {#- # -*- mode: jinja -*- -#} diff --git a/conda_smithy/templates/build_steps.sh.tmpl b/conda_smithy/templates/build_steps.sh.tmpl index 8ed02bb4c..2f3aff339 100644 --- a/conda_smithy/templates/build_steps.sh.tmpl +++ b/conda_smithy/templates/build_steps.sh.tmpl @@ -55,12 +55,12 @@ PINS {{ conda_install_tool }} install --update-specs --yes --quiet --channel conda-forge --strict-channel-priority \ pip {{ conda_install_tool_deps }} {{ conda_build_tool_deps }} {{ " ".join(remote_ci_setup) }} -{%- if conda_build_tool_deps != "" or conda_install_tool_deps != "" %} +{%- if conda_install_tool == "mamba" and (conda_build_tool_deps != "" or conda_install_tool_deps != "") %} {{ conda_install_tool }} update --update-specs --yes --quiet --channel conda-forge --strict-channel-priority \ - pip {{ conda_install_tool_deps }} {{ conda_build_tool_deps }} {{ " ".join(remote_ci_setup_names) }} + pip {{ conda_install_tool_deps }} {{ conda_build_tool_deps }} {{ " ".join(remote_ci_setup_update) }} {%- endif %} {% if local_ci_setup %} -conda uninstall --quiet --yes --force {{ " ".join(remote_ci_setup_names)}} +conda uninstall --quiet --yes --force {{ " ".join(remote_ci_setup)}} pip install --no-deps ${RECIPE_ROOT}/. {%- endif %} # set up the condarc diff --git a/conda_smithy/templates/run_osx_build.sh.tmpl b/conda_smithy/templates/run_osx_build.sh.tmpl index d421c1496..632a8a49c 100644 --- a/conda_smithy/templates/run_osx_build.sh.tmpl +++ b/conda_smithy/templates/run_osx_build.sh.tmpl @@ -47,13 +47,13 @@ PINS {{ conda_install_tool }} install --update-specs --quiet --yes --channel conda-forge --strict-channel-priority \ pip {{ conda_install_tool_deps }} {{ conda_build_tool_deps }} {{ " ".join(remote_ci_setup) }} -{%- if conda_build_tool_deps != "" or conda_install_tool_deps != "" %} +{%- if conda_install_tool == "mamba" and (conda_build_tool_deps != "" or conda_install_tool_deps != "") %} {{ conda_install_tool }} update --update-specs --yes --quiet --channel conda-forge --strict-channel-priority \ - pip {{ conda_install_tool_deps }} {{ conda_build_tool_deps }} {{ " ".join(remote_ci_setup_names) }} + pip {{ conda_install_tool_deps }} {{ conda_build_tool_deps }} {{ " ".join(remote_ci_setup_update) }} {%- endif %} {% if local_ci_setup %} -conda uninstall --quiet --yes --force {{ " ".join(remote_ci_setup_names) }} +conda uninstall --quiet --yes --force {{ " ".join(remote_ci_setup) }} pip install --no-deps {{ recipe_dir }}/. {%- endif %} diff --git a/conda_smithy/templates/run_win_build.bat.tmpl b/conda_smithy/templates/run_win_build.bat.tmpl index 0261bb86c..c00c08bf0 100644 --- a/conda_smithy/templates/run_win_build.bat.tmpl +++ b/conda_smithy/templates/run_win_build.bat.tmpl @@ -42,7 +42,7 @@ if !errorlevel! neq 0 exit /b !errorlevel! {%- if local_ci_setup %} echo Overriding conda-forge-ci-setup with local version -conda.exe uninstall --quiet --yes --force {{ " ".join(remote_ci_setup_names) }} +conda.exe uninstall --quiet --yes --force {{ " ".join(remote_ci_setup) }} if !errorlevel! neq 0 exit /b !errorlevel! pip install --no-deps ".\{{ recipe_dir }}\." if !errorlevel! neq 0 exit /b !errorlevel! diff --git a/conda_smithy/update_cb3.py b/conda_smithy/update_cb3.py deleted file mode 100644 index 88c46d469..000000000 --- a/conda_smithy/update_cb3.py +++ /dev/null @@ -1,578 +0,0 @@ -import io -import re -import jinja2 -import os -import ruamel.yaml -import collections -import requests -import tempfile -import tarfile -import zipfile -from .utils import tmp_directory, render_meta_yaml - - -class Str(ruamel.yaml.scalarstring.ScalarString): - __slots__ = "lc" - - style = "" - - def __new__(cls, value): - return ruamel.yaml.scalarstring.ScalarString.__new__(cls, value) - - -class MyPreservedScalarString(ruamel.yaml.scalarstring.PreservedScalarString): - __slots__ = "lc" - - -class MyDoubleQuotedScalarString( - ruamel.yaml.scalarstring.DoubleQuotedScalarString -): - __slots__ = "lc" - - -class MySingleQuotedScalarString( - ruamel.yaml.scalarstring.SingleQuotedScalarString -): - __slots__ = "lc" - - -class MyConstructor(ruamel.yaml.constructor.RoundTripConstructor): - def construct_scalar(self, node): - # type: (Any) -> Any - if not isinstance(node, ruamel.yaml.nodes.ScalarNode): - raise ruamel.yaml.constructor.ConstructorError( - None, - None, - "expected a scalar node, but found %s" % node.id, - node.start_mark, - ) - - if node.style == "|" and isinstance( - node.value, ruamel.yaml.compat.text_type - ): - ret_val = MyPreservedScalarString(node.value) - elif bool(self._preserve_quotes) and isinstance( - node.value, ruamel.yaml.compat.text_type - ): - if node.style == "'": - ret_val = MySingleQuotedScalarString(node.value) - elif node.style == '"': - ret_val = MyDoubleQuotedScalarString(node.value) - else: - ret_val = Str(node.value) - else: - ret_val = Str(node.value) - ret_val.lc = ruamel.yaml.comments.LineCol() - ret_val.lc.line = node.start_mark.line - ret_val.lc.col = node.start_mark.column - return ret_val - - -class Section: - def __init__(self, section, start, end): - self.section = section - self.start = start - self.end = end - - def __getitem__(self, item): - if item not in self.section: - return None - sect = self.section[item] - start = sect.lc.line - for other in self.section: - if other.lc.line > start: - end = other.lc.line - return Section(sect, start, end) - return Section(sect, start, self.end) - - -def iterate(tarzip): - if isinstance(tarzip, zipfile.ZipFile): - for f in iter(tarzip.infolist()): - yield f - elif isinstance(tarzip, tarfile.TarFile): - for f in tarzip: - yield f - - -def name(tzinfo): - if isinstance(tzinfo, zipfile.ZipInfo): - return tzinfo.filename - elif isinstance(tzinfo, tarfile.TarInfo): - return tzinfo.name - - -def get_compilers(url): - """ - Download the source and check for C/C++/Fortran - Also check if `np.get_include()` is present in the setup.py files - Return whether a C/C++/Fortran compiler is used and whether - numpy headers are used. - """ - if isinstance(url, list): - for u in url: - r = requests.get(u, allow_redirects=True) - if r.ok: - url = u - break - else: - r = requests.get(url, allow_redirects=True) - fname = os.path.basename(url) - ext = os.path.splitext(url)[1] - if ext == ".zip": - tarzip_open = zipfile.ZipFile - else: - tarzip_open = tarfile.open - - with tmp_directory() as tmp_dir: - with open(os.path.join(tmp_dir, fname), "wb") as f: - f.write(r.content) - need_numpy_pin = False - with tarzip_open(os.path.join(tmp_dir, fname)) as tf: - need_f = any( - [ - name(f).lower().endswith((".f", ".f90", ".f77")) - for f in iterate(tf) - ] - ) - # Fortran builds use CC to perform the link (they do not call the linker directly). - need_c = ( - True - if need_f - else any( - [ - name(f).lower().endswith((".c", ".pyx")) - for f in iterate(tf) - ] - ) - ) - need_cxx = any( - [ - name(f).lower().endswith((".cxx", ".cpp", ".cc", ".c++")) - for f in iterate(tf) - ] - ) - for f in iterate(tf): - if name(f).lower().endswith("setup.py"): - try: - content = tf.extractfile(f).read().decode("utf-8") - if ( - "numpy.get_include()" in content - or "np.get_include()" in content - ): - need_numpy_pin = True - except: - pass - return need_f, need_c, need_cxx, need_numpy_pin - - -def update_cb3(recipe_path, conda_build_config_path): - """ - Update the recipe in `recipe_path` to use conda-build=3 features according - to conda-build config yaml in `conda_build_config_path`. - Returns the updated recipe and a message about the changes made. - """ - yaml = ruamel.yaml.YAML() - yaml.Constructor = MyConstructor - yaml.allow_duplicate_keys = True - - with io.open(recipe_path, "rt") as fh: - lines = list(fh) - orig_content = "".join(lines) - content = orig_content - jinjas = re.findall("{%(?:.+?)%}", content, re.DOTALL) - for j in jinjas: - new_j = "" - for c in j: - if c == "\n": - new_j += "\n" - else: - new_j += " " - content = content.replace(j, new_j) - content = render_meta_yaml(content) - content2 = render_meta_yaml(orig_content) - meta_ = yaml.load(content) - orig_meta = yaml.load(content2) - content2 = content2.split("\n") - - change_lines = {} - - meta = Section(meta_, 0, len(content.split("\n"))) - build_section = meta["build"] - messages = collections.OrderedDict() - - requirements_section = meta["requirements"] - if not requirements_section: - return orig_content, "" - - reqbuild_section = requirements_section["build"] - if not reqbuild_section: - return orig_content, "" - - reqbuild_s = reqbuild_section.start - reqbuild_line = lines[reqbuild_s - 1] - - messages["Renamed build with host"] = True - change_lines[reqbuild_s - 1] = ( - reqbuild_line, - reqbuild_line.replace("build:", "host:"), - ) - - url = orig_meta["source"]["url"] - need_f, need_c, need_cxx, need_numpy_pin = get_compilers(url) - # need_f, need_c, need_cxx, need_numpy_pin = False, False, False, False - need_mingw_c = False - is_r_package = False - - with io.open(conda_build_config_path, "r") as fh: - config = "".join(fh) - ind = config.index("# Pinning packages") - config = config[ind:] - config = yaml.load(config) - - pinned_packages = list(config.keys()) - build_lines = [] - build_space = "" - need_boost_pin = False - python_win_matrix = False - python_dep = False - section = "build" - reqs = {"build": [], "run": []} - - # Setup requirements - for i in range(requirements_section.start, requirements_section.end + 1): - line = lines[i].strip() - if line == "run:": - section = "run" - if line.startswith("- "): - line = content2[i].strip()[2:].strip() - req = line.split(" ")[0] - reqs[section].append(req) - - section = "build" - - # Remove build stuff - for i in range(requirements_section.start, requirements_section.end + 1): - line = lines[i].strip() - if line == "run:": - section = "run" - if line.startswith("- "): - build_space = " " * (len(lines[i]) - len(lines[i].lstrip())) + "- " - line = lines[i].strip()[2:].strip() - req = line.replace("{{ ", "{{").replace(" }}", "}}").split(" ")[0] - req_rendered = ( - content2[i].strip()[2:].strip().split(" ")[0].strip() - ) - if len(req_rendered) == 0 or req_rendered not in req: - req_rendered = req - if req == "libgfortran": - need_f = True - if req == "r-base": - is_r_package = True - if req_rendered in [ - "toolchain", - "toolchain3", - "gcc", - "libgcc", - "libgfortran", - "vc", - "m2w64-toolchain", - "mingwpy", - "system", - "gcc-libs", - "m2w64-gcc-libs", - ]: - messages[ - "Removing {} in favour of compiler()".format(req) - ] = True - change_lines[i] = (lines[i], None) - need_c = True - if req in ["m2w64-toolchain", "mingwpy"] or ( - req != req_rendered - and req_rendered in ["toolchain", "toolchain3"] - ): - need_mingw_c = True - continue - if req_rendered == "cython" and not (need_c or need_cxx or need_f): - messages["Found cython requirement. Adding compiler"] = True - need_c = True - if ( - req - in [ - "ninja", - "jom", - "cmake", - "automake", - "autoconf", - "libtool", - "make", - "pkg-config", - "automake-wrapper", - "posix", - "m4", - ] - or req.startswith("{{p") - or req.startswith("m2-") - or ( - req_rendered - in [ - "perl", - "texlive-core", - "curl", - "openssl", - "tar", - "gzip", - "patch", - ] - and section == "build" - and req_rendered not in reqs["run"] - ) - ): - messages["Moving {} from host to build".format(req)] = True - build_lines.append(lines[i].rstrip()) - change_lines[i] = (lines[i], None) - continue - if req == "python" and "# [win]" in line: - messages[ - "Moving `python # [win]` which was used for vc matrix".format( - req - ) - ] = True - change_lines[i] = (lines[i], None) - python_win_matrix = True - continue - if req == "python": - python_dep = True - - if req.replace("-", "_") in pinned_packages or ( - req_rendered.replace("-", "_") in pinned_packages - ): - s = list(filter(None, lines[i].strip().split(" "))) - if ( - len(s) > 2 - and not s[2].startswith("#") - and i not in change_lines - ): - if ( - not req.replace("-", "_") in pinned_packages - and not ("m2w64-" + req_rendered.replace("-", "_")) - in pinned_packages - and ( - "# [not win]" not in line - and "# [unix]" not in line - ) - ): - msg = "Not sure how to remove pinnings for {}".format( - req - ) - else: - change_lines[i] = ( - lines[i], - lines[i].replace(s[2], " " * len(s[2])), - ) - msg = ( - "Removing pinnings for {} to use values from " - "conda_build_config.yaml. If you need the pin see " - "[here](https://conda-forge.org/docs/maintainer/pinning_deps.html) " - "for details.".format(req) - ) - if req == "numpy": - if s[2].startswith("1") or s[2].startswith("x.x"): - need_numpy_pin = True - if need_numpy_pin and i > reqbuild_section.end: - line = lines[i].replace(s[2], " " * len(s[2])) - msg = ( - "Pinning numpy using pin_compatible. If you need to pin numpy " - "to a specific version see " - "[here](https://conda-forge.org/docs/maintainer/" - "knowledge_base.html#linking-numpy)." - ) - change_lines[i] = ( - lines[i], - line.replace( - "numpy" + " " * len(s[2]), - "{{ pin_compatible('numpy') }}", - ), - ) - - messages[msg] = True - - skip_lines = [ - (i, line) - for i, line in enumerate(lines) - if i >= build_section.start - and i <= build_section.end - and line.strip().startswith("skip:") - ] - - if python_win_matrix and not python_dep: - for i, line in skip_lines: - skip_line = line.strip() - skip_line = skip_line[skip_line.find("#") :] - - if len(skip_lines) == 1 and skip_line in [ - "# [win and py36]", - "# [win and py35]", - "# [win and py>35]", - "# [win and py>=36]", - ]: - messages[ - "Removed skip for one of py35 or py36 as it's used for vc skipping" - ] = True - change_lines[i] = skip_line, None - - if len(skip_lines) == 1 and skip_line in [ - "# [win and py27]", - "# [win and py2k]", - "# [win and not py3k]", - "# [win and py<33]", - "# [win and py<34]", - "# [win and py<35]", - "# [win and not py35]", - "# [win and not py36]", - ]: - messages[ - "Removed skip for py2k and added skip for vc<14" - ] = True - change_lines[i] = ( - line, - line[: line.find("#")] + "# [win and vc<14]", - ) - - for i, line in enumerate(lines): - vc14 = "msvc_compiler: 14.0" - if line.strip().startswith(vc14): - need_c = True - messages["Removed {} and added a skip".format(vc14)] = True - change_lines[i] = ( - line, - line.replace(vc14, "skip: True # [win and vc<14]"), - ) - - features_section = build_section["features"] - remove_features_section = True - - # Remove vc features - if features_section is not None: - for i in range(features_section.start, features_section.end): - line = lines[i].strip() - if line.startswith("-"): - line = line[2:] - if line.startswith("vc"): - messages["Removing vc features"] = True - change_lines[i] = (lines[i], None) - need_c = True - elif len(line) > 0: - remove_features_section = False - - if remove_features_section: - messages["Removing features section as it is empty"] = True - change_lines[features_section.start - 1] = ( - lines[features_section.start - 1], - None, - ) - - def add_compiler(name, p_name): - if need_mingw_c: - build_lines.append( - build_space + "{{ compiler('" + name + "') }} # [unix]" - ) - build_lines.append( - build_space + "{{ compiler('m2w64_" + name + "') }} # [win]" - ) - messages[ - "Adding " + p_name + " compiler with mingw for windows" - ] = True - else: - build_lines.append(build_space + "{{ compiler('" + name + "') }}") - messages["Adding " + p_name + " compiler"] = True - - if need_f: - add_compiler("fortran", "Fortran") - if need_c: - add_compiler("c", "C") - if need_cxx: - add_compiler("cxx", "C++") - - if build_lines: - build_lines = [ - " " * (len(reqbuild_line) - len(reqbuild_line.lstrip())) + "build:" - ] + build_lines - pos = requirements_section.start - 1 - change_lines[pos] = lines[pos], lines[pos] + "\n".join(build_lines) - - if is_r_package: - messages["Adding merge_build_host: True # [win]"] = True - pos = build_section.start - 1 - change_lines[pos] = ( - lines[pos], - lines[pos] - + " " * (len(lines[pos + 1]) - len(lines[pos + 1].lstrip())) - + "merge_build_host: True # [win]", - ) - - new_lines = [] - - for i, line in enumerate(lines): - if i in change_lines: - if change_lines[i][1]: - new_lines.append(change_lines[i][1].rstrip()) - else: - new_lines.append(line.rstrip()) - - new_lines = ("\n".join(new_lines)).split("\n") - - if python_win_matrix and not python_dep: - for i, line in enumerate(new_lines): - l = line.strip() - ind = l.find("#") - if ind != -1: - select = l[ind:] - for x in [ - "py27", - "py<33", - "py<34", - "py<35", - "py2k", - "py<=27", - "py==27", - ]: - if x in select: - new_lines[i] = line.replace(x, "vc<14") - messages[ - "Changed {} in selector {} to vc<14".format( - x, select - ) - ] = True - for x in [ - "py3k", - "py>27", - "py>=35", - "py>34", - "py>=34", - "py>=33", - "py>33", - ]: - if x in select: - new_lines[i] = line.replace(x, "vc==14") - messages[ - "Changed {} in selector {} to vc==14".format( - x, select - ) - ] = True - - return "\n".join(new_lines) + "\n", "\n".join(messages.keys()) - - -if __name__ == "__main__": - import argparse - - parser = argparse.ArgumentParser() - parser.add_argument("recipe", help="Path to recipe meta.yaml") - parser.add_argument("output", help="Path where updated recipe is stored") - parser.add_argument("config", help="Path to conda_build_config.yaml file") - args = parser.parse_args() - new_meta, msg = update_cb3(args.recipe, args.config) - with io.open(args.output, "w") as fh: - fh.write(new_meta) - print(msg) diff --git a/news/1752-readme-channels.rst b/news/1752-readme-channels.rst new file mode 100644 index 000000000..921a60cf9 --- /dev/null +++ b/news/1752-readme-channels.rst @@ -0,0 +1,23 @@ +**Added:** + +* + +**Changed:** + +* Use the channels defined in `conda_build_config.yaml` (instead of those in `conda-forge.yml`) to render `README.md`. (#897 via #752) + +**Deprecated:** + +* + +**Removed:** + +* + +**Fixed:** + +* + +**Security:** + +* diff --git a/tests/test_configure_feedstock.py b/tests/test_configure_feedstock.py index 7c95718e9..7c8f80ef4 100644 --- a/tests/test_configure_feedstock.py +++ b/tests/test_configure_feedstock.py @@ -750,9 +750,7 @@ def test_conda_forge_yaml_empty(config_yaml): ), ) - assert ["conda-forge", "main"] in load_forge_config()["channels"][ - "targets" - ] + assert load_forge_config()["recipe_dir"] == "recipe" os.unlink(os.path.join(config_yaml, "conda-forge.yml")) with pytest.raises(RuntimeError): @@ -760,9 +758,7 @@ def test_conda_forge_yaml_empty(config_yaml): with open(os.path.join(config_yaml, "conda-forge.yml"), "w"): pass - assert ["conda-forge", "main"] in load_forge_config()["channels"][ - "targets" - ] + assert load_forge_config()["recipe_dir"] == "recipe" def test_noarch_platforms_bad_yaml(config_yaml): @@ -804,9 +800,7 @@ def test_forge_yml_alt_path(config_yaml): with pytest.raises(RuntimeError): load_forge_config(None) - assert ["conda-forge", "main"] in load_forge_config(forge_yml_alt)[ - "channels" - ]["targets"] + assert load_forge_config(forge_yml_alt)["recipe_dir"] == "recipe" def test_cos7_env_render(py_recipe, jinja_env): @@ -917,12 +911,35 @@ def test_remote_ci_setup(config_yaml): ), ) cfg = load_forge_config() + with open(os.path.join(config_yaml, "conda-forge.yml")) as fp: + unmodified = fp.read() + with open(os.path.join(config_yaml, "conda-forge.yml"), "a+") as fp: - fp.write("remote_ci_setup: ['conda-forge-ci-setup=3', 'py-lief<0.12']") + fp.write( + "remote_ci_setup: ['conda-forge-ci-setup=3', 'py-lief<0.12']\n" + ) + fp.write("conda_install_tool: conda\n") cfg = load_forge_config() # pylief was quoted due to < assert cfg["remote_ci_setup"] == [ "conda-forge-ci-setup=3", '"py-lief<0.12"', ] - assert cfg["remote_ci_setup_names"] == ["conda-forge-ci-setup", "py-lief"] + assert cfg["remote_ci_setup_update"] == ["conda-forge-ci-setup", "py-lief"] + + with open(os.path.join(config_yaml, "conda-forge.yml"), "w") as fp: + fp.write(unmodified + "\n") + fp.write( + "remote_ci_setup: ['conda-forge-ci-setup=3', 'py-lief<0.12']\n" + ) + fp.write("conda_install_tool: mamba\n") + cfg = load_forge_config() + # with conda_install_tool = mamba, we don't strip constraints + assert ( + cfg["remote_ci_setup"] + == cfg["remote_ci_setup_update"] + == [ + "conda-forge-ci-setup=3", + '"py-lief<0.12"', + ] + )