From 0af2b5937342b1de1c723628ccfd18382a156ac5 Mon Sep 17 00:00:00 2001 From: Yannik Tausch Date: Thu, 25 Jul 2024 17:51:34 +0200 Subject: [PATCH 1/6] add ruff config --- .pre-commit-config.yaml | 5 +++++ README.md | 1 + news/1919-ruff.rst | 23 +++++++++++++++++++++++ pyproject.toml | 23 +++++++++++++++++++++++ 4 files changed, 52 insertions(+) create mode 100644 news/1919-ruff.rst diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 5e53566e6..6fc2751c9 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -10,6 +10,11 @@ repos: rev: 24.4.2 hooks: - id: black +- repo: https://github.com/astral-sh/ruff-pre-commit + rev: v0.5.5 + hooks: + - id: ruff + args: [ --fix ] ci: autofix_commit_msg: | diff --git a/README.md b/README.md index 798d24161..c3ade1f72 100644 --- a/README.md +++ b/README.md @@ -12,6 +12,7 @@ Overview [![tests](https://github.com/conda-forge/conda-smithy/workflows/tests/badge.svg)](https://github.com/conda-forge/conda-smithy/actions?query=workflow%3Atests) [![Coverage Status](https://coveralls.io/repos/github/conda-forge/conda-smithy/badge.svg?branch=main)](https://coveralls.io/github/conda-forge/conda-smithy?branch=main) +[![Ruff](https://img.shields.io/endpoint?url=https://raw.githubusercontent.com/astral-sh/ruff/main/assets/badge/v2.json)](https://github.com/astral-sh/ruff) [![Code style: black](https://img.shields.io/badge/code%20style-black-000000.svg)](https://github.com/ambv/black) Installation diff --git a/news/1919-ruff.rst b/news/1919-ruff.rst new file mode 100644 index 000000000..491282f39 --- /dev/null +++ b/news/1919-ruff.rst @@ -0,0 +1,23 @@ +**Added:** + +* + +**Changed:** + +* Introduced ``ruff`` as pre-commit linter. (#1919) + +**Deprecated:** + +* + +**Removed:** + +* + +**Fixed:** + +* + +**Security:** + +* diff --git a/pyproject.toml b/pyproject.toml index 682e11cbd..23cd8a77a 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -33,6 +33,29 @@ write_to_template = "__version__ = '{version}'" [tool.black] line-length = 79 +[tool.ruff.lint] +ignore = [ + "E501", # https://docs.astral.sh/ruff/faq/#is-the-ruff-linter-compatible-with-black +] +select = [ + # pyflakes + "F", + # pycodestyle + "E", "W", + # isort + "I", + # pep8-naming + "N", + # pypugrade + "UP", + # flake8-logging-format + "G", + # flake8-tidy-imports + "TID", +] +isort.known-first-party = ["conda_smithy"] +flake8-tidy-imports.ban-relative-imports = "all" + [tool.pytest.ini_options] norecursedirs = [ "tests/recipes", From b05b906895b1ae82b31f3a3014e2c4603668cd96 Mon Sep 17 00:00:00 2001 From: Yannik Tausch Date: Thu, 25 Jul 2024 17:53:21 +0200 Subject: [PATCH 2/6] apply safe ruff fixes --- bootstrap-obvious-ci-and-miniconda.py | 11 +- conda_smithy/anaconda_token_rotation.py | 21 +-- conda_smithy/azure_ci_utils.py | 10 +- conda_smithy/ci_register.py | 126 ++++++-------- conda_smithy/ci_skeleton.py | 2 +- conda_smithy/cirun_utils.py | 6 +- conda_smithy/cli.py | 67 ++++---- conda_smithy/configure_feedstock.py | 149 ++++++----------- .../feedstock_content/build-locally.py | 4 +- conda_smithy/feedstock_io.py | 9 +- conda_smithy/feedstock_tokens.py | 38 ++--- conda_smithy/feedstocks.py | 15 +- conda_smithy/github.py | 41 ++--- conda_smithy/lint_recipe.py | 62 +++---- conda_smithy/linter/hints.py | 13 +- conda_smithy/linter/lints.py | 84 ++++------ conda_smithy/linter/utils.py | 8 +- conda_smithy/schema.py | 3 +- conda_smithy/utils.py | 19 +-- conda_smithy/validate_schema.py | 5 +- conda_smithy/variant_algebra.py | 15 +- tests/conftest.py | 17 +- tests/test_anaconda_token_rotation.py | 1 - tests/test_ci_skeleton.py | 3 - tests/test_cli.py | 10 +- tests/test_condaforge_config_schema.py | 3 +- tests/test_configure_feedstock.py | 2 +- tests/test_feedstock_io.py | 23 +-- tests/test_feedstock_tokens.py | 21 ++- tests/test_lint_recipe.py | 156 ++++++++---------- tests/test_variant_algebra.py | 4 +- 31 files changed, 393 insertions(+), 555 deletions(-) diff --git a/bootstrap-obvious-ci-and-miniconda.py b/bootstrap-obvious-ci-and-miniconda.py index 617f5fc2a..752027104 100644 --- a/bootstrap-obvious-ci-and-miniconda.py +++ b/bootstrap-obvious-ci-and-miniconda.py @@ -10,7 +10,6 @@ import os import platform import subprocess -import sys try: from urllib.request import urlretrieve @@ -41,7 +40,7 @@ def miniconda_url( "Windows": "Windows", } if target_system not in system_to_miniconda_os: - raise ValueError("Unexpected system {!r}.".format(target_system)) + raise ValueError(f"Unexpected system {target_system!r}.") template_values["OS"] = system_to_miniconda_os[target_system] miniconda_os_ext = {"Linux": "sh", "MacOSX": "sh", "Windows": "exe"} @@ -49,7 +48,7 @@ def miniconda_url( if major_py_version not in ["3"]: raise ValueError( - "Unexpected major Python version {!r}.".format(major_py_version) + f"Unexpected major Python version {major_py_version!r}." ) template_values["major_py_version"] = major_py_version @@ -86,14 +85,14 @@ def main( raise ValueError("Unsupported operating system.") if not os.path.exists(basename): - print("Downloading from {}".format(URL)) + print(f"Downloading from {URL}") urlretrieve(URL, basename) else: - print("Using cached version of {}".format(URL)) + print(f"Using cached version of {URL}") # Install with powershell. if os.path.exists(target_dir): - raise IOError("Installation directory already exists") + raise OSError("Installation directory already exists") subprocess.check_call(cmd) if not os.path.isdir(target_dir): diff --git a/conda_smithy/anaconda_token_rotation.py b/conda_smithy/anaconda_token_rotation.py index 980425cdf..4991a6d86 100644 --- a/conda_smithy/anaconda_token_rotation.py +++ b/conda_smithy/anaconda_token_rotation.py @@ -12,9 +12,9 @@ import os import sys from contextlib import redirect_stderr, redirect_stdout -from github import Github import requests +from github import Github from .utils import update_conda_forge_config @@ -285,8 +285,8 @@ def rotate_token_in_travis( """update the binstar token in travis.""" from .ci_register import ( travis_endpoint, - travis_headers, travis_get_repo_info, + travis_headers, ) headers = travis_headers() @@ -295,7 +295,7 @@ def rotate_token_in_travis( repo_id = repo_info["id"] r = requests.get( - "{}/repo/{repo_id}/env_vars".format(travis_endpoint, repo_id=repo_id), + f"{travis_endpoint}/repo/{repo_id}/env_vars", headers=headers, ) if r.status_code != 200: @@ -316,20 +316,14 @@ def rotate_token_in_travis( if have_binstar_token: r = requests.patch( - "{}/repo/{repo_id}/env_var/{ev_id}".format( - travis_endpoint, - repo_id=repo_id, - ev_id=ev_id, - ), + f"{travis_endpoint}/repo/{repo_id}/env_var/{ev_id}", headers=headers, json=data, ) r.raise_for_status() else: r = requests.post( - "{}/repo/{repo_id}/env_vars".format( - travis_endpoint, repo_id=repo_id - ), + f"{travis_endpoint}/repo/{repo_id}/env_vars", headers=headers, json=data, ) @@ -361,9 +355,10 @@ def rotate_token_in_travis( def rotate_token_in_azure(user, project, binstar_token, token_name): + from vsts.build.v4_1.models import BuildDefinitionVariable + from .azure_ci_utils import build_client, get_default_build_definition from .azure_ci_utils import default_config as config - from vsts.build.v4_1.models import BuildDefinitionVariable bclient = build_client() @@ -411,7 +406,7 @@ def rotate_token_in_azure(user, project, binstar_token, token_name): def rotate_token_in_appveyor(feedstock_config_path, binstar_token, token_name): from .ci_register import appveyor_token - headers = {"Authorization": "Bearer {}".format(appveyor_token)} + headers = {"Authorization": f"Bearer {appveyor_token}"} url = "https://ci.appveyor.com/api/account/encrypt" response = requests.post( url, headers=headers, data={"plainValue": binstar_token} diff --git a/conda_smithy/azure_ci_utils.py b/conda_smithy/azure_ci_utils.py index a10a3d6f5..469f0e4ec 100644 --- a/conda_smithy/azure_ci_utils.py +++ b/conda_smithy/azure_ci_utils.py @@ -17,6 +17,7 @@ from vsts.task_agent.v4_0.models import TaskAgentQueue from vsts.task_agent.v4_0.task_agent_client import TaskAgentClient from vsts.vss_connection import VssConnection + from .azure_defaults import AZURE_DEFAULT_ORG, AZURE_DEFAULT_PROJECT_NAME @@ -40,13 +41,11 @@ def __init__( ) try: - with open( - os.path.expanduser("~/.conda-smithy/azure.token"), "r" - ) as fh: + with open(os.path.expanduser("~/.conda-smithy/azure.token")) as fh: self.token = fh.read().strip() if not self.token: raise ValueError() - except (IOError, ValueError): + except (OSError, ValueError): print( "No azure token. Create a token and\n" "put it in ~/.conda-smithy/azure.token" @@ -133,12 +132,13 @@ def get_default_build_definition( config: AzureConfig = default_config, **kwargs, ): + import inspect + from vsts.build.v4_1.models import ( BuildDefinition, BuildRepository, ) from vsts.task_agent.v4_0.task_agent_client import TaskAgentClient - import inspect aclient = TaskAgentClient(config.instance_base_url, config.credentials) diff --git a/conda_smithy/ci_register.py b/conda_smithy/ci_register.py index b6a2117ed..5ea6a081b 100755 --- a/conda_smithy/ci_register.py +++ b/conda_smithy/ci_register.py @@ -1,46 +1,46 @@ #!/usr/bin/env python import os -import requests -import time import sys +import time + +import requests from . import github from .utils import update_conda_forge_config - # https://circleci.com/docs/api#add-environment-variable # curl -X POST --header "Content-Type: application/json" -d '{"name":"foo", "value":"bar"}' # https://circleci.com/api/v1/project/:username/:project/envvar?circle-token=:token try: - with open(os.path.expanduser("~/.conda-smithy/circle.token"), "r") as fh: + with open(os.path.expanduser("~/.conda-smithy/circle.token")) as fh: circle_token = fh.read().strip() if not circle_token: raise ValueError() -except (IOError, ValueError): +except (OSError, ValueError): print( "No circle token. Create a token at https://circleci.com/account/api and\n" "put it in ~/.conda-smithy/circle.token" ) try: - with open(os.path.expanduser("~/.conda-smithy/appveyor.token"), "r") as fh: + with open(os.path.expanduser("~/.conda-smithy/appveyor.token")) as fh: appveyor_token = fh.read().strip() if not appveyor_token: raise ValueError() -except (IOError, ValueError): +except (OSError, ValueError): print( "No appveyor token. Create a token at https://ci.appveyor.com/api-token and\n" "Put one in ~/.conda-smithy/appveyor.token" ) try: - with open(os.path.expanduser("~/.conda-smithy/drone.token"), "r") as fh: + with open(os.path.expanduser("~/.conda-smithy/drone.token")) as fh: drone_token = fh.read().strip() if not drone_token: raise ValueError() -except (IOError, ValueError): +except (OSError, ValueError): print( "No drone token. Create a token at https://cloud.drone.io/account and\n" "Put one in ~/.conda-smithy/drone.token" @@ -50,13 +50,11 @@ anaconda_token = os.environ["BINSTAR_TOKEN"] except KeyError: try: - with open( - os.path.expanduser("~/.conda-smithy/anaconda.token"), "r" - ) as fh: + with open(os.path.expanduser("~/.conda-smithy/anaconda.token")) as fh: anaconda_token = fh.read().strip() if not anaconda_token: raise ValueError() - except (IOError, ValueError): + except (OSError, ValueError): print( "No anaconda token. Create a token via\n" ' anaconda auth --create --name conda-smithy --scopes "repos conda api"\n' @@ -93,18 +91,18 @@ def travis_headers(): } travis_token = os.path.expanduser("~/.conda-smithy/travis.token") try: - with open(travis_token, "r") as fh: + with open(travis_token) as fh: token = fh.read().strip() if not token: raise ValueError - except (IOError, ValueError): + except (OSError, ValueError): # We generally want the V3 API, but can currently only auth with V2: # https://github.com/travis-ci/travis-ci/issues/9273#issuecomment-370474214 v2_headers = headers.copy() v2_headers["Accept"] = "application/vnd.travis-ci.2+json" del v2_headers["Travis-API-Version"] - url = "{}/auth/github".format(travis_endpoint) + url = f"{travis_endpoint}/auth/github" data = {"github_token": github.gh_token()} response = requests.post(url, json=data, headers=v2_headers) if response.status_code != 201: @@ -114,7 +112,7 @@ def travis_headers(): fh.write(token) # TODO: Set the permissions on the file. - headers["Authorization"] = "token {}".format(token) + headers["Authorization"] = f"token {token}" return headers @@ -197,7 +195,7 @@ def add_project_to_circle(user, project): # timing out once we had too many repos, so now the approach is simply "add it always". url = url_template.format( - component="{}/{}/follow".format(user, project).lower(), + component=f"{user}/{project}/follow".lower(), token=circle_token, ) response = requests.post(url, headers={}) @@ -208,7 +206,7 @@ def add_project_to_circle(user, project): # Note, here we are using a non-public part of the API and may change # Enable building PRs from forks url = url_template.format( - component="{}/{}/settings".format(user, project).lower(), + component=f"{user}/{project}/settings".lower(), token=circle_token, ) # Disable CircleCI secrets in builds of forked PRs explicitly. @@ -226,27 +224,21 @@ def add_project_to_circle(user, project): if response.status_code != 200: response.raise_for_status() - print(" * {}/{} enabled on CircleCI".format(user, project)) + print(f" * {user}/{project} enabled on CircleCI") def add_project_to_azure(user, project): from . import azure_ci_utils if azure_ci_utils.repo_registered(user, project): - print( - " * {}/{} already enabled on azure pipelines".format(user, project) - ) + print(f" * {user}/{project} already enabled on azure pipelines") else: azure_ci_utils.register_repo(user, project) - print( - " * {}/{} has been enabled on azure pipelines".format( - user, project - ) - ) + print(f" * {user}/{project} has been enabled on azure pipelines") def add_project_to_appveyor(user, project): - headers = {"Authorization": "Bearer {}".format(appveyor_token)} + headers = {"Authorization": f"Bearer {appveyor_token}"} url = "https://ci.appveyor.com/api/projects" response = requests.get(url, headers=headers) @@ -254,22 +246,22 @@ def add_project_to_appveyor(user, project): response.raise_for_status() repos = [repo["repositoryName"].lower() for repo in response.json()] - if "{}/{}".format(user, project).lower() in repos: - print(" * {}/{} already enabled on appveyor".format(user, project)) + if f"{user}/{project}".lower() in repos: + print(f" * {user}/{project} already enabled on appveyor") else: data = { "repositoryProvider": "gitHub", - "repositoryName": "{}/{}".format(user, project), + "repositoryName": f"{user}/{project}", } response = requests.post(url, headers=headers, data=data) if response.status_code != 201: response.raise_for_status() - print(" * {}/{} has been enabled on appveyor".format(user, project)) + print(f" * {user}/{project} has been enabled on appveyor") def appveyor_encrypt_binstar_token(feedstock_config_path, user, project): anaconda_token = _get_anaconda_token() - headers = {"Authorization": "Bearer {}".format(appveyor_token)} + headers = {"Authorization": f"Bearer {appveyor_token}"} url = "https://ci.appveyor.com/api/account/encrypt" response = requests.post( url, headers=headers, data={"plainValue": anaconda_token} @@ -285,14 +277,12 @@ def appveyor_encrypt_binstar_token(feedstock_config_path, user, project): def appveyor_configure(user, project): """Configure appveyor so that it skips building if there is no appveyor.yml present.""" - headers = {"Authorization": "Bearer {}".format(appveyor_token)} + headers = {"Authorization": f"Bearer {appveyor_token}"} # I have reasons to believe this is all AppVeyor is doing to the API URL. if project.startswith("_"): project = project[1:] project = project.replace("_", "-").replace(".", "-") - url = "https://ci.appveyor.com/api/projects/{}/{}/settings".format( - user, project - ) + url = f"https://ci.appveyor.com/api/projects/{user}/{project}/settings" response = requests.get(url, headers=headers) if response.status_code != 200: raise ValueError(response) @@ -305,10 +295,7 @@ def appveyor_configure(user, project): ): if not settings[required_setting]: print( - "{: <30}: Current setting for {} = {}." - "".format( - project, required_setting, settings[required_setting] - ) + f"{project: <30}: Current setting for {required_setting} = {settings[required_setting]}." ) settings[required_setting] = True @@ -320,7 +307,7 @@ def appveyor_configure(user, project): def travis_wait_until_synced(ignore=False): headers = travis_headers() - is_sync_url = "{}/user".format(travis_endpoint) + is_sync_url = f"{travis_endpoint}/user" for _ in range(20): response = requests.get(is_sync_url, headers=headers) content = response.json() @@ -349,9 +336,7 @@ def travis_repo_writable(repo_info): def travis_get_repo_info(user, project, show_error=False): headers = travis_headers() - url = "{}/repo/{user}%2F{project}".format( - travis_endpoint, user=user, project=project - ) + url = f"{travis_endpoint}/repo/{user}%2F{project}" response = requests.get(url, headers=headers) try: response.raise_for_status() @@ -414,21 +399,21 @@ def add_project_to_travis(user, project): raise RuntimeError(msg.format(user, project)) if repo_info["active"] is True: - print(" * {}/{} already enabled on travis-ci".format(user, project)) + print(f" * {user}/{project} already enabled on travis-ci") else: repo_id = repo_info["id"] - url = "{}/repo/{}/activate".format(travis_endpoint, repo_id) + url = f"{travis_endpoint}/repo/{repo_id}/activate" response = requests.post(url, headers=headers) response.raise_for_status() - print(" * {}/{} registered on travis-ci".format(user, project)) + print(f" * {user}/{project} registered on travis-ci") def travis_token_update_conda_forge_config( feedstock_config_path, user, project ): anaconda_token = _get_anaconda_token() - item = 'BINSTAR_TOKEN="{}"'.format(anaconda_token) - slug = "{}%2F{}".format(user, project) + item = f'BINSTAR_TOKEN="{anaconda_token}"' + slug = f"{user}%2F{project}" with update_conda_forge_config(feedstock_config_path) as code: code.setdefault("travis", {}).setdefault("secure", {})[ @@ -451,13 +436,12 @@ def travis_encrypt_binstar_token(repo, string_to_encrypt): # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. - from Crypto.PublicKey import RSA - from Crypto.Cipher import PKCS1_v1_5 import base64 - keyurl = "https://api.travis-ci.com/repo/{0}/key_pair/generated".format( - repo - ) + from Crypto.Cipher import PKCS1_v1_5 + from Crypto.PublicKey import RSA + + keyurl = f"https://api.travis-ci.com/repo/{repo}/key_pair/generated" r = requests.get(keyurl, headers=travis_headers()) r.raise_for_status() public_key = r.json()["public_key"] @@ -484,20 +468,14 @@ def travis_configure(user, project): repo_id = repo_info["id"] if repo_info["active"] is not True: - raise ValueError( - "Repo {user}/{project} is not active on Travis CI".format( - user=user, project=project - ) - ) + raise ValueError(f"Repo {user}/{project} is not active on Travis CI") settings = [ ("builds_only_with_travis_yml", True), ("auto_cancel_pull_requests", True), ] for name, value in settings: - url = "{}/repo/{repo_id}/setting/{name}".format( - travis_endpoint, repo_id=repo_id, name=name - ) + url = f"{travis_endpoint}/repo/{repo_id}/setting/{name}" data = {"setting.value": value} response = requests.patch(url, json=data, headers=headers) if response.status_code != 204: @@ -523,7 +501,7 @@ def add_token_to_travis(user, project): repo_id = repo_info["id"] r = requests.get( - "{}/repo/{repo_id}/env_vars".format(travis_endpoint, repo_id=repo_id), + f"{travis_endpoint}/repo/{repo_id}/env_vars", headers=headers, ) if r.status_code != 200: @@ -544,20 +522,14 @@ def add_token_to_travis(user, project): if have_token: r = requests.patch( - "{}/repo/{repo_id}/env_var/{ev_id}".format( - travis_endpoint, - repo_id=repo_id, - ev_id=ev_id, - ), + f"{travis_endpoint}/repo/{repo_id}/env_var/{ev_id}", headers=headers, json=data, ) r.raise_for_status() else: r = requests.post( - "{}/repo/{repo_id}/env_vars".format( - travis_endpoint, repo_id=repo_id - ), + f"{travis_endpoint}/repo/{repo_id}/env_vars", headers=headers, json=data, ) @@ -585,12 +557,12 @@ def get_conda_hook_info(hook_url, events): def add_conda_forge_webservice_hooks(user, repo): if user != "conda-forge": print( - "Unable to register {}/{} for conda-linting at this time as only " - "conda-forge repos are supported.".format(user, repo) + f"Unable to register {user}/{repo} for conda-linting at this time as only " + "conda-forge repos are supported." ) - headers = {"Authorization": "token {}".format(github.gh_token())} - url = "https://api.github.com/repos/{}/{}/hooks".format(user, repo) + headers = {"Authorization": f"token {github.gh_token()}"} + url = f"https://api.github.com/repos/{user}/{repo}/hooks" # Get the current hooks to determine if anything needs doing. response = requests.get(url, headers=headers) diff --git a/conda_smithy/ci_skeleton.py b/conda_smithy/ci_skeleton.py index a51efd0a6..f90f33446 100644 --- a/conda_smithy/ci_skeleton.py +++ b/conda_smithy/ci_skeleton.py @@ -40,7 +40,7 @@ def _insert_into_gitignore( fname = os.path.join(feedstock_directory, ".gitignore") print("Updating " + fname) if os.path.isfile(fname): - with open(fname, "r") as f: + with open(fname) as f: s = f.read() before, _, s = s.partition(prefix) _, _, after = s.partition(suffix) diff --git a/conda_smithy/cirun_utils.py b/conda_smithy/cirun_utils.py index 8b5917a5c..4f9b030aa 100644 --- a/conda_smithy/cirun_utils.py +++ b/conda_smithy/cirun_utils.py @@ -2,12 +2,12 @@ See http://py.cirun.io/api.html for cirun client docs """ -import os from functools import lru_cache -from typing import List, Dict, Any, Optional +from typing import Any, Dict, List, Optional from cirun import Cirun -from .github import gh_token, Github + +from .github import Github, gh_token @lru_cache diff --git a/conda_smithy/cli.py b/conda_smithy/cli.py index 5957ac1c1..73eaed5df 100644 --- a/conda_smithy/cli.py +++ b/conda_smithy/cli.py @@ -1,32 +1,27 @@ -import os +import argparse import logging +import os import subprocess import sys -import time -import argparse -import io import tempfile - +import time from textwrap import dedent -from typing import Literal, Optional, Union +from typing import Optional, Union import conda # noqa import conda_build.api from conda_build.metadata import MetaData from rattler_build_conda_compat.render import MetaData as RattlerMetaData from rattler_build_conda_compat.utils import has_recipe as has_rattler_recipe +from ruamel.yaml import YAML import conda_smithy.cirun_utils from conda_smithy.utils import get_feedstock_name_from_meta, merge_dict -from ruamel.yaml import YAML -from . import configure_feedstock -from . import feedstock_io +from . import __version__, configure_feedstock, feedstock_io from . import lint_recipe as linter -from . import __version__ from .utils import CONDA_BUILD, RATTLER_BUILD - if sys.version_info[0] == 2: raise Exception("Conda-smithy does not support python 2!") @@ -71,12 +66,12 @@ def generate_feedstock_content( os.path.join(target_recipe_dir, "conda-forge.yml") ) try: - with open(forge_yml_recipe, "r") as fp: + with open(forge_yml_recipe) as fp: _cfg = yaml.load(fp.read()) except Exception: _cfg = {} - with open(forge_yml, "r") as fp: + with open(forge_yml) as fp: _cfg_feedstock = yaml.load(fp.read()) merge_dict(_cfg, _cfg_feedstock) with feedstock_io.write_file(forge_yml) as fp: @@ -124,11 +119,9 @@ def __init__(self, parser): def __call__(self, args): # check some error conditions if args.recipe_directory and not os.path.isdir(args.recipe_directory): - raise IOError( + raise OSError( "The source recipe directory should be the directory of the " - "conda-recipe you want to build a feedstock for. Got {}".format( - args.recipe_directory - ) + f"conda-recipe you want to build a feedstock for. Got {args.recipe_directory}" ) # Get some information about the source recipe. @@ -153,9 +146,7 @@ def __call__(self, args): feedstock_directory = args.feedstock_directory.format( package=argparse.Namespace(name=meta.name()) ) - msg = "Initial feedstock commit with conda-smithy {}.".format( - __version__ - ) + msg = f"Initial feedstock commit with conda-smithy {__version__}." os.makedirs(feedstock_directory) subprocess.check_call(["git", "init"], cwd=feedstock_directory) @@ -268,18 +259,18 @@ def __init__(self, parser): ) for ci in self.ci_names: scp.add_argument( - "--without-{}".format(ci.lower()), + f"--without-{ci.lower()}", dest=ci.lower().replace("-", "_"), action="store_const", const=False, - help="If set, {} will be not registered".format(ci), + help=f"If set, {ci} will be not registered", ) scp.add_argument( - "--with-{}".format(ci.lower()), + f"--with-{ci.lower()}", dest=ci.lower().replace("-", "_"), action="store_const", const=True, - help="If set, {} will be registered".format(ci), + help=f"If set, {ci} will be registered", ) scp.add_argument( @@ -347,7 +338,7 @@ def __call__(self, args): trim_skip=False, )[0][0] feedstock_name = get_feedstock_name_from_meta(meta) - repo = "{}-feedstock".format(feedstock_name) + repo = f"{feedstock_name}-feedstock" if args.feedstock_config is None: args.feedstock_config = default_feedstock_config_path( @@ -358,7 +349,7 @@ def __call__(self, args): if getattr(args, ci.lower().replace("-", "_")) is None: setattr(args, ci.lower().replace("-", "_"), args.enable_ci) - print("CI Summary for {}/{} (can take ~30s):".format(owner, repo)) + print(f"CI Summary for {owner}/{repo} (can take ~30s):") if args.remove and any( [ args.azure, @@ -676,7 +667,7 @@ def __call__(self, args): ) ) else: - print("{} is in fine form".format(recipe)) + print(f"{recipe} is in fine form") # Exit code 1 for some lint, 0 for no lint. sys.exit(int(not all_good)) @@ -804,8 +795,8 @@ def __init__(self, parser): def __call__(self, args): from conda_smithy.feedstock_tokens import ( - generate_and_write_feedstock_token, feedstock_token_local_path, + generate_and_write_feedstock_token, ) owner = args.user or args.organization @@ -887,18 +878,18 @@ def __init__(self, parser): ) for ci in self.ci_names: scp.add_argument( - "--without-{}".format(ci.lower()), + f"--without-{ci.lower()}", dest=ci.lower().replace("-", "_"), action="store_const", const=False, - help="If set, {} will be not registered".format(ci), + help=f"If set, {ci} will be not registered", ) scp.add_argument( - "--with-{}".format(ci.lower()), + f"--with-{ci.lower()}", dest=ci.lower().replace("-", "_"), action="store_const", const=True, - help="If set, {} will be registered".format(ci), + help=f"If set, {ci} will be registered", ) scp.add_argument( @@ -915,11 +906,11 @@ def __init__(self, parser): ) def __call__(self, args): + from conda_smithy.ci_register import drone_default_endpoint from conda_smithy.feedstock_tokens import ( - register_feedstock_token_with_providers, register_feedstock_token, + register_feedstock_token_with_providers, ) - from conda_smithy.ci_register import drone_default_endpoint drone_endpoints = args.drone_endpoints if drone_endpoints is None: @@ -1030,18 +1021,18 @@ def __init__(self, parser): ) for ci in self.ci_names: scp.add_argument( - "--without-{}".format(ci.lower()), + f"--without-{ci.lower()}", dest=ci.lower().replace("-", "_"), action="store_const", const=False, - help="If set, the token on {} will be not changed.".format(ci), + help=f"If set, the token on {ci} will be not changed.", ) scp.add_argument( - "--with-{}".format(ci.lower()), + f"--with-{ci.lower()}", dest=ci.lower().replace("-", "_"), action="store_const", const=True, - help="If set, the token on {} will be changed".format(ci), + help=f"If set, the token on {ci} will be changed", ) scp.add_argument( diff --git a/conda_smithy/configure_feedstock.py b/conda_smithy/configure_feedstock.py index da3bf9264..232aec088 100644 --- a/conda_smithy/configure_feedstock.py +++ b/conda_smithy/configure_feedstock.py @@ -3,13 +3,12 @@ import hashlib import logging import os +import pprint import re import subprocess import sys -import pprint import textwrap import time -import yaml import warnings from collections import Counter, OrderedDict, namedtuple from copy import deepcopy @@ -19,11 +18,12 @@ from pathlib import Path, PurePath import requests +import yaml try: from builtins import ExceptionGroup except ImportError: - from exceptiongroup import ExceptionGroup + pass # The `requests` lib uses `simplejson` instead of `json` when available. # In consequence the same JSON library must be used or the `JSONDecodeError` @@ -34,24 +34,20 @@ except ImportError: import json -from conda.models.match_spec import MatchSpec -from conda.models.version import VersionOrder -from conda.exceptions import InvalidVersionSpec - import conda_build.api +import conda_build.conda_interface import conda_build.render import conda_build.utils import conda_build.variants -import conda_build.conda_interface -import conda_build.render +from conda.exceptions import InvalidVersionSpec from conda.models.match_spec import MatchSpec -from conda_build.metadata import get_selectors - -from copy import deepcopy - +from conda.models.version import VersionOrder from conda_build import __version__ as conda_build_version +from conda_build.metadata import get_selectors from jinja2 import FileSystemLoader from jinja2.sandbox import SandboxedEnvironment +from rattler_build_conda_compat.loader import parse_recipe_config_file +from rattler_build_conda_compat.render import render as rattler_render from conda_smithy.feedstock_io import ( copy_file, @@ -60,19 +56,17 @@ set_exe_file, write_file, ) -from conda_smithy.validate_schema import ( - validate_json_schema, - CONDA_FORGE_YAML_DEFAULTS_FILE, -) from conda_smithy.utils import ( + HashableDict, get_feedstock_about_from_meta, get_feedstock_name_from_meta, - HashableDict, +) +from conda_smithy.validate_schema import ( + CONDA_FORGE_YAML_DEFAULTS_FILE, + validate_json_schema, ) from . import __version__ -from rattler_build_conda_compat.render import render as rattler_render -from rattler_build_conda_compat.loader import parse_recipe_config_file from .utils import RATTLER_BUILD conda_forge_content = os.path.abspath(os.path.dirname(__file__)) @@ -339,9 +333,7 @@ def _get_used_key_values_by_input_order( for key in all_used_vars if key in squished_input_variants } - logger.debug( - "initial used_key_values {}".format(pprint.pformat(used_key_values)) - ) + logger.debug(f"initial used_key_values {pprint.pformat(used_key_values)}") # we want remove any used key values not in used variants and make sure they follow the # input order @@ -355,8 +347,8 @@ def _get_used_key_values_by_input_order( zip(*[squished_input_variants[k] for k in keyset]) ) zipped_keys |= set(keyset) - logger.debug("zipped_keys {}".format(pprint.pformat(zipped_keys))) - logger.debug("zipped_tuples {}".format(pprint.pformat(zipped_tuples))) + logger.debug(f"zipped_keys {pprint.pformat(zipped_keys)}") + logger.debug(f"zipped_tuples {pprint.pformat(zipped_tuples)}") for keyset, tuples in zipped_tuples.items(): # for each set of zipped keys from squished_input_variants, @@ -386,20 +378,16 @@ def _get_used_key_values_by_input_order( for tup in tuples ] ) - logger.debug("used_keyset {}".format(pprint.pformat(used_keyset))) - logger.debug( - "used_keyset_inds {}".format(pprint.pformat(used_keyset_inds)) - ) - logger.debug("used_tuples {}".format(pprint.pformat(used_tuples))) + logger.debug(f"used_keyset {pprint.pformat(used_keyset)}") + logger.debug(f"used_keyset_inds {pprint.pformat(used_keyset_inds)}") + logger.debug(f"used_tuples {pprint.pformat(used_tuples)}") # this is the set of tuples that we want to keep, but need to be reordered used_tuples_to_be_reordered = set( list(zip(*[squished_used_variants[k] for k in used_keyset])) ) logger.debug( - "used_tuples_to_be_reordered {}".format( - pprint.pformat(used_tuples_to_be_reordered) - ) + f"used_tuples_to_be_reordered {pprint.pformat(used_tuples_to_be_reordered)}" ) # we double check the logic above by looking to ensure everything in @@ -418,9 +406,7 @@ def _get_used_key_values_by_input_order( final_used_tuples = tuple( [tup for tup in used_tuples if tup in used_tuples_to_be_reordered] ) - logger.debug( - "final_used_tuples {}".format(pprint.pformat(final_used_tuples)) - ) + logger.debug(f"final_used_tuples {pprint.pformat(final_used_tuples)}") # now we reconstruct the list of values per key and replace in used_key_values # we keep only keys in all_used_vars @@ -434,9 +420,7 @@ def _get_used_key_values_by_input_order( used_key_values[k] = v logger.debug( - "post input reorder used_key_values {}".format( - pprint.pformat(used_key_values) - ) + f"post input reorder used_key_values {pprint.pformat(used_key_values)}" ) return used_key_values, zipped_keys @@ -559,7 +543,7 @@ def _collapse_subpackage_variants( cbc_path = os.path.join(list_of_metas[0].path, "conda_build_config.yaml") has_macdt = False if os.path.exists(cbc_path): - with open(cbc_path, "r") as f: + with open(cbc_path) as f: lines = f.readlines() if any(re.match(r"^\s*MACOSX_DEPLOYMENT_TARGET:", x) for x in lines): has_macdt = True @@ -574,9 +558,7 @@ def _collapse_subpackage_variants( if "target_platform" in all_used_vars: top_level_loop_vars.add("target_platform") - logger.debug( - "initial all_used_vars {}".format(pprint.pformat(all_used_vars)) - ) + logger.debug(f"initial all_used_vars {pprint.pformat(all_used_vars)}") # this is the initial collection of all variants before we discard any. "Squishing" # them is necessary because the input form is already broken out into one matrix @@ -591,22 +573,16 @@ def _collapse_subpackage_variants( conda_build.variants.list_of_dicts_to_dict_of_lists(list(all_variants)) ) logger.debug( - "squished_input_variants {}".format( - pprint.pformat(squished_input_variants) - ) + f"squished_input_variants {pprint.pformat(squished_input_variants)}" ) logger.debug( - "squished_used_variants {}".format( - pprint.pformat(squished_used_variants) - ) + f"squished_used_variants {pprint.pformat(squished_used_variants)}" ) # these are variables that only occur in the top level, and thus won't show up as loops in the # above collection of all variants. We need to transfer them from the input_variants. preserve_top_level_loops = set(top_level_loop_vars) - set(all_used_vars) - logger.debug( - "preserve_top_level_loops {}".format(preserve_top_level_loops) - ) + logger.debug(f"preserve_top_level_loops {preserve_top_level_loops}") # Add in some variables that should always be preserved always_keep_keys = { @@ -636,13 +612,9 @@ def _collapse_subpackage_variants( all_used_vars.update(always_keep_keys) all_used_vars.update(top_level_vars) - logger.debug( - "final all_used_vars {}".format(pprint.pformat(all_used_vars)) - ) - logger.debug("top_level_vars {}".format(pprint.pformat(top_level_vars))) - logger.debug( - "top_level_loop_vars {}".format(pprint.pformat(top_level_loop_vars)) - ) + logger.debug(f"final all_used_vars {pprint.pformat(all_used_vars)}") + logger.debug(f"top_level_vars {pprint.pformat(top_level_vars)}") + logger.debug(f"top_level_loop_vars {pprint.pformat(top_level_loop_vars)}") used_key_values, used_zipped_vars = _get_used_key_values_by_input_order( squished_input_variants, @@ -672,9 +644,7 @@ def _collapse_subpackage_variants( _trim_unused_zip_keys(used_key_values) _trim_unused_pin_run_as_build(used_key_values) - logger.debug( - "final used_key_values {}".format(pprint.pformat(used_key_values)) - ) + logger.debug(f"final used_key_values {pprint.pformat(used_key_values)}") return ( break_up_top_level_values(top_level_loop_vars, used_key_values), @@ -741,9 +711,7 @@ def dump_subspace_config_files( arch, forge_config, ) - logger.debug( - "collapsed subspace config files: {}".format(pprint.pformat(configs)) - ) + logger.debug(f"collapsed subspace config files: {pprint.pformat(configs)}") # get rid of the special object notation in the yaml file for objects that we dump yaml.add_representer(set, yaml.representer.SafeRepresenter.represent_list) @@ -752,7 +720,7 @@ def dump_subspace_config_files( ) yaml.add_representer(OrderedDict, _yaml_represent_ordereddict) - platform_arch = "{}-{}".format(platform, arch) + platform_arch = f"{platform}-{arch}" result = [] for config in configs: @@ -774,9 +742,7 @@ def dump_subspace_config_files( os.makedirs(out_folder) config = finalize_config(config, platform, arch, forge_config) - logger.debug( - "finalized config file: {}".format(pprint.pformat(config)) - ) + logger.debug(f"finalized config file: {pprint.pformat(config)}") with write_file(out_path) as f: yaml.dump(config, f, default_flow_style=False) @@ -868,7 +834,7 @@ def migrate_combined_spec(combined_spec, forge_dir, config, forge_config): from .variant_algebra import parse_variant, variant_add migration_variants = [ - (fn, parse_variant(open(fn, "r").read(), config=config)) + (fn, parse_variant(open(fn).read(), config=config)) for fn in migrations ] @@ -912,10 +878,9 @@ def _conda_build_api_render_for_smithy( """ from conda.exceptions import NoPackagesFoundError - + from conda_build.config import get_or_merge_config from conda_build.exceptions import DependencyNeedsBuildingError from conda_build.render import finalize_metadata, render_recipe - from conda_build.config import get_or_merge_config config = get_or_merge_config(config, **kwargs) @@ -1442,7 +1407,7 @@ def _get_platforms_of_provider(provider, forge_config): # Allow config to disable package uploads on a per provider basis, # default to True if not set explicitly set to False by config entry. upload_packages.append( - (forge_config.get(provider, {}).get("upload_packages", True)) + forge_config.get(provider, {}).get("upload_packages", True) ) elif ( provider == "azure" @@ -1537,7 +1502,7 @@ def _render_template_exe_files( if target_fname in get_common_scripts(forge_dir) and os.path.exists( target_fname ): - with open(target_fname, "r") as fh: + with open(target_fname) as fh: old_file_contents = fh.read() if old_file_contents != new_file_contents: import difflib @@ -1556,9 +1521,7 @@ def _render_template_exe_files( ) ) raise RuntimeError( - "Same file {} is rendered twice with different contents".format( - target_fname - ) + f"Same file {target_fname} is rendered twice with different contents" ) with write_file(target_fname) as fh: fh.write(new_file_contents) @@ -2139,12 +2102,10 @@ def render_README(jinja_env, forge_config, forge_dir, render_info=None): # Works if the Azure CI is public try: azure_build_id_from_public(forge_config) - except (IndexError, IOError) as err: + except (OSError, IndexError) as err: # We don't want to command to fail if requesting the build_id fails. logger.warning( - "Azure build_id can't be retrieved using the Azure token. Exception: {}".format( - err - ) + f"Azure build_id can't be retrieved using the Azure token. Exception: {err}" ) except json.decoder.JSONDecodeError: azure_build_id_from_token(forge_config) @@ -2211,7 +2172,7 @@ def _update_dict_within_dict(items, config): def _read_forge_config(forge_dir, forge_yml=None): # Load default values from the conda-forge.yml file - with open(CONDA_FORGE_YAML_DEFAULTS_FILE, "r") as fh: + with open(CONDA_FORGE_YAML_DEFAULTS_FILE) as fh: default_config = yaml.safe_load(fh.read()) if forge_yml is None: @@ -2226,7 +2187,7 @@ def _read_forge_config(forge_dir, forge_yml=None): " feedstock root if it's the latter." ) - with open(forge_yml, "r") as fh: + with open(forge_yml) as fh: documents = list(yaml.safe_load_all(fh)) file_config = (documents or [None])[0] or {} @@ -2462,27 +2423,23 @@ def get_most_recent_version(name, include_broken=False): def check_version_uptodate(name, installed_version, error_on_warn): most_recent_version = get_most_recent_version(name).version if installed_version is None: - msg = "{} is not installed in conda-smithy's environment.".format(name) + msg = f"{name} is not installed in conda-smithy's environment." elif VersionOrder(installed_version) < VersionOrder(most_recent_version): - msg = "{} version ({}) is out-of-date ({}) in conda-smithy's environment.".format( - name, installed_version, most_recent_version - ) + msg = f"{name} version ({installed_version}) is out-of-date ({most_recent_version}) in conda-smithy's environment." else: return if error_on_warn: - raise RuntimeError("{} Exiting.".format(msg)) + raise RuntimeError(f"{msg} Exiting.") else: logger.info(msg) def commit_changes(forge_file_directory, commit, cs_ver, cfp_ver, cb_ver): if cfp_ver: - msg = "Re-rendered with conda-build {}, conda-smithy {}, and conda-forge-pinning {}".format( - cb_ver, cs_ver, cfp_ver - ) + msg = f"Re-rendered with conda-build {cb_ver}, conda-smithy {cs_ver}, and conda-forge-pinning {cfp_ver}" else: - msg = "Re-rendered with conda-build {} and conda-smithy {}".format( - cb_ver, cs_ver + msg = ( + f"Re-rendered with conda-build {cb_ver} and conda-smithy {cs_ver}" ) logger.info(msg) @@ -2494,7 +2451,7 @@ def commit_changes(forge_file_directory, commit, cs_ver, cfp_ver, cb_ver): ) if has_staged_changes: if commit: - git_args = ["git", "commit", "-m", "MNT: {}".format(msg)] + git_args = ["git", "commit", "-m", f"MNT: {msg}"] if commit == "edit": git_args += ["--edit", "--status", "--verbose"] subprocess.check_call(git_args, cwd=forge_file_directory) @@ -2502,7 +2459,7 @@ def commit_changes(forge_file_directory, commit, cs_ver, cfp_ver, cb_ver): else: logger.info( "You can commit the changes with:\n\n" - ' git commit -m "MNT: {}"\n'.format(msg) + f' git commit -m "MNT: {msg}"\n' ) logger.info("These changes need to be pushed to github!\n") else: @@ -2656,7 +2613,7 @@ def get_migrations_in_dir(migrations_root): """ res = {} for fn in glob.glob(os.path.join(migrations_root, "*.yaml")): - with open(fn, "r") as f: + with open(fn) as f: contents = f.read() migration_yaml = ( yaml.load(contents, Loader=yaml.loader.BaseLoader) or {} diff --git a/conda_smithy/feedstock_content/build-locally.py b/conda_smithy/feedstock_content/build-locally.py index e0d408d07..d78427b5b 100755 --- a/conda_smithy/feedstock_content/build-locally.py +++ b/conda_smithy/feedstock_content/build-locally.py @@ -3,11 +3,11 @@ # This file has been generated by conda-smithy in order to build the recipe # locally. # -import os import glob +import os +import platform import subprocess from argparse import ArgumentParser -import platform def setup_environment(ns): diff --git a/conda_smithy/feedstock_io.py b/conda_smithy/feedstock_io.py index f7ad6f90a..e3fbecf68 100644 --- a/conda_smithy/feedstock_io.py +++ b/conda_smithy/feedstock_io.py @@ -1,8 +1,7 @@ -from contextlib import contextmanager -import io import os import shutil import stat +from contextlib import contextmanager def get_repo(path, search_parent_directories=True): @@ -52,7 +51,7 @@ def write_file(filename): if dirname and not os.path.exists(dirname): os.makedirs(dirname) - with io.open(filename, "w", encoding="utf-8", newline="\n") as fh: + with open(filename, "w", encoding="utf-8", newline="\n") as fh: yield fh repo = get_repo(filename) @@ -97,8 +96,8 @@ def copy_file(src, dst): If the file fails to be decoded with utf-8, we revert to a regular copy. """ try: - with io.open(src, "r", encoding="utf-8") as fh_src: - with io.open(dst, "w", encoding="utf-8", newline="\n") as fh_dst: + with open(src, encoding="utf-8") as fh_src: + with open(dst, "w", encoding="utf-8", newline="\n") as fh_dst: for line in fh_src: fh_dst.write(line) except UnicodeDecodeError: diff --git a/conda_smithy/feedstock_tokens.py b/conda_smithy/feedstock_tokens.py index 66e1c0042..201a4a5d3 100644 --- a/conda_smithy/feedstock_tokens.py +++ b/conda_smithy/feedstock_tokens.py @@ -23,13 +23,13 @@ then uploaded to the token registry (a repo on GitHub). """ -import tempfile -import os +import hmac import json -import time +import os import secrets -import hmac -from contextlib import redirect_stderr, redirect_stdout, contextmanager +import tempfile +import time +from contextlib import contextmanager, redirect_stderr, redirect_stdout import git import requests @@ -143,7 +143,7 @@ def read_feedstock_token(user, project, provider=None): if not os.path.exists(user_token_pth): err_msg = "No token found in '%s'" % user_token_pth else: - with open(user_token_pth, "r") as fp: + with open(user_token_pth) as fp: feedstock_token = fp.read().strip() if not feedstock_token: err_msg = "Empty token found in '%s'" % user_token_pth @@ -185,7 +185,7 @@ def feedstock_token_exists(user, project, token_repo, provider=None): ) if os.path.exists(token_file): - with open(token_file, "r") as fp: + with open(token_file) as fp: token_data = json.load(fp) if "tokens" not in token_data: @@ -259,7 +259,7 @@ def is_valid_feedstock_token( ) if os.path.exists(token_file): - with open(token_file, "r") as fp: + with open(token_file) as fp: token_data = json.load(fp) if "tokens" not in token_data: @@ -351,7 +351,7 @@ def register_feedstock_token(user, project, token_repo, provider=None): # append the token if needed if os.path.exists(token_file): - with open(token_file, "r") as fp: + with open(token_file) as fp: token_data = json.load(fp) if "tokens" not in token_data: token_data = {"tokens": [token_data]} @@ -616,8 +616,8 @@ def add_feedstock_token_to_travis(user, project, feedstock_token, clobber): """Add the FEEDSTOCK_TOKEN to travis.""" from .ci_register import ( travis_endpoint, - travis_headers, travis_get_repo_info, + travis_headers, ) headers = travis_headers() @@ -626,7 +626,7 @@ def add_feedstock_token_to_travis(user, project, feedstock_token, clobber): repo_id = repo_info["id"] r = requests.get( - "{}/repo/{repo_id}/env_vars".format(travis_endpoint, repo_id=repo_id), + f"{travis_endpoint}/repo/{repo_id}/env_vars", headers=headers, ) if r.status_code != 200: @@ -647,20 +647,14 @@ def add_feedstock_token_to_travis(user, project, feedstock_token, clobber): if have_feedstock_token and clobber: r = requests.patch( - "{}/repo/{repo_id}/env_var/{ev_id}".format( - travis_endpoint, - repo_id=repo_id, - ev_id=ev_id, - ), + f"{travis_endpoint}/repo/{repo_id}/env_var/{ev_id}", headers=headers, json=data, ) r.raise_for_status() elif not have_feedstock_token: r = requests.post( - "{}/repo/{repo_id}/env_vars".format( - travis_endpoint, repo_id=repo_id - ), + f"{travis_endpoint}/repo/{repo_id}/env_vars", headers=headers, json=data, ) @@ -669,9 +663,10 @@ def add_feedstock_token_to_travis(user, project, feedstock_token, clobber): def add_feedstock_token_to_azure(user, project, feedstock_token, clobber): + from vsts.build.v4_1.models import BuildDefinitionVariable + from .azure_ci_utils import build_client, get_default_build_definition from .azure_ci_utils import default_config as config - from vsts.build.v4_1.models import BuildDefinitionVariable bclient = build_client() @@ -724,9 +719,10 @@ def add_feedstock_token_to_azure(user, project, feedstock_token, clobber): def add_feedstock_token_to_github_actions( user, project, feedstock_token, clobber ): - from .github import gh_token from github import Github + from .github import gh_token + gh = Github(gh_token()) repo = gh.get_repo(f"{user}/{project}") diff --git a/conda_smithy/feedstocks.py b/conda_smithy/feedstocks.py index 45e71ff75..2f095923a 100644 --- a/conda_smithy/feedstocks.py +++ b/conda_smithy/feedstocks.py @@ -4,11 +4,11 @@ import os import git -from git import Repo, GitCommandError +from git import GitCommandError, Repo from github import Github from . import github as smithy_github -from .utils import render_meta_yaml, get_yaml +from .utils import get_yaml, render_meta_yaml def feedstock_repos(gh_organization="conda-forge"): @@ -55,9 +55,7 @@ def fetch_feedstock(repo_dir): try: remote.fetch() except GitCommandError: - print( - "Failed to fetch {} from {}.".format(remote.name, remote.url) - ) + print(f"Failed to fetch {remote.name} from {remote.url}.") def fetch_feedstocks(feedstock_directory): @@ -88,7 +86,7 @@ def clone_feedstock(feedstock_gh_repo, feedstocks_dir): clone_directory = os.path.join(feedstocks_dir, repo.name) if not os.path.exists(clone_directory): - print("Cloning {}".format(repo.name)) + print(f"Cloning {repo.name}") clone = Repo.clone_from(repo.clone_url, clone_directory) clone.delete_remote("origin") clone = Repo(clone_directory) @@ -217,7 +215,7 @@ def feedstocks_yaml( organization, feedstocks_directory, use_local=False, - **feedstocks_repo_kwargs + **feedstocks_repo_kwargs, ): """ Generator of (feedstock, ref, content, yaml) for each upstream git ref of each feedstock. @@ -258,7 +256,6 @@ def feedstocks_yaml( os.path.join( feedstock.directory, "recipe", "meta.yaml" ), - "r", ) as fh: content = "".join(fh.readlines()) else: @@ -268,7 +265,7 @@ def feedstocks_yaml( yaml = yaml_meta(content) except: # Add a helpful comment so we know what we are working with and reraise. - print("Failed on {}".format(feedstock.package)) + print(f"Failed on {feedstock.package}") raise yield (feedstock, ref, content, yaml) diff --git a/conda_smithy/github.py b/conda_smithy/github.py index c8c381208..a1d3398bf 100644 --- a/conda_smithy/github.py +++ b/conda_smithy/github.py @@ -1,27 +1,24 @@ import os from random import choice +import conda_build.api +import github from git import Repo - from github import Github from github.GithubException import GithubException from github.Organization import Organization from github.Team import Team -import github -import conda_build.api from conda_smithy.utils import get_feedstock_name_from_meta def gh_token(): try: - with open( - os.path.expanduser("~/.conda-smithy/github.token"), "r" - ) as fh: + with open(os.path.expanduser("~/.conda-smithy/github.token")) as fh: token = fh.read().strip() if not token: raise ValueError() - except (IOError, ValueError): + except (OSError, ValueError): msg = ( "No github token. Go to https://github.com/settings/tokens/new and generate\n" "a token with repo access. Put it in ~/.conda-smithy/github.token" @@ -68,17 +65,17 @@ def has_in_members(team, member): def get_cached_team(org, team_name, description=""): cached_file = os.path.expanduser( - "~/.conda-smithy/{}-{}-team".format(org.login, team_name) + f"~/.conda-smithy/{org.login}-{team_name}-team" ) try: - with open(cached_file, "r") as fh: + with open(cached_file) as fh: team_id = int(fh.read().strip()) return org.get_team(team_id) - except IOError: + except OSError: pass try: - repo = org.get_repo("{}-feedstock".format(team_name)) + repo = org.get_repo(f"{team_name}-feedstock") team = next( (team for team in repo.get_teams() if team.name == team_name), None ) @@ -94,7 +91,7 @@ def get_cached_team(org, team_name, description=""): if description: team = create_team(org, team_name, description, []) else: - raise RuntimeError("Couldn't find team {}".format(team_name)) + raise RuntimeError(f"Couldn't find team {team_name}") with open(cached_file, "w") as fh: fh.write(str(team.id)) @@ -124,17 +121,15 @@ def create_github_repo(args): # Use the organization provided. user_or_org = gh.get_organization(args.organization) - repo_name = "{}-feedstock".format(feedstock_name) + repo_name = f"{feedstock_name}-feedstock" try: gh_repo = user_or_org.create_repo( repo_name, has_wiki=False, private=args.private, - description="A conda-smithy repository for {}.".format( - feedstock_name - ), + description=f"A conda-smithy repository for {feedstock_name}.", ) - print("Created {} on github".format(gh_repo.full_name)) + print(f"Created {gh_repo.full_name} on github") except GithubException as gh_except: if ( gh_except.data.get("errors", [{}])[0].get("message", "") @@ -152,10 +147,8 @@ def create_github_repo(args): existing_remote = repo.remotes[remote_name] if existing_remote.url != gh_repo.ssh_url: print( - "Remote {} already exists, and doesn't point to {} " - "(it points to {}).".format( - remote_name, gh_repo.ssh_url, existing_remote.url - ) + f"Remote {remote_name} already exists, and doesn't point to {gh_repo.ssh_url} " + f"(it points to {existing_remote.url})." ) else: repo.create_remote(remote_name, gh_repo.ssh_url) @@ -183,7 +176,7 @@ def accept_all_repository_invitations(gh): def remove_from_project(gh, org, project): user = gh.get_user() - repo = gh.get_repo("{}/{}".format(org, project)) + repo = gh.get_repo(f"{org}/{project}") repo.remove_from_collaborators(user.login) @@ -228,14 +221,14 @@ def configure_github_team(meta, gh_repo, org, feedstock_name, remove=True): fs_team = create_team( org, team_name, - "The {} {} contributors!".format(choice(superlative), team_name), + f"The {choice(superlative)} {team_name} contributors!", ) fs_team.add_to_repos(gh_repo) current_maintainers = set([e.login.lower() for e in fs_team.get_members()]) # Get the all-members team - description = "All of the awesome {} contributors!".format(org.login) + description = f"All of the awesome {org.login} contributors!" all_members_team = get_cached_team(org, "all-members", description) new_org_members = set() diff --git a/conda_smithy/lint_recipe.py b/conda_smithy/lint_recipe.py index bccfcd1bb..c2e68ebce 100644 --- a/conda_smithy/lint_recipe.py +++ b/conda_smithy/lint_recipe.py @@ -1,7 +1,15 @@ -# -*- coding: utf-8 -*- - +import json +import os +import sys from collections.abc import Mapping +from glob import glob +from inspect import cleandoc +from textwrap import indent +import github +import requests + +from conda_smithy.linter import rattler_linter from conda_smithy.linter.hints import ( hint_check_spdx, hint_pip_usage, @@ -40,26 +48,11 @@ lint_usage_of_legacy_patterns, ) from conda_smithy.linter.utils import ( - CONDA_BUILD_TOOL, EXPECTED_SECTION_ORDER, - RATTLER_BUILD_TOOL, find_local_config_file, get_section, ) -from conda_smithy.linter import rattler_linter - -import io -import json -import os -import requests -import sys -from glob import glob -from inspect import cleandoc -from textwrap import indent - -import github - if sys.version_info[:2] < (3, 11): import tomli as tomllib else: @@ -68,9 +61,9 @@ from conda_build.metadata import ( ensure_valid_license_family, ) -from conda_smithy.validate_schema import validate_json_schema -from conda_smithy.utils import render_meta_yaml, get_yaml +from conda_smithy.utils import get_yaml, render_meta_yaml +from conda_smithy.validate_schema import validate_json_schema NEEDED_FAMILIES = ["gpl", "bsd", "mit", "apache", "psf"] @@ -87,7 +80,7 @@ def lintify_forge_yaml(recipe_dir=None) -> (list, list): ) ) if forge_yaml_filename: - with open(forge_yaml_filename[0], "r") as fh: + with open(forge_yaml_filename[0]) as fh: forge_yaml = get_yaml().load(fh) else: forge_yaml = {} @@ -144,9 +137,7 @@ def lintify_meta_yaml( for section in major_sections: if section not in expected_keys: - lints.append( - "The top level meta key {} is unexpected".format(section) - ) + lints.append(f"The top level meta key {section} is unexpected") unexpected_sections.append(section) for section in unexpected_sections: @@ -224,7 +215,7 @@ def lintify_meta_yaml( ) if conda_build_config_filename: - with open(conda_build_config_filename, "r") as fh: + with open(conda_build_config_filename) as fh: conda_build_config_keys = set(get_yaml().load(fh).keys()) else: conda_build_config_keys = set() @@ -234,7 +225,7 @@ def lintify_meta_yaml( ) if forge_yaml_filename: - with open(forge_yaml_filename, "r") as fh: + with open(forge_yaml_filename) as fh: forge_yaml = get_yaml().load(fh) else: forge_yaml = {} @@ -343,7 +334,7 @@ def run_conda_forge_specific(meta, recipe_dir, lints, hints): ] ): try: - if cf.get_repo("{}-feedstock".format(name)): + if cf.get_repo(f"{name}-feedstock"): existing_recipe_name = name feedstock_exists = True break @@ -356,15 +347,12 @@ def run_conda_forge_specific(meta, recipe_dir, lints, hints): lints.append("Feedstock with the same name exists in conda-forge.") elif feedstock_exists: hints.append( - "Feedstock with the name {} exists in conda-forge. Is it the same as this package ({})?".format( - existing_recipe_name, - recipe_name, - ) + f"Feedstock with the name {existing_recipe_name} exists in conda-forge. Is it the same as this package ({recipe_name})?" ) bio = gh.get_user("bioconda").get_repo("bioconda-recipes") try: - bio.get_dir_contents("recipes/{}".format(recipe_name)) + bio.get_dir_contents(f"recipes/{recipe_name}") except github.UnknownObjectException: pass else: @@ -403,9 +391,7 @@ def run_conda_forge_specific(meta, recipe_dir, lints, hints): try: gh.get_user(maintainer) except github.UnknownObjectException: - lints.append( - 'Recipe maintainer "{}" does not exist'.format(maintainer) - ) + lints.append(f'Recipe maintainer "{maintainer}" does not exist') # 3: if the recipe dir is inside the example dir if recipe_dir is not None and "recipes/example/" in recipe_dir: @@ -540,9 +526,9 @@ def main(recipe_dir, conda_forge=False, return_hints=False): recipe_dir = os.path.abspath(recipe_dir) recipe_meta = os.path.join(recipe_dir, "meta.yaml") if not os.path.exists(recipe_dir): - raise IOError("Feedstock has no recipe/meta.yaml.") + raise OSError("Feedstock has no recipe/meta.yaml.") - with io.open(recipe_meta, "rt") as fh: + with open(recipe_meta) as fh: content = render_meta_yaml("".join(fh)) meta = get_yaml().load(content) @@ -571,13 +557,13 @@ def main(recipe_dir, conda_forge=False, return_hints=False): all_pass = False messages.append( "\nFor **{}**:\n\n{}".format( - rel_path, "\n".join("* {}".format(lint) for lint in lints) + rel_path, "\n".join(f"* {lint}" for lint in lints) ) ) if hints: messages.append( "\nFor **{}**:\n\n{}".format( - rel_path, "\n".join("* {}".format(hint) for hint in hints) + rel_path, "\n".join(f"* {hint}" for hint in hints) ) ) diff --git a/conda_smithy/linter/hints.py b/conda_smithy/linter/hints.py index 54be017e0..54ca4f5c3 100644 --- a/conda_smithy/linter/hints.py +++ b/conda_smithy/linter/hints.py @@ -1,10 +1,9 @@ -from glob import glob -import io import os import re import shutil import subprocess import sys +from glob import glob from conda_smithy.linter.utils import find_local_config_file, is_selector_line from conda_smithy.utils import get_yaml @@ -33,7 +32,7 @@ def hint_suggest_noarch( and ("pip" in build_reqs) and (is_staged_recipes or not conda_forge) ): - with io.open(meta_fname, "rt") as fh: + with open(meta_fname) as fh: in_runreqs = False no_arch_possible = True for line in fh: @@ -66,7 +65,7 @@ def hint_shellcheck_usage(recipe_dir, hints): shell_scripts = glob(os.path.join(recipe_dir, "*.sh")) forge_yaml = find_local_config_file(recipe_dir, "conda-forge.yml") if shell_scripts and forge_yaml: - with open(forge_yaml, "r") as fh: + with open(forge_yaml) as fh: code = get_yaml().load(fh) shellcheck_enabled = code.get("shellcheck", {}).get( "enabled", shellcheck_enabled @@ -144,13 +143,11 @@ def hint_check_spdx(about_section, hints): if not licenseref_regex.match(license): filtered_licenses.append(license) - with open( - os.path.join(os.path.dirname(__file__), "licenses.txt"), "r" - ) as f: + with open(os.path.join(os.path.dirname(__file__), "licenses.txt")) as f: expected_licenses = f.readlines() expected_licenses = set([l.strip() for l in expected_licenses]) with open( - os.path.join(os.path.dirname(__file__), "license_exceptions.txt"), "r" + os.path.join(os.path.dirname(__file__), "license_exceptions.txt") ) as f: expected_exceptions = f.readlines() expected_exceptions = set([l.strip() for l in expected_exceptions]) diff --git a/conda_smithy/linter/lints.py b/conda_smithy/linter/lints.py index e5c413739..05c36d751 100644 --- a/conda_smithy/linter/lints.py +++ b/conda_smithy/linter/lints.py @@ -1,33 +1,27 @@ -from collections.abc import Sequence -import re - import fnmatch -import io import itertools import os +import re +from collections.abc import Sequence +from typing import List, Optional - -from typing import Optional, List - +from conda.models.version import VersionOrder from ruamel.yaml import CommentedSeq - +from conda_smithy.linter import rattler_linter from conda_smithy.linter.utils import ( + EXPECTED_SECTION_ORDER, FIELDS, JINJA_VAR_PAT, REQUIREMENTS_ORDER, TEST_FILES, TEST_KEYS, + get_section, is_selector_line, jinja_lines, - EXPECTED_SECTION_ORDER, - get_section, selector_lines, ) from conda_smithy.utils import get_yaml -from conda_smithy.linter import rattler_linter - -from conda.models.version import VersionOrder def lint_section_order( @@ -50,7 +44,7 @@ def lint_section_order( section_order_sorted_str = "[" + section_order_sorted_str + "]" lints.append( "The top level meta keys are in an unexpected order. " - "Expecting {}.".format(section_order_sorted_str) + f"Expecting {section_order_sorted_str}." ) @@ -126,7 +120,7 @@ def lint_selectors_should_be_in_tidy_form(recipe_fname, lints, hints): # Look out for py27, py35 selectors; we prefer py==35 pyXY_selectors_pat = re.compile(r".+#\s*\[.*?(py\d{2,3}).*\]") if os.path.exists(recipe_fname): - with io.open(recipe_fname, "rt") as fh: + with open(recipe_fname) as fh: for selector_line, line_number in selector_lines(fh): if not good_selectors_pat.match(selector_line): bad_selectors.append(selector_line) @@ -145,20 +139,20 @@ def lint_selectors_should_be_in_tidy_form(recipe_fname, lints, hints): lints.append( "Selectors are suggested to take a " "``#[]`` form." - " See lines {}".format(bad_lines) + f" See lines {bad_lines}" ) if pyXY_selectors_hint: hints.append( "Old-style Python selectors (py27, py34, py35, py36) are " "deprecated. Instead, consider using the int ``py``. For " - "example: ``# [py>=36]``. See lines {}".format(pyXY_lines_hint) + f"example: ``# [py>=36]``. See lines {pyXY_lines_hint}" ) if pyXY_selectors_lint: lints.append( "Old-style Python selectors (py27, py35, etc) are only available " "for Python 2.7, 3.4, 3.5, and 3.6. Please use explicit comparisons " "with the integer ``py``, e.g. ``# [py==37]`` or ``# [py>=37]``. " - "See lines {}".format(pyXY_lines_lint) + f"See lines {pyXY_lines_lint}" ) @@ -211,16 +205,16 @@ def lint_license_should_not_have_license(about_section, lints): def lint_should_be_empty_line(meta_fname, lints): if os.path.exists(meta_fname): - with io.open(meta_fname, "r") as f: + with open(meta_fname) as f: lines = f.read().split("\n") # Count the number of empty lines from the end of the file empty_lines = itertools.takewhile(lambda x: x == "", reversed(lines)) end_empty_lines_count = len(list(empty_lines)) if end_empty_lines_count > 1: lints.append( - "There are {} too many lines. " + f"There are {end_empty_lines_count - 1} too many lines. " "There should be one empty line at the end of the " - "file.".format(end_empty_lines_count - 1) + "file." ) elif end_empty_lines_count < 1: lints.append( @@ -271,17 +265,17 @@ def lint_subheaders(major_sections, meta, lints): and subsection not in expected_subsections ): lints.append( - "The {} section contained an unexpected " - "subsection name. {} is not a valid subsection" - " name.".format(section, subsection) + f"The {section} section contained an unexpected " + f"subsection name. {subsection} is not a valid subsection" + " name." ) elif section == "source" or section == "outputs": for source_subsection in subsection: if source_subsection not in expected_subsections: lints.append( - "The {} section contained an unexpected " - "subsection name. {} is not a valid subsection" - " name.".format(section, source_subsection) + f"The {section} section contained an unexpected " + f"subsection name. {source_subsection} is not a valid subsection" + " name." ) @@ -291,9 +285,7 @@ def lint_noarch(noarch_value: Optional[str], lints): if noarch_value not in valid_noarch_values: valid_noarch_str = "`, `".join(valid_noarch_values) lints.append( - "Invalid `noarch` value `{}`. Should be one of `{}`.".format( - noarch_value, valid_noarch_str - ) + f"Invalid `noarch` value `{noarch_value}`. Should be one of `{valid_noarch_str}`." ) @@ -302,7 +294,7 @@ def lint_noarch_and_runtime_dependencies( ): if noarch_value is not None and os.path.exists(meta_fname): noarch_platforms = len(forge_yaml.get("noarch_platforms", [])) > 1 - with io.open(meta_fname, "rt") as fh: + with open(meta_fname) as fh: in_runreqs = False for line in fh: line_s = line.strip() @@ -314,7 +306,7 @@ def lint_noarch_and_runtime_dependencies( lints.append( "`noarch` packages can't have skips with selectors. If " "the selectors are necessary, please remove " - "`noarch: {}`.".format(noarch_value) + f"`noarch: {noarch_value}`." ) break if in_runreqs: @@ -329,7 +321,7 @@ def lint_noarch_and_runtime_dependencies( lints.append( "`noarch` packages can't have selectors. If " "the selectors are necessary, please remove " - "`noarch: {}`.".format(noarch_value) + f"`noarch: {noarch_value}`." ) break @@ -340,9 +332,7 @@ def lint_package_version(package_section, lints): try: VersionOrder(ver) except: - lints.append( - "Package version {} doesn't match conda spec".format(ver) - ) + lints.append(f"Package version {ver} doesn't match conda spec") def lint_jinja_variables_definitions(meta_fname, lints): @@ -351,7 +341,7 @@ def lint_jinja_variables_definitions(meta_fname, lints): # Good Jinja2 variable definitions look like "{% set .+ = .+ %}" good_jinja_pat = re.compile(r"\s*\{%\s(set)\s[^\s]+\s=\s[^\s]+\s%\}") if os.path.exists(meta_fname): - with io.open(meta_fname, "rt") as fh: + with open(meta_fname) as fh: for jinja_line, line_number in jinja_lines(fh): if not good_jinja_pat.match(jinja_line): bad_jinja.append(jinja_line) @@ -359,10 +349,10 @@ def lint_jinja_variables_definitions(meta_fname, lints): if bad_jinja: lints.append( "Jinja2 variable definitions are suggested to " - "take a ``{{%set" + "take a ``{%set" "=" - "%}}`` form. See lines " - "{}".format(bad_lines) + "%}`` form. See lines " + f"{bad_lines}" ) @@ -446,9 +436,7 @@ def lint_non_noarch_builds( ] if filtered_host_reqs and not filtered_run_reqs: lints.append( - "If {0} is a host requirement, it should be a run requirement.".format( - str(language) - ) + f"If {str(language)} is a host requirement, it should be a run requirement." ) for reqs in [filtered_host_reqs, filtered_run_reqs]: if str(language) in reqs: @@ -459,9 +447,7 @@ def lint_non_noarch_builds( "<" ): lints.append( - "Non noarch packages should have {0} requirement without any version constraints.".format( - str(language) - ) + f"Non noarch packages should have {str(language)} requirement without any version constraints." ) @@ -469,7 +455,7 @@ def lint_jinja_var_references(meta_fname, hints): bad_vars = [] bad_lines = [] if os.path.exists(meta_fname): - with io.open(meta_fname, "rt") as fh: + with open(meta_fname) as fh: for i, line in enumerate(fh.readlines()): for m in JINJA_VAR_PAT.finditer(line): if m.group(1) is not None: @@ -550,7 +536,7 @@ def lint_check_usage_of_whls(meta_fname, noarch_value, lints, hints): pure_python_wheel_re = re.compile(r".*[:-]\s+(http.*-none-any\.whl)\s+.*") wheel_re = re.compile(r".*[:-]\s+(http.*\.whl)\s+.*") if os.path.exists(meta_fname): - with open(meta_fname, "rt") as f: + with open(meta_fname) as f: for line in f: if match := pure_python_wheel_re.search(line): pure_python_wheel_urls.append(match.group(1)) @@ -675,7 +661,7 @@ def lint_stdlib( # stdlib issues in CBC cbc_lines = [] if conda_build_config_filename: - with open(conda_build_config_filename, "r") as fh: + with open(conda_build_config_filename) as fh: cbc_lines = fh.readlines() # filter on osx-relevant lines diff --git a/conda_smithy/linter/utils.py b/conda_smithy/linter/utils.py index ac5ebbd62..723481388 100644 --- a/conda_smithy/linter/utils.py +++ b/conda_smithy/linter/utils.py @@ -1,8 +1,8 @@ -from collections.abc import Sequence import copy -from glob import glob import os import re +from collections.abc import Sequence +from glob import glob from typing import Mapping from conda_build.metadata import ( @@ -61,8 +61,8 @@ def get_meta_section(parent, name, lints): section = parent.get(name, {}) if not isinstance(section, Mapping): lints.append( - 'The "{}" section was expected to be a dictionary, but ' - "got a {}.".format(name, type(section).__name__) + f'The "{name}" section was expected to be a dictionary, but ' + f"got a {type(section).__name__}." ) section = {} return section diff --git a/conda_smithy/schema.py b/conda_smithy/schema.py index 66c54c289..e092b83ac 100644 --- a/conda_smithy/schema.py +++ b/conda_smithy/schema.py @@ -7,9 +7,8 @@ from typing import Any, Dict, List, Literal, Optional, Union import yaml -from pydantic import BaseModel, Field, create_model, ConfigDict - from conda.base.constants import KNOWN_SUBDIRS +from pydantic import BaseModel, ConfigDict, Field, create_model try: from enum import StrEnum diff --git a/conda_smithy/utils.py b/conda_smithy/utils.py index 9faf3bdc1..f0c48f815 100644 --- a/conda_smithy/utils.py +++ b/conda_smithy/utils.py @@ -1,16 +1,15 @@ +import datetime import json +import os import shutil import tempfile -import io -import jinja2 -import jinja2.sandbox -import datetime import time -import os -from pathlib import Path from collections import defaultdict from contextlib import contextmanager +from pathlib import Path +import jinja2 +import jinja2.sandbox import ruamel.yaml RATTLER_BUILD = "rattler-build" @@ -37,7 +36,7 @@ def get_feedstock_about_from_meta(meta) -> dict: recipe_meta = os.path.join( meta.meta["extra"]["parent_recipe"]["path"], "meta.yaml" ) - with io.open(recipe_meta, "rt") as fh: + with open(recipe_meta) as fh: content = render_meta_yaml("".join(fh)) meta = get_yaml().load(content) return dict(meta["about"]) @@ -69,10 +68,10 @@ def __str__(self): return self._undefined_name def __getattr__(self, name): - return "{}.{}".format(self, name) + return f"{self}.{name}" def __getitem__(self, name): - return '{}["{}"]'.format(self, name) + return f'{self}["{name}"]' class MockOS(dict): @@ -128,7 +127,7 @@ def update_conda_forge_config(forge_yaml): ... cfg['foo'] = 'bar' """ if os.path.exists(forge_yaml): - with open(forge_yaml, "r") as fh: + with open(forge_yaml) as fh: code = get_yaml().load(fh) else: code = {} diff --git a/conda_smithy/validate_schema.py b/conda_smithy/validate_schema.py index a7c8fbcb8..6507f5142 100644 --- a/conda_smithy/validate_schema.py +++ b/conda_smithy/validate_schema.py @@ -1,6 +1,7 @@ import json from pathlib import Path -from typing import Tuple, List +from typing import List, Tuple + from jsonschema import Draft202012Validator, validators from jsonschema.exceptions import ValidationError @@ -43,7 +44,7 @@ def validate_json_schema( if not schema_file: schema_file = CONDA_FORGE_YAML_SCHEMA_FILE - with open(schema_file, "r") as fh: + with open(schema_file) as fh: _json_schema = json.loads(fh.read()) validator = _VALIDATOR_CLASS(_json_schema) diff --git a/conda_smithy/variant_algebra.py b/conda_smithy/variant_algebra.py index 3273f1371..7306460a9 100644 --- a/conda_smithy/variant_algebra.py +++ b/conda_smithy/variant_algebra.py @@ -13,16 +13,15 @@ """ -import yaml -import toolz -from conda_build.utils import ensure_list +from functools import partial +from typing import Any, Dict, List, Optional, Union + import conda_build.variants as variants +import toolz +import yaml from conda.exports import VersionOrder from conda_build.config import Config -from functools import partial - - -from typing import Any, Dict, List, Optional, Union +from conda_build.utils import ensure_list def parse_variant( @@ -47,7 +46,7 @@ def parse_variant( from conda_build.config import Config config = Config() - from conda_build.metadata import select_lines, ns_cfg + from conda_build.metadata import ns_cfg, select_lines contents = select_lines( variant_file_content, ns_cfg(config), variants_in_place=False diff --git a/tests/conftest.py b/tests/conftest.py index ceaa9560c..4c3677ecc 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -1,29 +1,28 @@ import collections import os -from pathlib import Path -from textwrap import dedent import typing +from pathlib import Path import pytest import yaml - +from conda_build.utils import copy_into from jinja2 import FileSystemLoader from jinja2.sandbox import SandboxedEnvironment -from conda_build.utils import copy_into from conda_smithy.configure_feedstock import ( - conda_forge_content, _load_forge_config, + conda_forge_content, ) - RecipeConfigPair = collections.namedtuple( "RecipeConfigPair", ("recipe", "config") ) -ConfigYAML = typing.NamedTuple( - "ConfigYAML", [("workdir", Path), ("recipe_name", str), ("type", str)] -) + +class ConfigYAML(typing.NamedTuple): + workdir: Path + recipe_name: str + type: str @pytest.fixture(scope="function") diff --git a/tests/test_anaconda_token_rotation.py b/tests/test_anaconda_token_rotation.py index b860f6ecc..69098b066 100644 --- a/tests/test_anaconda_token_rotation.py +++ b/tests/test_anaconda_token_rotation.py @@ -3,7 +3,6 @@ import pytest from conda_smithy.anaconda_token_rotation import rotate_anaconda_token - from conda_smithy.ci_register import drone_default_endpoint diff --git a/tests/test_ci_skeleton.py b/tests/test_ci_skeleton.py index de14bc463..9cc957223 100644 --- a/tests/test_ci_skeleton.py +++ b/tests/test_ci_skeleton.py @@ -1,8 +1,5 @@ -import pytest - from conda_smithy.ci_skeleton import generate - CONDA_FORGE_YML = """clone_depth: 0 recipe_dir: myrecipe skip_render: diff --git a/tests/test_cli.py b/tests/test_cli.py index a4d09d3e0..f8ec7c0ee 100644 --- a/tests/test_cli.py +++ b/tests/test_cli.py @@ -1,12 +1,12 @@ import argparse import collections import os +import shutil import subprocess from textwrap import dedent -import yaml import pytest -import shutil +import yaml from conda_smithy import cli @@ -80,7 +80,7 @@ def test_init_with_custom_config(py_recipe): destination = os.path.join(recipe, "py-test-feedstock") assert os.path.isdir(destination) data = yaml.safe_load( - open(os.path.join(destination, "conda-forge.yml"), "r").read() + open(os.path.join(destination, "conda-forge.yml")).read() ) assert data.get("bot") != None assert data["bot"]["automerge"] == True @@ -173,7 +173,7 @@ def test_render_readme_with_multiple_outputs(testing_workdir, dirname): regen_obj(args) readme_path = os.path.join(feedstock_dir, "README.md") assert os.path.exists(readme_path) - with open(readme_path, "r") as readme_file: + with open(readme_path) as readme_file: readme = readme_file.read() if dirname == "multiple_outputs": # case 1: implicit subpackage, no individual subpackage about @@ -367,6 +367,6 @@ def test_render_variant_mismatches(testing_workdir): if _cfg == "README": continue cfg = os.path.join(matrix_dir, _cfg) - with open(cfg, "r") as f: + with open(cfg) as f: data = yaml.safe_load(f) assert data["a"] == data["b"] diff --git a/tests/test_condaforge_config_schema.py b/tests/test_condaforge_config_schema.py index c987b6979..d865a5fe4 100644 --- a/tests/test_condaforge_config_schema.py +++ b/tests/test_condaforge_config_schema.py @@ -1,8 +1,7 @@ import pytest from pydantic import ValidationError -import yaml -from conda_smithy.schema import ConfigModel +from conda_smithy.schema import ConfigModel # Sample config files SAMPLE_CONFIGS = [ diff --git a/tests/test_configure_feedstock.py b/tests/test_configure_feedstock.py index 80bdf72b0..a2c125dea 100644 --- a/tests/test_configure_feedstock.py +++ b/tests/test_configure_feedstock.py @@ -8,8 +8,8 @@ import pytest import yaml - from conftest import ConfigYAML + from conda_smithy import configure_feedstock diff --git a/tests/test_feedstock_io.py b/tests/test_feedstock_io.py index 4665fbc1e..b576b1af1 100644 --- a/tests/test_feedstock_io.py +++ b/tests/test_feedstock_io.py @@ -1,11 +1,10 @@ import functools -import io import operator as op import os import random +import shutil import stat import string -import shutil import tempfile import unittest @@ -17,7 +16,7 @@ def keep_dir(dirname): keep_filename = os.path.join(dirname, ".keep") - with io.open(keep_filename, "w", encoding="utf-8", newline="\n") as fh: + with open(keep_filename, "w", encoding="utf-8", newline="\n") as fh: fh.write("") @@ -54,7 +53,7 @@ def setUp(self): self.tmp_dir = tempfile.mkdtemp() os.chdir(self.tmp_dir) - with io.open( + with open( os.path.abspath(".keep"), "w", encoding="utf-8", newline="\n" ) as fh: fh.write("") @@ -87,9 +86,7 @@ def test_set_exe_file(self): for tmp_dir, repo, pathfunc in parameterize(): filename = "test.txt" filename = os.path.join(tmp_dir, filename) - with io.open( - filename, "w", encoding="utf-8", newline="\n" - ) as fh: + with open(filename, "w", encoding="utf-8", newline="\n") as fh: fh.write("") if repo is not None: repo.index.add([filename]) @@ -117,7 +114,7 @@ def test_write_file(self): repo.index.add([filename]) read_text = "" - with io.open(filename, "r", encoding="utf-8") as fh: + with open(filename, encoding="utf-8") as fh: read_text = fh.read() self.assertEqual(write_text, read_text) @@ -136,7 +133,7 @@ def test_touch_file(self): fio.touch_file(pathfunc(filename)) read_text = "" - with io.open(filename, "r", encoding="utf-8") as fh: + with open(filename, encoding="utf-8") as fh: read_text = fh.read() self.assertEqual("", read_text) @@ -156,9 +153,7 @@ def test_remove_file(self): filename = os.path.join(tmp_dir, filename) - with io.open( - filename, "w", encoding="utf-8", newline="\n" - ) as fh: + with open(filename, "w", encoding="utf-8", newline="\n") as fh: fh.write("") if repo is not None: repo.index.add([filename]) @@ -192,7 +187,7 @@ def test_copy_file(self): filename2 = os.path.join(tmp_dir, filename2) write_text = "text" - with io.open(filename1, "w", encoding="utf-8", newline="\n") as fh: + with open(filename1, "w", encoding="utf-8", newline="\n") as fh: fh.write(write_text) self.assertTrue(os.path.exists(filename1)) @@ -212,7 +207,7 @@ def test_copy_file(self): ) read_text = "" - with io.open(filename2, "r", encoding="utf-8") as fh: + with open(filename2, encoding="utf-8") as fh: read_text = fh.read() self.assertEqual(write_text, read_text) diff --git a/tests/test_feedstock_tokens.py b/tests/test_feedstock_tokens.py index 645cacf47..ee3c933b2 100644 --- a/tests/test_feedstock_tokens.py +++ b/tests/test_feedstock_tokens.py @@ -1,24 +1,23 @@ -import os import json -from unittest import mock +import os import time +from unittest import mock import pytest import scrypt +from conda_smithy.ci_register import drone_default_endpoint from conda_smithy.feedstock_tokens import ( + FeedstockTokenError, + feedstock_token_exists, + feedstock_token_local_path, generate_and_write_feedstock_token, + is_valid_feedstock_token, read_feedstock_token, - feedstock_token_exists, register_feedstock_token, register_feedstock_token_with_providers, - is_valid_feedstock_token, - FeedstockTokenError, - feedstock_token_local_path, ) -from conda_smithy.ci_register import drone_default_endpoint - @pytest.mark.parametrize( "provider,ci,retval_ci", @@ -95,7 +94,7 @@ def test_feedstock_tokens_roundtrip( with open(token_json_pth, "w") as fp: fp.write(json.dumps(token_data)) - with open(pth, "r") as fp: + with open(pth) as fp: feedstock_token = fp.read().strip() retval = is_valid_feedstock_token( @@ -449,7 +448,7 @@ def test_register_feedstock_token_works( if ci is not None: data["provider"] = ci - with open(token_json_pth, "r") as fp: + with open(token_json_pth) as fp: assert json.load(fp) == {"tokens": [data]} @@ -577,7 +576,7 @@ def test_register_feedstock_token_append( if ci is not None: data["provider"] = ci - with open(token_json_pth, "r") as fp: + with open(token_json_pth) as fp: assert json.load(fp) == {"tokens": [1, data]} diff --git a/tests/test_lint_recipe.py b/tests/test_lint_recipe.py index 6d9489378..4d74fdb7b 100644 --- a/tests/test_lint_recipe.py +++ b/tests/test_lint_recipe.py @@ -1,8 +1,4 @@ #!/usr/bin/env python -# -*- coding: utf-8 -*- -from collections import OrderedDict -from contextlib import contextmanager -import io import os import shutil import subprocess @@ -10,6 +6,8 @@ import textwrap import unittest import warnings +from collections import OrderedDict +from contextlib import contextmanager import github import pytest @@ -38,7 +36,7 @@ def test_stdlib_lint(comp_lang): expected_message = "This recipe is using a compiler" with tmp_directory() as recipe_dir: - with io.open(os.path.join(recipe_dir, "meta.yaml"), "w") as fh: + with open(os.path.join(recipe_dir, "meta.yaml"), "w") as fh: fh.write( f""" package: @@ -58,7 +56,7 @@ def test_sysroot_lint(): expected_message = "You're setting a requirement on sysroot" with tmp_directory() as recipe_dir: - with io.open(os.path.join(recipe_dir, "meta.yaml"), "w") as fh: + with open(os.path.join(recipe_dir, "meta.yaml"), "w") as fh: fh.write( """ package: @@ -78,7 +76,7 @@ def test_osx_lint(where): expected_message = "You're setting a constraint on the `__osx` virtual" with tmp_directory() as recipe_dir: - with io.open(os.path.join(recipe_dir, "meta.yaml"), "w") as fh: + with open(os.path.join(recipe_dir, "meta.yaml"), "w") as fh: fh.write( f""" package: @@ -98,7 +96,7 @@ def test_stdlib_lints_multi_output(): expected_message = "You're setting a requirement on sysroot" with tmp_directory() as recipe_dir: - with io.open(os.path.join(recipe_dir, "meta.yaml"), "w") as fh: + with open(os.path.join(recipe_dir, "meta.yaml"), "w") as fh: fh.write( """ package: @@ -147,7 +145,7 @@ def test_osx_noarch_hint(where): avoid_message = "You're setting a constraint on the `__osx` virtual" with tmp_directory() as recipe_dir: - with io.open(os.path.join(recipe_dir, "meta.yaml"), "w") as fh: + with open(os.path.join(recipe_dir, "meta.yaml"), "w") as fh: fh.write( f""" package: @@ -256,7 +254,7 @@ def test_cbc_osx_lints( # run the linter lints, _ = linter.main(rdir, return_hints=True) # show CBC/hints for debugging - with open(os.path.join(rdir, "conda_build_config.yaml"), "r") as fh: + with open(os.path.join(rdir, "conda_build_config.yaml")) as fh: print("".join(fh.readlines())) print(lints) # validate against expectations @@ -493,7 +491,7 @@ def test_test_section_with_recipe(self): lints, hints = linter.lintify_meta_yaml({}, recipe_dir) self.assertIn(expected_message, lints) - with io.open(os.path.join(recipe_dir, "run_test.py"), "w") as fh: + with open(os.path.join(recipe_dir, "run_test.py"), "w") as fh: fh.write("# foo") lints, hints = linter.lintify_meta_yaml({}, recipe_dir) self.assertNotIn(expected_message, lints) @@ -506,7 +504,7 @@ def test_jinja2_vars(self): ) with tmp_directory() as recipe_dir: - with io.open(os.path.join(recipe_dir, "meta.yaml"), "w") as fh: + with open(os.path.join(recipe_dir, "meta.yaml"), "w") as fh: fh.write( """ package: @@ -531,33 +529,28 @@ def test_selectors(self): expected_message = ( "Selectors are suggested to take a " "``#[]`` form." - " See lines {}".format([3]) + f" See lines {[3]}" ) with tmp_directory() as recipe_dir: def assert_selector(selector, is_good=True): - with io.open(os.path.join(recipe_dir, "meta.yaml"), "w") as fh: + with open(os.path.join(recipe_dir, "meta.yaml"), "w") as fh: fh.write( - """ + f""" package: name: foo_py2 # [py2k] - {} - """.format( - selector - ) + {selector} + """ ) lints, hints = linter.lintify_meta_yaml({}, recipe_dir) if is_good: message = ( "Found lints when there shouldn't have been a " - "lint for '{}'.".format(selector) + f"lint for '{selector}'." ) else: - message = ( - "Expecting lints for '{}', but didn't get any." - "".format(selector) - ) + message = f"Expecting lints for '{selector}', but didn't get any." self.assertEqual( not is_good, any(lint.startswith(expected_message) for lint in lints), @@ -578,18 +571,16 @@ def assert_pyXY_selector(meta_string, is_good=False, kind="lint"): expected_start = "Old-style Python selectors (py27, py34, py35, py36) are deprecated" else: expected_start = "Old-style Python selectors (py27, py35, etc) are only available" - with io.open(os.path.join(recipe_dir, "meta.yaml"), "w") as fh: + with open(os.path.join(recipe_dir, "meta.yaml"), "w") as fh: fh.write(meta_string) lints, hints = linter.main(recipe_dir, return_hints=True) if is_good: message = ( "Found lints or hints when there shouldn't have " - "been for '{}'." - ).format(meta_string) + f"been for '{meta_string}'." + ) else: - message = ( - "Expected lints or hints for '{}', but didn't get any." - ).format(meta_string) + message = f"Expected lints or hints for '{meta_string}', but didn't get any." problems = lints if kind == "lint" else hints self.assertEqual( not is_good, @@ -688,18 +679,19 @@ def test_noarch_selectors(self): with tmp_directory() as recipe_dir: def assert_noarch_selector(meta_string, is_good=False): - with io.open(os.path.join(recipe_dir, "meta.yaml"), "w") as fh: + with open(os.path.join(recipe_dir, "meta.yaml"), "w") as fh: fh.write(meta_string) lints = linter.main(recipe_dir) if is_good: message = ( "Found lints when there shouldn't have " - "been a lint for '{}'." - ).format(meta_string) + f"been a lint for '{meta_string}'." + ) else: message = ( - "Expected lints for '{}', but didn't " "get any." - ).format(meta_string) + f"Expected lints for '{meta_string}', but didn't " + "get any." + ) self.assertEqual( not is_good, any(lint.startswith(expected_start) for lint in lints), @@ -836,18 +828,19 @@ def test_suggest_noarch(self): with tmp_directory() as recipe_dir: def assert_noarch_hint(meta_string, is_good=False): - with io.open(os.path.join(recipe_dir, "meta.yaml"), "w") as fh: + with open(os.path.join(recipe_dir, "meta.yaml"), "w") as fh: fh.write(meta_string) lints, hints = linter.main(recipe_dir, return_hints=True) if is_good: message = ( "Found hints when there shouldn't have " - "been a lint for '{}'." - ).format(meta_string) + f"been a lint for '{meta_string}'." + ) else: message = ( - "Expected hints for '{}', but didn't " "get any." - ).format(meta_string) + f"Expected hints for '{meta_string}', but didn't " + "get any." + ) self.assertEqual( not is_good, any(lint.startswith(expected_start) for lint in hints), @@ -907,7 +900,7 @@ def test_jinja_os_environ(self): # Test that we can use os.environ in a recipe. We don't care about # the results here. with tmp_directory() as recipe_dir: - with io.open(os.path.join(recipe_dir, "meta.yaml"), "w") as fh: + with open(os.path.join(recipe_dir, "meta.yaml"), "w") as fh: fh.write( """ {% set version = os.environ.get('WIBBLE') %} @@ -922,14 +915,14 @@ def test_jinja_load_file_regex(self): # Test that we can use load_file_regex in a recipe. We don't care about # the results here. with tmp_directory() as recipe_dir: - with io.open(os.path.join(recipe_dir, "sha256"), "w") as fh: + with open(os.path.join(recipe_dir, "sha256"), "w") as fh: fh.write( """ d0e46ea5fca7d4c077245fe0b4195a828d9d4d69be8a0bd46233b2c12abd2098 iwftc_osx.zip 8ce4dc535b21484f65027be56263d8b0d9f58e57532614e1a8f6881f3b8fe260 iwftc_win.zip """ ) - with io.open(os.path.join(recipe_dir, "meta.yaml"), "w") as fh: + with open(os.path.join(recipe_dir, "meta.yaml"), "w") as fh: fh.write( """ {% set sha256_osx = load_file_regex(load_file="sha256", @@ -948,7 +941,7 @@ def test_jinja_load_file_data(self): # renders conda-build functions to just function stubs to pass the linting. # TODO: add *args and **kwargs for functions used to parse the file. with tmp_directory() as recipe_dir: - with io.open(os.path.join(recipe_dir, "meta.yaml"), "w") as fh: + with open(os.path.join(recipe_dir, "meta.yaml"), "w") as fh: fh.write( """ {% set data = load_file_data("IDONTNEED", from_recipe_dir=True, recipe_dir=".") %} @@ -965,7 +958,7 @@ def test_jinja_load_setup_py_data(self): # renders conda-build functions to just function stubs to pass the linting. # TODO: add *args and **kwargs for functions used to parse the file. with tmp_directory() as recipe_dir: - with io.open(os.path.join(recipe_dir, "meta.yaml"), "w") as fh: + with open(os.path.join(recipe_dir, "meta.yaml"), "w") as fh: fh.write( """ {% set data = load_setup_py_data("IDONTNEED", from_recipe_dir=True, recipe_dir=".") %} @@ -982,7 +975,7 @@ def test_jinja_load_str_data(self): # renders conda-build functions to just function stubs to pass the linting. # TODO: add *args and **kwargs for functions used to parse the data. with tmp_directory() as recipe_dir: - with io.open(os.path.join(recipe_dir, "meta.yaml"), "w") as fh: + with open(os.path.join(recipe_dir, "meta.yaml"), "w") as fh: fh.write( """ {% set data = load_str_data("IDONTNEED", "json") %} @@ -996,7 +989,7 @@ def test_jinja_load_str_data(self): def test_jinja_os_sep(self): # Test that we can use os.sep in a recipe. with tmp_directory() as recipe_dir: - with io.open(os.path.join(recipe_dir, "meta.yaml"), "w") as fh: + with open(os.path.join(recipe_dir, "meta.yaml"), "w") as fh: fh.write( """ package: @@ -1012,7 +1005,7 @@ def test_target_platform(self): # Test that we can use target_platform in a recipe. We don't care about # the results here. with tmp_directory() as recipe_dir: - with io.open(os.path.join(recipe_dir, "meta.yaml"), "w") as fh: + with open(os.path.join(recipe_dir, "meta.yaml"), "w") as fh: fh.write( """ package: @@ -1252,17 +1245,17 @@ def test_end_empty_line(self): bad_contents + [valid_content], [0, 0, 0, 2, 2, 2, 3, 3, 3, 1] ): with tmp_directory() as recipe_dir: - with io.open(os.path.join(recipe_dir, "meta.yaml"), "w") as f: + with open(os.path.join(recipe_dir, "meta.yaml"), "w") as f: f.write(content) lints, hints = linter.lintify_meta_yaml( {}, recipe_dir=recipe_dir ) if lines > 1: expected_message = ( - "There are {} too many lines. " + f"There are {lines - 1} too many lines. " "There should be one empty line " "at the end of the " - "file.".format(lines - 1) + "file." ) else: expected_message = ( @@ -1331,7 +1324,7 @@ def test_maintainer_exists(self): try: cf.get_repo("python1-feedstock") feedstock_exists = True - except github.UnknownObjectException as e: + except github.UnknownObjectException: feedstock_exists = False if feedstock_exists: @@ -1354,12 +1347,10 @@ def test_maintainer_exists(self): bio = gh.get_user("bioconda").get_repo("bioconda-recipes") r = "samtools" try: - bio.get_dir_contents("recipe/{}".format(r)) - except github.UnknownObjectException as e: + bio.get_dir_contents(f"recipe/{r}") + except github.UnknownObjectException: warnings.warn( - "There's no bioconda recipe named {}, but tests assume that there is".format( - r - ) + f"There's no bioconda recipe named {r}, but tests assume that there is" ) else: # Check that feedstock exists if staged_recipes @@ -1390,17 +1381,15 @@ def test_maintainer_exists(self): r = "this-will-never-exist" try: - bio.get_dir_contents("recipes/{}".format(r)) - except github.UnknownObjectException as e: + bio.get_dir_contents(f"recipes/{r}") + except github.UnknownObjectException: lints, _ = linter.lintify_meta_yaml( {"package": {"name": r}}, recipe_dir=r, conda_forge=True ) self.assertNotIn(expected_message, lints) else: warnings.warn( - "There's a bioconda recipe named {}, but tests assume that there isn't".format( - r - ) + f"There's a bioconda recipe named {r}, but tests assume that there isn't" ) expected_message = ( @@ -1554,8 +1543,8 @@ def test_string_source(self): lints, hints = linter.lintify_meta_yaml({"source": url}) msg = ( 'The "source" section was expected to be a dictionary or a ' - "list, but got a {}.{}." - ).format(type(url).__module__, type(url).__name__) + f"list, but got a {type(url).__module__}.{type(url).__name__}." + ) self.assertIn(msg, lints) def test_single_space_pins(self): @@ -1745,7 +1734,7 @@ def test_go_license_bundling(self): class TestCLI_recipe_lint(unittest.TestCase): def test_cli_fail(self): with tmp_directory() as recipe_dir: - with io.open(os.path.join(recipe_dir, "meta.yaml"), "w") as fh: + with open(os.path.join(recipe_dir, "meta.yaml"), "w") as fh: fh.write( textwrap.dedent( """ @@ -1765,7 +1754,7 @@ def test_cli_fail(self): def test_cli_success(self): with tmp_directory() as recipe_dir: - with io.open(os.path.join(recipe_dir, "meta.yaml"), "w") as fh: + with open(os.path.join(recipe_dir, "meta.yaml"), "w") as fh: fh.write( textwrap.dedent( """ @@ -1796,7 +1785,7 @@ def test_cli_success(self): def test_cli_environ(self): with tmp_directory() as recipe_dir: - with io.open(os.path.join(recipe_dir, "meta.yaml"), "w") as fh: + with open(os.path.join(recipe_dir, "meta.yaml"), "w") as fh: fh.write( textwrap.dedent( """ @@ -1832,8 +1821,8 @@ def test_unicode(self): Tests that unicode does not confuse the linter. """ with tmp_directory() as recipe_dir: - with io.open( - os.path.join(recipe_dir, "meta.yaml"), "wt", encoding="utf-8" + with open( + os.path.join(recipe_dir, "meta.yaml"), "w", encoding="utf-8" ) as fh: fh.write( """ @@ -1854,35 +1843,30 @@ def test_unicode(self): def test_jinja_variable_def(self): expected_message = ( "Jinja2 variable definitions are suggested to " - "take a ``{{%set" + "take a ``{%set" "=" - "%}}`` form. See lines " - "{}".format([2]) + "%}`` form. See lines " + f"{[2]}" ) with tmp_directory() as recipe_dir: def assert_jinja(jinja_var, is_good=True): - with io.open(os.path.join(recipe_dir, "meta.yaml"), "w") as fh: + with open(os.path.join(recipe_dir, "meta.yaml"), "w") as fh: fh.write( - """ + f""" {{% set name = "conda-smithy" %}} - {} - """.format( - jinja_var - ) + {jinja_var} + """ ) lints, hints = linter.lintify_meta_yaml({}, recipe_dir) if is_good: message = ( "Found lints when there shouldn't have been a " - "lint for '{}'.".format(jinja_var) + f"lint for '{jinja_var}'." ) else: - message = ( - "Expecting lints for '{}', but didn't get any." - "".format(jinja_var) - ) + message = f"Expecting lints for '{jinja_var}', but didn't get any." self.assertEqual( not is_good, any(lint.startswith(expected_message) for lint in lints), @@ -1903,11 +1887,11 @@ def test_lint_no_builds(): with tmp_directory() as feedstock_dir: ci_support_dir = os.path.join(feedstock_dir, ".ci_support") os.makedirs(ci_support_dir, exist_ok=True) - with io.open(os.path.join(ci_support_dir, "README"), "w") as fh: + with open(os.path.join(ci_support_dir, "README"), "w") as fh: fh.write("blah") recipe_dir = os.path.join(feedstock_dir, "recipe") os.makedirs(recipe_dir, exist_ok=True) - with io.open(os.path.join(recipe_dir, "meta.yaml"), "w") as fh: + with open(os.path.join(recipe_dir, "meta.yaml"), "w") as fh: fh.write( """ package: @@ -1918,7 +1902,7 @@ def test_lint_no_builds(): lints = linter.main(recipe_dir, conda_forge=True) assert any(lint.startswith(expected_message) for lint in lints) - with io.open(os.path.join(ci_support_dir, "blah.yaml"), "w") as fh: + with open(os.path.join(ci_support_dir, "blah.yaml"), "w") as fh: fh.write("blah") lints = linter.main(recipe_dir, conda_forge=True) diff --git a/tests/test_variant_algebra.py b/tests/test_variant_algebra.py index 45f43336c..352ae5fa2 100644 --- a/tests/test_variant_algebra.py +++ b/tests/test_variant_algebra.py @@ -1,8 +1,8 @@ -import pytest from textwrap import dedent -from conda_smithy.variant_algebra import parse_variant, variant_add +import pytest +from conda_smithy.variant_algebra import parse_variant, variant_add tv1 = parse_variant( """\ From 15060eea2bc943c0b10d3658ff178a2f5fd04d34 Mon Sep 17 00:00:00 2001 From: Yannik Tausch Date: Thu, 25 Jul 2024 18:05:20 +0200 Subject: [PATCH 3/6] apply unsafe ruff fixes manually reviewed --- conda_smithy/__init__.py | 2 +- conda_smithy/anaconda_token_rotation.py | 62 +++++++++---------- conda_smithy/azure_ci_utils.py | 5 +- conda_smithy/ci_register.py | 14 ++--- conda_smithy/ci_skeleton.py | 2 +- conda_smithy/cirun_utils.py | 2 +- conda_smithy/cli.py | 57 ++++++++---------- conda_smithy/configure_feedstock.py | 15 ++--- conda_smithy/feedstock_io.py | 4 +- conda_smithy/feedstock_tokens.py | 80 ++++++++++++------------- conda_smithy/feedstocks.py | 4 +- conda_smithy/linter/lints.py | 6 +- tests/test_anaconda_token_rotation.py | 2 +- tests/test_cli.py | 6 +- tests/test_condaforge_config_schema.py | 2 +- tests/test_configure_feedstock.py | 5 +- tests/test_feedstock_io.py | 5 +- tests/test_feedstock_tokens.py | 26 ++++---- tests/test_lint_recipe.py | 15 +++-- 19 files changed, 152 insertions(+), 162 deletions(-) diff --git a/conda_smithy/__init__.py b/conda_smithy/__init__.py index 654639fab..a6a90a864 100644 --- a/conda_smithy/__init__.py +++ b/conda_smithy/__init__.py @@ -1,4 +1,4 @@ try: - from ._version import __version__ + from conda_smithy._version import __version__ except ImportError: __version__ = "0.0.0" diff --git a/conda_smithy/anaconda_token_rotation.py b/conda_smithy/anaconda_token_rotation.py index 4991a6d86..749cf326b 100644 --- a/conda_smithy/anaconda_token_rotation.py +++ b/conda_smithy/anaconda_token_rotation.py @@ -16,13 +16,13 @@ import requests from github import Github -from .utils import update_conda_forge_config +from conda_smithy.utils import update_conda_forge_config def _get_anaconda_token(): """use this helper to enable easier patching for tests""" try: - from .ci_register import anaconda_token + from conda_smithy.ci_register import anaconda_token return anaconda_token except ImportError: @@ -58,7 +58,7 @@ def rotate_anaconda_token( # note that these imports cover all providers from .ci_register import travis_endpoint # noqa from .azure_ci_utils import default_config # noqa - from .github import gh_token + from conda_smithy.github import gh_token anaconda_token = _get_anaconda_token() @@ -89,9 +89,9 @@ def rotate_anaconda_token( raise e else: err_msg = ( - "Failed to rotate token for %s/%s" + f"Failed to rotate token for {user}/{project}" " on circle!" - ) % (user, project) + ) failed = True raise RuntimeError(err_msg) @@ -110,9 +110,9 @@ def rotate_anaconda_token( raise e else: err_msg = ( - "Failed to rotate token for %s/%s" - " on drone endpoint %s!" - ) % (user, project, drone_endpoint) + f"Failed to rotate token for {user}/{project}" + f" on drone endpoint {drone_endpoint}!" + ) failed = True raise RuntimeError(err_msg) @@ -130,9 +130,9 @@ def rotate_anaconda_token( raise e else: err_msg = ( - "Failed to rotate token for %s/%s" + f"Failed to rotate token for {user}/{project}" " on travis!" - ) % (user, project) + ) failed = True raise RuntimeError(err_msg) @@ -146,8 +146,9 @@ def rotate_anaconda_token( raise e else: err_msg = ( - "Failed to rotate token for %s/%s" " on azure!" - ) % (user, project) + f"Failed to rotate token for {user}/{project}" + " on azure!" + ) failed = True raise RuntimeError(err_msg) @@ -161,9 +162,9 @@ def rotate_anaconda_token( raise e else: err_msg = ( - "Failed to rotate token for %s/%s" + f"Failed to rotate token for {user}/{project}" " on appveyor!" - ) % (user, project) + ) failed = True raise RuntimeError(err_msg) @@ -177,9 +178,9 @@ def rotate_anaconda_token( raise e else: err_msg = ( - "Failed to rotate token for %s/%s" + f"Failed to rotate token for {user}/{project}" " on github actions!" - ) % (user, project) + ) failed = True raise RuntimeError(err_msg) @@ -193,17 +194,14 @@ def rotate_anaconda_token( raise RuntimeError(err_msg) else: raise RuntimeError( - ( - "Rotating the feedstock token in providers for %s/%s failed!" - " Try the command locally with DEBUG_ANACONDA_TOKENS" - " defined in the environment to investigate!" - ) - % (user, project) + f"Rotating the feedstock token in providers for {user}/{project} failed!" + " Try the command locally with DEBUG_ANACONDA_TOKENS" + " defined in the environment to investigate!" ) def rotate_token_in_circle(user, project, binstar_token, token_name): - from .ci_register import circle_token + from conda_smithy.ci_register import circle_token url_template = ( "https://circleci.com/api/v1.1/project/github/{user}/{project}/envvar{extra}?" @@ -229,7 +227,7 @@ def rotate_token_in_circle(user, project, binstar_token, token_name): token=circle_token, user=user, project=project, - extra="/%s" % token_name, + extra=f"/{token_name}", ) ) if r.status_code != 200: @@ -249,7 +247,7 @@ def rotate_token_in_circle(user, project, binstar_token, token_name): def rotate_token_in_drone( user, project, binstar_token, token_name, drone_endpoint ): - from .ci_register import drone_session + from conda_smithy.ci_register import drone_session session = drone_session(drone_endpoint) @@ -283,7 +281,7 @@ def rotate_token_in_travis( user, project, feedstock_config_path, binstar_token, token_name ): """update the binstar token in travis.""" - from .ci_register import ( + from conda_smithy.ci_register import ( travis_endpoint, travis_get_repo_info, travis_headers, @@ -357,8 +355,11 @@ def rotate_token_in_travis( def rotate_token_in_azure(user, project, binstar_token, token_name): from vsts.build.v4_1.models import BuildDefinitionVariable - from .azure_ci_utils import build_client, get_default_build_definition - from .azure_ci_utils import default_config as config + from conda_smithy.azure_ci_utils import ( + build_client, + get_default_build_definition, + ) + from conda_smithy.azure_ci_utils import default_config as config bclient = build_client() @@ -370,8 +371,7 @@ def rotate_token_in_azure(user, project, binstar_token, token_name): ed = existing_definitions[0] else: raise RuntimeError( - "Cannot add %s to a repo that is not already registerd on azure CI!" - % token_name + f"Cannot add {token_name} to a repo that is not already registerd on azure CI!" ) ed = bclient.get_definition(ed.id, project=config.project_name) @@ -404,7 +404,7 @@ def rotate_token_in_azure(user, project, binstar_token, token_name): def rotate_token_in_appveyor(feedstock_config_path, binstar_token, token_name): - from .ci_register import appveyor_token + from conda_smithy.ci_register import appveyor_token headers = {"Authorization": f"Bearer {appveyor_token}"} url = "https://ci.appveyor.com/api/account/encrypt" diff --git a/conda_smithy/azure_ci_utils.py b/conda_smithy/azure_ci_utils.py index 469f0e4ec..6758c198f 100644 --- a/conda_smithy/azure_ci_utils.py +++ b/conda_smithy/azure_ci_utils.py @@ -18,7 +18,10 @@ from vsts.task_agent.v4_0.task_agent_client import TaskAgentClient from vsts.vss_connection import VssConnection -from .azure_defaults import AZURE_DEFAULT_ORG, AZURE_DEFAULT_PROJECT_NAME +from conda_smithy.azure_defaults import ( + AZURE_DEFAULT_ORG, + AZURE_DEFAULT_PROJECT_NAME, +) class AzureConfig: diff --git a/conda_smithy/ci_register.py b/conda_smithy/ci_register.py index 5ea6a081b..cda2fecda 100755 --- a/conda_smithy/ci_register.py +++ b/conda_smithy/ci_register.py @@ -5,8 +5,8 @@ import requests -from . import github -from .utils import update_conda_forge_config +from conda_smithy import github +from conda_smithy.utils import update_conda_forge_config # https://circleci.com/docs/api#add-environment-variable @@ -69,16 +69,14 @@ class LiveServerSession(requests.Session): """Utility class to avoid typing out urls all the time""" def __init__(self, prefix_url=None, *args, **kwargs): - super(LiveServerSession, self).__init__(*args, **kwargs) + super().__init__(*args, **kwargs) self.prefix_url = prefix_url def request(self, method, url, *args, **kwargs): from urllib.parse import urljoin url = urljoin(self.prefix_url, url) - return super(LiveServerSession, self).request( - method, url, *args, **kwargs - ) + return super().request(method, url, *args, **kwargs) def travis_headers(): @@ -228,7 +226,7 @@ def add_project_to_circle(user, project): def add_project_to_azure(user, project): - from . import azure_ci_utils + from conda_smithy import azure_ci_utils if azure_ci_utils.repo_registered(user, project): print(f" * {user}/{project} already enabled on azure pipelines") @@ -313,7 +311,7 @@ def travis_wait_until_synced(ignore=False): content = response.json() print(".", end="") sys.stdout.flush() - if "is_syncing" in content and content["is_syncing"] == False: + if "is_syncing" in content and content["is_syncing"] is False: break time.sleep(6) else: diff --git a/conda_smithy/ci_skeleton.py b/conda_smithy/ci_skeleton.py index f90f33446..41f4f1af7 100644 --- a/conda_smithy/ci_skeleton.py +++ b/conda_smithy/ci_skeleton.py @@ -9,7 +9,7 @@ import os import sys -from .configure_feedstock import make_jinja_env +from conda_smithy.configure_feedstock import make_jinja_env def _render_template(template_file, env, forge_dir, config): diff --git a/conda_smithy/cirun_utils.py b/conda_smithy/cirun_utils.py index 4f9b030aa..27b6192c4 100644 --- a/conda_smithy/cirun_utils.py +++ b/conda_smithy/cirun_utils.py @@ -7,7 +7,7 @@ from cirun import Cirun -from .github import Github, gh_token +from conda_smithy.github import Github, gh_token @lru_cache diff --git a/conda_smithy/cli.py b/conda_smithy/cli.py index 73eaed5df..4131713c8 100644 --- a/conda_smithy/cli.py +++ b/conda_smithy/cli.py @@ -16,14 +16,14 @@ from ruamel.yaml import YAML import conda_smithy.cirun_utils -from conda_smithy.utils import get_feedstock_name_from_meta, merge_dict - -from . import __version__, configure_feedstock, feedstock_io -from . import lint_recipe as linter -from .utils import CONDA_BUILD, RATTLER_BUILD - -if sys.version_info[0] == 2: - raise Exception("Conda-smithy does not support python 2!") +from conda_smithy import __version__, configure_feedstock, feedstock_io +from conda_smithy import lint_recipe as linter +from conda_smithy.utils import ( + CONDA_BUILD, + RATTLER_BUILD, + get_feedstock_name_from_meta, + merge_dict, +) def default_feedstock_config_path(feedstock_directory): @@ -47,7 +47,7 @@ def generate_feedstock_content( import sys raise type(e)( - str(e) + " while copying file %s" % source_recipe_dir + str(e) + f" while copying file {source_recipe_dir}" ).with_traceback(sys.exc_info()[2]) forge_yml = default_feedstock_config_path(target_directory) @@ -100,7 +100,7 @@ class Init(Subcommand): def __init__(self, parser): # conda-smithy init /path/to/udunits-recipe ./ - super(Init, self).__init__( + super().__init__( parser, "Create a feedstock git repository, which can contain " "one conda recipes.", @@ -168,9 +168,7 @@ class RegisterGithub(Subcommand): def __init__(self, parser): # conda-smithy register-github ./ --organization=conda-forge - super(RegisterGithub, self).__init__( - parser, "Register a repo for a feedstock at github." - ) + super().__init__(parser, "Register a repo for a feedstock at github.") scp = self.subcommand_parser scp.add_argument( "--add-teams", @@ -210,7 +208,7 @@ def __init__(self, parser): ) def __call__(self, args): - from . import github + from conda_smithy import github github.create_github_repo(args) print( @@ -232,7 +230,7 @@ class RegisterCI(Subcommand): def __init__(self, parser): # conda-smithy register-ci ./ - super(RegisterCI, self).__init__( + super().__init__( parser, "Register a feedstock at the CI services which do the builds.", ) @@ -479,7 +477,7 @@ def __init__(self, parser): AZURE_DEFAULT_PROJECT_NAME, ) - super(AddAzureBuildId, self).__init__( + super().__init__( parser, dedent( "Update the azure configuration stored in the config file." @@ -530,7 +528,7 @@ def __call__(self, args): args.feedstock_directory ) - from .utils import update_conda_forge_config + from conda_smithy.utils import update_conda_forge_config with update_conda_forge_config(args.feedstock_config) as config: config.setdefault("azure", {}) @@ -545,7 +543,7 @@ class Regenerate(Subcommand): aliases = ["rerender"] def __init__(self, parser): - super(Regenerate, self).__init__( + super().__init__( parser, "Regenerate / update the CI support files of the " "feedstock.", ) @@ -617,7 +615,7 @@ class RecipeLint(Subcommand): aliases = ["lint"] def __init__(self, parser): - super(RecipeLint, self).__init__( + super().__init__( parser, "Lint a single conda recipe and its configuration.", ) @@ -696,7 +694,7 @@ class CISkeleton(Subcommand): subcommand = "ci-skeleton" def __init__(self, parser): - super(CISkeleton, self).__init__( + super().__init__( parser, "Generate skeleton for using CI outside of a feedstock" ) scp = self.subcommand_parser @@ -768,7 +766,7 @@ class GenerateFeedstockToken(Subcommand): ) def __init__(self, parser): - super(GenerateFeedstockToken, self).__init__( + super().__init__( parser, "Generate a feedstock token.", ) @@ -805,9 +803,8 @@ def __call__(self, args): if not args.unique_token_per_provider: generate_and_write_feedstock_token(owner, repo) print( - "Your feedstock token has been generated at %s\n" + f"Your feedstock token has been generated at {feedstock_token_local_path(owner, repo)}\n" "This token is stored in plaintext so be careful!" - % feedstock_token_local_path(owner, repo) ) else: for ci in self.ci_names: @@ -816,9 +813,8 @@ def __call__(self, args): owner, repo, provider=provider ) print( - "Your feedstock token has been generated at %s\n" - "This token is stored in plaintext so be careful!" - % ( + "Your feedstock token has been generated at {}\n" + "This token is stored in plaintext so be careful!".format( feedstock_token_local_path( owner, repo, provider=provider ) @@ -838,7 +834,7 @@ class RegisterFeedstockToken(Subcommand): def __init__(self, parser): # conda-smithy register-feedstock-token ./ - super(RegisterFeedstockToken, self).__init__( + super().__init__( parser, "Register the feedstock token w/ the CI services for builds and " "with the token registry. \n\n" @@ -920,10 +916,7 @@ def __call__(self, args): repo = os.path.basename(os.path.abspath(args.feedstock_directory)) if args.token_repo is None: - token_repo = ( - "https://${GITHUB_TOKEN}@github.com/%s/feedstock-tokens" - % owner - ) + token_repo = f"https://${{GITHUB_TOKEN}}@github.com/{owner}/feedstock-tokens" else: token_repo = args.token_repo @@ -986,7 +979,7 @@ class UpdateAnacondaToken(Subcommand): ) def __init__(self, parser): - super(UpdateAnacondaToken, self).__init__( + super().__init__( parser, "Update the anaconda/binstar token used for package uploads.\n\n" "All exceptions are swallowed and stdout/stderr from this function is" diff --git a/conda_smithy/configure_feedstock.py b/conda_smithy/configure_feedstock.py index 232aec088..2156974c3 100644 --- a/conda_smithy/configure_feedstock.py +++ b/conda_smithy/configure_feedstock.py @@ -49,6 +49,7 @@ from rattler_build_conda_compat.loader import parse_recipe_config_file from rattler_build_conda_compat.render import render as rattler_render +from conda_smithy import __version__ from conda_smithy.feedstock_io import ( copy_file, remove_file, @@ -57,6 +58,7 @@ write_file, ) from conda_smithy.utils import ( + RATTLER_BUILD, HashableDict, get_feedstock_about_from_meta, get_feedstock_name_from_meta, @@ -66,9 +68,6 @@ validate_json_schema, ) -from . import __version__ -from .utils import RATTLER_BUILD - conda_forge_content = os.path.abspath(os.path.dirname(__file__)) logger = logging.getLogger(__name__) @@ -831,7 +830,7 @@ def migrate_combined_spec(combined_spec, forge_dir, config, forge_config): migrations = set_migration_fns(forge_dir, forge_config) migrations = forge_config["migration_fns"] - from .variant_algebra import parse_variant, variant_add + from conda_smithy.variant_algebra import parse_variant, variant_add migration_variants = [ (fn, parse_variant(open(fn).read(), config=config)) @@ -1050,8 +1049,7 @@ def _render_ci_provider( raise RuntimeError( "Travis CI can only be used for 'linux_aarch64', " "'linux_ppc64le' or 'linux_s390x' native builds" - ", not '%s_%s', to avoid using open-source build minutes!" - % (platform, arch) + f", not '{platform}_{arch}', to avoid using open-source build minutes!" ) # AFAIK there is no way to get conda build to ignore the CBC yaml @@ -1508,8 +1506,7 @@ def _render_template_exe_files( import difflib logger.debug( - "diff:\n%s" - % ( + "diff:\n{}".format( "\n".join( difflib.unified_diff( old_file_contents.splitlines(), @@ -2475,7 +2472,7 @@ def get_cfp_file_path(temporary_directory): else: raise RuntimeError( "Could not determine proper conda package extension for " - "pinning package '%s'!" % pkg.url + f"pinning package '{pkg.url}'!" ) dest = os.path.join( temporary_directory, f"conda-forge-pinning-{ pkg.version }{ext}" diff --git a/conda_smithy/feedstock_io.py b/conda_smithy/feedstock_io.py index e3fbecf68..cdb2c4fb9 100644 --- a/conda_smithy/feedstock_io.py +++ b/conda_smithy/feedstock_io.py @@ -33,9 +33,7 @@ def set_exe_file(filename, set_exe=True): repo = get_repo(filename) if repo: mode = "+x" if set_exe else "-x" - repo.git.execute( - ["git", "update-index", "--chmod=%s" % mode, filename] - ) + repo.git.execute(["git", "update-index", f"--chmod={mode}", filename]) mode = os.stat(filename).st_mode if set_exe: diff --git a/conda_smithy/feedstock_tokens.py b/conda_smithy/feedstock_tokens.py index 201a4a5d3..1bb189cef 100644 --- a/conda_smithy/feedstock_tokens.py +++ b/conda_smithy/feedstock_tokens.py @@ -62,13 +62,13 @@ def feedstock_token_local_path(user, project, provider=None): pth = os.path.join( "~", ".conda-smithy", - "%s_%s.token" % (user, project), + f"{user}_{project}.token", ) else: pth = os.path.join( "~", ".conda-smithy", - "%s_%s_%s.token" % (user, project, provider), + f"{user}_{project}_{provider}.token", ) return os.path.expanduser(pth) @@ -89,13 +89,10 @@ def generate_and_write_feedstock_token(user, project, provider=None): pth = feedstock_token_local_path(user, project, provider=provider) if os.path.exists(pth): failed = True - err_msg = ( - "Token for %s/%s on provider%s is already written locally!" - % ( - user, - project, - "" if provider is None else " " + provider, - ) + err_msg = "Token for {}/{} on provider{} is already written locally!".format( + user, + project, + "" if provider is None else " " + provider, ) raise FeedstockTokenError(err_msg) @@ -114,11 +111,12 @@ def generate_and_write_feedstock_token(user, project, provider=None): else: raise FeedstockTokenError( ( - "Generating the feedstock token for %s/%s on provider%s failed!" + "Generating the feedstock token for {}/{} on provider{} failed!" " Try the command locally with DEBUG_FEEDSTOCK_TOKENS" " defined in the environment to investigate!" + ).format( + user, project, "" if provider is None else " " + provider ) - % (user, project, "" if provider is None else " " + provider) ) return failed @@ -141,12 +139,12 @@ def read_feedstock_token(user, project, provider=None): ) if not os.path.exists(user_token_pth): - err_msg = "No token found in '%s'" % user_token_pth + err_msg = f"No token found in '{user_token_pth}'" else: with open(user_token_pth) as fp: feedstock_token = fp.read().strip() if not feedstock_token: - err_msg = "Empty token found in '%s'" % user_token_pth + err_msg = f"Empty token found in '{user_token_pth}'" feedstock_token = None return feedstock_token, err_msg @@ -161,7 +159,7 @@ def feedstock_token_exists(user, project, token_repo, provider=None): If you need to debug this function, define `DEBUG_FEEDSTOCK_TOKENS` in your environment before calling this function. """ - from .github import gh_token + from conda_smithy.github import gh_token github_token = gh_token() @@ -210,11 +208,12 @@ def feedstock_token_exists(user, project, token_repo, provider=None): else: raise FeedstockTokenError( ( - "Testing for the feedstock token for %s/%s on provider%s failed!" + "Testing for the feedstock token for {}/{} on provider{} failed!" " Try the command locally with DEBUG_FEEDSTOCK_TOKENS" " defined in the environment to investigate!" + ).format( + user, project, "" if provider is None else " " + provider ) - % (user, project, "" if provider is None else " " + provider) ) return exists @@ -232,7 +231,7 @@ def is_valid_feedstock_token( If you need to debug this function, define `DEBUG_FEEDSTOCK_TOKENS` in your environment before calling this function. """ - from .github import gh_token + from conda_smithy.github import gh_token github_token = gh_token() @@ -293,11 +292,12 @@ def is_valid_feedstock_token( else: raise FeedstockTokenError( ( - "Validating the feedstock token for %s/%s on provider%s failed!" + "Validating the feedstock token for {}/{} on provider{} failed!" " Try the command locally with DEBUG_FEEDSTOCK_TOKENS" " defined in the environment to investigate!" + ).format( + user, project, "" if provider is None else " " + provider ) - % (user, project, "" if provider is None else " " + provider) ) return valid @@ -317,7 +317,7 @@ def register_feedstock_token(user, project, token_repo, provider=None): If you need to debug this function, define `DEBUG_FEEDSTOCK_TOKENS` in your environment before calling this function. """ - from .github import gh_token + from conda_smithy.github import gh_token github_token = gh_token() @@ -375,8 +375,7 @@ def register_feedstock_token(user, project, token_repo, provider=None): repo.index.add(token_file) repo.index.commit( "[ci skip] [skip ci] [cf admin skip] ***NO_CI*** " - "added token for %s/%s on provider%s" - % ( + "added token for {}/{} on provider{}".format( user, project, "" if provider is None else " " + provider, @@ -395,11 +394,12 @@ def register_feedstock_token(user, project, token_repo, provider=None): else: raise FeedstockTokenError( ( - "Registering the feedstock token for %s/%s on provider%s failed!" + "Registering the feedstock token for {}/{} on provider{} failed!" " Try the command locally with DEBUG_FEEDSTOCK_TOKENS" " defined in the environment to investigate!" + ).format( + user, project, "" if provider is None else " " + provider ) - % (user, project, "" if provider is None else " " + provider) ) return failed @@ -434,7 +434,7 @@ def register_feedstock_token_with_providers( # to generate the proper errors for missing tokens from .ci_register import travis_endpoint # noqa from .azure_ci_utils import default_config # noqa - from .ci_register import drone_default_endpoint + from conda_smithy.ci_register import drone_default_endpoint def _register_token(user, project, clobber, provider, func, args=None): args = args or tuple() @@ -458,9 +458,9 @@ def _register_token(user, project, clobber, provider, func, args=None): raise e else: err_msg = ( - "Failed to register feedstock token for %s/%s" - " on %s for args %r!" - ) % (user, project, provider, args) + f"Failed to register feedstock token for {user}/{project}" + f" on {provider} for args {args!r}!" + ) raise FeedstockTokenError(err_msg) # capture stdout, stderr and suppress all exceptions so we don't @@ -525,17 +525,14 @@ def _register_token(user, project, clobber, provider, func, args=None): if failed: raise FeedstockTokenError( - ( - "Registering the feedstock token with providers for %s/%s failed!" - " Try the command locally with DEBUG_FEEDSTOCK_TOKENS" - " defined in the environment to investigate!" - ) - % (user, project) + f"Registering the feedstock token with providers for {user}/{project} failed!" + " Try the command locally with DEBUG_FEEDSTOCK_TOKENS" + " defined in the environment to investigate!" ) def add_feedstock_token_to_circle(user, project, feedstock_token, clobber): - from .ci_register import circle_token + from conda_smithy.ci_register import circle_token url_template = ( "https://circleci.com/api/v1.1/project/github/{user}/{project}/envvar{extra}?" @@ -582,7 +579,7 @@ def add_feedstock_token_to_circle(user, project, feedstock_token, clobber): def add_feedstock_token_to_drone( user, project, feedstock_token, clobber, drone_endpoint ): - from .ci_register import drone_session + from conda_smithy.ci_register import drone_session session = drone_session(drone_endpoint) @@ -614,7 +611,7 @@ def add_feedstock_token_to_drone( def add_feedstock_token_to_travis(user, project, feedstock_token, clobber): """Add the FEEDSTOCK_TOKEN to travis.""" - from .ci_register import ( + from conda_smithy.ci_register import ( travis_endpoint, travis_get_repo_info, travis_headers, @@ -665,8 +662,11 @@ def add_feedstock_token_to_travis(user, project, feedstock_token, clobber): def add_feedstock_token_to_azure(user, project, feedstock_token, clobber): from vsts.build.v4_1.models import BuildDefinitionVariable - from .azure_ci_utils import build_client, get_default_build_definition - from .azure_ci_utils import default_config as config + from conda_smithy.azure_ci_utils import ( + build_client, + get_default_build_definition, + ) + from conda_smithy.azure_ci_utils import default_config as config bclient = build_client() @@ -721,7 +721,7 @@ def add_feedstock_token_to_github_actions( ): from github import Github - from .github import gh_token + from conda_smithy.github import gh_token gh = Github(gh_token()) repo = gh.get_repo(f"{user}/{project}") diff --git a/conda_smithy/feedstocks.py b/conda_smithy/feedstocks.py index 2f095923a..89d8ba9ba 100644 --- a/conda_smithy/feedstocks.py +++ b/conda_smithy/feedstocks.py @@ -7,8 +7,8 @@ from git import GitCommandError, Repo from github import Github -from . import github as smithy_github -from .utils import get_yaml, render_meta_yaml +from conda_smithy import github as smithy_github +from conda_smithy.utils import get_yaml, render_meta_yaml def feedstock_repos(gh_organization="conda-forge"): diff --git a/conda_smithy/linter/lints.py b/conda_smithy/linter/lints.py index 05c36d751..3f1618f5a 100644 --- a/conda_smithy/linter/lints.py +++ b/conda_smithy/linter/lints.py @@ -38,7 +38,7 @@ def lint_section_order( if major_sections != section_order_sorted: section_order_sorted_str = map( - lambda s: "'%s'" % s, section_order_sorted + lambda s: f"'{s}'", section_order_sorted ) section_order_sorted_str = ", ".join(section_order_sorted_str) section_order_sorted_str = "[" + section_order_sorted_str + "]" @@ -460,14 +460,14 @@ def lint_jinja_var_references(meta_fname, hints): for m in JINJA_VAR_PAT.finditer(line): if m.group(1) is not None: var = m.group(1) - if var != " %s " % var.strip(): + if var != f" {var.strip()} ": bad_vars.append(m.group(1).strip()) bad_lines.append(i + 1) if bad_vars: hints.append( "Jinja2 variable references are suggested to " "take a ``{{}}``" - " form. See lines %s." % (bad_lines,) + f" form. See lines {bad_lines}." ) diff --git a/tests/test_anaconda_token_rotation.py b/tests/test_anaconda_token_rotation.py index 69098b066..817d5caa1 100644 --- a/tests/test_anaconda_token_rotation.py +++ b/tests/test_anaconda_token_rotation.py @@ -225,4 +225,4 @@ def test_rotate_anaconda_token_provider_error( user, project, None, drone_endpoints=[drone_default_endpoint] ) - assert "on %s" % provider.replace("_", " ") in str(e.value) + assert "on {}".format(provider.replace("_", " ")) in str(e.value) diff --git a/tests/test_cli.py b/tests/test_cli.py index f8ec7c0ee..e51027a4d 100644 --- a/tests/test_cli.py +++ b/tests/test_cli.py @@ -82,9 +82,9 @@ def test_init_with_custom_config(py_recipe): data = yaml.safe_load( open(os.path.join(destination, "conda-forge.yml")).read() ) - assert data.get("bot") != None - assert data["bot"]["automerge"] == True - assert data["bot"]["run_deps_from_wheel"] == True + assert data.get("bot") is not None + assert data["bot"]["automerge"] is True + assert data["bot"]["run_deps_from_wheel"] is True def test_init_multiple_output_matrix(testing_workdir): diff --git a/tests/test_condaforge_config_schema.py b/tests/test_condaforge_config_schema.py index d865a5fe4..b4e08b586 100644 --- a/tests/test_condaforge_config_schema.py +++ b/tests/test_condaforge_config_schema.py @@ -76,4 +76,4 @@ def test_extra_fields(): }, } with pytest.raises(ValidationError): - config = ConfigModel(**config_dict) + ConfigModel(**config_dict) diff --git a/tests/test_configure_feedstock.py b/tests/test_configure_feedstock.py index a2c125dea..6cbd0750c 100644 --- a/tests/test_configure_feedstock.py +++ b/tests/test_configure_feedstock.py @@ -865,15 +865,14 @@ def test_noarch_platforms_bad_yaml(config_yaml: ConfigYAML, caplog): def test_forge_yml_alt_path(config_yaml: ConfigYAML): - load_forge_config = ( - lambda forge_yml: configure_feedstock._load_forge_config( # noqa + def load_forge_config(forge_yml): + return configure_feedstock._load_forge_config( config_yaml.workdir, exclusive_config_file=os.path.join( config_yaml.workdir, "recipe", "default_config.yaml" ), forge_yml=forge_yml, ) - ) forge_yml = os.path.join(config_yaml.workdir, "conda-forge.yml") forge_yml_alt = os.path.join( diff --git a/tests/test_feedstock_io.py b/tests/test_feedstock_io.py index b576b1af1..f471b6929 100644 --- a/tests/test_feedstock_io.py +++ b/tests/test_feedstock_io.py @@ -69,8 +69,9 @@ def test_repo(self): possible_repo_subdir = os.path.join( tmp_dir, "".join( - "%s%s" - % (x, os.path.sep if random.random() > 0.5 else "") + "{}{}".format( + x, os.path.sep if random.random() > 0.5 else "" + ) for x in string.ascii_lowercase ), ) diff --git a/tests/test_feedstock_tokens.py b/tests/test_feedstock_tokens.py index ee3c933b2..888c1e95b 100644 --- a/tests/test_feedstock_tokens.py +++ b/tests/test_feedstock_tokens.py @@ -75,7 +75,7 @@ def test_feedstock_tokens_roundtrip( project, provider=ci, ) - token_json_pth = os.path.join(tmpdir, "tokens", "%s.json" % project) + token_json_pth = os.path.join(tmpdir, "tokens", f"{project}.json") os.makedirs(os.path.join(tmpdir, "tokens"), exist_ok=True) try: @@ -179,7 +179,7 @@ def test_is_valid_feedstock_token_badtoken( user = "conda-forge" feedstock_token = "akdjhfl" - token_pth = os.path.join(tmpdir, "tokens", "%s.json" % project) + token_pth = os.path.join(tmpdir, "tokens", f"{project}.json") os.makedirs(os.path.dirname(token_pth), exist_ok=True) with open(token_pth, "w") as fp: td = {"salt": b"adf".hex(), "hashed_token": b"fgh".hex()} @@ -201,7 +201,7 @@ def test_generate_and_write_feedstock_token(ci): repo = "foo" if ci: - pth = os.path.expanduser("~/.conda-smithy/bar_foo_%s.token" % ci) + pth = os.path.expanduser(f"~/.conda-smithy/bar_foo_{ci}.token") opth = os.path.expanduser("~/.conda-smithy/bar_foo.token") else: pth = os.path.expanduser("~/.conda-smithy/bar_foo.token") @@ -233,7 +233,7 @@ def test_read_feedstock_token(ci): user = "bar" repo = "foo" if ci: - pth = os.path.expanduser("~/.conda-smithy/bar_foo_%s.token" % ci) + pth = os.path.expanduser(f"~/.conda-smithy/bar_foo_{ci}.token") else: pth = os.path.expanduser("~/.conda-smithy/bar_foo.token") @@ -326,7 +326,7 @@ def test_feedstock_token_exists( os.makedirs(os.path.join(tmpdir, "tokens"), exist_ok=True) if file_exists: with open( - os.path.join(tmpdir, "tokens", "%s.json" % project), "w" + os.path.join(tmpdir, "tokens", f"{project}.json"), "w" ) as fp: data = {"tokens": [{}]} if provider is not None: @@ -365,7 +365,7 @@ def test_feedstock_token_raises( git_mock.Repo.clone_from.side_effect = ValueError("blarg") user = "foo" os.makedirs(os.path.join(tmpdir, "tokens"), exist_ok=True) - with open(os.path.join(tmpdir, "tokens", "%s.json" % project), "w") as fp: + with open(os.path.join(tmpdir, "tokens", f"{project}.json"), "w") as fp: fp.write("{}") with pytest.raises(FeedstockTokenError) as e: @@ -414,7 +414,7 @@ def test_register_feedstock_token_works( project, provider=ci, ) - token_json_pth = os.path.join(tmpdir, "tokens", "%s.json" % project) + token_json_pth = os.path.join(tmpdir, "tokens", f"{project}.json") try: generate_and_write_feedstock_token(user, project, provider=ci) @@ -434,8 +434,9 @@ def test_register_feedstock_token_works( repo = git_mock.Repo.clone_from.return_value repo.index.add.assert_called_once_with(token_json_pth) repo.index.commit.assert_called_once_with( - "[ci skip] [skip ci] [cf admin skip] ***NO_CI*** added token for %s/%s on provider%s" - % (user, project, "" if ci is None else " " + ci) + "[ci skip] [skip ci] [cf admin skip] ***NO_CI*** added token for {}/{} on provider{}".format( + user, project, "" if ci is None else " " + ci + ) ) repo.remote.return_value.pull.assert_called_once_with(rebase=True) repo.remote.return_value.push.assert_called_once_with() @@ -562,8 +563,9 @@ def test_register_feedstock_token_append( repo = git_mock.Repo.clone_from.return_value repo.index.add.assert_called_once_with(token_json_pth) repo.index.commit.assert_called_once_with( - "[ci skip] [skip ci] [cf admin skip] ***NO_CI*** added token for %s/%s on provider%s" - % (user, project, "" if ci is None else " " + ci) + "[ci skip] [skip ci] [cf admin skip] ***NO_CI*** added token for {}/{} on provider{}".format( + user, project, "" if ci is None else " " + ci + ) ) repo.remote.return_value.pull.assert_called_once_with(rebase=True) repo.remote.return_value.push.assert_called_once_with() @@ -828,7 +830,7 @@ def test_register_feedstock_token_with_providers_error( unique_token_per_provider=unique_token_per_provider, ) - assert "on %s" % provider in str(e.value) + assert f"on {provider}" in str(e.value) finally: for _provider in providers: pth = feedstock_token_local_path(user, project, provider=_provider) diff --git a/tests/test_lint_recipe.py b/tests/test_lint_recipe.py index 4d74fdb7b..647416559 100644 --- a/tests/test_lint_recipe.py +++ b/tests/test_lint_recipe.py @@ -93,7 +93,6 @@ def test_osx_lint(where): def test_stdlib_lints_multi_output(): - expected_message = "You're setting a requirement on sysroot" with tmp_directory() as recipe_dir: with open(os.path.join(recipe_dir, "meta.yaml"), "w") as fh: @@ -909,7 +908,7 @@ def test_jinja_os_environ(self): version: {{ version }} """ ) - lints = linter.main(recipe_dir) + linter.main(recipe_dir) def test_jinja_load_file_regex(self): # Test that we can use load_file_regex in a recipe. We don't care about @@ -933,7 +932,7 @@ def test_jinja_load_file_regex(self): version: {{ version }} """ ) - lints = linter.main(recipe_dir) + linter.main(recipe_dir) def test_jinja_load_file_data(self): # Test that we can use load_file_data in a recipe. We don't care about @@ -950,7 +949,7 @@ def test_jinja_load_file_data(self): version: {{ version }} """ ) - lints = linter.main(recipe_dir) + linter.main(recipe_dir) def test_jinja_load_setup_py_data(self): # Test that we can use load_setup_py_data in a recipe. We don't care about @@ -967,7 +966,7 @@ def test_jinja_load_setup_py_data(self): version: {{ version }} """ ) - lints = linter.main(recipe_dir) + linter.main(recipe_dir) def test_jinja_load_str_data(self): # Test that we can use load_str_data in a recipe. We don't care about @@ -984,7 +983,7 @@ def test_jinja_load_str_data(self): version: {{ version }} """ ) - lints = linter.main(recipe_dir) + linter.main(recipe_dir) def test_jinja_os_sep(self): # Test that we can use os.sep in a recipe. @@ -999,7 +998,7 @@ def test_jinja_os_sep(self): script: {{ os.sep }} """ ) - lints = linter.main(recipe_dir) + linter.main(recipe_dir) def test_target_platform(self): # Test that we can use target_platform in a recipe. We don't care about @@ -1013,7 +1012,7 @@ def test_target_platform(self): version: 1.0 """ ) - lints = linter.main(recipe_dir) + linter.main(recipe_dir) def test_missing_build_number(self): expected_message = "The recipe must have a `build/number` section." From a5cea13caa66aa1068631795da95c6bd54acc1aa Mon Sep 17 00:00:00 2001 From: Yannik Tausch Date: Thu, 25 Jul 2024 18:24:06 +0200 Subject: [PATCH 4/6] apply manual ruff fixes --- bootstrap-obvious-ci-and-miniconda.py | 10 +-- conda_smithy/configure_feedstock.py | 92 ++++++++++++++------------- conda_smithy/feedstock_io.py | 6 +- conda_smithy/lint_recipe.py | 3 +- conda_smithy/linter/hints.py | 20 +++--- conda_smithy/linter/lints.py | 37 ++++++----- conda_smithy/linter/utils.py | 4 +- conda_smithy/schema.py | 6 +- conda_smithy/validate_schema.py | 3 +- tests/test_configure_feedstock.py | 6 +- tests/test_lint_recipe.py | 28 ++++---- 11 files changed, 113 insertions(+), 102 deletions(-) diff --git a/bootstrap-obvious-ci-and-miniconda.py b/bootstrap-obvious-ci-and-miniconda.py index 752027104..57a245376 100644 --- a/bootstrap-obvious-ci-and-miniconda.py +++ b/bootstrap-obvious-ci-and-miniconda.py @@ -63,10 +63,10 @@ def main( install_obvci=True, ): system = platform.system() - URL = miniconda_url( + url = miniconda_url( system, target_arch, major_py_version, miniconda_version ) - basename = URL.rsplit("/", 1)[1] + basename = url.rsplit("/", 1)[1] if system in ["Linux", "Darwin"]: cmd = ["bash", basename, "-b", "-p", target_dir] bin_dir = "bin" @@ -85,10 +85,10 @@ def main( raise ValueError("Unsupported operating system.") if not os.path.exists(basename): - print(f"Downloading from {URL}") - urlretrieve(URL, basename) + print(f"Downloading from {url}") + urlretrieve(url, basename) else: - print(f"Using cached version of {URL}") + print(f"Using cached version of {url}") # Install with powershell. if os.path.exists(target_dir): diff --git a/conda_smithy/configure_feedstock.py b/conda_smithy/configure_feedstock.py index 2156974c3..18470fd64 100644 --- a/conda_smithy/configure_feedstock.py +++ b/conda_smithy/configure_feedstock.py @@ -20,11 +20,6 @@ import requests import yaml -try: - from builtins import ExceptionGroup -except ImportError: - pass - # The `requests` lib uses `simplejson` instead of `json` when available. # In consequence the same JSON library must be used or the `JSONDecodeError` # used when catching an exception won't be the same as the one raised @@ -119,10 +114,10 @@ def _ignore_match(ignore, rel): """ srch = {rel} srch.update(map(fspath, PurePath(rel).parents)) - logger.debug(f"srch:{srch}") - logger.debug(f"ignore:{ignore}") + logger.debug("srch:%s", srch) + logger.debug("ignore:%s", ignore) if srch.intersection(ignore): - logger.info(f"{rel} rendering is skipped") + logger.info("%s rendering is skipped", rel) return True else: return False @@ -332,7 +327,7 @@ def _get_used_key_values_by_input_order( for key in all_used_vars if key in squished_input_variants } - logger.debug(f"initial used_key_values {pprint.pformat(used_key_values)}") + logger.debug("initial used_key_values %s", pprint.pformat(used_key_values)) # we want remove any used key values not in used variants and make sure they follow the # input order @@ -346,8 +341,8 @@ def _get_used_key_values_by_input_order( zip(*[squished_input_variants[k] for k in keyset]) ) zipped_keys |= set(keyset) - logger.debug(f"zipped_keys {pprint.pformat(zipped_keys)}") - logger.debug(f"zipped_tuples {pprint.pformat(zipped_tuples)}") + logger.debug("zipped_keys %s", pprint.pformat(zipped_keys)) + logger.debug("zipped_tuples %s", pprint.pformat(zipped_tuples)) for keyset, tuples in zipped_tuples.items(): # for each set of zipped keys from squished_input_variants, @@ -377,16 +372,17 @@ def _get_used_key_values_by_input_order( for tup in tuples ] ) - logger.debug(f"used_keyset {pprint.pformat(used_keyset)}") - logger.debug(f"used_keyset_inds {pprint.pformat(used_keyset_inds)}") - logger.debug(f"used_tuples {pprint.pformat(used_tuples)}") + logger.debug("used_keyset %s", pprint.pformat(used_keyset)) + logger.debug("used_keyset_inds %s", pprint.pformat(used_keyset_inds)) + logger.debug("used_tuples %s", pprint.pformat(used_tuples)) # this is the set of tuples that we want to keep, but need to be reordered used_tuples_to_be_reordered = set( list(zip(*[squished_used_variants[k] for k in used_keyset])) ) logger.debug( - f"used_tuples_to_be_reordered {pprint.pformat(used_tuples_to_be_reordered)}" + "used_tuples_to_be_reordered %s", + pprint.pformat(used_tuples_to_be_reordered), ) # we double check the logic above by looking to ensure everything in @@ -405,7 +401,7 @@ def _get_used_key_values_by_input_order( final_used_tuples = tuple( [tup for tup in used_tuples if tup in used_tuples_to_be_reordered] ) - logger.debug(f"final_used_tuples {pprint.pformat(final_used_tuples)}") + logger.debug("final_used_tuples %s", pprint.pformat(final_used_tuples)) # now we reconstruct the list of values per key and replace in used_key_values # we keep only keys in all_used_vars @@ -419,7 +415,8 @@ def _get_used_key_values_by_input_order( used_key_values[k] = v logger.debug( - f"post input reorder used_key_values {pprint.pformat(used_key_values)}" + "post input reorder used_key_values %s", + pprint.pformat(used_key_values), ) return used_key_values, zipped_keys @@ -557,7 +554,7 @@ def _collapse_subpackage_variants( if "target_platform" in all_used_vars: top_level_loop_vars.add("target_platform") - logger.debug(f"initial all_used_vars {pprint.pformat(all_used_vars)}") + logger.debug("initial all_used_vars %s", pprint.pformat(all_used_vars)) # this is the initial collection of all variants before we discard any. "Squishing" # them is necessary because the input form is already broken out into one matrix @@ -572,16 +569,16 @@ def _collapse_subpackage_variants( conda_build.variants.list_of_dicts_to_dict_of_lists(list(all_variants)) ) logger.debug( - f"squished_input_variants {pprint.pformat(squished_input_variants)}" + "squished_input_variants %s", pprint.pformat(squished_input_variants) ) logger.debug( - f"squished_used_variants {pprint.pformat(squished_used_variants)}" + "squished_used_variants %s", pprint.pformat(squished_used_variants) ) # these are variables that only occur in the top level, and thus won't show up as loops in the # above collection of all variants. We need to transfer them from the input_variants. preserve_top_level_loops = set(top_level_loop_vars) - set(all_used_vars) - logger.debug(f"preserve_top_level_loops {preserve_top_level_loops}") + logger.debug("preserve_top_level_loops %s", preserve_top_level_loops) # Add in some variables that should always be preserved always_keep_keys = { @@ -611,9 +608,9 @@ def _collapse_subpackage_variants( all_used_vars.update(always_keep_keys) all_used_vars.update(top_level_vars) - logger.debug(f"final all_used_vars {pprint.pformat(all_used_vars)}") - logger.debug(f"top_level_vars {pprint.pformat(top_level_vars)}") - logger.debug(f"top_level_loop_vars {pprint.pformat(top_level_loop_vars)}") + logger.debug("final all_used_vars %s", pprint.pformat(all_used_vars)) + logger.debug("top_level_vars %s", pprint.pformat(top_level_vars)) + logger.debug("top_level_loop_vars %s", pprint.pformat(top_level_loop_vars)) used_key_values, used_zipped_vars = _get_used_key_values_by_input_order( squished_input_variants, @@ -643,7 +640,7 @@ def _collapse_subpackage_variants( _trim_unused_zip_keys(used_key_values) _trim_unused_pin_run_as_build(used_key_values) - logger.debug(f"final used_key_values {pprint.pformat(used_key_values)}") + logger.debug("final used_key_values %s", pprint.pformat(used_key_values)) return ( break_up_top_level_values(top_level_loop_vars, used_key_values), @@ -710,7 +707,9 @@ def dump_subspace_config_files( arch, forge_config, ) - logger.debug(f"collapsed subspace config files: {pprint.pformat(configs)}") + logger.debug( + "collapsed subspace config files: %s", pprint.pformat(configs) + ) # get rid of the special object notation in the yaml file for objects that we dump yaml.add_representer(set, yaml.representer.SafeRepresenter.represent_list) @@ -741,7 +740,7 @@ def dump_subspace_config_files( os.makedirs(out_folder) config = finalize_config(config, platform, arch, forge_config) - logger.debug(f"finalized config file: {pprint.pformat(config)}") + logger.debug("finalized config file: %s", pprint.pformat(config)) with write_file(out_path) as f: yaml.dump(config, f, default_flow_style=False) @@ -840,7 +839,8 @@ def migrate_combined_spec(combined_spec, forge_dir, config, forge_config): migration_variants.sort(key=lambda fn_v: (fn_v[1]["migrator_ts"], fn_v[0])) if len(migration_variants): logger.info( - f"Applying migrations: {','.join(k for k, v in migration_variants)}" + "Applying migrations: %s", + ",".join(k for k, v in migration_variants), ) for migrator_file, migration in migration_variants: @@ -1506,16 +1506,15 @@ def _render_template_exe_files( import difflib logger.debug( - "diff:\n{}".format( - "\n".join( - difflib.unified_diff( - old_file_contents.splitlines(), - new_file_contents.splitlines(), - fromfile=target_fname, - tofile=target_fname, - ) + "diff:\n%s", + "\n".join( + difflib.unified_diff( + old_file_contents.splitlines(), + new_file_contents.splitlines(), + fromfile=target_fname, + tofile=target_fname, ) - ) + ), ) raise RuntimeError( f"Same file {target_fname} is rendered twice with different contents" @@ -1999,7 +1998,7 @@ def azure_build_id_from_public(forge_config): forge_config["azure"]["build_id"] = build_def["id"] -def render_README(jinja_env, forge_config, forge_dir, render_info=None): +def render_readme(jinja_env, forge_config, forge_dir, render_info=None): if "README.md" in forge_config["skip_render"]: logger.info("README.md rendering is skipped") return @@ -2102,7 +2101,8 @@ def render_README(jinja_env, forge_config, forge_dir, render_info=None): except (OSError, IndexError) as err: # We don't want to command to fail if requesting the build_id fails. logger.warning( - f"Azure build_id can't be retrieved using the Azure token. Exception: {err}" + "Azure build_id can't be retrieved using the Azure token. Exception: %s", + err, ) except json.decoder.JSONDecodeError: azure_build_id_from_token(forge_config) @@ -2456,7 +2456,8 @@ def commit_changes(forge_file_directory, commit, cs_ver, cfp_ver, cb_ver): else: logger.info( "You can commit the changes with:\n\n" - f' git commit -m "MNT: {msg}"\n' + ' git commit -m "MNT: %s"\n', + msg, ) logger.info("These changes need to be pushed to github!\n") else: @@ -2478,14 +2479,14 @@ def get_cfp_file_path(temporary_directory): temporary_directory, f"conda-forge-pinning-{ pkg.version }{ext}" ) - logger.info(f"Downloading conda-forge-pinning-{ pkg.version }") + logger.info("Downloading conda-forge-pinning-%s", pkg.version) response = requests.get(pkg.url) response.raise_for_status() with open(dest, "wb") as f: f.write(response.content) - logger.info(f"Extracting conda-forge-pinning to { temporary_directory }") + logger.info("Extracting conda-forge-pinning to %s", temporary_directory) cmd = ["cph"] # If possible, avoid needing to activate the environment to access cph if sys.executable: @@ -2679,14 +2680,15 @@ def set_migration_fns(forge_dir, forge_config): new_fn, new_num, _ = migrations_in_cfp[ts] if num == new_num: logger.info( - f"{os.path.basename(fn)} from feedstock is ignored and upstream version is used" + "%s from feedstock is ignored and upstream version is used", + os.path.basename(fn), ) result.append(new_fn) else: result.append(fn) else: # Delete this as this migration is over. - logger.info(f"{os.path.basename(fn)} is closed now. Removing") + logger.info("%s is closed now. Removing", os.path.basename(fn)) remove_file(fn) forge_config["migration_fns"] = result return @@ -2787,7 +2789,7 @@ def main( tmp = render_info[0] render_info[0] = render_info[azure_ind] render_info[azure_ind] = tmp - render_README(env, config, forge_dir, render_info) + render_readme(env, config, forge_dir, render_info) logger.debug("README rendered") diff --git a/conda_smithy/feedstock_io.py b/conda_smithy/feedstock_io.py index cdb2c4fb9..473c894c0 100644 --- a/conda_smithy/feedstock_io.py +++ b/conda_smithy/feedstock_io.py @@ -28,7 +28,7 @@ def get_repo_root(path): def set_exe_file(filename, set_exe=True): - IXALL = stat.S_IXOTH | stat.S_IXGRP | stat.S_IXUSR + all_execute_permissions = stat.S_IXOTH | stat.S_IXGRP | stat.S_IXUSR repo = get_repo(filename) if repo: @@ -37,9 +37,9 @@ def set_exe_file(filename, set_exe=True): mode = os.stat(filename).st_mode if set_exe: - mode |= IXALL + mode |= all_execute_permissions else: - mode -= mode & IXALL + mode -= mode & all_execute_permissions os.chmod(filename, mode) diff --git a/conda_smithy/lint_recipe.py b/conda_smithy/lint_recipe.py index c2e68ebce..9e7f328df 100644 --- a/conda_smithy/lint_recipe.py +++ b/conda_smithy/lint_recipe.py @@ -7,6 +7,7 @@ from textwrap import indent import github +import jsonschema import requests from conda_smithy.linter import rattler_linter @@ -485,7 +486,7 @@ def run_conda_forge_specific(meta, recipe_dir, lints, hints): ) -def _format_validation_msg(error: "jsonschema.ValidationError"): +def _format_validation_msg(error: jsonschema.ValidationError): """Use the data on the validation error to generate improved reporting. If available, get the help URL from the first level of the JSON path: diff --git a/conda_smithy/linter/hints.py b/conda_smithy/linter/hints.py index 54ca4f5c3..a8bec9566 100644 --- a/conda_smithy/linter/hints.py +++ b/conda_smithy/linter/hints.py @@ -72,7 +72,7 @@ def hint_shellcheck_usage(recipe_dir, hints): ) if shellcheck_enabled and shutil.which("shellcheck") and shell_scripts: - MAX_SHELLCHECK_LINES = 50 + max_shellcheck_lines = 50 cmd = [ "shellcheck", "--enable=all", @@ -105,10 +105,10 @@ def hint_shellcheck_usage(recipe_dir, hints): + " recipe/*.sh -f diff | git apply' helps)" ) hints.extend(findings[:50]) - if len(findings) > MAX_SHELLCHECK_LINES: + if len(findings) > max_shellcheck_lines: hints.append( "Output restricted, there are '%s' more lines." - % (len(findings) - MAX_SHELLCHECK_LINES) + % (len(findings) - max_shellcheck_lines) ) elif p.returncode != 0: # Something went wrong. @@ -128,12 +128,12 @@ def hint_check_spdx(about_section, hints): parsed_licenses_with_exception = licensing.license_symbols( license.strip(), decompose=False ) - for l in parsed_licenses_with_exception: - if isinstance(l, license_expression.LicenseWithExceptionSymbol): - parsed_licenses.append(l.license_symbol.key) - parsed_exceptions.append(l.exception_symbol.key) + for li in parsed_licenses_with_exception: + if isinstance(li, license_expression.LicenseWithExceptionSymbol): + parsed_licenses.append(li.license_symbol.key) + parsed_exceptions.append(li.exception_symbol.key) else: - parsed_licenses.append(l.key) + parsed_licenses.append(li.key) except license_expression.ExpressionError: parsed_licenses = [license] @@ -145,12 +145,12 @@ def hint_check_spdx(about_section, hints): with open(os.path.join(os.path.dirname(__file__), "licenses.txt")) as f: expected_licenses = f.readlines() - expected_licenses = set([l.strip() for l in expected_licenses]) + expected_licenses = set([li.strip() for li in expected_licenses]) with open( os.path.join(os.path.dirname(__file__), "license_exceptions.txt") ) as f: expected_exceptions = f.readlines() - expected_exceptions = set([l.strip() for l in expected_exceptions]) + expected_exceptions = set([li.strip() for li in expected_exceptions]) if set(filtered_licenses) - expected_licenses: hints.append( "License is not an SPDX identifier (or a custom LicenseRef) nor an SPDX license expression.\n\n" diff --git a/conda_smithy/linter/lints.py b/conda_smithy/linter/lints.py index 3f1618f5a..154b73fbf 100644 --- a/conda_smithy/linter/lints.py +++ b/conda_smithy/linter/lints.py @@ -5,6 +5,7 @@ from collections.abc import Sequence from typing import List, Optional +from conda.exceptions import InvalidVersionSpec from conda.models.version import VersionOrder from ruamel.yaml import CommentedSeq @@ -113,46 +114,46 @@ def lint_license_cannot_be_unknown(about_section, lints): def lint_selectors_should_be_in_tidy_form(recipe_fname, lints, hints): bad_selectors, bad_lines = [], [] - pyXY_selectors_lint, pyXY_lines_lint = [], [] - pyXY_selectors_hint, pyXY_lines_hint = [], [] + python_selectors_lint, py_selector_lines_lint = [], [] + python_selectors_hint, py_selector_lines_hint = [], [] # Good selectors look like ".*\s\s#\s[...]" good_selectors_pat = re.compile(r"(.+?)\s{2,}#\s\[(.+)\](?(2).*)$") # Look out for py27, py35 selectors; we prefer py==35 - pyXY_selectors_pat = re.compile(r".+#\s*\[.*?(py\d{2,3}).*\]") + python_selectors_pat = re.compile(r".+#\s*\[.*?(py\d{2,3}).*\]") if os.path.exists(recipe_fname): with open(recipe_fname) as fh: for selector_line, line_number in selector_lines(fh): if not good_selectors_pat.match(selector_line): bad_selectors.append(selector_line) bad_lines.append(line_number) - pyXY_matches = pyXY_selectors_pat.match(selector_line) - if pyXY_matches: - for pyXY in pyXY_matches.groups(): - if int(pyXY[2:]) in (27, 34, 35, 36): + python_matches = python_selectors_pat.match(selector_line) + if python_matches: + for py_selector in python_matches.groups(): + if int(py_selector[2:]) in (27, 34, 35, 36): # py27, py35 and so on are ok up to py36 (included); only warn - pyXY_selectors_hint.append(selector_line) - pyXY_lines_hint.append(line_number) + python_selectors_hint.append(selector_line) + py_selector_lines_hint.append(line_number) else: - pyXY_selectors_lint.append(selector_line) - pyXY_lines_lint.append(line_number) + python_selectors_lint.append(selector_line) + py_selector_lines_lint.append(line_number) if bad_selectors: lints.append( "Selectors are suggested to take a " "``#[]`` form." f" See lines {bad_lines}" ) - if pyXY_selectors_hint: + if python_selectors_hint: hints.append( "Old-style Python selectors (py27, py34, py35, py36) are " "deprecated. Instead, consider using the int ``py``. For " - f"example: ``# [py>=36]``. See lines {pyXY_lines_hint}" + f"example: ``# [py>=36]``. See lines {py_selector_lines_hint}" ) - if pyXY_selectors_lint: + if python_selectors_lint: lints.append( "Old-style Python selectors (py27, py35, etc) are only available " "for Python 2.7, 3.4, 3.5, and 3.6. Please use explicit comparisons " "with the integer ``py``, e.g. ``# [py==37]`` or ``# [py>=37]``. " - f"See lines {pyXY_lines_lint}" + f"See lines {py_selector_lines_lint}" ) @@ -331,8 +332,10 @@ def lint_package_version(package_section, lints): ver = str(package_section.get("version")) try: VersionOrder(ver) - except: - lints.append(f"Package version {ver} doesn't match conda spec") + except InvalidVersionSpec as e: + lints.append( + f"Package version {ver} doesn't match conda spec: {e}" + ) def lint_jinja_variables_definitions(meta_fname, lints): diff --git a/conda_smithy/linter/utils.py b/conda_smithy/linter/utils.py index 723481388..17331f6ed 100644 --- a/conda_smithy/linter/utils.py +++ b/conda_smithy/linter/utils.py @@ -6,11 +6,11 @@ from typing import Mapping from conda_build.metadata import ( - FIELDS as cbfields, + FIELDS as _CONDA_BUILD_FIELDS, ) from rattler_build_conda_compat import loader as rattler_loader -FIELDS = copy.deepcopy(cbfields) +FIELDS = copy.deepcopy(_CONDA_BUILD_FIELDS) # Just in case 'extra' moves into conda_build if "extra" not in FIELDS.keys(): diff --git a/conda_smithy/schema.py b/conda_smithy/schema.py index e092b83ac..02d4ed9f6 100644 --- a/conda_smithy/schema.py +++ b/conda_smithy/schema.py @@ -108,8 +108,10 @@ class AzureRunnerSettings(BaseModel): default=None, description="Swapfile size in GiB" ) - timeoutInMinutes: Optional[int] = Field( - default=360, description="Timeout in minutes for the job" + timeout_in_minutes: Optional[int] = Field( + default=360, + description="Timeout in minutes for the job", + alias="timeoutInMinutes", ) variables: Optional[Dict[str, str]] = Field( diff --git a/conda_smithy/validate_schema.py b/conda_smithy/validate_schema.py index 6507f5142..555de599a 100644 --- a/conda_smithy/validate_schema.py +++ b/conda_smithy/validate_schema.py @@ -14,7 +14,8 @@ ) -class DeprecatedFieldWarning(ValidationError): +# this is actually not an error, therefore the naming is okay +class DeprecatedFieldWarning(ValidationError): # noqa: N818 pass diff --git a/tests/test_configure_feedstock.py b/tests/test_configure_feedstock.py index 6cbd0750c..0e4a18139 100644 --- a/tests/test_configure_feedstock.py +++ b/tests/test_configure_feedstock.py @@ -505,7 +505,7 @@ def test_circle_skipped(linux_skipped_recipe, jinja_env): def test_render_with_all_skipped_generates_readme(skipped_recipe, jinja_env): - configure_feedstock.render_README( + configure_feedstock.render_readme( jinja_env=jinja_env, forge_config=skipped_recipe.config, forge_dir=skipped_recipe.recipe, @@ -537,7 +537,7 @@ def test_render_windows_with_skipped_python(python_skipped_recipe, jinja_env): def test_readme_has_terminating_newline(noarch_recipe, jinja_env): - configure_feedstock.render_README( + configure_feedstock.render_readme( jinja_env=jinja_env, forge_config=noarch_recipe.config, forge_dir=noarch_recipe.recipe, @@ -746,7 +746,7 @@ def test_migrator_compiler_version_recipe( def test_files_skip_render(render_skipped_recipe, jinja_env): - configure_feedstock.render_README( + configure_feedstock.render_readme( jinja_env=jinja_env, forge_config=render_skipped_recipe.config, forge_dir=render_skipped_recipe.recipe, diff --git a/tests/test_lint_recipe.py b/tests/test_lint_recipe.py index 647416559..7cdfa315a 100644 --- a/tests/test_lint_recipe.py +++ b/tests/test_lint_recipe.py @@ -268,7 +268,7 @@ def test_cbc_osx_lints( assert any(lint.startswith(exp_lint) for lint in lints) -class Test_linter(unittest.TestCase): +class TestLinter(unittest.TestCase): def test_pin_compatible_in_run_exports(self): meta = { @@ -561,10 +561,12 @@ def assert_selector(selector, is_good=True): assert_selector("name: foo_py3 #[py3k]", is_good=False) assert_selector("name: foo_py3 # [py3k]", is_good=False) - def test_pyXY_selectors(self): + def test_python_selectors(self): with tmp_directory() as recipe_dir: - def assert_pyXY_selector(meta_string, is_good=False, kind="lint"): + def assert_python_selector( + meta_string, is_good=False, kind="lint" + ): assert kind in ("lint", "hint") if kind == "hint": expected_start = "Old-style Python selectors (py27, py34, py35, py36) are deprecated" @@ -590,7 +592,7 @@ def assert_pyXY_selector(meta_string, is_good=False, kind="lint"): message, ) - assert_pyXY_selector( + assert_python_selector( """ build: noarch: python @@ -599,7 +601,7 @@ def assert_pyXY_selector(meta_string, is_good=False, kind="lint"): """, kind="hint", ) - assert_pyXY_selector( + assert_python_selector( """ build: noarch: python @@ -608,7 +610,7 @@ def assert_pyXY_selector(meta_string, is_good=False, kind="lint"): """, kind="lint", ) - assert_pyXY_selector( + assert_python_selector( """ build: noarch: python @@ -617,7 +619,7 @@ def assert_pyXY_selector(meta_string, is_good=False, kind="lint"): """, kind="lint", ) - assert_pyXY_selector( + assert_python_selector( """ build: noarch: python @@ -626,7 +628,7 @@ def assert_pyXY_selector(meta_string, is_good=False, kind="lint"): """, kind="hint", ) - assert_pyXY_selector( + assert_python_selector( """ build: noarch: python @@ -635,7 +637,7 @@ def assert_pyXY_selector(meta_string, is_good=False, kind="lint"): """, kind="lint", ) - assert_pyXY_selector( + assert_python_selector( """ build: noarch: python @@ -644,7 +646,7 @@ def assert_pyXY_selector(meta_string, is_good=False, kind="lint"): """, kind="lint", ) - assert_pyXY_selector( + assert_python_selector( """ build: noarch: python @@ -653,7 +655,7 @@ def assert_pyXY_selector(meta_string, is_good=False, kind="lint"): """, kind="lint", ) - assert_pyXY_selector( + assert_python_selector( """ build: noarch: python @@ -662,7 +664,7 @@ def assert_pyXY_selector(meta_string, is_good=False, kind="lint"): """, kind="lint", ) - assert_pyXY_selector( + assert_python_selector( """ build: noarch: python @@ -1730,7 +1732,7 @@ def test_go_license_bundling(self): @pytest.mark.cli -class TestCLI_recipe_lint(unittest.TestCase): +class TestCliRecipeLint(unittest.TestCase): def test_cli_fail(self): with tmp_directory() as recipe_dir: with open(os.path.join(recipe_dir, "meta.yaml"), "w") as fh: From bea1448dd6a942f1667e76e5426c66732c07b9b5 Mon Sep 17 00:00:00 2001 From: Yannik Tausch Date: Thu, 25 Jul 2024 18:30:34 +0200 Subject: [PATCH 5/6] apply results from discussions --- conda_smithy/linter/lints.py | 4 ++++ tests/test_lint_recipe.py | 2 +- 2 files changed, 5 insertions(+), 1 deletion(-) diff --git a/conda_smithy/linter/lints.py b/conda_smithy/linter/lints.py index 154b73fbf..46a196dcf 100644 --- a/conda_smithy/linter/lints.py +++ b/conda_smithy/linter/lints.py @@ -328,6 +328,10 @@ def lint_noarch_and_runtime_dependencies( def lint_package_version(package_section, lints): + version = package_section.get("version") + if not version: + lints.append("Package version is missing.") + return if package_section.get("version") is not None: ver = str(package_section.get("version")) try: diff --git a/tests/test_lint_recipe.py b/tests/test_lint_recipe.py index 7cdfa315a..c71bbf9b5 100644 --- a/tests/test_lint_recipe.py +++ b/tests/test_lint_recipe.py @@ -1500,7 +1500,7 @@ def test_version(self): "Package version 2.0.0~alpha0 doesn't match conda spec" ) lints, hints = linter.lintify_meta_yaml(meta) - self.assertIn(expected_message, lints) + assert any(lint.startswith(expected_message) for lint in lints) @unittest.skipUnless(is_gh_token_set(), "GH_TOKEN not set") def test_examples(self): From a3f1dd06cbd1028766ecf4363afd2db788f43ab2 Mon Sep 17 00:00:00 2001 From: Yannik Tausch Date: Thu, 25 Jul 2024 18:45:38 +0200 Subject: [PATCH 6/6] fix tests --- tests/test_lint_recipe.py | 8 ++++++-- 1 file changed, 6 insertions(+), 2 deletions(-) diff --git a/tests/test_lint_recipe.py b/tests/test_lint_recipe.py index c71bbf9b5..1e20cd5f6 100644 --- a/tests/test_lint_recipe.py +++ b/tests/test_lint_recipe.py @@ -1761,6 +1761,7 @@ def test_cli_success(self): """ package: name: 'test_package' + version: 1.0.0 build: number: 0 test: @@ -1768,7 +1769,8 @@ def test_cli_success(self): - foo about: home: something - license: something else + license: MIT + license_file: LICENSE summary: a test recipe extra: recipe-maintainers: @@ -1792,6 +1794,7 @@ def test_cli_environ(self): """ package: name: 'test_package' + version: 1.0.0 build: number: 0 test: @@ -1801,7 +1804,8 @@ def test_cli_environ(self): - foo about: home: something - license: something else + license: MIT + license_file: LICENSE summary: a test recipe extra: recipe-maintainers: