diff --git a/.github/workflows/CI.yml b/.github/workflows/CI.yml index c31cc03..b8c497f 100644 --- a/.github/workflows/CI.yml +++ b/.github/workflows/CI.yml @@ -1,18 +1,10 @@ name: Check lints - -on: [push] - +on: [push, pull_request] jobs: - build: + ruff: runs-on: ubuntu-latest steps: - - uses: actions/checkout@v2 - - name: Set up Python - uses: actions/setup-python@v2 - - name: Install dependencies - run: | - python -m pip install --upgrade pip - pip install pylint - pip install -r requirements.txt - - name: Check lints - run: pylint ./src + - uses: actions/checkout@v4 + - uses: chartboost/ruff-action@v1 + with: + args: 'format --check' diff --git a/.gitignore b/.gitignore index a6eef2c..0e8f421 100644 --- a/.gitignore +++ b/.gitignore @@ -1,2 +1,4 @@ **__pycache__ .mypy_cache +.ruff_cache +.vscode diff --git a/requirements.txt b/requirements.txt index b4dcac4..822be75 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,4 +1 @@ -DateTime==4.3.0 -requests==2.23.0 -toml==0.10.2 - +requests==2.23.0 \ No newline at end of file diff --git a/src/__init__.py b/src/__init__.py index 5394807..a88d18e 100644 --- a/src/__init__.py +++ b/src/__init__.py @@ -2,8 +2,8 @@ Script to release new gtk-rs crates version. """ -__version__ = '0.0.1' -__author__ = 'Guillaume Gomez' +__version__ = "0.0.1" +__author__ = "Guillaume Gomez" -__all__ = ['consts', 'github', 'my_toml', 'release', 'utils', 'args', 'globals'] +__all__ = ["consts", "github", "release", "utils", "args"] diff --git a/src/args.py b/src/args.py index 22a321e..9c03f39 100644 --- a/src/args.py +++ b/src/args.py @@ -1,192 +1,46 @@ import getopt + # local imports from utils import write_error, write_msg import consts -class UpdateType: - MAJOR = 0 - MEDIUM = 1 - MINOR = 2 - - @staticmethod - def create_from_string(version_s): - version_s = version_s.lower() - if version_s == 'major': - return UpdateType.MAJOR - if version_s == 'medium': - return UpdateType.MEDIUM - if version_s == 'minor': - return UpdateType.MINOR - return None - - @staticmethod - def to_str(update): - if update == UpdateType.MAJOR: - return "MAJOR" - if update == UpdateType.MEDIUM: - return "MEDIUM" - if update == UpdateType.MINOR: - return "MINOR" - return "UNKNOWN" - - -def get_answer(text): - while True: - text = input(f'{text} [Y/n] ').strip().lower() - if len(text) == 0 or text == 'y': - return True - if text == 'n': - return False - write_msg(f'-> Invalid answer "{text}": only "Y" and "n" are expected') - - -def is_sys_crate(crate): - return crate.endswith('-sys') or crate.endswith('-sys-rs') - - -def get_up_type(crate, mode, pick_update_type_for_crates, default_updates): - if mode is None and pick_update_type_for_crates is False: - return None - if is_sys_crate(crate) and default_updates['sys'] is not None: - return default_updates['sys'] - if not is_sys_crate(crate) and default_updates['non-sys'] is not None: - return default_updates['non-sys'] - while pick_update_type_for_crates is True: - text = input(f'Which kind of update do you want for "{crate}"? [MINOR/MEDIUM/MAJOR] ') - text = text.strip().lower() - mode = UpdateType.create_from_string(text) - if mode is not None: - if (is_sys_crate(crate) and - get_answer('Do you want to use this release for all other sys crates?')): - default_updates['sys'] = mode - elif (not is_sys_crate(crate) and - get_answer('Do you want to use this release for all other non-sys crates?')): - default_updates['non-sys'] = mode - break - write_msg(f'Invalid update type received: "{text}". Accepted values: (MINOR|MEDIUM|MAJOR)') - return mode - - -def ask_updates_confirmation(crates): - write_msg("Recap' of picked updates:") - for crate in crates: - crate_name = crate['crate']['crate'] - update = UpdateType.to_str(crate['up-type']) - write_msg(f"[{crate_name}] => {update}") - return get_answer('Do you agree with this?') - - def write_help(): write_msg("release.py accepts the following options:") write_msg("") write_msg(" * -h | --help : display this message") write_msg(" * -t | --token= : give the github token") - write_msg(" * -m | --mode= : give the update type (MINOR|MEDIUM|MAJOR)") - write_msg(" * --no-push : performs all operations but doesn't push anything") - write_msg(" * -c | --crate= : only update the given crate (for test purpose" - " mainly)") - write_msg(" * --badges-only : only update the badges on the website") - write_msg(" * --tags-only : only create new tags") - write_msg(" * --blog-only : only create blog post") - write_msg(" * --pick-crates : add an interactive way to pick crates") - write_msg(" * --pick-update-type-for-crates: pick an update type for each crate") class Arguments: def __init__(self): self.token = None - self.mode = None - self.no_push = False - self.specified_crate = None - self.tags_only = False - self.blog_only = False - self.crates = consts.CRATE_LIST @staticmethod def parse_arguments(argv): - # pylint: disable=too-many-branches,too-many-return-statements try: - opts = getopt.getopt(argv, - "ht:m:c:", - ["help", "token=", "mode=", "no-push", "crate", - "badges-only", "tags-only", "pick-update-type-for-crates", - "pick-crates", "blog-only"])[0] # second argument is "args" + opts = getopt.getopt(argv, "ht:m:c:", ["help", "token="])[ + 0 + ] # second argument is "args" except getopt.GetoptError: write_help() return None instance = Arguments() - pick_update_type_for_crates = False - for opt, arg in opts: - if opt in ('-h', '--help'): + if opt in ("-h", "--help"): write_help() return None if opt in ("-t", "--token"): instance.token = arg - elif opt in ("-m", "--mode"): - instance.mode = UpdateType.create_from_string(arg) - if instance.mode is None: - write_error(f'{opt}: Invalid update type received. Accepted values: ' - '(MINOR|MEDIUM|MAJOR)') - return None - elif opt == "--no-push": - instance.no_push = True - elif opt in ('-c', '--crate'): - instance.specified_crate = arg - elif opt == '--tags-only': - instance.tags_only = True - elif opt == '--blog-only': - instance.blog_only = True - elif opt == '--pick-crates': - instance.crates = [] - elif opt == '--pick-update-type-for-crates': - pick_update_type_for_crates = True else: write_msg(f'"{opt}": unknown option') write_msg('Use "-h" or "--help" to see help') return None - if instance.token is None and instance.no_push is False and instance.blog_only is False: + if instance.token is None: # In this case, I guess it's not an issue to not have a github token... - write_error('Missing token argument.') - return None - # To make pylint happy. - not_only_checks = (instance.tags_only is False or - instance.blog_only is False) - if (instance.mode is None and - not_only_checks is False and - pick_update_type_for_crates is False): - write_error('Missing update type argument.') + write_error("Missing token argument.") return None - default_updates = {"sys": None, "non-sys": None} - if len(instance.crates) == 0: - for crate in consts.CRATE_LIST: - if get_answer(f'Do you want to include "{crate}" in this release?') is True: - instance.crates.append( - { - 'up-type': get_up_type(crate['crate'], - instance.mode, - pick_update_type_for_crates, - default_updates), - 'crate': crate, - }) - if ask_updates_confirmation(instance.crates) is False: - write_msg('OK! Aborting then!') - return None - else: - instance.crates = [ - { - 'up-type': get_up_type(crate['crate'], - instance.mode, - pick_update_type_for_crates, - default_updates), - 'crate': crate, - } for crate in instance.crates] - if (pick_update_type_for_crates is True and - ask_updates_confirmation(instance.crates) is False): - write_msg('OK! Aborting then!') - return None return instance diff --git a/src/consts.py b/src/consts.py index 5fd2090..341813f 100644 --- a/src/consts.py +++ b/src/consts.py @@ -1,59 +1,24 @@ -GH_API_URL = 'https://api.github.com' +from datetime import date + + +GH_API_URL = "https://api.github.com" GITHUB_URL = "https://github.com" GIT_URL = "git@github.com:" ORGANIZATION = "gtk-rs" -MASTER_TMP_BRANCH = "master-release-update" BLOG_REPO = "gtk-rs.github.io" -CRATE_LIST = [ - # Sys crates - {"crate": "glib-sys", "repository": "gtk-rs-core", "path": "glib/sys"}, - {"crate": "gobject-sys", "repository": "gtk-rs-core", "path": "glib/gobject-sys"}, - {"crate": "graphene-sys", "repository": "gtk-rs-core", "path": "graphene/sys"}, - {"crate": "gio-sys", "repository": "gtk-rs-core", "path": "gio/sys"}, - {"crate": "pango-sys", "repository": "gtk-rs-core", "path": "pango/sys"}, - {"crate": "gdk-pixbuf-sys", "repository": "gtk-rs-core", "path": "gdk-pixbuf/sys"}, - {"crate": "glib-macros", "repository": "gtk-rs-core", "path": "glib-macros"}, - # glib must be published before cairo-sys (because of macros) - {"crate": "glib", "repository": "gtk-rs-core", "path": "glib"}, - {"crate": "cairo-sys-rs", "repository": "gtk-rs-core", "path": "cairo/sys"}, - {"crate": "pangocairo-sys", "repository": "gtk-rs-core", "path": "pangocairo/sys"}, - {"crate": "atk-sys", "repository": "gtk3-rs", "path": "atk/sys"}, - {"crate": "gdkx11-sys", "repository": "gtk3-rs", "path": "gdkx11/sys"}, - {"crate": "gdk-sys", "repository": "gtk3-rs", "path": "gdk/sys"}, - {"crate": "gdkwayland-sys", "repository": "gtk3-rs", "path": "gdkwayland/sys"}, - {"crate": "gtk-sys", "repository": "gtk3-rs", "path": "gtk/sys"}, - {"crate": "gdk4-sys", "repository": "gtk4-rs", "path": "gdk4/sys"}, - {"crate": "gdk4-wayland-sys", "repository": "gtk4-rs", "path": "gdk4-wayland/sys"}, - {"crate": "gdk4-x11-sys", "repository": "gtk4-rs", "path": "gdk4-x11/sys"}, - {"crate": "gsk4-sys", "repository": "gtk4-rs", "path": "gsk4/sys"}, - {"crate": "gtk4-sys", "repository": "gtk4-rs", "path": "gtk4/sys"}, - - # Non-sys crates - {"crate": "gtk3-macros", "repository": "gtk3-rs", "path": "gtk3-macros"}, - {"crate": "gtk4-macros", "repository": "gtk4-rs", "path": "gtk4-macros"}, - {"crate": "graphene", "repository": "gtk-rs-core", "path": "graphene"}, - {"crate": "atk", "repository": "gtk3-rs", "path": "atk"}, - {"crate": "gio", "repository": "gtk-rs-core", "path": "gio"}, - {"crate": "pango", "repository": "gtk-rs-core", "path": "pango"}, - {"crate": "cairo-rs", "repository": "gtk-rs-core", "path": "cairo"}, - {"crate": "gdk-pixbuf", "repository": "gtk-rs-core", "path": "gdk-pixbuf"}, - {"crate": "gdk", "repository": "gtk3-rs", "path": "gdk"}, - {"crate": "gdkwayland", "repository": "gtk3-rs", "path": "gdkwayland"}, - {"crate": "gtk", "repository": "gtk3-rs", "path": "gtk"}, - {"crate": "gdkx11", "repository": "gtk3-rs", "path": "gdkx11"}, - {"crate": "pangocairo", "repository": "gtk-rs-core", "path": "pangocairo"}, - {"crate": "gdk4", "repository": "gtk4-rs", "path": "gdk4"}, - {"crate": "gdk4-wayland", "repository": "gtk4-rs", "path": "gdk4-wayland"}, - {"crate": "gdk4-x11", "repository": "gtk4-rs", "path": "gdk4-x11"}, - {"crate": "gsk4", "repository": "gtk4-rs", "path": "gsk4"}, - {"crate": "gtk4", "repository": "gtk4-rs", "path": "gtk4"}, - # {"crate": "gtk-test", "repository": "gtk-test", "path": ""}, -] +CORE_RELEASE_DATE = date.fromisoformat("2024-02-04") +GTK4_RELEASE_DATE = CORE_RELEASE_DATE -EXAMPLES = [ - {"repository": "gtk3-rs", "path": "examples"}, - {"repository": "gtk4-rs", "path": "examples"}, +REPOSITORIES = [ + { + "name": "gtk-rs-core", + "date": CORE_RELEASE_DATE, + }, + { + "name": "gtk4-rs", + "date": GTK4_RELEASE_DATE, + }, ] diff --git a/src/github.py b/src/github.py index 143555b..fc8b64a 100644 --- a/src/github.py +++ b/src/github.py @@ -1,11 +1,13 @@ from datetime import date + # pip3 install requests import requests + def compare_dates(gh_date, comp_date): if gh_date is None or len(gh_date) < 1: return False - gh_date = gh_date.split('T')[0].split('-') + gh_date = gh_date.split("T")[0].split("-") year = int(gh_date[0]) month = int(gh_date[1]) day = int(gh_date[2]) @@ -14,23 +16,23 @@ def compare_dates(gh_date, comp_date): def get_page_number(url): - parts = url.split('?')[-1].split('&') + parts = url.split("?")[-1].split("&") for part in parts: - if part.startswith('page='): + if part.startswith("page="): try: - return int(part.split('=')[-1]) + return int(part.split("=")[-1]) except Exception: break return 1 def get_next_pages_url(link): - parts = link.split(',') + parts = link.split(",") subs = [] for part in parts: - subs.append(part.split(';')) - next_page_url = '' - last_page_url = '' + subs.append(part.split(";")) + next_page_url = "" + last_page_url = "" for sub in subs: if len(sub) != 2: continue @@ -43,15 +45,15 @@ def get_next_pages_url(link): def filter_data(content, to_return, max_date): total = 0 - if content.__class__.__name__ == 'dict': + if content.__class__.__name__ == "dict": return 0 for pull_request in content: - if 'closed_at' in pull_request and pull_request['closed_at'] is not None: - if compare_dates(pull_request['closed_at'], max_date): + if "closed_at" in pull_request and pull_request["closed_at"] is not None: + if compare_dates(pull_request["closed_at"], max_date): to_return.append(pull_request) total += 1 - elif 'updated_at' in pull_request: - if compare_dates(pull_request['updated_at'], max_date): + elif "updated_at" in pull_request: + if compare_dates(pull_request["updated_at"], max_date): to_return.append(pull_request) total += 1 return total @@ -62,39 +64,45 @@ def get_url_data(url, headers, params): if res.status_code != 200: if res.status_code == 403: # We reached the rate limit. - if ('X-RateLimit-Limit' in res.headers and - 'X-RateLimit-Remaining' in res.headers and - 'X-RateLimit-Reset' in res.headers): - limit = res.headers['X-RateLimit-Limit'] - remaining = res.headers['X-RateLimit-Remaining'] - reset = res.headers['X-RateLimit-Reset'] - raise Exception("Github rate limit exceeded...\n" - f"X-RateLimit-Limit: {limit}\n" - f"X-RateLimit-Remaining: {remaining}\n" - f"X-RateLimit-Reset: {reset}") + if ( + "X-RateLimit-Limit" in res.headers + and "X-RateLimit-Remaining" in res.headers + and "X-RateLimit-Reset" in res.headers + ): + limit = res.headers["X-RateLimit-Limit"] + remaining = res.headers["X-RateLimit-Remaining"] + reset = res.headers["X-RateLimit-Reset"] + raise Exception( + "Github rate limit exceeded...\n" + f"X-RateLimit-Limit: {limit}\n" + f"X-RateLimit-Remaining: {remaining}\n" + f"X-RateLimit-Reset: {reset}" + ) raise Exception( - f"Get request failed: '{url}', got: [{res.status_code}]: {str(res.content)}") + f"Get request failed: '{url}', got: [{res.status_code}]: {str(res.content)}" + ) return res # This function tries to get as much github data as possible by running # "parallel" requests. -def get_all_contents(url, state=None, max_date=None, token=None, recursive=True, params=None): - # pylint: disable=too-many-branches,too-many-locals +def get_all_contents( + url, state=None, max_date=None, token=None, recursive=True, params=None +): if params is None: params = {} headers = { - 'User-Agent': 'GuillaumeGomez', - 'Accept': 'application/vnd.github.v3+json', + "User-Agent": "GuillaumeGomez", + "Accept": "application/vnd.github.v3+json", } - params['per_page'] = 100 + params["per_page"] = 100 if state is not None: - params['sort'] = 'updated' - params['state'] = state - params['direction'] = 'desc' + params["sort"] = "updated" + params["state"] = state + params["direction"] = "desc" if token is not None: # Authentication to github. - headers['Authorization'] = f'token {token}' + headers["Authorization"] = f"token {token}" res = get_url_data(url, headers, params) content = res.json() to_return = [] @@ -104,11 +112,11 @@ def get_all_contents(url, state=None, max_date=None, token=None, recursive=True, else: for line in content: to_return.append(line) - if 'Link' not in res.headers or not recursive: + if "Link" not in res.headers or not recursive: # If there are no other pages, we can return the current content. return to_return - header_link = res.headers.get('Link') + header_link = res.headers.get("Link") if header_link is None or len(header_link) < 1: return content @@ -120,9 +128,9 @@ def get_all_contents(url, state=None, max_date=None, token=None, recursive=True, to_replace = f"page={next_page}" while next_page <= last_page: - res = get_url_data(next_page_url.replace(f"&{to_replace}", f"&page={next_page}"), - headers, - None) + res = get_url_data( + next_page_url.replace(f"&{to_replace}", f"&page={next_page}"), headers, None + ) if res.status_code != 200: break content = res.json() @@ -144,11 +152,9 @@ def get_pull(self, repo_name, repo_owner, pull_number): return Repository(self, repo_name, repo_owner).get_pull(pull_number) def get_pulls(self, repo_name, repo_owner, state, max_date, only_merged=False): - return Repository(self, - repo_name, - repo_owner).get_pulls(state, - max_date, - only_merged=only_merged) + return Repository(self, repo_name, repo_owner).get_pulls( + state, max_date, only_merged=only_merged + ) def get_organization(self, organization_name): return Organization(self, organization_name) @@ -160,12 +166,15 @@ def __init__(self, gh_obj, name): self.name = name def get_repositories(self): - repos = get_all_contents(f'https://api.github.com/orgs/{self.name}/repos', - token=self.gh_obj.token) + repos = get_all_contents( + f"https://api.github.com/orgs/{self.name}/repos", token=self.gh_obj.token + ) if repos is None: return [] - return [Repository(self.gh_obj, repo['name'], repo['owner']['login']) - for repo in repos] + return [ + Repository(self.gh_obj, repo["name"], repo["owner"]["login"]) + for repo in repos + ] class Repository: @@ -175,54 +184,79 @@ def __init__(self, gh_obj, name, owner): self.owner = owner def get_pulls(self, state, max_date, only_merged=False): - prs = get_all_contents(f'https://api.github.com/repos/{self.owner}/{self.name}/pulls', - state, max_date, - token=self.gh_obj.token) + prs = get_all_contents( + f"https://api.github.com/repos/{self.owner}/{self.name}/pulls", + state, + max_date, + token=self.gh_obj.token, + params={ + "per_page": 100, + }, + ) if prs is None: return [] - return [self.create_pr_obj(pull_request) for pull_request in prs - if (only_merged is False or (pull_request['merged_at'] is not None and - len(pull_request['merged_at']) > 0))] + return [ + self.create_pr_obj(pull_request) + for pull_request in prs + if ( + only_merged is False + or ( + pull_request["merged_at"] is not None + and len(pull_request["merged_at"]) > 0 + ) + ) + ] def get_pull(self, pull_number): pull_request = get_all_contents( - f'https://api.github.com/repos/{self.owner}/{self.name}/pulls/{pull_number}', - 'all', None, + f"https://api.github.com/repos/{self.owner}/{self.name}/pulls/{pull_number}", + "all", + None, token=self.gh_obj.token, ) return self.create_pr_obj(pull_request, pull_number) def get_commits(self, branch, since, until): commits = get_all_contents( - f'https://api.github.com/repos/{self.owner}/{self.name}/commits', + f"https://api.github.com/repos/{self.owner}/{self.name}/commits", token=self.gh_obj.token, - params={'sha': branch, - 'since': f'{since.year}-{since.month:02d}-{since.day:02d}T00:00:00Z', - 'until': f'{until.year}-{until.month:02d}-{until.day:02d}T00:00:00Z'}) + params={ + "sha": branch, + "since": f"{since.year}-{since.month:02d}-{since.day:02d}T00:00:00Z", + "until": f"{until.year}-{until.month:02d}-{until.day:02d}T00:00:00Z", + }, + ) if commits is None: return [] - return [Commit(x['commit']['author']['name'], x['commit']['committer']['name'], - x['sha'], x['commit']['message']) - for x in commits] + return [ + Commit( + x["commit"]["author"]["name"], + x["commit"]["committer"]["name"], + x["sha"], + x["commit"]["message"], + ) + for x in commits + ] def create_pr_obj(self, pull_request, pull_number=None): if pull_request is None: return None if pull_number is None: - pull_number = pull_request['number'] + pull_number = pull_request["number"] return PullRequest( self.gh_obj, self.name, self.owner, pull_number, - pull_request['base']['ref'], - pull_request['head']['ref'], - pull_request['head']['sha'], - pull_request['title'], - pull_request['user']['login'], - pull_request['state'], - pull_request['merged_at'], - pull_request['closed_at']) + pull_request["base"]["ref"], + pull_request["head"]["ref"], + pull_request["head"]["sha"], + pull_request["title"], + pull_request["user"]["login"], + pull_request["state"], + pull_request["merged_at"], + pull_request["closed_at"], + ) class Commit: @@ -235,9 +269,21 @@ def __init__(self, author, committer, sha, message): # Represent a Github Pull Request. class PullRequest: - def __init__(self, gh_obj, repo_name, repo_owner, - pull_number, target_branch, from_branch, head_commit, - title, author, open_state, merged_at, closed_at): + def __init__( + self, + gh_obj, + repo_name, + repo_owner, + pull_number, + target_branch, + from_branch, + head_commit, + title, + author, + open_state, + merged_at, + closed_at, + ): self.repo_name = repo_name self.gh_obj = gh_obj self.repo_owner = repo_owner @@ -250,10 +296,12 @@ def __init__(self, gh_obj, repo_name, repo_owner, self.open_state = open_state self.merged_at = merged_at if self.merged_at is None: - self.merged_at = '' + self.merged_at = "" self.closed_at = closed_at if self.closed_at is None: - self.closed_at = '' + self.closed_at = "" def get_url(self): - return f"https://github.com/{self.repo_owner}/{self.repo_name}/pull/{self.number}" + return ( + f"https://github.com/{self.repo_owner}/{self.repo_name}/pull/{self.number}" + ) diff --git a/src/globals.py b/src/globals.py deleted file mode 100644 index 7fa2143..0000000 --- a/src/globals.py +++ /dev/null @@ -1,3 +0,0 @@ -# globals used around, will be removed once we switch to a class for handling most of the stuff -CRATES_VERSION = {} -PULL_REQUESTS = [] diff --git a/src/my_toml.py b/src/my_toml.py deleted file mode 100644 index 2b7df37..0000000 --- a/src/my_toml.py +++ /dev/null @@ -1,77 +0,0 @@ -# Very simple Toml parser. - -def create_section(content_line): - if content_line.endswith(']'): - return Section(content_line[1:-1]) - return Section(content_line[1:]) - - -class Section: - def __init__(self, name): - self.name = name - self.entries = [] - - def add_entry(self, entry): - if len(entry) > 0: - elems = entry.split('=') - key = elems[0].strip() - elems = '='.join(elems[1:]).strip() - self.set(key, elems) - - def set(self, key, value): - for entry in self.entries: - if entry['key'] == key: - entry['value'] = value - return - self.entries.append({'key': key, 'value': value}) - - def remove(self, key): - for (pos, entry) in enumerate(self.entries): - if entry['key'] == key: - self.entries.pop(pos) - return - - def get(self, key, default_value): - for entry in self.entries: - if entry['key'] == key: - return entry['value'] - return default_value - - def __str__(self): - content = '\n'.join([f'{x["key"]} = {x["value"]}' for x in self.entries]) - return f'[{self.name}]\n{content}' - - -class TomlHandler: - def __init__(self, content): - self.sections = [] - filler = [] - multilines = { - '[': ']', - '"""': '"""', - '{': '}', - } - stop_str = None - for line in content.split('\n'): - if len(filler) > 0: - filler.append(line) - if line.endswith(stop_str): - self.sections[-1].add_entry('\n'.join(filler)) - filler = [] - elif line.startswith('['): - self.sections.append(create_section(line)) - elif len(self.sections) > 0: - add_entry = True - for key, end_str in multilines.items(): - if line.endswith(key): - stop_str = end_str - filler.append(line) - add_entry = False - break - if add_entry is True: - self.sections[-1].add_entry(line) - continue - - - def __str__(self): - return '\n\n'.join([str(x) for x in self.sections]) + '\n' diff --git a/src/release.py b/src/release.py index bc5684f..eb29110 100644 --- a/src/release.py +++ b/src/release.py @@ -1,28 +1,21 @@ #!/bin/python3 from contextlib import contextmanager -# pip3 install datetime -import datetime import errno import time import shutil import sys import tempfile -from os import sep as os_sep -from os import listdir -from os.path import isdir, join +from os.path import join # local imports import consts -from args import Arguments, UpdateType +from args import Arguments from github import Github -from globals import CRATES_VERSION, PULL_REQUESTS -from my_toml import TomlHandler from utils import add_to_commit, clone_repo -from utils import checkout_target_branch, get_file_content, write_error, write_into_file -from utils import commit, commit_and_push, create_pull_request, push, write_msg -from utils import create_tag_and_push, publish_crate, get_last_commit_date -from utils import check_if_up_to_date, checkout_to_new_branch, revert_git_history +from utils import write_error +from utils import commit, write_msg +from utils import check_if_up_to_date @contextmanager @@ -39,242 +32,32 @@ def temporary_directory(): raise -# Doesn't handle version number containing something else than numbers and '.'! -def update_version(version, update_type, section_name, place_type="section"): - version_split = version.replace('"', '').split('.') - if len(version_split) != 3: - # houston, we've got a problem! - write_error(f'Invalid version in {place_type} "{section_name}": {version}') - return None - if update_type == UpdateType.MINOR: - version_split[update_type] = str(int(version_split[update_type]) + 1) - elif update_type == UpdateType.MEDIUM: - version_split[update_type] = str(int(version_split[update_type]) + 1) - version_split[UpdateType.MINOR] = '0' - else: - version_split[update_type] = str(int(version_split[update_type]) + 1) - version_split[UpdateType.MEDIUM] = '0' - version_split[UpdateType.MINOR] = '0' - new_version = '.'.join(version_split) - return f'"{new_version}"' - - -def update_crate_version(repo_name, crate_dir_path, temp_dir, update_type): - file_path = join(join(join(temp_dir, repo_name), crate_dir_path), "Cargo.toml") - output = file_path.replace(temp_dir, "") - if output.startswith('/'): - output = output[1:] - write_msg(f'=> Updating crate versions for {file_path}') - content = get_file_content(file_path) - if content is None: - return False - toml = TomlHandler(content) - for section in toml.sections: - if section.name == 'package': - new_version = update_version( - section.get('version', '0.0.0'), - update_type, - 'version', - place_type="package") - if new_version is None: - return False - section.set('version', new_version) - break - result = write_into_file(file_path, str(toml)) - res = output.split(os_sep)[-2] - status = 'Failure' if result is False else 'Success' - write_msg(f'=> {res}: {status}') - return result - - -def get_all_versions(args, temp_dir): - write_msg('=> Getting crates version...') - for crate in args.crates: - crate = crate['crate'] - if args.specified_crate is not None and crate['crate'] != args.specified_crate: - continue - if not get_crate_version(crate["repository"], crate["crate"], crate["path"], temp_dir): - folder = join(temp_dir, crate['path']) - input(f"Couldn't find version for in `{folder}`...") - write_msg('Done!') - - -def get_crate_version(repo_name, crate_name, crate_dir_path, temp_dir): - file_path = join(join(join(temp_dir, repo_name), crate_dir_path), "Cargo.toml") - output = file_path.replace(temp_dir, "") - if output.startswith('/'): - output = output[1:] - write_msg(f'=> Updating versions for {file_path}') - content = get_file_content(file_path) - if content is None: - return False - toml = TomlHandler(content) - for section in toml.sections: - if (section.name == 'package' or - (section.name.startswith('dependencies.') and find_crate(section.name[13:]))): - version = section.get('version', None) - if version is None: - continue - CRATES_VERSION[crate_name] = version - return True - return False - - -def update_examples(path, temp_dir): - for entry in listdir(path): - if entry == "Cargo.toml": - update_crate_cargo_file(path, temp_dir) - continue - full_path = join(path, entry) - if isdir(full_path): - update_examples(full_path, temp_dir) - - -def update_crates_cargo_file(args, temp_dir): - write_msg('==> Updating versions in crates...') - for crate in args.crates: - crate = crate['crate'] - update_crate_cargo_file(join(join(temp_dir, crate['repository']), crate['path']), temp_dir) - write_msg('Done!') - write_msg('==> Now updating versions in examples...') - for example in consts.EXAMPLES: - update_examples(join(join(temp_dir, example['repository']), example['path']), temp_dir) - write_msg('Done!') - - -def get_crate(crate_name): - for entry in consts.CRATE_LIST: - if entry['crate'] == crate_name: - return crate_name - return None - - -def find_crate(crate_name): - return get_crate(crate_name) is not None - - -def get_crate_in_package(value): - if not value.strip().startswith('{'): - return None - parts = [y.strip() for y in value[1:-1].split('",')] - for part in parts: - if part.split('=')[0].strip() == 'package': - return get_crate(part.split('=')[1].replace('"', '').strip()) - return None - - -def update_crate_cargo_file(path, temp_dir): - # pylint: disable=too-many-branches,too-many-locals,too-many-nested-blocks - file_path = join(path, "Cargo.toml") - output = file_path.replace(temp_dir, "") - if output.startswith('/'): - output = output[1:] - write_msg(f'=> Updating versions for {file_path}') - content = get_file_content(file_path) - if content is None: - return False - toml = TomlHandler(content) - for section in toml.sections: - if section.name.startswith('dependencies.'): - real = section.get('package', None) - if real is None: - real = section.name[13:] - real = real.replace('"', '') - if find_crate(real): - section.remove("git") - section.set('version', CRATES_VERSION[real]) - elif section.name == 'dependencies': - for entry in section.entries: - info = entry['value'].strip() - crate_name = get_crate_in_package(info) - if crate_name is None: - crate_name = get_crate(entry['key']) - if crate_name is not None: - if info.strip().startswith('{'): - parts = [y.strip() for y in info[1:-1].split(',')] - parts = [y for y in parts - if not y.startswith("git ") and not y.startswith("git=")] - version = CRATES_VERSION[crate_name] - parts.append(f'version = {version}') - if len(parts) > 1: - joined = ', '.join(parts) - entry['value'] = f'{{{joined}}}' - else: - entry['value'] = CRATES_VERSION[entry['key']] - else: - entry['value'] = CRATES_VERSION[entry] - out = str(toml) - if not out.endswith("\n"): - out += '\n' - result = True - result = write_into_file(file_path, out) - res = output.split(os_sep)[-2] - status = 'Failure' if result is False else 'Success' - write_msg(f'=> {res}: {status}') - return result - - def write_merged_prs(merged_prs, contributors, repo_url): - content = '' + content = "" for merged_pr in reversed(merged_prs): - if merged_pr.title.startswith('[release] '): + if merged_pr.title.startswith("[release] "): continue if merged_pr.author not in contributors: contributors.append(merged_pr.author) - md_content = (merged_pr.title.replace('<', '<') - .replace('>', '>') - .replace('[', '\\[') - .replace(']', '\\]') - .replace('*', '\\*') - .replace('_', '\\_')) - content += f' * [{md_content}]({repo_url}/pull/{merged_pr.number})\n' - return content + '\n' - - -def downgrade_version(version): - # We need to remove the '"' from the version number. - parts = version.replace('"', '').split(".") - while len(parts) < 3: - parts.append('0') - for pos, part in enumerate(parts): - tmp = int(part) - if tmp > 0: - tmp -= 1 - parts[pos] = str(tmp) - pos += 1 - while pos < len(parts): - parts[pos] = '0' - pos += 1 - break - return '.'.join(parts) - - -def checkout_to_previous_release_branch(repo_name, temp_dir): - for crate in consts.CRATE_LIST: - if not crate['crate'].endswith('-sys') and crate['repository'] == repo_name: - original_version = CRATES_VERSION[crate['crate']] - # In this case, we keep all three version digits because we want the previous major - # tag. - version = downgrade_version(original_version) - write_msg( - f'For repository `{repo_name}`, the previous major release tag was guessed as ' - f'`{version}`, (from `{original_version}`) let\'s try to checkout to it...') - if not checkout_target_branch(repo_name, temp_dir, version, ask_input=False): - input("Failed to checkout to this branch... Press ENTER to continue") - return False - return True - write_error(f'No crate matches the repository `{repo_name}` apparently...') - return False - - -def build_blog_post(repositories, temp_dir, token, args): - # pylint: disable=too-many-locals,too-many-statements - write_msg('=> Building blog post...') - - author = input('Enter author name: ') - title = input('Enter title: ') + md_content = ( + merged_pr.title.replace("<", "<") + .replace(">", ">") + .replace("[", "\\[") + .replace("]", "\\]") + .replace("*", "\\*") + .replace("_", "\\_") + ) + content += f" * [{md_content}]({repo_url}/pull/{merged_pr.number})\n" + return content + "\n" + + +def build_blog_post(temp_dir, token): + write_msg("=> Building blog post...") + + author = input("Enter author name: ") + title = input("Enter title: ") blog_post_date = time.strftime("%Y-%m-%d %H:00:00 +0000") - content = f'''--- + content = f"""--- layout: post author: {author} title: {title} @@ -288,219 +71,79 @@ def build_blog_post(repositories, temp_dir, token, args): For the interested ones, here is the list of the merged pull requests: -''' +""" contributors = [] git = Github(token) oldest_date = None - for repo in repositories: - need_revert = checkout_to_previous_release_branch(repo, temp_dir) - success, out, err = get_last_commit_date(repo, temp_dir) - if not success: - write_msg(f"Couldn't get PRs for '{repo}': {err}") - else: - max_date = datetime.date.fromtimestamp(int(out)) - if oldest_date is None or max_date < oldest_date: - oldest_date = max_date - write_msg(f"Gettings merged PRs from {repo}...") - merged_prs = git.get_pulls( - repo, consts.ORGANIZATION, 'closed', max_date, only_merged=True) - write_msg(f"=> Got {len(merged_prs)} merged PRs") - if len(merged_prs) > 0: - repo_url = f'{consts.GITHUB_URL}/{consts.ORGANIZATION}/{repo}' - content += f'[{repo}]({repo_url}):\n\n' - content += write_merged_prs(merged_prs, contributors, repo_url) - if need_revert: - # If we switched back to the tag, we need to cancel this to come back to the previous - # repository state. - revert_git_history(repo, temp_dir, 1) + for repo in consts.REPOSITORIES: + release_date = repo["date"] + repo_name = repo["name"] + if oldest_date is None or release_date < oldest_date: + oldest_date = release_date + write_msg(f"Gettings merged PRs from {repo_name}...") + merged_prs = git.get_pulls( + repo_name, consts.ORGANIZATION, "closed", release_date, only_merged=True + ) + write_msg(f"=> Got {len(merged_prs)} merged PRs") + if len(merged_prs) > 0: + repo_url = f"{consts.GITHUB_URL}/{consts.ORGANIZATION}/{repo_name}" + content += f"[{repo_name}]({repo_url}):\n\n" + content += write_merged_prs(merged_prs, contributors, repo_url) write_msg("Gettings merged PRs from gir...") - merged_prs = git.get_pulls('gir', consts.ORGANIZATION, 'closed', oldest_date, only_merged=True) + merged_prs = git.get_pulls( + "gir", consts.ORGANIZATION, "closed", oldest_date, only_merged=True + ) write_msg(f"=> Got {len(merged_prs)} merged PRs") if len(merged_prs) > 0: - repo_url = f'{consts.GITHUB_URL}/{consts.ORGANIZATION}/gir' - content += f'All this was possible thanks to the [gtk-rs/gir]({repo_url}) project as well:' - content += '\n\n' + repo_url = f"{consts.GITHUB_URL}/{consts.ORGANIZATION}/gir" + content += f"All this was possible thanks to the [gtk-rs/gir]({repo_url}) project as well:" + content += "\n\n" content += write_merged_prs(merged_prs, contributors, repo_url) - content += 'Thanks to all of our contributors for their (awesome!) work on this release:\n\n' + content += "Thanks to all of our contributors for their (awesome!) work on this release:\n\n" # Sort contributors list alphabetically with case insensitive. contributors = sorted(contributors, key=lambda s: s.casefold()) - content += '\n'.join([f' * [@{contributor}]({consts.GITHUB_URL}/{contributor})' - for contributor in contributors]) - content += '\n' + content += "\n".join( + [ + f" * [@{contributor}]({consts.GITHUB_URL}/{contributor})" + for contributor in contributors + ] + ) + content += "\n" current_date = time.strftime("%Y-%m-%d") - file_name = join(join(temp_dir, consts.BLOG_REPO), f'_posts/{current_date}-new-release.md') + file_name = join( + join(temp_dir, consts.BLOG_REPO), f"_posts/{current_date}-new-release.md" + ) try: - with open(file_name, 'w', encoding='utf-8') as outfile: + with open(file_name, "w", encoding="utf-8") as outfile: outfile.write(content) write_msg(f'New blog post written into "{file_name}".') add_to_commit(consts.BLOG_REPO, temp_dir, [file_name]) commit(consts.BLOG_REPO, temp_dir, "Add new blog post") - if not args.no_push: - branch_name = f"release-{current_date}" - push(consts.BLOG_REPO, temp_dir, branch_name) - create_pull_request(consts.BLOG_REPO, branch_name, "master", token) except Exception as err: - write_error(f'build_blog_post failed: {err}') - write_msg(f'\n=> Here is the blog post content:\n{content}\n<=') - write_msg('Done!') - - -def shorter_version(version): - return '.'.join(version.split('.')[:2]).replace('"', '') + write_error(f"build_blog_post failed: {err}") + write_msg(f"\n=> Here is the blog post content:\n{content}\n<=") + write_msg("Done!") -def generate_new_branches(repository, temp_dir, specified_crate, args): - # We make a new branch for every crate based on the current "crate" branch: - # - # * If it is a "sys" crate or a "macro" crate, then we ignore it. - # * If not, then we create a new branch - for crate in args.crates: - crate = crate['crate'] - if crate['repository'] == repository: - if specified_crate is not None and crate['crate'] != specified_crate: - continue - if (crate['crate'].endswith('-sys') or - crate['crate'].endswith('-sys-rs') or - "-macro" in crate['crate']): - continue - # We only keep major and medium version numbers, so "0.9.0" becomes "0.9". - branch_name = shorter_version(CRATES_VERSION[crate['crate']]) - write_msg(f'==> Creating new branch "{branch_name}" for repository "{repository}"...') - checkout_to_new_branch(repository, temp_dir, branch_name) - return - - -def clone_repositories(args, temp_dir): - write_msg('=> Cloning the repositories...') - repositories = [] - for crate in args.crates: - crate = crate['crate'] - if args.specified_crate is not None and crate['crate'] != args.specified_crate: - continue - if crate["repository"] not in repositories: - repositories.append(crate["repository"]) - if clone_repo(crate["repository"], temp_dir) is False: - write_error(f'Cannot clone the "{crate["repository"]}" repository...') - return [] - if len(repositories) < 1: - write_msg(f'No crate "{args.specified_crate}" found. Aborting...') - return [] +def clone_website_repo(temp_dir): + write_msg("=> Cloning the repositories...") if clone_repo(consts.BLOG_REPO, temp_dir, depth=1) is False: write_error(f'Cannot clone the "{consts.BLOG_REPO}" repository...') return [] - write_msg('Done!') - return repositories - - -def update_crates_versions(args, temp_dir, repositories): - if args.tags_only: - return - write_msg('=> Updating [master] crates version...') - for repository in repositories: - checkout_target_branch(repository, temp_dir, 'master') - for crate in args.crates: - update_type = crate['up-type'] - crate = crate['crate'] - if args.specified_crate is not None and crate['crate'] != args.specified_crate: - continue - if update_crate_version(crate["repository"], crate["path"], temp_dir, update_type) is False: - write_error(f'The update for the "{crate["crate"]}" crate failed...') - input('Press ENTER to continue...') - write_msg('Done!') - extra = " and pushing" if args.no_push is False else "" - write_msg(f'=> Committing{extra} to the "{consts.MASTER_TMP_BRANCH}" branch...') - for repo in repositories: - commit(repo, temp_dir, "Update versions for next release [ci skip]") - if args.no_push is False: - push(repo, temp_dir, consts.MASTER_TMP_BRANCH) - write_msg('Done!') - - if args.no_push is False: - write_msg('=> Creating PRs on master branch...') - for repo in repositories: - create_pull_request(repo, consts.MASTER_TMP_BRANCH, "master", args.token) - write_msg('Done!') - - -def publish_crates(args, temp_dir): - write_msg('+++++++++++++++') - write_msg('++ IMPORTANT ++') - write_msg('+++++++++++++++') - write_msg('Almost everything has been done.') - input('Check the generated branches then press ENTER to continue...') - write_msg('=> Publishing crates...') - for crate in args.crates: - crate = crate['crate'] - if args.specified_crate is not None and crate['crate'] != args.specified_crate: - continue - if not crate.get('ignore', False): - publish_crate(crate['repository'], crate['path'], temp_dir, crate['crate']) - write_msg('Done!') - - -def generate_version_branches(args, temp_dir, repositories): - write_msg("=> Generating branches...") - for repo in repositories: - generate_new_branches(repo, temp_dir, args.specified_crate, args) - write_msg('Done!') - - -def push_new_version_branches_and_tags(args, temp_dir, repositories): - for repository in repositories: - for crate in args.crates: - crate = crate['crate'] - if (crate['repository'] != repository or - crate['crate'].endswith('-sys') or - crate['crate'].endswith('-sys-rs')): - continue - if args.tags_only is False: - commit_and_push( - repository, - temp_dir, - 'Update Cargo.toml format for release', - shorter_version(CRATES_VERSION[crate['crate']])) - create_tag_and_push( - CRATES_VERSION[crate['crate']], - repository, - temp_dir) - break + write_msg("Done!") def start(args, temp_dir): - repositories = clone_repositories(args, temp_dir) - if len(repositories) < 1: - return - get_all_versions(args, temp_dir) - if args.blog_only is False: - generate_version_branches(args, temp_dir, repositories) - if args.blog_only is False: - update_crates_cargo_file(args, temp_dir) - if args.no_push is False and args.blog_only is False: - push_new_version_branches_and_tags(args, temp_dir, repositories) - - if args.tags_only is False: - build_blog_post(repositories, temp_dir, args.token, args) - if args.blog_only: - input("Blog post generated, press ENTER to quit (it'll remove the tmp folder and " - "its content!)") - return - - if args.tags_only is False and args.no_push is False: - publish_crates(args, temp_dir) - - update_crates_versions(args, temp_dir, repositories) - - write_msg("Everything is almost done now. Just need to merge the remaining pull requests...") - pr_list = '\n'.join(PULL_REQUESTS) - write_msg(f"\n{pr_list}\n") - - write_msg('Seems like most things are done! Now remains:') + clone_website_repo(temp_dir) + build_blog_post(temp_dir, args.token) input( - f'Press ENTER to leave (once done, the temporary directory "{temp_dir}" will be destroyed)') + "Blog post generated, press ENTER to quit (it'll remove the tmp folder and " + "its content!)" + ) def main(argv): @@ -509,7 +152,7 @@ def main(argv): sys.exit(1) if check_if_up_to_date() is False: return - write_msg('=> Creating temporary directory...') + write_msg("=> Creating temporary directory...") with temporary_directory() as temp_dir: write_msg(f'Temporary directory created in "{temp_dir}"') start(args, temp_dir) diff --git a/src/utils.py b/src/utils.py index 4c81c3d..8f03b8d 100644 --- a/src/utils.py +++ b/src/utils.py @@ -1,61 +1,38 @@ from os.path import join -import json import subprocess import sys -import time -# pip3 install requests -import requests + # local import import consts -from globals import PULL_REQUESTS -from my_toml import TomlHandler def write_error(error_msg): - sys.stderr.write(f'{error_msg}\n') + sys.stderr.write(f"{error_msg}\n") def write_msg(msg): - sys.stdout.write(f'{msg}\n') + sys.stdout.write(f"{msg}\n") def convert_to_string(content): - if content.__class__.__name__ == 'bytes': - return content.decode('utf-8') + if content.__class__.__name__ == "bytes": + return content.decode("utf-8") return content -def get_file_content(file_path): - try: - with open(file_path, 'r', encoding='utf-8') as file: - return file.read() - except Exception as err: - write_error(f'get_file_content failed: "{file_path}": {err}') - return None - - -def write_into_file(file_path, content): - try: - with open(file_path, 'w', encoding='utf-8') as file: - file.write(content) - return True - except Exception as err: - write_error(f'write_into_file failed: "{file_path}": {err}') - return False - - def exec_command(command, timeout=None, show_output=False, cwd=None): if show_output: write_msg(f"Executing command {command} with cwd: {cwd}") - # pylint: disable=consider-using-with - child = subprocess.Popen(command, stdout=subprocess.PIPE, stderr=subprocess.PIPE, cwd=cwd) + child = subprocess.Popen( + command, stdout=subprocess.PIPE, stderr=subprocess.PIPE, cwd=cwd + ) if timeout is not None: stdout, stderr = child.communicate(timeout=timeout) else: stdout, stderr = child.communicate() if show_output: - write_msg(f'== STDOUT == {stdout}') - write_msg(f'== STDERR == {stderr}') + write_msg(f"== STDOUT == {stdout}") + write_msg(f"== STDERR == {stderr}") stdout = convert_to_string(stdout) stderr = convert_to_string(stderr) return (child.returncode == 0, stdout, stderr) @@ -64,309 +41,74 @@ def exec_command(command, timeout=None, show_output=False, cwd=None): def exec_command_and_print_error(command, timeout=None, cwd=None): ret, stdout, stderr = exec_command(command, timeout=timeout, cwd=cwd) if not ret: - full_command = ' '.join(command) + full_command = " ".join(command) write_error(f'Command "{full_command}" failed:') if len(stdout) > 0: - write_error(f'=== STDOUT ===\n{stdout}\n') + write_error(f"=== STDOUT ===\n{stdout}\n") if len(stderr) > 0: - write_error(f'=== STDERR ===\n{stderr}\n') + write_error(f"=== STDERR ===\n{stderr}\n") return ret def clone_repo(repo_name, temp_dir, depth=None): - repo_url = f'{consts.GIT_URL}/{consts.ORGANIZATION}/{repo_name}.git' + repo_url = f"{consts.GIT_URL}/{consts.ORGANIZATION}/{repo_name}.git" target_dir = join(temp_dir, repo_name) try: write_msg(f'=> Cloning "{repo_name}" from "{repo_url}"') - command = ['git', 'clone', repo_url, target_dir] + command = ["git", "clone", repo_url, target_dir] if depth is not None: - command = ['git', 'clone', '--depth', str(depth), repo_url, target_dir] + command = ["git", "clone", "--depth", str(depth), repo_url, target_dir] ret, stdout, stderr = exec_command(command, timeout=300) if not ret: - full_command = ' '.join(command) + full_command = " ".join(command) write_error( - f'command "{full_command}" failed: ===STDOUT===\n{stdout}\n===STDERR===\n{stderr}') + f'command "{full_command}" failed: ===STDOUT===\n{stdout}\n===STDERR===\n{stderr}' + ) return False - command = ['git', 'submodule', 'update', '--init'] + command = ["git", "submodule", "update", "--init"] if not exec_command_and_print_error(command, cwd=target_dir): - input('Failed to init submodule... Press ENTER to continue') + input("Failed to init submodule... Press ENTER to continue") return True except subprocess.TimeoutExpired: - full_command = ' '.join(command) - write_error(f'command timed out: {full_command}') + full_command = " ".join(command) + write_error(f"command timed out: {full_command}") except Exception as err: - full_command = ' '.join(command) + full_command = " ".join(command) write_error(f'command "{full_command}" got an exception: {err}') return False -def create_headers(token): - headers = { - 'User-Agent': 'gtk-rs', - 'Accept': 'application/vnd.github.v3+json', - } - if token is not None: - # Authentication to github. - headers['Authorization'] = f'token {token}' - return headers - - -def post_content(url, token, details, method='post', header_extras=None): - if header_extras is None: - header_extras = {} - headers = create_headers(token) - for extra in header_extras: - headers[extra] = header_extras[extra] - try: - req = None - if method == 'post': - req = requests.post(url, data=json.dumps(details), headers=headers, timeout=30) - else: - req = requests.put(url, data=json.dumps(details), headers=headers, timeout=30) - try: - req.raise_for_status() - except Exception: - write_msg(f'Sent by bithub api: {req.json()}') - req.raise_for_status() - return req.json() - except Exception as err: - write_error(f'post_content: An error occurred: {err}') - return None - - -def get_highest_feature_version(v1_feature, v2_feature): - t_v1 = v1_feature[1:].split('_') - t_v2 = v2_feature[1:].split('_') - i = 0 - while i < len(t_v1) and i < len(t_v2): - try: - x1_version = int(t_v1[i]) - x2_version = int(t_v2[i]) - if x1_version > x2_version: - return v1_feature - if x1_version < x2_version: - return v2_feature - i += 1 - except Exception: - write_error(f'get_highest_feature_version int conversion error: int("{t_v1[i]}") vs ' - f'int("{t_v2[i]}") from "{v1_feature}" and "{v2_feature}"') - break - return v1_feature - - -# This function does two things: -# -# 1. Check if dox feature is present or try getting the highest version feature -# 2. Getting all the other features (for cairo it's very important) -def get_features(path): - # pylint: disable=too-many-branches - features = [] - highest_version = None - content = get_file_content(path) - if content is None: - return '' - toml = TomlHandler(content) - dox_present = False - for section in toml.sections: - if section.name == 'features': - for entry in section.entries: - if entry['key'] in ['purge-lgpl-docs', 'default']: - continue - if entry['key'] == 'dox': - dox_present = True - if entry['key'].startswith('v'): - if highest_version is None: - highest_version = entry['key'] - else: - highest_version = get_highest_feature_version(highest_version, entry['key']) - else: - features.append(entry['key']) - if dox_present is True: - if 'dox' not in features: - features.append('dox') - elif highest_version is not None: - write_msg("/!\\ Seems there is no dox feature so let's just use the highest version " - "instead...") - features.append(highest_version) - else: - write_msg("/!\\ That's weird: no dox or version feature. Is everything fine with this one?") - return ' '.join(features) - - -# def compare_versions(v1, v2): -# v1 = v1.split('.') -# v2 = v2.split('.') -# -# for x in range(0, min(len(v1), len(v2))): -# try: -# entry1 = int(v1) -# entry2 = int(v2) -# except Exception: -# # If it cannot be converted into a number, better just compare strings then. -# entry1 = v1 -# entry2 = v2 -# if entry1 > entry2: -# return 1 -# if entry1 < entry2: -# return -1 -# # In here, "3.2" is considered littler than "3.2.0". That's how life goes. -# return len(v1) - len(v2) - - -def commit_and_push(repo_name, temp_dir, commit_msg, target_branch): - commit(repo_name, temp_dir, commit_msg) - push(repo_name, temp_dir, target_branch) - - def commit(repo_name, temp_dir, commit_msg): repo_path = join(temp_dir, repo_name) - command = ['git', 'commit', '.', '-m', commit_msg] - if not exec_command_and_print_error(command, cwd=repo_path): - input("Fix the error and then press ENTER") - - -def push(repo_name, temp_dir, target_branch): - repo_path = join(temp_dir, repo_name) - command = ['git', 'push', 'origin', f'HEAD:{target_branch}'] + command = ["git", "commit", ".", "-m", commit_msg] if not exec_command_and_print_error(command, cwd=repo_path): input("Fix the error and then press ENTER") def add_to_commit(repo_name, temp_dir, files_to_add): repo_path = join(temp_dir, repo_name) - command = ['git', 'add'] + command = ["git", "add"] for file in files_to_add: command.append(file) if not exec_command_and_print_error(command, cwd=repo_path): input("Fix the error and then press ENTER") -def revert_changes(repo_name, temp_dir, files): - repo_path = join(temp_dir, repo_name) - files = ' '.join([f'"{f}"' for f in files]) - command = ['bash', '-c', f'cd {repo_path} && git rm -f {files} && git checkout -- {files}'] - if not exec_command_and_print_error(command): - input("Fix the error and then press ENTER") - - -def checkout_target_branch(repo_name, temp_dir, target_branch, ask_input=True): - repo_path = join(temp_dir, repo_name) - command = ['git', 'checkout', target_branch] - if not exec_command_and_print_error(command, cwd=repo_path): - if ask_input: - input("Fix the error and then press ENTER") - return False - return True - - -def checkout_to_new_branch(repo_name, temp_dir, target_branch): - repo_path = join(temp_dir, repo_name) - command = ['git', 'checkout', '-b', target_branch] - if not exec_command_and_print_error(command, cwd=repo_path): - input("Fix the error and then press ENTER") - - -def revert_git_history(repo_name, temp_dir, nb_actions_to_revert): - repo_path = join(temp_dir, repo_name) - command = ['git', 'reset', '--hard', f'HEAD@{{{nb_actions_to_revert}}}'] - if not exec_command_and_print_error(command, cwd=repo_path): - input("Fix the error and then press ENTER") - - -def get_last_commit_date(repo_name, temp_dir): - repo_path = join(temp_dir, repo_name) - success, out, err = exec_command( - ['git', 'log', '--format=%at', '--no-merges', '-n', '1'], - show_output=True, - cwd=repo_path) - return (success, out, err) - - def get_last_commit_hash(repo_path): - success, out, _ = exec_command(['git', 'rev-parse', 'HEAD'], cwd=repo_path) + success, out, _ = exec_command(["git", "rev-parse", "HEAD"], cwd=repo_path) if success is True: return out.strip() - return '' + return "" def get_repo_last_commit_hash(repo_url): success, out, _ = exec_command( - ['git', 'ls-remote', repo_url, 'HEAD'], show_output=True) + ["git", "ls-remote", repo_url, "HEAD"], show_output=True + ) if success is True: - out = out.split('\n', maxsplit=1)[0].strip() - return out.split('\t', maxsplit=1)[0].split(' ', maxsplit=1)[0] - return '' - - -def merging_branches(repo_name, temp_dir, merge_branch): - repo_path = join(temp_dir, repo_name) - command = ['git', 'merge', f'origin/{merge_branch}'] - if not exec_command_and_print_error(command, cwd=repo_path): - input("Fix the error and then press ENTER") - - -def publish_crate(repository, crate_dir_path, temp_dir, crate_name): - # pylint: disable=too-many-locals - write_msg(f'=> publishing crate {crate_name}') - path = join(join(temp_dir, repository), crate_dir_path) - command = ['cargo', 'publish'] - retry = 3 - error_messages = [] - final_success = False - wait_time = 30 - while retry > 0: - ret, stdout, stderr = exec_command(command, cwd=path) - if not ret: - full_command = ' '.join(command) - error_messages.append(f'Command "{full_command}" failed:') - if len(stdout) > 0: - error_messages[len(error_messages) - 1] += f'\n=== STDOUT ===\n{stdout}\n' - if len(stderr) > 0: - error_messages[len(error_messages) - 1] += f'\n=== STDERR ===\n{stderr}\n' - retry -= 1 - if retry > 0: - extra = 'ies' if retry > 0 else 'y' - write_msg( - f"Let's sleep for {wait_time} seconds before retrying, {retry + 1} " - f"retr{extra} remaining...") - time.sleep(wait_time) - else: - final_success = True - break - if final_success is False: - errors = set(error_messages) - errors = '====\n'.join(errors) - write_msg(f'== ERRORS ==\n{errors}') - input("Something bad happened! Try to fix it and then press ENTER to continue...") - write_msg(f'> crate {crate_name} has been published') - - -def create_tag_and_push(tag_name, repository, temp_dir): - path = join(temp_dir, repository) - command = ['bash', '-c', f'cd {path} && git tag "{tag_name}" && git push origin "{tag_name}"'] - if not exec_command_and_print_error(command): - input("Something bad happened! Try to fix it and then press ENTER to continue...") - - -def create_pull_request(repo_name, from_branch, target_branch, token, add_to_list=True): - req = post_content(f'{consts.GH_API_URL}/repos/{consts.ORGANIZATION}/{repo_name}/pulls', - token, - {'title': f'[release] merging {from_branch} into {target_branch}', - 'body': 'cc @GuillaumeGomez @sdroege @bilelmoussaoui', - 'base': target_branch, - 'head': from_branch, - 'maintainer_can_modify': True}) - if req is None: - write_error(f"Pull request from {repo_name}/{from_branch} to {repo_name}/{target_branch} " - "couldn't be created. You need to do it yourself... (url provided at the end)") - input("Press ENTER once done to continue...") - PULL_REQUESTS.append( - f'|=> "{consts.GITHUB_URL}/{consts.ORGANIZATION}/{repo_name}' - f'/compare/{target_branch}...{from_branch}?expand=1"') - else: - write_msg(f"===> Pull request created: {req['html_url']}") - if add_to_list is True: - PULL_REQUESTS.append(f'> {req["html_url"]}') + out = out.split("\n", maxsplit=1)[0].strip() + return out.split("\t", maxsplit=1)[0].split(" ", maxsplit=1)[0] + return "" def check_if_up_to_date(): @@ -377,9 +119,10 @@ def check_if_up_to_date(): if last_commit != remote_last_commit: write_msg( f"Remote repository `{remote_repo}` has a different last commit than local: `" - f"{remote_last_commit}` != `{last_commit}`") + f"{remote_last_commit}` != `{last_commit}`" + ) text = input("Do you want to continue anyway? [y/N] ").strip().lower() - if len(text) == 0 or text != 'y': + if len(text) == 0 or text != "y": write_msg("Ok, leaving then. Don't forget to update!") return False return True