diff --git a/.github/workflows/deploy.yml b/.github/workflows/deploy.yml new file mode 100644 index 0000000..0f8cbd1 --- /dev/null +++ b/.github/workflows/deploy.yml @@ -0,0 +1,93 @@ +name: Deploy Repository + +on: + # Runs on pushes targeting the default branch + push: + branches: [$default-branch] + + # Runs dayly at 00:00 UTC + schedule: + - cron: "0 0 * * *" + + workflow_dispatch: + +jobs: + build: + runs-on: ubuntu-latest + outputs: + updated: ${{ steps.resolve-repository.outputs.updated }} + steps: + - name: Set up python + id: setup-python + uses: actions/setup-python@v4 + with: + python-version: 3.11 + + - name: Checkout repository + id: checkout-repository + uses: actions/checkout@v4 + + - name: Restore cache + id: restore-cache + uses: actions/cache/restore@v3 + with: + path: |- + cache/ + _site/libraries.json.sha512 + blacklist.json + key: ${{ runner.os }}-state + restore-keys: | + ${{ runner.os }}-state + + - name: Resolve repository + id: resolve-repository + env: + GH_USER: ${{secrets.GH_APP_ID}} + GH_PASS: ${{secrets.GH_APP_TOKEN}} + run: python3 tasks crawl + + - name: Save cache + id: save-cache + uses: actions/cache/save@v3 + if: always() + with: + path: |- + cache/ + _site/libraries.json.sha512 + blacklist.json + key: ${{ runner.os }}-state-${{ hashFiles('cache/*') }} + + - name: Setup pages + id: setup-pages + if: steps.resolve-repository.outputs.updated == true + uses: actions/configure-pages@v3 + + - name: Upload pages + id: upload-pages + if: steps.resolve-repository.outputs.updated == true + uses: actions/upload-pages-artifact@v2 + with: + path: _site/ + + # Deploy job + deploy: + # Add a dependency to the build job + needs: build + if: needs.build.outputs.updated == true + + # Grant GITHUB_TOKEN the permissions required to make a Pages deployment + permissions: + pages: write # to deploy to Pages + id-token: write # to verify the deployment originates from an appropriate source + + # Deploy to the github-pages environment + environment: + name: github-pages + url: ${{ steps.deployment.outputs.page_url }} + + # Specify runner + deployment step + runs-on: ubuntu-latest + steps: + - name: Deploy pages + id: deployment + uses: actions/deploy-pages@v2 diff --git a/.gitignore b/.gitignore new file mode 100644 index 0000000..a405cdd --- /dev/null +++ b/.gitignore @@ -0,0 +1,5 @@ +# python cache +__pycache__/ +*.pyc + +.sublime/ diff --git a/_site/index.html b/_site/index.html new file mode 100644 index 0000000..914b18a --- /dev/null +++ b/_site/index.html @@ -0,0 +1,12 @@ + + +
+ + +# pre-release + [-_.]? + (?Palpha|a|beta|b|prerelease|preview|pre|c|rc) + [-_.]? + (?P [0-9]+)? + )? + (?P # post release + (?:-(?P [0-9]+)) + | + (?: + [-_.]? + (?P patch|post|rev|r) + [-_.]? + (?P [0-9]+)? + ) + )? + (?P # dev release + [-_.]? + (?P development|develop|devel|dev) + [-_.]? + (?P [0-9]+)? + )? + (?:\+(?P [a-z0-9]+(?:[-_.][a-z0-9]+)*))? # local version + \s*$ + """, + re.VERBOSE, + ) + + def __init__(self, string): + """ + Constructs a new ``PEP440Version`` instance. + + :param string: + An unicode string of the pep44Ăź version. + """ + match = self._regex.match(string.lower()) + if not match: + raise PEP440InvalidVersionError("'{}' is not a valid PEP440 version string".format(string)) + + ( + epoch, + release, + pre, + pre_l, + pre_n, + post, + post_n1, + _, + post_n2, + dev, + _, + dev_n, + local, + ) = match.groups() + + epoch = int(epoch or 0) + release = tuple(map(int, release.split("."))) + + prerelease = () + + if pre: + if pre_l == "a" or pre_l == "alpha": + pre_l = -3 + elif pre_l == "b" or pre_l == "beta": + pre_l = -2 + else: + pre_l = -1 + prerelease += ((pre_l, int(pre_n or 0)),) + + if post: + prerelease += ((1, int(post_n1 or post_n2 or 0)),) + + if dev: + prerelease += ((-4, int(dev_n or 0)),) + + while len(prerelease) < 3: + prerelease += ((0, 0),) + + tup = () + if local: + # Versions with a local segment need that segment parsed to implement + # the sorting rules in PEP440. + # - Alpha numeric segments sort before numeric segments + # - Alpha numeric segments sort lexicographically + # - Numeric segments sort numerically + # - Shorter versions sort before longer versions when the prefixes + # match exactly + for seg in _local_version_separators.split(local): + try: + tup += ((int(seg), ""),) + except ValueError: + tup += ((-1, seg),) + + local = tup + + self._tup = (epoch, release, prerelease, local) + + def __repr__(self): + return "<{0.__class__.__name__}('{0!s}')>".format(self) + + def __str__(self): + return self.version_string() + + def __eq__(self, rhs): + a, b = _norm_tuples(self._tup, rhs._tup) + return a == b + + def __ne__(self, rhs): + a, b = _norm_tuples(self._tup, rhs._tup) + return a != b + + def __lt__(self, rhs): + a, b = _norm_tuples(self._tup, rhs._tup) + return a < b + + def __le__(self, rhs): + a, b = _norm_tuples(self._tup, rhs._tup) + return a <= b + + def __gt__(self, rhs): + a, b = _norm_tuples(self._tup, rhs._tup) + return a > b + + def __ge__(self, rhs): + a, b = _norm_tuples(self._tup, rhs._tup) + return a >= b + + def __hash__(self): + return hash(self._tup) + + def version_info(self, verbose=False): + return _version_info(*self._tup, verbose=verbose) + + def version_string(self, verbose=False): + return _version_string(*self._tup, verbose=verbose) + + @property + def epoch(self): + return self._tup[0] + + @property + def release(self): + return self._tup[1] + + @property + def major(self): + try: + return self._tup[1][0] + except IndexError: + return 0 + + @property + def minor(self): + try: + return self._tup[1][1] + except IndexError: + return 0 + + @property + def micro(self): + try: + return self._tup[1][2] + except IndexError: + return 0 + + @property + def prerelease(self): + tup = () + pre = self._tup[2] + if pre and pre[0][0] != 0: + tag = ("dev", "a", "b", "rc", "", "post") + for t, n in pre: + if t != 0: + tup += (tag[t + 4], n) + + return tup + + @property + def local(self): + return ".".join(str(n) if n > -1 else s for n, s in self._tup[3]) + + @property + def is_final(self): + """Version represents a final release.""" + return self._tup[2][0][0] == 0 + + @property + def is_dev(self): + """Version represents a pre release.""" + return any(t[0] == -4 for t in self._tup[2]) + + @property + def is_prerelease(self): + """Version represents a pre release.""" + return self._tup[2][0][0] < 0 + + @property + def is_postrelease(self): + """Version represents a post final release.""" + return self._tup[2][0][0] > 0 + + +class PEP440InvalidVersionSpecifierError(ValueError): + pass + + +class PEP440VersionSpecifier: + __slots__ = ["_operator", "_prefix", "_prereleases", "_tup"] + + _regex = re.compile( + r""" + ^\s* + (?: (?P ===|==|!=|~=|<=?|>=?) \s* )? # operator + v? + (?:(?P [0-9]+)!)? # epoch + (?P [0-9]+(?:\.[0-9]+)*) # release segment + (?: + \.(?P \*) # prefix-release + | + (?P # pre-release + [-_.]? + (?Palpha|a|beta|b|preview|pre|c|rc) + [-_.]? + (?P [0-9]+)? + )? + (?P # post release + (?:-(?P [0-9]+)) + | + (?: + [-_.]? + (?P post|rev|r) + [-_.]? + (?P [0-9]+)? + ) + )? + (?P # dev release + [-_.]? + (?P dev) + [-_.]? + (?P [0-9]+)? + )? + (?:\+(?P [a-z0-9]+(?:[-_.][a-z0-9]+)*))? # local version + ) + \s*$ + """, + re.VERBOSE, + ) + + _op_str = ("", "===", "==", "!=", "~=", "<", "<=", ">", ">=") + + OP_ITY = 1 + OP_EQ = 2 + OP_NE = 3 + OP_CPE = 4 + OP_LT = 5 + OP_LTE = 6 + OP_GT = 7 + OP_GTE = 8 + + def __init__(self, string, prereleases=True): + """ + Constructs a new ``PEP440VersionSpecifier`` instance. + + :param string: + An unicode string of the pep44Ăź version specifier. + """ + match = self._regex.match(string.lower()) + if not match: + raise PEP440InvalidVersionSpecifierError( + "'{}' is not a valid PEP 440 version specifier string".format(string) + ) + + ( + op, + epoch, + release, + wildcard, + pre, + pre_l, + pre_n, + post, + post_n1, + _, + post_n2, + dev, + _, + dev_n, + local, + ) = match.groups() + + self._operator = self._op_str.index(op) if op else self.OP_EQ + self._prefix = bool(wildcard) + self._prereleases = prereleases + + epoch = int(epoch or 0) + release = tuple(map(int, release.split("."))) + + if self._prefix: + if self._operator not in (self.OP_EQ, self.OP_NE): + raise PEP440InvalidVersionSpecifierError( + "'{}' is not a valid PEP 440 version specifier string".format(string) + ) + + self._tup = (epoch, release) + return + + if self._operator == self.OP_CPE and len(release) < 2: + raise PEP440InvalidVersionSpecifierError( + "'{}' is not a valid PEP 440 version specifier string".format(string) + ) + + prerelease = () + + if pre: + if pre_l == "a" or pre_l == "alpha": + pre_l = -3 + elif pre_l == "b" or pre_l == "beta": + pre_l = -2 + else: + pre_l = -1 + prerelease += ((pre_l, int(pre_n or 0)),) + + if post: + prerelease += ((1, int(post_n1 or post_n2 or 0)),) + + if dev: + prerelease += ((-4, int(dev_n or 0)),) + + while len(prerelease) < 3: + prerelease += ((0, 0),) + + tup = () + if local: + if self._operator not in (self.OP_EQ, self.OP_NE, self.OP_ITY): + raise PEP440InvalidVersionSpecifierError( + "'{}' is not a valid PEP 440 version specifier string".format(string) + ) + + for seg in _local_version_separators.split(local): + try: + tup += ((int(seg), ""),) + except ValueError: + tup += ((-1, seg),) + local = tup + + self._tup = (epoch, release, prerelease, local) + + def __repr__(self): + return "<{0.__class__.__name__}('{0!s}')>".format(self) + + def __str__(self): + return self._op_str[self._operator] + self.version_string() + + def __contains__(self, version): + return self.contains(version) + + def __hash__(self): + return hash((self._operator, self._tup)) + + def contains(self, version): + """ + Ensures the version matches this specifier + + :param version: + A ``PEP440Version`` object to check. + + :return: + Returns ``True`` if ``version`` satisfies the ``specifier``. + """ + if not self._prereleases and version.is_prerelease: + return False + + if self._prefix: + # The specifier is a version prefix (aka. wildcard present). + # Trim and normalize version to ( epoch, ( major [, minor [, micro ] ] ) ), + # so it matches exactly the specifier's length. + + self_tup, ver_tup = _trim_tuples(self._tup, version._tup) + + if self._operator == self.OP_EQ: + return ver_tup == self._tup + + if self._operator == self.OP_NE: + return ver_tup != self._tup + + else: + if self._operator == self.OP_ITY: + return version.version_string(False) == self.version_string(False) + + self_tup, ver_tup = _norm_tuples(self._tup, version._tup) + + if self._operator == self.OP_CPE: + # Compatible releases have an equivalent combination of >= and ==. + # That is that ~=2.2 is equivalent to >=2.2,==2.*. + if ver_tup < self_tup: + return False + + # create prefix specifier with last digit removed. + self_tup, ver_tup = _trim_tuples((self._tup[0], self._tup[1][:-1]), version._tup) + return ver_tup == self_tup + + if self._operator == self.OP_EQ: + return ver_tup == self_tup + + if self._operator == self.OP_NE: + return ver_tup != self_tup + + if self._operator == self.OP_GTE: + return ver_tup >= self_tup + + if self._operator == self.OP_GT: + # TODO: + # - parse local version and include into comparison result + # - drop only invalid local versions + return ver_tup[:2] > self_tup[:2] + + if self._operator == self.OP_LTE: + return ver_tup <= self_tup + + if self._operator == self.OP_LT: + # TODO: + # - parse local version and include into comparison result + # - drop only invalid local versions + return ver_tup[:2] < self_tup[:2] + + raise PEP440InvalidVersionSpecifierError( + "Invalid PEP 440 version specifier operator: {!r}".format(self._operator) + ) + + def filter(self, iterable): + return filter(self.contains, iterable) + + def version_string(self, verbose=False): + return _version_string(*self._tup, prefix=self._prefix, verbose=verbose) + + +def check_version(spec, version, include_prereleases=False): + """ + Check if version satisfies specifications + + :param spec: + The pep440 version specifier string. + + :param version: + The pep440 version string or ``PEP440Version`` ojbect to check. + + :param include_prereleases: + If ``True`` succeed also, if version is a pre-release. + If ``False`` (default) succeed only, if version is a final release. + + :returns: + Returns ``True`` if ``version`` satisfies the ``specifier``. + """ + if isinstance(version, str): + version = PEP440Version(version) + return PEP440VersionSpecifier(spec, include_prereleases).contains(version) diff --git a/tasks/lib/package_control/providers/__init__.py b/tasks/lib/package_control/providers/__init__.py new file mode 100644 index 0000000..416ccd0 --- /dev/null +++ b/tasks/lib/package_control/providers/__init__.py @@ -0,0 +1,20 @@ +from .bitbucket_repository_provider import BitBucketRepositoryProvider +from .github_repository_provider import GitHubRepositoryProvider +from .github_user_provider import GitHubUserProvider +from .gitlab_repository_provider import GitLabRepositoryProvider +from .gitlab_user_provider import GitLabUserProvider +from .json_repository_provider import JsonRepositoryProvider + +from .channel_provider import ChannelProvider + + +REPOSITORY_PROVIDERS = [ + BitBucketRepositoryProvider, + GitHubRepositoryProvider, + GitHubUserProvider, + GitLabRepositoryProvider, + GitLabUserProvider, + JsonRepositoryProvider +] + +CHANNEL_PROVIDERS = [ChannelProvider] diff --git a/tasks/lib/package_control/providers/base_repository_provider.py b/tasks/lib/package_control/providers/base_repository_provider.py new file mode 100644 index 0000000..6517534 --- /dev/null +++ b/tasks/lib/package_control/providers/base_repository_provider.py @@ -0,0 +1,129 @@ +class BaseRepositoryProvider: + """ + Base repository downloader that fetches package info + + This base class acts as interface to ensure all providers expose the same + set of methods. All providers should therefore derive from this base class. + + The structure of the JSON a repository should contain is located in + example-packages.json. + + :param repo_url: + The URL of the package repository + + :param settings: + A dict containing at least the following fields: + `cache_length`, + `debug`, + `timeout`, + `user_agent` + Optional fields: + `http_proxy`, + `https_proxy`, + `proxy_username`, + `proxy_password`, + `query_string_params` + """ + + __slots__ = [ + 'broken_libriaries' + 'broken_packages', + 'failed_sources', + 'libraries', + 'packages', + 'repo_url', + 'settings', + ] + + def __init__(self, repo_url, settings): + self.broken_libriaries = {} + self.broken_packages = {} + self.failed_sources = {} + self.libraries = None + self.packages = None + self.repo_url = repo_url + self.settings = settings + + @classmethod + def match_url(cls, repo_url): + """ + Indicates if this provider can handle the provided repo_url + """ + + return True + + def prefetch(self): + """ + Go out and perform HTTP operations, caching the result + """ + + [name for name, info in self.get_packages()] + + def fetch(self): + """ + Retrieves and loads the JSON for other methods to use + + :raises: + NotImplementedError: when called + """ + + raise NotImplementedError() + + def get_broken_libraries(self): + """ + List of library names for libraries that are missing information + + :return: + A generator of ("Library Name", Exception()) tuples + """ + + return self.broken_libriaries.items() + + def get_broken_packages(self): + """ + List of package names for packages that are missing information + + :return: + A generator of ("Package Name", Exception()) tuples + """ + + return self.broken_packages.items() + + def get_failed_sources(self): + """ + List of any URLs that could not be accessed while accessing this repository + + :return: + A generator of ("https://example.com", Exception()) tuples + """ + + return self.failed_sources.items() + + def get_libraries(self, invalid_sources=None): + """ + For API-compatibility with RepositoryProvider + """ + + return {}.items() + + def get_packages(self, invalid_sources=None): + """ + For API-compatibility with RepositoryProvider + """ + + return {}.items() + + def get_sources(self): + """ + Return a list of current URLs that are directly referenced by the repo + + :return: + A list of URLs + """ + + return [self.repo_url] + + def get_renamed_packages(self): + """For API-compatibility with RepositoryProvider""" + + return {} diff --git a/tasks/lib/package_control/providers/bitbucket_repository_provider.py b/tasks/lib/package_control/providers/bitbucket_repository_provider.py new file mode 100644 index 0000000..be1e496 --- /dev/null +++ b/tasks/lib/package_control/providers/bitbucket_repository_provider.py @@ -0,0 +1,138 @@ +from ..clients.bitbucket_client import BitBucketClient +from ..clients.client_exception import ClientException +from ..downloaders.downloader_exception import DownloaderException +from .base_repository_provider import BaseRepositoryProvider +from .provider_exception import ( + GitProviderDownloadInfoException, + GitProviderRepoInfoException, + ProviderException, +) + + +class BitBucketRepositoryProvider(BaseRepositoryProvider): + """ + Allows using a public BitBucket repository as the source for a single package. + For legacy purposes, this can also be treated as the source for a Package + Control "repository". + + :param repo: + The public web URL to the BitBucket repository. Should be in the format + `https://bitbucket.org/user/package`. + + :param settings: + A dict containing at least the following fields: + `cache_length`, + `debug`, + `timeout`, + `user_agent` + Optional fields: + `http_proxy`, + `https_proxy`, + `proxy_username`, + `proxy_password`, + `query_string_params`, + `http_basic_auth` + """ + + @classmethod + def match_url(cls, repo_url): + """ + Indicates if this provider can handle the provided repo_url + + :param repo_url: + The URL to the repository, in one of the forms: + https://bitbucket.org/{user}/{repo}.git + https://bitbucket.org/{user}/{repo} + https://bitbucket.org/{user}/{repo}/ + https://bitbucket.org/{user}/{repo}/src/{branch} + https://bitbucket.org/{user}/{repo}/src/{branch}/ + + :return: + True if repo_url matches an supported scheme. + """ + user, repo, _ = BitBucketClient.user_repo_branch(repo_url) + return bool(user and repo) + + def get_packages(self, invalid_sources=None): + """ + Uses the BitBucket API to construct necessary info for a package + + :param invalid_sources: + A list of URLs that should be ignored + + :return: + A generator of + ( + 'Package Name', + { + 'name': name, + 'description': description, + 'author': author, + 'homepage': homepage, + 'last_modified': last modified date, + 'releases': [ + { + 'sublime_text': '*', + 'platforms': ['*'], + 'url': url, + 'date': date, + 'version': version + }, ... + ], + 'previous_names': [], + 'labels': [], + 'sources': [the repo URL], + 'readme': url, + 'issues': url, + 'donate': url, + 'buy': None + } + ) + tuples + """ + + if self.packages is not None: + for key, value in self.packages.items(): + yield (key, value) + return + + if invalid_sources is not None and self.repo_url in invalid_sources: + return + + client = BitBucketClient(self.settings) + + try: + repo_info = client.repo_info(self.repo_url) + if not repo_info: + raise GitProviderRepoInfoException(self) + + downloads = client.download_info_from_branch(self.repo_url, repo_info['default_branch']) + if not downloads: + raise GitProviderDownloadInfoException(self) + + for download in downloads: + download['sublime_text'] = '*' + download['platforms'] = ['*'] + + name = repo_info['name'] + details = { + 'name': name, + 'description': repo_info['description'], + 'homepage': repo_info['homepage'], + 'author': repo_info['author'], + 'last_modified': downloads[0].get('date'), + 'releases': downloads, + 'previous_names': [], + 'labels': [], + 'sources': [self.repo_url], + 'readme': repo_info['readme'], + 'issues': repo_info['issues'], + 'donate': repo_info['donate'], + 'buy': None + } + self.packages = {name: details} + yield (name, details) + + except (DownloaderException, ClientException, ProviderException) as e: + self.failed_sources[self.repo_url] = e + self.packages = {} diff --git a/tasks/lib/package_control/providers/channel_provider.py b/tasks/lib/package_control/providers/channel_provider.py new file mode 100644 index 0000000..fb26030 --- /dev/null +++ b/tasks/lib/package_control/providers/channel_provider.py @@ -0,0 +1,415 @@ +import json +import os +import re +from itertools import chain + +from ..console_write import console_write +from ..download_manager import http_get, resolve_urls, update_url +from ..package_version import version_sort +from .provider_exception import ProviderException +from .schema_version import SchemaVersion + + +class InvalidChannelFileException(ProviderException): + + def __init__(self, channel, reason_message): + super().__init__( + 'Channel %s does not appear to be a valid channel file because' + ' %s' % (channel.channel_url, reason_message)) + + +class UncachedChannelRepositoryError(ProviderException): + pass + + +class ChannelProvider: + """ + Retrieves a channel and provides an API into the information + + The current channel/repository infrastructure caches repository info into + the channel to improve the Package Control client performance. This also + has the side effect of lessening the load on the GitHub and BitBucket APIs + and getting around not-infrequent HTTP 503 errors from those APIs. + + :param channel_url: + The URL of the channel + + :param settings: + A dict containing at least the following fields: + `cache_length`, + `debug`, + `timeout`, + `user_agent` + Optional fields: + `http_proxy`, + `https_proxy`, + `proxy_username`, + `proxy_password`, + `query_string_params`, + `http_basic_auth` + """ + + __slots__ = [ + 'channel_url', + 'schema_version', + 'repositories', + 'libraries_cache', + 'packages_cache', + 'settings', + ] + + def __init__(self, channel_url, settings): + self.channel_url = channel_url + self.schema_version = SchemaVersion('4.0.0') + self.repositories = None + self.libraries_cache = {} + self.packages_cache = {} + self.settings = settings + + @classmethod + def match_url(cls, channel_url): + """ + Indicates if this provider can handle the provided channel_url. + """ + + return True + + def prefetch(self): + """ + Go out and perform HTTP operations, caching the result + + :raises: + ProviderException: when an error occurs trying to open a file + DownloaderException: when an error occurs trying to open a URL + """ + + self.fetch() + + def fetch(self): + """ + Retrieves and loads the JSON for other methods to use + + :raises: + InvalidChannelFileException: when parsing or validation file content fails + ProviderException: when an error occurs trying to open a file + DownloaderException: when an error occurs trying to open a URL + """ + + if self.repositories is not None: + return + + if re.match(r'https?://', self.channel_url, re.I): + json_string = http_get(self.channel_url, self.settings, 'Error downloading channel.') + + # All other channels are expected to be filesystem paths + else: + if not os.path.exists(self.channel_url): + raise ProviderException('Error, file %s does not exist' % self.channel_url) + + if self.settings.get('debug'): + console_write( + ''' + Loading %s as a channel + ''', + self.channel_url + ) + + # We open as binary so we get bytes like the DownloadManager + with open(self.channel_url, 'rb') as f: + json_string = f.read() + + try: + channel_info = json.loads(json_string.decode('utf-8')) + except ValueError: + raise InvalidChannelFileException(self, 'parsing JSON failed.') + + try: + schema_version = SchemaVersion(channel_info['schema_version']) + except KeyError: + raise InvalidChannelFileException(self, 'the "schema_version" JSON key is missing.') + except ValueError as e: + raise InvalidChannelFileException(self, e) + + if 'repositories' not in channel_info: + raise InvalidChannelFileException(self, 'the "repositories" JSON key is missing.') + + self.repositories = self._migrate_repositories(channel_info, schema_version) + self.packages_cache = self._migrate_packages_cache(channel_info, schema_version) + self.libraries_cache = self._migrate_libraries_cache(channel_info, schema_version) + + def get_renamed_packages(self): + """ + :raises: + ProviderException: when an error occurs with the channel contents + DownloaderException: when an error occurs trying to open a URL + + :return: + A dict of the packages that have been renamed + """ + + self.fetch() + + output = {} + for package in chain(*self.packages_cache.values()): + previous_names = package.get('previous_names', []) + if not isinstance(previous_names, list): + previous_names = [previous_names] + for previous_name in previous_names: + output[previous_name] = package['name'] + + return output + + def get_repositories(self): + """ + :raises: + ProviderException: when an error occurs with the channel contents + DownloaderException: when an error occurs trying to open a URL + + :return: + A list of the repository URLs + """ + + self.fetch() + + return self.repositories + + def get_sources(self): + """ + Return a list of current URLs that are directly referenced by the + channel + + :return: + A list of URLs and/or file paths + """ + + return self.get_repositories() + + def get_packages(self, repo_url): + """ + Provides access to the repository info that is cached in a channel + + :param repo_url: + The URL of the repository to get the cached info of + + :raises: + DownloaderException: when an error occurs trying to open a URL + UncachedChannelRepositoryError when no cache entry exists for repo_url + + :return: + A generator of + ( + 'Package Name', + { + 'name': name, + 'description': description, + 'author': author, + 'homepage': homepage, + 'previous_names': [old_name, ...], + 'labels': [label, ...], + 'readme': url, + 'issues': url, + 'donate': url, + 'buy': url, + 'last_modified': last modified date, + 'releases': [ + { + 'sublime_text': compatible version, + 'platforms': [platform name, ...], + 'python_versions': ['3.3', '3.8'], + 'url': url, + 'date': date, + 'version': version, + 'libraries': [library name, ...] + }, ... + ] + } + ) + tuples + """ + + self.fetch() + + if repo_url not in self.packages_cache: + raise UncachedChannelRepositoryError(repo_url) + + for package in self.packages_cache[repo_url]: + if package['releases']: + yield (package['name'], package) + + def get_libraries(self, repo_url): + """ + Provides access to the library info that is cached in a channel + + :param repo_url: + The URL of the repository to get the cached info of + + :raises: + DownloaderException: when an error occurs trying to open a URL + UncachedChannelRepositoryError when no cache entry exists for repo_url + + :return: + A generator of + ( + 'Library Name', + { + 'name': name, + 'description': description, + 'author': author, + 'issues': URL, + 'releases': [ + { + 'sublime_text': compatible version, + 'platforms': [platform name, ...], + 'python_versions': ['3.3', '3.8'], + 'url': url, + 'version': version, + 'sha256': hex hash + }, ... + ] + } + ) + tuples + """ + + self.fetch() + + if repo_url not in self.libraries_cache: + raise UncachedChannelRepositoryError(repo_url) + + for library in self.libraries_cache[repo_url]: + if library['releases']: + yield (library['name'], library) + + def get_broken_packages(self): + """ + Provide package names without releases. + + :raises: + ProviderException: when an error occurs with the channel contents + DownloaderException: when an error occurs trying to open a URL + + :return: + A generator of 'package names' + """ + + self.fetch() + + for package in chain(*self.packages_cache.values()): + if not package['releases']: + yield package['name'] + + def get_broken_libraries(self): + """ + Provide library names without releases. + + :raises: + ProviderException: when an error occurs with the channel contents + DownloaderException: when an error occurs trying to open a URL + + :return: + A generator of 'library names' + """ + + self.fetch() + + for library in chain(*self.libraries_cache.values()): + if not library['releases']: + yield library['name'] + + def _migrate_repositories(self, channel_info, schema_version): + + debug = self.settings.get('debug') + + return [ + update_url(url, debug) + for url in resolve_urls(self.channel_url, channel_info['repositories']) + ] + + def _migrate_packages_cache(self, channel_info, schema_version): + """ + Transform input packages cache to scheme version 4.0.0 + + :param channel_info: + The input channel information of any scheme version + + :param schema_version: + The schema version of the input channel information + + :returns: + packages_cache object of scheme version 4.0.0 + """ + + debug = self.settings.get('debug') + + package_cache = channel_info.get('packages_cache', {}) + + defaults = { + 'buy': None, + 'issues': None, + 'labels': [], + 'previous_names': [], + 'readme': None, + 'donate': None + } + + for package in chain(*package_cache.values()): + + for field in defaults: + if field not in package: + package[field] = defaults[field] + + # Workaround for packagecontrol.io, which adds `authors` instead of `author` + # to cached packages and libraries. + if 'authors' in package: + package['author'] = package.pop('authors') + + releases = version_sort(package.get('releases', []), 'platforms', reverse=True) + package['releases'] = releases + package['last_modified'] = releases[0]['date'] if releases else None + + # The 4.0.0 channel schema renamed the `dependencies` key to `libraries`. + if schema_version.major < 4: + for release in package['releases']: + if 'dependencies' in release: + release['libraries'] = release.pop('dependencies') + + # Fix any out-dated repository URLs in packages cache + return {update_url(name, debug): info for name, info in package_cache.items()} + + def _migrate_libraries_cache(self, channel_info, schema_version): + """ + Transform input libraries cache to scheme version 4.0.0 + + :param channel_info: + The input channel information of any scheme version + + :param schema_version: + The schema version of the input channel information + + :returns: + libraries_cache object of scheme version 4.0.0 + """ + + debug = self.settings.get('debug') + + if schema_version.major < 4: + # The 4.0.0 channel schema renamed the key cached package info was + # stored under in order to be more clear to new users. + libraries_cache = channel_info.pop('dependencies_cache', {}) + + # The 4.0.0 channel scheme drops 'load_order' from each library + # and adds a required 'python_versions' list to each release. + for library in chain(*libraries_cache.values()): + del library['load_order'] + for release in library['releases']: + release['python_versions'] = ['3.3'] + library['releases'] = version_sort(library['releases'], 'platforms', reverse=True) + + else: + libraries_cache = channel_info.get('libraries_cache', {}) + + for library in chain(*libraries_cache.values()): + library['releases'] = version_sort(library['releases'], 'platforms', reverse=True) + + # Fix any out-dated repository URLs in libraries cache + return {update_url(name, debug): info for name, info in libraries_cache.items()} diff --git a/tasks/lib/package_control/providers/github_repository_provider.py b/tasks/lib/package_control/providers/github_repository_provider.py new file mode 100644 index 0000000..c69b6f6 --- /dev/null +++ b/tasks/lib/package_control/providers/github_repository_provider.py @@ -0,0 +1,146 @@ +import re + +from ..clients.client_exception import ClientException +from ..clients.github_client import GitHubClient +from ..downloaders.downloader_exception import DownloaderException +from .base_repository_provider import BaseRepositoryProvider +from .provider_exception import ( + GitProviderDownloadInfoException, + GitProviderRepoInfoException, + ProviderException, +) + + +class GitHubRepositoryProvider(BaseRepositoryProvider): + """ + Allows using a public GitHub repository as the source for a single package. + For legacy purposes, this can also be treated as the source for a Package + Control "repository". + + :param repo_url: + The public web URL to the GitHub repository. Should be in the format + `https://github.com/user/package` for the master branch, or + `https://github.com/user/package/tree/{branch_name}` for any other + branch. + + :param settings: + A dict containing at least the following fields: + `cache_length`, + `debug`, + `timeout`, + `user_agent` + Optional fields: + `http_proxy`, + `https_proxy`, + `proxy_username`, + `proxy_password`, + `query_string_params`, + `http_basic_auth` + """ + + def __init__(self, repo_url, settings): + # Clean off the trailing .git to be more forgiving + super().__init__(re.sub(r'\.git$', '', repo_url), settings) + + @classmethod + def match_url(cls, repo_url): + """ + Indicates if this provider can handle the provided repo_url + + :param repo_url: + The URL to the repository, in one of the forms: + https://github.com/{user}/{repo}.git + https://github.com/{user}/{repo} + https://github.com/{user}/{repo}/ + https://github.com/{user}/{repo}/tree/{branch} + https://github.com/{user}/{repo}/tree/{branch}/ + + :return: + True if repo_url matches an supported scheme. + """ + user, repo, _ = GitHubClient.user_repo_branch(repo_url) + return bool(user and repo) + + def get_packages(self, invalid_sources=None): + """ + Uses the GitHub API to construct necessary info for a package + + :param invalid_sources: + A list of URLs that should be ignored + + :return: + A generator of + ( + 'Package Name', + { + 'name': name, + 'description': description, + 'author': author, + 'homepage': homepage, + 'last_modified': last modified date, + 'releases': [ + { + 'sublime_text': '*', + 'platforms': ['*'], + 'url': url, + 'date': date, + 'version': version + }, ... + ], + 'previous_names': [], + 'labels': [], + 'sources': [the repo URL], + 'readme': url, + 'issues': url, + 'donate': url, + 'buy': None + } + ) + tuples + """ + + if self.packages is not None: + for key, value in self.packages.items(): + yield (key, value) + return + + if invalid_sources is not None and self.repo_url in invalid_sources: + return + + client = GitHubClient(self.settings) + + try: + repo_info = client.repo_info(self.repo_url) + if not repo_info: + raise GitProviderRepoInfoException(self) + + downloads = client.download_info_from_branch(self.repo_url, repo_info['default_branch']) + if not downloads: + raise GitProviderDownloadInfoException(self) + + for download in downloads: + download['sublime_text'] = '*' + download['platforms'] = ['*'] + + name = repo_info['name'] + details = { + 'name': name, + 'description': repo_info['description'], + 'homepage': repo_info['homepage'], + 'author': repo_info['author'], + 'last_modified': downloads[0].get('date'), + 'releases': downloads, + 'previous_names': [], + 'labels': [], + 'sources': [self.repo_url], + 'readme': repo_info['readme'], + 'issues': repo_info['issues'], + 'donate': repo_info['donate'], + 'buy': None + } + self.packages = {name: details} + yield (name, details) + + except (DownloaderException, ClientException, ProviderException) as e: + self.failed_sources[self.repo_url] = e + self.packages = {} diff --git a/tasks/lib/package_control/providers/github_user_provider.py b/tasks/lib/package_control/providers/github_user_provider.py new file mode 100644 index 0000000..f35e976 --- /dev/null +++ b/tasks/lib/package_control/providers/github_user_provider.py @@ -0,0 +1,148 @@ +from ..clients.client_exception import ClientException +from ..clients.github_client import GitHubClient +from ..downloaders.downloader_exception import DownloaderException +from .base_repository_provider import BaseRepositoryProvider +from .provider_exception import ( + GitProviderDownloadInfoException, + GitProviderUserInfoException, + ProviderException, +) + + +class GitHubUserProvider(BaseRepositoryProvider): + """ + Allows using a GitHub user/organization as the source for multiple packages, + or in Package Control terminology, a "repository". + + :param repo_url: + The public web URL to the GitHub user/org. Should be in the format + `https://github.com/user`. + + :param settings: + A dict containing at least the following fields: + `cache_length`, + `debug`, + `timeout`, + `user_agent`, + Optional fields: + `http_proxy`, + `https_proxy`, + `proxy_username`, + `proxy_password`, + `query_string_params`, + `http_basic_auth` + """ + + @classmethod + def match_url(cls, repo_url): + """ + Indicates if this provider can handle the provided repo_url + + :param repo_url: + The URL to the repository, in one of the forms: + https://github.com/{user} + https://github.com/{user}/ + + :return: + True if repo_url matches an supported scheme. + """ + user, repo, _ = GitHubClient.user_repo_branch(repo_url) + return bool(user and not repo) + + def get_packages(self, invalid_sources=None): + """ + Uses the GitHub API to construct necessary info for all packages + + :param invalid_sources: + A list of URLs that should be ignored + + :return: + A generator of + ( + 'Package Name', + { + 'name': name, + 'description': description, + 'author': author, + 'homepage': homepage, + 'last_modified': last modified date, + 'releases': [ + { + 'sublime_text': '*', + 'platforms': ['*'], + 'url': url, + 'date': date, + 'version': version + }, ... + ], + 'previous_names': [], + 'labels': [], + 'sources': [the user URL], + 'readme': url, + 'issues': url, + 'donate': url, + 'buy': None + } + ) + tuples + """ + + if self.packages is not None: + for key, value in self.packages.items(): + yield (key, value) + return + + if invalid_sources is not None and self.repo_url in invalid_sources: + return + + client = GitHubClient(self.settings) + + try: + user_repos = client.user_info(self.repo_url) + if not user_repos: + raise GitProviderUserInfoException(self) + except (DownloaderException, ClientException, ProviderException) as e: + self.failed_sources[self.repo_url] = e + self.packages = {} + return + + output = {} + for repo_info in user_repos: + author = repo_info['author'] + name = repo_info['name'] + repo_url = client.repo_url(author, name) + + if invalid_sources is not None and repo_url in invalid_sources: + continue + + try: + downloads = client.download_info_from_branch(repo_url, repo_info['default_branch']) + if not downloads: + raise GitProviderDownloadInfoException(self) + + for download in downloads: + download['sublime_text'] = '*' + download['platforms'] = ['*'] + + details = { + 'name': name, + 'description': repo_info['description'], + 'homepage': repo_info['homepage'], + 'author': author, + 'last_modified': downloads[0].get('date'), + 'releases': downloads, + 'previous_names': [], + 'labels': [], + 'sources': [self.repo_url], + 'readme': repo_info['readme'], + 'issues': repo_info['issues'], + 'donate': repo_info['donate'], + 'buy': None + } + output[name] = details + yield (name, details) + + except (DownloaderException, ClientException, ProviderException) as e: + self.failed_sources[repo_url] = e + + self.packages = output diff --git a/tasks/lib/package_control/providers/gitlab_repository_provider.py b/tasks/lib/package_control/providers/gitlab_repository_provider.py new file mode 100644 index 0000000..5872795 --- /dev/null +++ b/tasks/lib/package_control/providers/gitlab_repository_provider.py @@ -0,0 +1,146 @@ +import re + +from ..clients.client_exception import ClientException +from ..clients.gitlab_client import GitLabClient +from ..downloaders.downloader_exception import DownloaderException +from .base_repository_provider import BaseRepositoryProvider +from .provider_exception import ( + GitProviderDownloadInfoException, + GitProviderRepoInfoException, + ProviderException, +) + + +class GitLabRepositoryProvider(BaseRepositoryProvider): + """ + Allows using a public GitLab repository as the source for a single package. + For legacy purposes, this can also be treated as the source for a Package + Control "repository". + + :param repo_url: + The public web URL to the GitLab repository. Should be in the format + `https://gitlab.com/user/package` for the master branch, or + `https://gitlab.com/user/package/-/tree/{branch_name}` for any other + branch. + + :param settings: + A dict containing at least the following fields: + `cache_length`, + `debug`, + `timeout`, + `user_agent` + Optional fields: + `http_proxy`, + `https_proxy`, + `proxy_username`, + `proxy_password`, + `query_string_params`, + `http_basic_auth` + """ + + def __init__(self, repo_url, settings): + # Clean off the trailing .git to be more forgiving + super().__init__(re.sub(r'\.git$', '', repo_url), settings) + + @classmethod + def match_url(cls, repo_url): + """ + Indicates if this provider can handle the provided repo_url + + :param repo_url: + The URL to the repository, in one of the forms: + https://gitlab.com/{user}/{repo}.git + https://gitlab.com/{user}/{repo} + https://gitlab.com/{user}/{repo}/ + https://gitlab.com/{user}/{repo}/-/tree/{branch} + https://gitlab.com/{user}/{repo}/-/tree/{branch}/ + + :return: + True if repo_url matches an supported scheme. + """ + user, repo, _ = GitLabClient.user_repo_branch(repo_url) + return bool(user and repo) + + def get_packages(self, invalid_sources=None): + """ + Uses the GitLab API to construct necessary info for a package + + :param invalid_sources: + A list of URLs that should be ignored + + :return: + A generator of + ( + 'Package Name', + { + 'name': name, + 'description': description, + 'author': author, + 'homepage': homepage, + 'last_modified': last modified date, + 'releases': [ + { + 'sublime_text': '*', + 'platforms': ['*'], + 'url': url, + 'date': date, + 'version': version + }, ... + ], + 'previous_names': [], + 'labels': [], + 'sources': [the repo URL], + 'readme': url, + 'issues': url, + 'donate': url, + 'buy': None + } + ) + tuples + """ + + if self.packages is not None: + for key, value in self.packages.items(): + yield (key, value) + return + + if invalid_sources is not None and self.repo_url in invalid_sources: + return + + client = GitLabClient(self.settings) + + try: + repo_info = client.repo_info(self.repo_url) + if not repo_info: + raise GitProviderRepoInfoException(self) + + downloads = client.download_info_from_branch(self.repo_url, repo_info['default_branch']) + if not downloads: + raise GitProviderDownloadInfoException(self) + + for download in downloads: + download['sublime_text'] = '*' + download['platforms'] = ['*'] + + name = repo_info['name'] + details = { + 'name': name, + 'description': repo_info['description'], + 'homepage': repo_info['homepage'], + 'author': repo_info['author'], + 'last_modified': downloads[0].get('date'), + 'releases': downloads, + 'previous_names': [], + 'labels': [], + 'sources': [self.repo_url], + 'readme': repo_info['readme'], + 'issues': repo_info['issues'], + 'donate': repo_info['donate'], + 'buy': None + } + self.packages = {name: details} + yield (name, details) + + except (DownloaderException, ClientException, ProviderException) as e: + self.failed_sources[self.repo_url] = e + self.packages = {} diff --git a/tasks/lib/package_control/providers/gitlab_user_provider.py b/tasks/lib/package_control/providers/gitlab_user_provider.py new file mode 100644 index 0000000..61f63f5 --- /dev/null +++ b/tasks/lib/package_control/providers/gitlab_user_provider.py @@ -0,0 +1,148 @@ +from ..clients.client_exception import ClientException +from ..clients.gitlab_client import GitLabClient +from ..downloaders.downloader_exception import DownloaderException +from .base_repository_provider import BaseRepositoryProvider +from .provider_exception import ( + GitProviderDownloadInfoException, + GitProviderUserInfoException, + ProviderException, +) + + +class GitLabUserProvider(BaseRepositoryProvider): + """ + Allows using a GitLab user/organization as the source for multiple packages, + or in Package Control terminology, a 'repository'. + + :param repo_url: + The public web URL to the GitHub user/org. Should be in the format + `https://gitlab.com/user`. + + :param settings: + A dict containing at least the following fields: + `cache_length`, + `debug`, + `timeout`, + `user_agent`, + Optional fields: + `http_proxy`, + `https_proxy`, + `proxy_username`, + `proxy_password`, + `query_string_params`, + `http_basic_auth` + """ + + @classmethod + def match_url(cls, repo_url): + """ + Indicates if this provider can handle the provided repo_url + + :param repo_url: + The URL to the repository, in one of the forms: + https://gitlab.com/{user} + https://gitlab.com/{user}/ + + :return: + True if repo_url matches an supported scheme. + """ + user, repo, _ = GitLabClient.user_repo_branch(repo_url) + return bool(user and not repo) + + def get_packages(self, invalid_sources=None): + """ + Uses the lab API to construct necessary info for all packages + + :param invalid_sources: + A list of URLs that should be ignored + + :return: + A generator of + ( + 'Package Name', + { + 'name': name, + 'description': description, + 'author': author, + 'homepage': homepage, + 'last_modified': last modified date, + 'releases': [ + { + 'sublime_text': '*', + 'platforms': ['*'], + 'url': url, + 'date': date, + 'version': version + }, ... + ], + 'previous_names': [], + 'labels': [], + 'sources': [the user URL], + 'readme': url, + 'issues': url, + 'donate': url, + 'buy': None + } + ) + tuples + """ + + if self.packages is not None: + for key, value in self.packages.items(): + yield (key, value) + return + + if invalid_sources is not None and self.repo_url in invalid_sources: + return + + client = GitLabClient(self.settings) + + try: + user_repos = client.user_info(self.repo_url) + if not user_repos: + raise GitProviderUserInfoException(self) + except (DownloaderException, ClientException, ProviderException) as e: + self.failed_sources[self.repo_url] = e + self.packages = {} + return + + output = {} + for repo_info in user_repos: + author = repo_info['author'] + name = repo_info['name'] + repo_url = client.repo_url(author, name) + + if invalid_sources is not None and repo_url in invalid_sources: + continue + + try: + downloads = client.download_info_from_branch(repo_url, repo_info['default_branch']) + if not downloads: + raise GitProviderDownloadInfoException(self) + + for download in downloads: + download['sublime_text'] = '*' + download['platforms'] = ['*'] + + details = { + 'name': name, + 'description': repo_info['description'], + 'homepage': repo_info['homepage'], + 'author': author, + 'last_modified': downloads[0].get('date'), + 'releases': downloads, + 'previous_names': [], + 'labels': [], + 'sources': [self.repo_url], + 'readme': repo_info['readme'], + 'issues': repo_info['issues'], + 'donate': repo_info['donate'], + 'buy': None + } + output[name] = details + yield (name, details) + + except (DownloaderException, ClientException, ProviderException) as e: + self.failed_sources[repo_url] = e + + self.packages = output diff --git a/tasks/lib/package_control/providers/json_repository_provider.py b/tasks/lib/package_control/providers/json_repository_provider.py new file mode 100644 index 0000000..824218f --- /dev/null +++ b/tasks/lib/package_control/providers/json_repository_provider.py @@ -0,0 +1,931 @@ +import json +import re +import os +from itertools import chain +from urllib.parse import urlparse + +from ..clients.bitbucket_client import BitBucketClient +from ..clients.client_exception import ClientException +from ..clients.github_client import GitHubClient +from ..clients.gitlab_client import GitLabClient +from ..clients.pypi_client import PyPiClient +from ..console_write import console_write +from ..download_manager import http_get, resolve_url, resolve_urls, update_url +from ..downloaders.downloader_exception import DownloaderException +from ..package_version import version_sort +from .base_repository_provider import BaseRepositoryProvider +from .provider_exception import ProviderException +from .schema_version import SchemaVersion + +try: + # running within ST + from ..selectors import is_compatible_platform, is_compatible_version + IS_ST = True +except ImportError: + # running on CLI or server + IS_ST = False + + +class InvalidRepoFileException(ProviderException): + def __init__(self, repo, reason_message): + super().__init__( + 'Repository {} does not appear to be a valid repository file because' + ' {}'.format(repo.repo_url, reason_message)) + + +class InvalidLibraryReleaseKeyError(ProviderException): + def __init__(self, repo, name, key): + super().__init__( + 'Invalid or missing release-level key "{}" in library "{}"' + ' in repository "{}".'.format(key, name, repo)) + + +class InvalidPackageReleaseKeyError(ProviderException): + def __init__(self, repo, name, key): + super().__init__( + 'Invalid or missing release-level key "{}" in package "{}"' + ' in repository "{}".'.format(key, name, repo)) + + +class JsonRepositoryProvider(BaseRepositoryProvider): + """ + Generic repository downloader that fetches package info + + With the current channel/repository architecture where the channel file + caches info from all includes repositories, these package providers just + serve the purpose of downloading packages not in the default channel. + + The structure of the JSON a repository should contain is located in + example-packages.json. + + :param repo_url: + The URL of the package repository + + :param settings: + A dict containing at least the following fields: + `cache_length`, + `debug`, + `timeout`, + `user_agent` + Optional fields: + `http_proxy`, + `https_proxy`, + `proxy_username`, + `proxy_password`, + `query_string_params`, + `http_basic_auth` + """ + + def __init__(self, repo_url, settings): + super().__init__(repo_url, settings) + self.included_urls = set() + self.repo_info = None + self.schema_version = None + + def fetch(self): + """ + Retrieves and loads the JSON for other methods to use + + :raises: + InvalidChannelFileException: when parsing or validation file content fails + ProviderException: when an error occurs trying to open a file + DownloaderException: when an error occurs trying to open a URL + """ + + if self.repo_info is not None: + return True + + if self.repo_url in self.failed_sources: + return False + + try: + self.repo_info = self.fetch_repo(self.repo_url) + self.schema_version = self.repo_info['schema_version'] + except (DownloaderException, ClientException, ProviderException) as e: + self.failed_sources[self.repo_url] = e + self.libraries = {} + self.packages = {} + return False + + return True + + def fetch_repo(self, location): + """ + Fetches the contents of a URL of file path + + :param location: + The URL or file path + + :raises: + ProviderException: when an error occurs trying to open a file + DownloaderException: when an error occurs trying to open a URL + + :return: + A dict of the parsed JSON + """ + + # Prevent circular includes + if location in self.included_urls: + raise ProviderException('Error, repository "%s" already included.' % location) + + self.included_urls.add(location) + + if re.match(r'https?://', location, re.I): + json_string = http_get(location, self.settings, 'Error downloading repository.') + + # Anything that is not a URL is expected to be a filesystem path + else: + if not os.path.exists(location): + raise ProviderException('Error, file %s does not exist' % location) + + if self.settings.get('debug'): + console_write( + ''' + Loading %s as a repository + ''', + location + ) + + # We open as binary so we get bytes like the DownloadManager + with open(location, 'rb') as f: + json_string = f.read() + + try: + repo_info = json.loads(json_string.decode('utf-8')) + except (ValueError): + raise InvalidRepoFileException(self, 'parsing JSON failed.') + + try: + schema_version = repo_info['schema_version'] = SchemaVersion(repo_info['schema_version']) + except KeyError: + raise InvalidRepoFileException( + self, 'the "schema_version" JSON key is missing.') + except ValueError as e: + raise InvalidRepoFileException(self, e) + + # Main keys depending on scheme version + if schema_version.major < 4: + repo_keys = {'packages', 'dependencies', 'includes'} + else: + repo_keys = {'packages', 'libraries', 'includes'} + + # Check existence of at least one required main key + if not set(repo_info.keys()) & repo_keys: + raise InvalidRepoFileException(self, 'it doesn\'t look like a repository.') + + # Check type of existing main keys + for key in repo_keys: + if key in repo_info and not isinstance(repo_info[key], list): + raise InvalidRepoFileException(self, 'the "%s" key is not an array.' % key) + + # Migrate dependencies to libraries + # The 4.0.0 repository schema renamed dependencies key to libraries. + if schema_version.major < 4: + repo_info['libraries'] = repo_info.pop('dependencies', []) + + # Allow repositories to include other repositories, recursively + includes = repo_info.pop('includes', None) + if includes: + for include in resolve_urls(self.repo_url, includes): + try: + include_info = self.fetch_repo(include) + except (DownloaderException, ClientException, ProviderException) as e: + self.failed_sources[include] = e + else: + include_version = include_info['schema_version'] + if include_version != schema_version: + raise ProviderException( + 'Scheme version of included repository %s doesn\'t match its parent.' % include) + + repo_info['packages'].extend(include_info.get('packages', [])) + repo_info['libraries'].extend(include_info.get('libraries', [])) + + return repo_info + + def get_libraries(self, invalid_sources=None): + """ + Provides access to the libraries in this repository + + :param invalid_sources: + A list of URLs that are permissible to fetch data from + + :return: + A generator of + ( + 'Library Name', + { + 'name': name, + 'description': description, + 'author': author, + 'issues': URL, + 'releases': [ + { + 'sublime_text': compatible version, + 'platforms': [platform name, ...], + 'python_versions': ['3.3', '3.8'], + 'url': url, + 'version': version, + 'sha256': hex hash + }, ... + ], + 'sources': [url, ...] + } + ) + tuples + """ + + if self.libraries is not None: + for key, value in self.libraries.items(): + yield (key, value) + return + + if invalid_sources is not None and self.repo_url in invalid_sources: + return + + if not self.fetch(): + return + + if not self.repo_info: + return + + if self.schema_version.major >= 4: + allowed_library_keys = { + 'name', 'description', 'author', 'homepage', 'issues', 'releases' + } + allowed_release_keys = { # todo: remove 'branch' + 'base', 'version', 'sublime_text', 'platforms', 'python_versions', + 'branch', 'tags', 'asset', 'url', 'sha256' + } + else: + allowed_library_keys = { + 'name', 'description', 'author', 'issues', 'load_order', 'releases' + } + allowed_release_keys = { + 'base', 'version', 'sublime_text', 'platforms', + 'branch', 'tags', 'url', 'sha256' + } + + copied_library_keys = ('name', 'description', 'author', 'homepage', 'issues') + copied_release_keys = ('date', 'version', 'sha256') + default_platforms = ['*'] + default_python_versions = ['3.3'] + default_sublime_text = '*' + + debug = self.settings.get('debug') + + clients = [ + Client(self.settings) for Client in (GitHubClient, GitLabClient, BitBucketClient, PyPiClient) + ] + + output = {} + for library in self.repo_info.get('libraries', []): + info = { + 'releases': [], + 'sources': [self.repo_url] + } + + for field in copied_library_keys: + field_value = library.get(field) + if field_value: + info[field] = field_value + + if 'name' not in info: + self.failed_sources[self.repo_url] = ProviderException( + 'No "name" value for one of libraries' + ' in repository "{}".'.format(self.repo_url) + ) + continue + + try: + unknown_keys = set(library) - allowed_library_keys + if unknown_keys: + raise ProviderException( + 'The "{}" key(s) in library "{}" in repository {} are not supported.'.format( + '", "'.join(sorted(unknown_keys)), info['name'], + self.repo_url + ) + ) + + releases = library.get('releases', []) + if releases and not isinstance(releases, list): + raise ProviderException( + 'The "releases" value is not an array for library "{}"' + ' in repository {}.'.format(info['name'], self.repo_url) + ) + + staged_releases = {} + + for release in releases: + download_info = {} + + unknown_keys = set(release) - allowed_release_keys + if unknown_keys: + raise ProviderException( + 'The "{}" key(s) in one of the releases of library "{}"' + ' in repository {} are not supported.'.format( + '", "'.join(sorted(unknown_keys)), info['name'], self.repo_url + ) + ) + + # Validate libraries + # the key can be used to specify dependencies, upstream via repositories + key = 'libraries' if self.schema_version.major >= 4 else 'dependencies' + value = release.get(key, []) + if value: + if not isinstance(value, list): + raise InvalidLibraryReleaseKeyError(self.repo_url, info['name'], key) + download_info['libraries'] = value + + # Validate supported platforms + key = 'platforms' + value = release.get(key, default_platforms) + if isinstance(value, str): + value = [value] + elif not isinstance(value, list): + raise InvalidLibraryReleaseKeyError(self.repo_url, info['name'], key) + # ignore incompatible release (avoid downloading/evaluating further information) + if IS_ST and not is_compatible_platform(value): + continue + download_info[key] = value + + # Validate supported python_versions + key = 'python_versions' + value = release.get(key, default_python_versions) + if isinstance(value, str): + value = [value] + elif not isinstance(value, list): + raise InvalidLibraryReleaseKeyError(self.repo_url, info['name'], key) + download_info[key] = value + + # Validate supported ST version + key = 'sublime_text' + value = release.get(key, default_sublime_text) + if not isinstance(value, str): + raise InvalidLibraryReleaseKeyError(self.repo_url, info['name'], key) + # ignore incompatible release (avoid downloading/evaluating further information) + if IS_ST and not is_compatible_version(value): + continue + download_info[key] = value + + # Validate url + # if present, it is an explicit or resolved release + url = release.get('url') + if url: + for key in copied_release_keys: + if key in release: + value = release[key] + if not value or not isinstance(value, str): + raise InvalidLibraryReleaseKeyError(self.repo_url, info['name'], key) + download_info[key] = value + + if 'version' not in download_info: + raise ProviderException( + 'Missing "version" key in release with explicit "url" of library "{}"' + ' in repository "{}".'.format(info['name'], self.repo_url) + ) + + download_info['url'] = update_url(resolve_url(self.repo_url, url), debug) + is_http = urlparse(download_info['url']).scheme == 'http' + if is_http and 'sha256' not in download_info: + raise ProviderException( + 'No "sha256" key for the non-secure "url" value in one of the releases' + ' of the library "{}" in repository {}.'.format(info['name'], self.repo_url) + ) + + info['releases'].append(download_info) + continue + + # Resolve release template using `base` and `branch` or `tags` keys + + base = release.get('base') + if not base: + raise InvalidLibraryReleaseKeyError(self.repo_url, info['name'], 'base') + + base_url = resolve_url(self.repo_url, base) + downloads = None + + # Evaluate and resolve "tags" and "branch" release templates + asset = release.get('asset') + branch = release.get('branch') + tags = release.get('tags') + extra = None if tags is True else tags + + if asset: + if branch: + raise ProviderException( + 'Illegal "asset" key "{}" for branch based release of library "{}"' + ' in repository "{}".'.format(base, info['name'], self.repo_url) + ) + # group releases with assets by base_url and tag-prefix + # to prepare gathering download_info with a single API call + staged_releases.setdefault((base_url, extra), []).append((asset, download_info)) + continue + + elif tags: + for client in clients: + downloads = client.download_info_from_tags(base_url, extra) + if downloads is not None: + break + + elif branch: + for client in clients: + downloads = client.download_info_from_branch(base_url, branch) + if downloads is not None: + break + else: + raise ProviderException( + 'Missing "branch", "tags" or "url" key in release of library "{}"' + ' in repository "{}".'.format(info['name'], self.repo_url) + ) + + if downloads is None: + raise ProviderException( + 'Invalid "base" value "{}" for one of the releases of library "{}"' + ' in repository "{}".'.format(base, info['name'], self.repo_url) + ) + + if downloads is False: + raise ProviderException( + 'No valid semver tags found at "{}" for library "{}"' + ' in repository "{}".'.format(base, info['name'], self.repo_url) + ) + + for download in downloads: + download.update(download_info) + info['releases'].append(download) + + # gather download_info from releases + for (base_url, extra), asset_templates in staged_releases.items(): + for client in clients: + downloads = client.download_info_from_releases(base_url, asset_templates, extra) + if downloads is not None: + info['releases'].extend(downloads) + break + + # check required library keys + for key in ('description', 'author', 'issues'): + if not info.get(key): + raise ProviderException( + 'Missing or invalid "{}" key for library "{}"' + ' in repository "{}".'.format(key, info['name'], self.repo_url) + ) + + # Empty releases means package is unavailable on current platform or for version of ST + if not info['releases']: + continue + + info['releases'] = version_sort(info['releases'], 'platforms', reverse=True) + + output[info['name']] = info + yield (info['name'], info) + + except (DownloaderException, ClientException, ProviderException) as e: + self.broken_libriaries[info['name']] = e + + self.libraries = output + + def get_packages(self, invalid_sources=None): + """ + Provides access to the packages in this repository + + :param invalid_sources: + A list of URLs that are permissible to fetch data from + + :return: + A generator of + ( + 'Package Name', + { + 'name': name, + 'description': description, + 'author': author, + 'homepage': homepage, + 'previous_names': [old_name, ...], + 'labels': [label, ...], + 'sources': [url, ...], + 'readme': url, + 'issues': url, + 'donate': url, + 'buy': url, + 'last_modified': last modified date, + 'releases': [ + { + 'sublime_text': compatible version, + 'platforms': [platform name, ...], + 'url': url, + 'date': date, + 'version': version, + 'libraries': [library name, ...] + }, ... + ] + } + ) + tuples + """ + + if self.packages is not None: + for key, value in self.packages.items(): + yield (key, value) + return + + if invalid_sources is not None and self.repo_url in invalid_sources: + return + + if not self.fetch(): + return + + if not self.repo_info: + return + + copied_package_keys = ( + 'name', + 'description', + 'author', + 'last_modified', + 'previous_names', + 'labels', + 'homepage', + 'readme', + 'issues', + 'donate', + 'buy' + ) + copied_release_keys = ('date', 'version') + default_platforms = ['*'] + default_sublime_text = '*' + + debug = self.settings.get('debug') + + clients = [ + Client(self.settings) for Client in (GitHubClient, GitLabClient, BitBucketClient) + ] + + output = {} + for package in self.repo_info.get('packages', []): + info = { + 'releases': [], + 'sources': [self.repo_url] + } + + for field in copied_package_keys: + if package.get(field): + info[field] = package.get(field) + + # Try to grab package-level details from GitHub or BitBucket + details = package.get('details') + if details: + details = resolve_url(self.repo_url, details) + + if invalid_sources is not None and details in invalid_sources: + continue + + if details not in info['sources']: + info['sources'].append(details) + + try: + repo_info = None + + for client in clients: + repo_info = client.repo_info(details) + if repo_info: + break + else: + raise ProviderException( + 'Invalid "details" value "{}" for one of the packages' + ' in the repository {}.'.format(details, self.repo_url) + ) + + del repo_info['default_branch'] + + # When grabbing details, prefer explicit field values over the values + # from the GitHub or BitBucket API + info = dict(chain(repo_info.items(), info.items())) + + except (DownloaderException, ClientException, ProviderException) as e: + if 'name' in info: + self.broken_packages[info['name']] = e + self.failed_sources[details] = e + continue + + if 'name' not in info: + self.failed_sources[self.repo_url] = ProviderException( + 'No "name" value for one of the packages' + ' in the repository {}.'.format(self.repo_url) + ) + continue + + try: + if not info.get('author'): + raise ProviderException( + 'Missing or invalid "author" key for package "{}"' + ' in repository "{}".'.format(info['name'], self.repo_url) + ) + + # evaluate releases + + releases = package.get('releases') + + # If no releases info was specified, also grab the download info from GH or BB + if self.schema_version.major == 2 and not releases and details: + releases = [{'details': details}] + + if not releases: + raise ProviderException( + 'No "releases" value for the package "{}"' + ' in the repository {}.'.format(info['name'], self.repo_url) + ) + + if not isinstance(releases, list): + raise ProviderException( + 'The "releases" value is not an array for the package "{}"' + ' in the repository {}.'.format(info['name'], self.repo_url) + ) + + staged_releases = {} + + # This allows developers to specify a GH or BB location to get releases from, + # especially tags URLs (https://github.com/user/repo/tags or + # https://bitbucket.org/user/repo#tags) + for release in releases: + download_info = {} + + # Validate libraries + # the key can be used to specify dependencies, upstream via repositories + key = 'libraries' if self.schema_version.major >= 4 else 'dependencies' + value = release.get(key, []) + if value: + if not isinstance(value, list): + raise InvalidPackageReleaseKeyError(self.repo_url, info['name'], key) + download_info['libraries'] = value + + # Validate supported platforms + key = 'platforms' + value = release.get(key, default_platforms) + if isinstance(value, str): + value = [value] + elif not isinstance(value, list): + raise InvalidPackageReleaseKeyError(self.repo_url, info['name'], key) + # ignore incompatible release (avoid downloading/evaluating further information) + if IS_ST and not is_compatible_platform(value): + continue + download_info[key] = value + + # Validate supported python_versions (requires scheme 4.0.0!) + key = 'python_versions' + value = release.get(key) + if value: + # Package releases may optionally contain `python_versions` list to tell + # which python version they are compatibilible with. + # The main purpose is to be able to opt-in unmaintained packages to python 3.8 + # if they are known not to cause trouble. + if isinstance(value, str): + value = [value] + elif not isinstance(value, list): + raise InvalidPackageReleaseKeyError(self.repo_url, info['name'], key) + download_info[key] = value + + if self.schema_version.major >= 3: + # Validate supported ST version + # missing key indicates any ST3+ build is supported + key = 'sublime_text' + value = release.get(key, default_sublime_text) + if not isinstance(value, str): + raise InvalidPackageReleaseKeyError(self.repo_url, info['name'], key) + # ignore incompatible release (avoid downloading/evaluating further information) + if IS_ST and not is_compatible_version(value): + continue + download_info[key] = value + + # Validate url + # if present, it is an explicit or resolved release + url = release.get('url') + if url: + # Validate date and version + for key in copied_release_keys: + if key in release: + value = release[key] + if not value or not isinstance(value, str): + raise InvalidPackageReleaseKeyError(self.repo_url, info['name'], key) + download_info[key] = value + + if 'version' not in download_info: + raise ProviderException( + 'Missing "version" key in release with explicit "url" of package "{}"' + ' in repository "{}".'.format(info['name'], self.repo_url) + ) + + download_info['url'] = update_url(resolve_url(self.repo_url, url), debug) + info['releases'].append(download_info) + continue + + # Resolve release template using `base` and `branch` or `tags` keys + + base = release.get('base') + if not base: + base = details + if not base: + raise ProviderException( + 'Missing root-level "details" key, or release-level "base" key' + ' for one of the releases of package "{}"' + ' in repository {}.'.format(info['name'], self.repo_url) + ) + + base_url = resolve_url(self.repo_url, base) + downloads = None + + asset = release.get('asset') + branch = release.get('branch') + tags = release.get('tags') + extra = None if tags is True else tags + + if asset: + if branch: + raise ProviderException( + 'Illegal "asset" key "{}" for branch based release of library "{}"' + ' in repository "{}".'.format(base, info['name'], self.repo_url) + ) + # group releases with assets by base_url and tag-prefix + # to prepare gathering download_info with a single API call + staged_releases.setdefault((base_url, extra), []).append((asset, download_info)) + continue + + elif tags: + for client in clients: + downloads = client.download_info_from_tags(base_url, extra) + if downloads is not None: + break + + elif branch: + for client in clients: + downloads = client.download_info_from_branch(base_url, branch) + if downloads is not None: + break + else: + raise ProviderException( + 'Missing "branch", "tags" or "url" key in release of package "{}"' + ' in repository "{}".'.format(info['name'], self.repo_url) + ) + + if downloads is None: + raise ProviderException( + 'Invalid "base" value "{}" for one of the releases of package "{}"' + ' in repository "{}".'.format(base, info['name'], self.repo_url) + ) + + if downloads is False: + raise ProviderException( + 'No valid semver tags found at "{}" for package "{}"' + ' in repository "{}".'.format(base, info['name'], self.repo_url) + ) + + for download in downloads: + download.update(download_info) + info['releases'].append(download) + + elif self.schema_version.major == 2: + # missing key indicates ST2 release; no longer supported + key = 'sublime_text' + value = release.get(key) + if not value: + continue + if not isinstance(value, str): + raise InvalidPackageReleaseKeyError(self.repo_url, info['name'], key) + # ignore incompatible release (avoid downloading/evaluating further information) + if IS_ST and not is_compatible_version(value): + continue + download_info[key] = value + + # Validate url + # if present, it is an explicit or resolved release + url = release.get('url') + if url: + for key in copied_release_keys: + if key in release: + value = release[key] + if not value or not isinstance(value, str): + raise InvalidPackageReleaseKeyError(self.repo_url, info['name'], key) + download_info[key] = value + + if 'version' not in download_info: + raise ProviderException( + 'Missing "version" key in release with explicit "url" of package "{}"' + ' in repository "{}".'.format(info['name'], self.repo_url) + ) + + download_info['url'] = update_url(resolve_url(self.repo_url, url), debug) + info['releases'].append(download_info) + continue + + # Evaluate and resolve "tags" and "branch" release templates + + download_details = release.get('details') + if not download_details or not isinstance(download_details, str): + raise InvalidPackageReleaseKeyError(self.repo_url, info['name'], 'details') + + download_details = resolve_url(self.repo_url, release['details']) + + downloads = None + + for client in clients: + downloads = client.download_info(download_details) + if downloads is not None: + break + + if downloads is None: + raise ProviderException( + 'Invalid "details" value "{}" for one of the releases of package "{}"' + ' in repository "{}".'.format(download_details, info['name'], self.repo_url) + ) + + if downloads is False: + raise ProviderException( + 'No valid semver tags found at "{}" for package "{}"' + ' in repository "{}".'.format(download_details, info['name'], self.repo_url) + ) + + for download in downloads: + download.update(download_info) + info['releases'].append(download) + + # gather download_info from releases + for (base_url, extra), asset_templates in staged_releases.items(): + for client in clients: + downloads = client.download_info_from_releases(base_url, asset_templates, extra) + if downloads is not None: + info['releases'].extend(downloads) + break + + # Empty releases means package is unavailable on current platform or for version of ST + if not info['releases']: + continue + + info['releases'] = version_sort(info['releases'], 'platforms', reverse=True) + + for field in ('previous_names', 'labels'): + if field not in info: + info[field] = [] + + if 'readme' in info: + info['readme'] = update_url(resolve_url(self.repo_url, info['readme']), debug) + + for field in ('description', 'readme', 'issues', 'donate', 'buy'): + if field not in info: + info[field] = None + + if 'homepage' not in info: + info['homepage'] = details if details else self.repo_url + + if 'last_modified' not in info: + # Extract a date from the newest release + date = '1970-01-01 00:00:00' + for release in info['releases']: + release_date = release.get('date') + if release_date and isinstance(release_date, str) and release_date > date: + date = release_date + info['last_modified'] = date + + output[info['name']] = info + yield (info['name'], info) + + except (DownloaderException, ClientException, ProviderException) as e: + self.broken_packages[info['name']] = e + + self.packages = output + + def get_sources(self): + """ + Return a list of current URLs that are directly referenced by the repo + + :return: + A list of URLs and/or file paths + """ + + if not self.fetch(): + return [] + + output = [self.repo_url] + for package in self.repo_info['packages']: + details = package.get('details') + if details: + output.append(details) + return output + + def get_renamed_packages(self): + """:return: A dict of the packages that have been renamed""" + + if not self.fetch(): + return {} + + output = {} + for package in self.repo_info['packages']: + if 'previous_names' not in package: + continue + + previous_names = package['previous_names'] + if not isinstance(previous_names, list): + previous_names = [previous_names] + + for previous_name in previous_names: + output[previous_name] = package['name'] + + return output diff --git a/tasks/lib/package_control/providers/provider_exception.py b/tasks/lib/package_control/providers/provider_exception.py new file mode 100644 index 0000000..e327f59 --- /dev/null +++ b/tasks/lib/package_control/providers/provider_exception.py @@ -0,0 +1,54 @@ +class ProviderException(Exception): + + """If a provider could not return information""" + + def __bytes__(self): + return self.__str__().encode('utf-8') + + +class GitProviderUserInfoException(ProviderException): + """ + Exception for signalling user information download error. + + The exception is used to indicate a given URL not being in expected form + to be used by given provider to download user info from. + """ + + def __init__(self, provider): + self.provider_name = provider.__class__.__name__ + self.url = provider.repo_url + + def __str__(self): + return '%s unable to fetch user information from "%s".' % (self.provider_name, self.url) + + +class GitProviderRepoInfoException(ProviderException): + """ + Exception for signalling repository information download error. + + The exception is used to indicate a given URL not being in expected form + to be used by given provider to download repo info from. + """ + + def __init__(self, provider): + self.provider_name = provider.__class__.__name__ + self.url = provider.repo_url + + def __str__(self): + return '%s unable to fetch repo information from "%s".' % (self.provider_name, self.url) + + +class GitProviderDownloadInfoException(ProviderException): + """ + Exception for signalling download information download error. + + The exception is used to indicate a given URL not being in expected form + to be used by given provider to download release information from. + """ + + def __init__(self, provider, url=None): + self.provider_name = provider.__class__.__name__ + self.url = url or provider.repo_url + + def __str__(self): + return '%s unable to fetch download information from "%s".' % (self.provider_name, self.url) diff --git a/tasks/lib/package_control/providers/schema_version.py b/tasks/lib/package_control/providers/schema_version.py new file mode 100644 index 0000000..3e5efbb --- /dev/null +++ b/tasks/lib/package_control/providers/schema_version.py @@ -0,0 +1,33 @@ +from ..pep440 import PEP440Version + + +class SchemaVersion(PEP440Version): + supported_versions = ('2.0', '3.0.0', '4.0.0') + + def __init__(self, ver): + """ + Custom version string parsing to maintain backward compatibility. + + SemVer needs all of major, minor and patch parts being present in `ver`. + + :param ver: + An integer, float or string containing a version string. + + :returns: + List of (major, minor, patch) + """ + try: + if isinstance(ver, int): + ver = float(ver) + if isinstance(ver, float): + ver = str(ver) + except ValueError: + raise ValueError('the "schema_version" is not a valid number.') + + if ver not in self.supported_versions: + raise ValueError( + 'the "schema_version" is not recognized. Must be one of: %s or %s.' + % (', '.join(self.supported_versions[:-1]), self.supported_versions[-1]) + ) + + super().__init__(ver) diff --git a/tasks/lib/package_control/readme.md b/tasks/lib/package_control/readme.md new file mode 100644 index 0000000..23eff38 --- /dev/null +++ b/tasks/lib/package_control/readme.md @@ -0,0 +1,60 @@ +# Package Control + +The [Sublime Text](http://www.sublimetext.com) package manager. Visit +[packagecontrol.io](https://packagecontrol.io) for +[installation instructions](https://packagecontrol.io/installation), a list of +[available packages](https://packagecontrol.io/browse) and detailed +[documentation](https://packagecontrol.io/docs). + +## License + +Package Control is licensed under the MIT license. + +All of the source code (except for `package_control/semver.py`), is under the +license: + +``` +Copyright (c) 2011-2020 Will Bond + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. +``` + +`package_control/semver.py` is under the license: + +``` +Copyright (c) 2013 Zachary King, FichteFoll + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. +``` diff --git a/tasks/lib/package_control/show_error.py b/tasks/lib/package_control/show_error.py new file mode 100644 index 0000000..9ea608e --- /dev/null +++ b/tasks/lib/package_control/show_error.py @@ -0,0 +1,24 @@ +# Not shared with Package Control + +from . import text + + +def show_error(string, params=None, strip=True, indent=None): + """ + Sends an error message to rollbar after running the string through + text.format() + + :param string: + The error to display + + :param params: + Params to interpolate into the string + + :param strip: + If the last newline in the string should be removed + + :param indent: + If all lines should be indented by a set indent after being dedented + """ + + print(text.format(string, params, strip=strip, indent=indent)) diff --git a/tasks/lib/package_control/sys_path.py b/tasks/lib/package_control/sys_path.py new file mode 100644 index 0000000..d182aef --- /dev/null +++ b/tasks/lib/package_control/sys_path.py @@ -0,0 +1,18 @@ +# Not shared with Package Control + +import os + +__cache_path = os.path.join(os.path.expanduser('~'), '.package_control') + + +def set_cache_dir(cache_path): + global __cache_path + __cache_path = cache_path + + +def pc_cache_dir(): + return __cache_path + + +def user_config_dir(): + return pc_cache_dir() diff --git a/tasks/lib/package_control/text.py b/tasks/lib/package_control/text.py new file mode 100644 index 0000000..103c20b --- /dev/null +++ b/tasks/lib/package_control/text.py @@ -0,0 +1,64 @@ +from __future__ import unicode_literals + +from textwrap import dedent +import re + + +def format(string, params=None, strip=True, indent=None): + """ + Takes a multi-line string and does the following: + + - dedents + - removes a single leading newline if the second character is not a newline also + - converts newlines with text before and after into a single line + - removes a single trailing newline if the second-to-laster character is not a newline also + + :param string: + The string to format + + :param params: + Params to interpolate into the string + + :param strip: + If the last newline in the string should be removed + + :param indent: + If all lines should be indented by a set indent after being dedented + + :return: + The formatted string + """ + + output = string + + # Only dedent if not a single-line string. This allows for + # single-line-formatted string to be printed that include intentional + # whitespace. + if output.find('\n') != -1: + output = dedent(output) + + # If the string starts with just a newline, we want to trim it because + # it is a side-effect of the code formatting, but if there are two newlines + # then that means we intended there to be newlines at the beginning + if output[0] == '\n' and output[1] != '\n': + output = output[1:] + + # Unwrap lines, taking into account bulleted lists, ordered lists and + # underlines consisting of = signs + if output.find('\n') != -1: + output = re.sub(r'(?<=\S)\n(?=[^ \n\t\d\*\-=])', ' ', output) + + # By default we want to trim a single trailing newline from a string since + # that is likely from the code formatting, but that trimming is prevented + # if strip == False, or if there are two trailing newlines, which means we + # actually wanted whitespace at the end + if output[-1] == '\n' and strip and output[-2] != '\n': + output = output[0:-1] + + if params is not None: + output = output % params + + if indent is not None: + output = indent + output.replace('\n', '\n' + indent) + + return output