diff --git a/.github/dependabot.yml b/.github/dependabot.yml new file mode 100644 index 00000000..037e15f2 --- /dev/null +++ b/.github/dependabot.yml @@ -0,0 +1,31 @@ +# config for Dependabot updates -- see docs: +# https://docs.github.com/code-security/dependabot/dependabot-version-updates/configuration-options-for-the-dependabot.yml-file + +version: 2 +updates: + # python dependencies + - package-ecosystem: "pip" + directory: "/requirements/" + schedule: + interval: "daily" + labels: + - "update" + target-branch: "develop" + + # Dockerfile dependencies + - package-ecosystem: "docker" + directory: "/" + schedule: + interval: "daily" + labels: + - "update" + target-branch: "develop" + + # github actions used in .github/workflows/ + - package-ecosystem: "github-actions" + directory: "/" + schedule: + interval: "daily" + labels: + - "update" + target-branch: "develop" diff --git a/.github/workflows/run_gravyvalet_tests.yml b/.github/workflows/run_gravyvalet_tests.yml new file mode 100644 index 00000000..fad88303 --- /dev/null +++ b/.github/workflows/run_gravyvalet_tests.yml @@ -0,0 +1,60 @@ +name: run_gravyvalet_tests + +on: + push: + pull_request: + workflow_dispatch: + +jobs: + run_gravyvalet_tests: + strategy: + fail-fast: false + matrix: # use to test upgrades before upgrading + python-version: ['3.12'] + postgres-version: ['15'] + runs-on: ubuntu-latest + services: + postgres: + image: postgres:${{ matrix.postgres-version }} + env: + POSTGRES_HOST_AUTH_METHOD: trust + # Set health checks to wait until postgres has started + options: >- + --health-cmd pg_isready + --health-interval 10s + --health-timeout 5s + --health-retries 5 + ports: + - 5432:5432 + steps: + - uses: actions/checkout@v4 + + - name: set up python${{ matrix.python-version }} + uses: actions/setup-python@v4 + with: + python-version: ${{ matrix.python-version }} + cache: pip + cache-dependency-path: | + requirements/requirements.txt + requirements/dev-requirements.txt + + - name: install py dependencies + run: pip install -r requirements/dev-requirements.txt + + - name: set up pre-commit cache + uses: actions/cache@v3 + with: + path: ~/.cache/pre-commit + key: pre-commit|${{ matrix.python-version }}|${{ hashFiles('.pre-commit-config.yaml') }} + + - name: run pre-commit checks + run: pre-commit run --all-files --show-diff-on-failure + + - name: run tests + run: python3 manage.py test + env: + DEBUG: 1 + POSTGRES_HOST: localhost + POSTGRES_DB: gravyvalettest + POSTGRES_USER: postgres + SECRET_KEY: oh-so-secret diff --git a/.gitignore b/.gitignore index c8e4233c..2a6d0162 100644 --- a/.gitignore +++ b/.gitignore @@ -1,3 +1,4 @@ .python-version db.sqlite3 __pycache__ +.venv diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml new file mode 100644 index 00000000..686a9c8c --- /dev/null +++ b/.pre-commit-config.yaml @@ -0,0 +1,13 @@ +repos: + - repo: https://github.com/psf/black + rev: 23.11.0 + hooks: + - id: black + - repo: https://github.com/pycqa/flake8 + rev: 6.1.0 + hooks: + - id: flake8 + - repo: https://github.com/pycqa/isort + rev: 5.12.0 + hooks: + - id: isort diff --git a/Dockerfile b/Dockerfile new file mode 100644 index 00000000..0495b9d9 --- /dev/null +++ b/Dockerfile @@ -0,0 +1,18 @@ +# Use the official Python image as the base image +FROM python:3.12 + +# System Dependencies: +RUN apt-get update && apt-get install -y libpq-dev + +WORKDIR /code +COPY requirements/ /code/requirements/ + +# Python dependencies: +RUN pip3 install --no-cache-dir -r requirements/requirements.txt + +COPY . /code/ + +EXPOSE 8000 + +# Start the Django development server +CMD ["python", "manage.py", "runserver", "0.0.0.0:8000"] \ No newline at end of file diff --git a/Makefile b/Makefile deleted file mode 100644 index 0c351143..00000000 --- a/Makefile +++ /dev/null @@ -1,10 +0,0 @@ -flake: - flake8 - -black: - black -S gravyvalet charon - -isort: - isort . - -lintall: black isort flake diff --git a/README.md b/README.md index 678b705b..8484199e 100644 --- a/README.md +++ b/README.md @@ -1,24 +1,42 @@ -# Gravyvalet +![Center for Open Science Logo](https://mfr.osf.io/export?url=https://osf.io/download/24697/?direct=%26mode=render&format=2400x2400.jpeg) -A thicker, more hands-on counterpart to waterbutler. +# OSF Addon Service (GravyValet) -# Reason for being +Welcome to the Open Science Framework's base server for addon integration with our RESTful API (osf.io). This server acts as a gateway between the OSF and external APIs. Authenticated users or machines can access various resources through common file storage and citation management APIs via the OSF. Institutional members can also add their own integrations, tailoring addon usage to their specific communities. -The goal is to split out OSF addons into their own well-encapsulated service. This is the prototype/initial version +## Setting up GravyValet Locally -## Approach +1. Start your PostgreSQL and Django containers with `docker compose up -d`. +2. Enter the Django container: `docker compose exec addon_service /bin/bash`. +3. Migrate the existing models: `python3 manage.py migrate`. +4. Visit [http://0.0.0.0:8004/](http://0.0.0.0:8004/). -Mostly just started off by figuring out the necessary endpoints and putting in stubs. Then started inlining code from the OSF, chasing down things through base classes, decorators, and utility functions. Foolishly attempted to inline the actual django model code for addons. That broke me. Moved that into a side file and just stubbed out the called model code. Currently trying to fill out the stubs with simple impls & fixtures. +## Running Tests -Chose box as the first addon to implement, since it is one of the saner, less corner-casey addons. Not currently worrying to much about making it extensible, figure that's part of the actual dev. +To run tests, use the following command: -# Quickstart +```bash +python3 manage.py test +``` + +Development Tips -It's a Django app. `gravyvalet` is the "root" app, but most of the work is being done in the `charon` app. +Optionally, but recommended: Set up pre-commit hooks that will run formatters and linters on staged files. Install pre-commit using: -For no particular reason, I've chosen `8011` as the default gravyvalet port. +```bash +pip install pre-commit ``` -$ pip install -r requirements.txt -$ python manage.py runserver 8011 + +Then, run: + +```bash + +pre-commit install --allow-missing-config ``` +Reporting Issues and Questions + +If you encounter a bug, have a technical question, or want to request a feature, please don't hesitate to contact us +at help@osf.io. While we may respond to questions through other channels, reaching out to us at help@osf.io ensures +that your feedback goes to the right person promptly. If you're considering posting an issue on our GitHub issues page, + we recommend sending it to help@osf.io instead. diff --git a/charon/__init__.py b/addon_service/__init__.py similarity index 100% rename from charon/__init__.py rename to addon_service/__init__.py diff --git a/addon_service/apps.py b/addon_service/apps.py new file mode 100644 index 00000000..e0583163 --- /dev/null +++ b/addon_service/apps.py @@ -0,0 +1,6 @@ +from django.apps import AppConfig + + +class AddonServiceConfig(AppConfig): + default_auto_field = "django.db.models.BigAutoField" + name = "addon_service" diff --git a/charon/migrations/__init__.py b/addon_service/authorized_storage_account/__init__.py similarity index 100% rename from charon/migrations/__init__.py rename to addon_service/authorized_storage_account/__init__.py diff --git a/addon_service/authorized_storage_account/models.py b/addon_service/authorized_storage_account/models.py new file mode 100644 index 00000000..7ada099e --- /dev/null +++ b/addon_service/authorized_storage_account/models.py @@ -0,0 +1,32 @@ +# from django.contrib.postgres.fields import ArrayField +from django.db import models + +from addon_service.common.base_model import AddonsServiceBaseModel + + +class AuthorizedStorageAccount(AddonsServiceBaseModel): + # TODO: capabilities = ArrayField(...) + default_root_folder = models.CharField(blank=True) + + external_storage_service = models.ForeignKey( + "addon_service.ExternalStorageService", + on_delete=models.CASCADE, + related_name="authorized_storage_accounts", + ) + external_account = models.ForeignKey( + "addon_service.ExternalAccount", + on_delete=models.CASCADE, + related_name="authorized_storage_accounts", + ) + + class Meta: + verbose_name = "Authorized Storage Account" + verbose_name_plural = "Authorized Storage Accounts" + app_label = "addon_service" + + class JSONAPIMeta: + resource_name = "authorized-storage-accounts" + + @property + def account_owner(self): + return self.external_account.owner # TODO: prefetch/select_related diff --git a/addon_service/authorized_storage_account/serializers.py b/addon_service/authorized_storage_account/serializers.py new file mode 100644 index 00000000..aa47cf30 --- /dev/null +++ b/addon_service/authorized_storage_account/serializers.py @@ -0,0 +1,55 @@ +from rest_framework_json_api import serializers +from rest_framework_json_api.relations import ( + HyperlinkedRelatedField, + ResourceRelatedField, +) +from rest_framework_json_api.utils import get_resource_type_from_model + +from addon_service.models import ( + AuthorizedStorageAccount, + ConfiguredStorageAddon, + ExternalStorageService, + InternalUser, +) + + +RESOURCE_NAME = get_resource_type_from_model(AuthorizedStorageAccount) + + +class AuthorizedStorageAccountSerializer(serializers.HyperlinkedModelSerializer): + url = serializers.HyperlinkedIdentityField(view_name=f"{RESOURCE_NAME}-detail") + account_owner = HyperlinkedRelatedField( + many=False, + queryset=InternalUser.objects.all(), + related_link_view_name=f"{RESOURCE_NAME}-related", + ) + external_storage_service = ResourceRelatedField( + queryset=ExternalStorageService.objects.all(), + many=False, + related_link_view_name=f"{RESOURCE_NAME}-related", + ) + configured_storage_addons = HyperlinkedRelatedField( + many=True, + queryset=ConfiguredStorageAddon.objects.all(), + related_link_view_name=f"{RESOURCE_NAME}-related", + ) + + included_serializers = { + "account_owner": "addon_service.serializers.InternalUserSerializer", + "external_storage_service": ( + "addon_service.serializers.ExternalStorageServiceSerializer" + ), + "configured_storage_addons": ( + "addon_service.serializers.ConfiguredStorageAddonSerializer" + ), + } + + class Meta: + model = AuthorizedStorageAccount + fields = [ + "url", + "account_owner", + "configured_storage_addons", + "default_root_folder", + "external_storage_service", + ] diff --git a/addon_service/authorized_storage_account/views.py b/addon_service/authorized_storage_account/views.py new file mode 100644 index 00000000..ec2a7a1f --- /dev/null +++ b/addon_service/authorized_storage_account/views.py @@ -0,0 +1,10 @@ +from rest_framework_json_api.views import ModelViewSet + +from .models import AuthorizedStorageAccount +from .serializers import AuthorizedStorageAccountSerializer + + +class AuthorizedStorageAccountViewSet(ModelViewSet): + queryset = AuthorizedStorageAccount.objects.all() + serializer_class = AuthorizedStorageAccountSerializer + # TODO: permissions_classes diff --git a/gravyvalet/__init__.py b/addon_service/common/__init__.py similarity index 100% rename from gravyvalet/__init__.py rename to addon_service/common/__init__.py diff --git a/addon_service/common/base_model.py b/addon_service/common/base_model.py new file mode 100644 index 00000000..52901ad5 --- /dev/null +++ b/addon_service/common/base_model.py @@ -0,0 +1,22 @@ +from django.db import models +from django.utils import timezone + + +class AddonsServiceBaseModel(models.Model): + created = models.DateTimeField(editable=False) + modified = models.DateTimeField() + + def save(self, *args, **kwargs): + if not self.id: + self.created = timezone.now() + self.modified = timezone.now() + super().save(*args, **kwargs) + + def __str__(self): + return f"<{self.__class__.__qualname__}(pk={self.pk})>" + + def __repr__(self): + return self.__str__() + + class Meta: + abstract = True diff --git a/addon_service/configured_storage_addon/__init__.py b/addon_service/configured_storage_addon/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/addon_service/configured_storage_addon/models.py b/addon_service/configured_storage_addon/models.py new file mode 100644 index 00000000..f1c68fa4 --- /dev/null +++ b/addon_service/configured_storage_addon/models.py @@ -0,0 +1,26 @@ +from django.db import models + +from addon_service.common.base_model import AddonsServiceBaseModel + + +class ConfiguredStorageAddon(AddonsServiceBaseModel): + root_folder = models.CharField() + + authorized_storage_account = models.ForeignKey( + "addon_service.AuthorizedStorageAccount", + on_delete=models.CASCADE, + related_name="configured_storage_addons", + ) + internal_resource = models.ForeignKey( + "addon_service.InternalResource", + on_delete=models.CASCADE, + related_name="configured_storage_addons", + ) + + class Meta: + verbose_name = "Configured Storage Addon" + verbose_name_plural = "Configured Storage Addons" + app_label = "addon_service" + + class JSONAPIMeta: + resource_name = "configured-storage-addons" diff --git a/addon_service/configured_storage_addon/serializers.py b/addon_service/configured_storage_addon/serializers.py new file mode 100644 index 00000000..832be36a --- /dev/null +++ b/addon_service/configured_storage_addon/serializers.py @@ -0,0 +1,41 @@ +from rest_framework_json_api import serializers +from rest_framework_json_api.relations import ResourceRelatedField +from rest_framework_json_api.utils import get_resource_type_from_model + +from addon_service.models import ( + ConfiguredStorageAddon, + InternalResource, +) + + +RESOURCE_NAME = get_resource_type_from_model(ConfiguredStorageAddon) + + +class ConfiguredStorageAddonSerializer(serializers.HyperlinkedModelSerializer): + url = serializers.HyperlinkedIdentityField(view_name=f"{RESOURCE_NAME}-detail") + authorized_storage_account = ResourceRelatedField( + queryset=ConfiguredStorageAddon.objects.all(), + many=False, + related_link_view_name=f"{RESOURCE_NAME}-related", + ) + internal_resource = ResourceRelatedField( + queryset=InternalResource.objects.all(), + many=False, + related_link_view_name=f"{RESOURCE_NAME}-related", + ) + + included_serializers = { + "authorized_storage_account": ( + "addon_service.serializers.AuthorizedStorageAccountSerializer" + ), + "internal_resource": "addon_service.serializers.InternalResourceSerializer", + } + + class Meta: + model = ConfiguredStorageAddon + fields = [ + "url", + "root_folder", + "authorized_storage_account", + "internal_resource", + ] diff --git a/addon_service/configured_storage_addon/views.py b/addon_service/configured_storage_addon/views.py new file mode 100644 index 00000000..555bffbd --- /dev/null +++ b/addon_service/configured_storage_addon/views.py @@ -0,0 +1,10 @@ +from rest_framework_json_api.views import ModelViewSet + +from .models import ConfiguredStorageAddon +from .serializers import ConfiguredStorageAddonSerializer + + +class ConfiguredStorageAddonViewSet(ModelViewSet): + queryset = ConfiguredStorageAddon.objects.all() + serializer_class = ConfiguredStorageAddonSerializer + # TODO: permissions_classes diff --git a/addon_service/external_account/__init__.py b/addon_service/external_account/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/addon_service/external_account/models.py b/addon_service/external_account/models.py new file mode 100644 index 00000000..68497de1 --- /dev/null +++ b/addon_service/external_account/models.py @@ -0,0 +1,30 @@ +from django.db import models + +from addon_service.common.base_model import AddonsServiceBaseModel + + +class ExternalAccount(AddonsServiceBaseModel): + # The user's ID on the remote service + remote_account_id = models.CharField() + remote_account_display_name = models.CharField() + + external_service = models.ForeignKey( + "addon_service.ExternalService", + on_delete=models.CASCADE, + related_name="external_accounts", + ) + owner = models.ForeignKey( + "addon_service.InternalUser", + on_delete=models.CASCADE, + related_name="external_accounts", + ) + credentials = models.ForeignKey( + "addon_service.ExternalCredentials", + on_delete=models.CASCADE, + related_name="external_accounts", + ) + + class Meta: + verbose_name = "External Account" + verbose_name_plural = "External Accounts" + app_label = "addon_service" diff --git a/addon_service/external_credentials/__init__.py b/addon_service/external_credentials/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/addon_service/external_credentials/models.py b/addon_service/external_credentials/models.py new file mode 100644 index 00000000..d0f6ee5f --- /dev/null +++ b/addon_service/external_credentials/models.py @@ -0,0 +1,22 @@ +from django.db import models + +from addon_service.common.base_model import AddonsServiceBaseModel + + +class ExternalCredentials(AddonsServiceBaseModel): + # TODO: Settle on encryption solution + oauth_key = models.CharField(blank=True, null=True) + + # For OAuth1, this is usually the "oauth_token_secret" + # For OAuth2, this is not used + oauth_secret = models.CharField(blank=True, null=True) + + # Used for OAuth2 only + refresh_token = models.CharField(blank=True, null=True) + date_last_refreshed = models.DateTimeField(blank=True, null=True) + expires_at = models.DateTimeField(blank=True, null=True) + + class Meta: + verbose_name = "External Credentials" + verbose_name_plural = "External Credentials" + app_label = "addon_service" diff --git a/addon_service/external_service/__init__.py b/addon_service/external_service/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/addon_service/external_service/models.py b/addon_service/external_service/models.py new file mode 100644 index 00000000..9c5a3625 --- /dev/null +++ b/addon_service/external_service/models.py @@ -0,0 +1,13 @@ +from django.db import models + +from addon_service.common.base_model import AddonsServiceBaseModel + + +# TODO: consider another name +class ExternalService(AddonsServiceBaseModel): + name = models.CharField(null=False) + + class Meta: + verbose_name = "External Service" + verbose_name_plural = "External Services" + app_label = "addon_service" diff --git a/addon_service/external_storage_service/__init__.py b/addon_service/external_storage_service/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/addon_service/external_storage_service/models.py b/addon_service/external_storage_service/models.py new file mode 100644 index 00000000..8a2dee1b --- /dev/null +++ b/addon_service/external_storage_service/models.py @@ -0,0 +1,24 @@ +from django.db import models + +from addon_service.common.base_model import AddonsServiceBaseModel + + +class ExternalStorageService(AddonsServiceBaseModel): + max_concurrent_downloads = models.IntegerField(null=False) + max_upload_mb = models.IntegerField(null=False) + + auth_uri = models.URLField(null=False) + + external_service = models.ForeignKey( + "addon_service.ExternalService", + on_delete=models.CASCADE, + related_name="external_storage_services", + ) + + class Meta: + verbose_name = "External Storage Service" + verbose_name_plural = "External Storage Services" + app_label = "addon_service" + + class JSONAPIMeta: + resource_name = "external-storage-services" diff --git a/addon_service/external_storage_service/serializers.py b/addon_service/external_storage_service/serializers.py new file mode 100644 index 00000000..0c05520c --- /dev/null +++ b/addon_service/external_storage_service/serializers.py @@ -0,0 +1,37 @@ +from rest_framework_json_api import serializers +from rest_framework_json_api.relations import HyperlinkedRelatedField +from rest_framework_json_api.utils import get_resource_type_from_model + +from addon_service.models import ( + AuthorizedStorageAccount, + ExternalStorageService, +) + + +RESOURCE_NAME = get_resource_type_from_model(ExternalStorageService) + + +class ExternalStorageServiceSerializer(serializers.HyperlinkedModelSerializer): + url = serializers.HyperlinkedIdentityField(view_name=f"{RESOURCE_NAME}-detail") + + authorized_storage_accounts = HyperlinkedRelatedField( + many=True, + queryset=AuthorizedStorageAccount.objects.all(), + related_link_view_name=f"{RESOURCE_NAME}-related", + ) + + included_serializers = { + "authorized_storage_accounts": ( + "addon_service.serializers.AuthorizedStorageAccountSerializer" + ), + } + + class Meta: + model = ExternalStorageService + fields = [ + "url", + "max_concurrent_downloads", + "max_upload_mb", + "auth_uri", + "authorized_storage_accounts", + ] diff --git a/addon_service/external_storage_service/views.py b/addon_service/external_storage_service/views.py new file mode 100644 index 00000000..73b11c3b --- /dev/null +++ b/addon_service/external_storage_service/views.py @@ -0,0 +1,10 @@ +from rest_framework_json_api.views import ModelViewSet + +from .models import ExternalStorageService +from .serializers import ExternalStorageServiceSerializer + + +class ExternalStorageServiceViewSet(ModelViewSet): + queryset = ExternalStorageService.objects.all() + serializer_class = ExternalStorageServiceSerializer + # TODO: permissions_classes diff --git a/addon_service/internal_resource/__init__.py b/addon_service/internal_resource/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/addon_service/internal_resource/models.py b/addon_service/internal_resource/models.py new file mode 100644 index 00000000..5b9162bd --- /dev/null +++ b/addon_service/internal_resource/models.py @@ -0,0 +1,15 @@ +from django.db import models + +from addon_service.common.base_model import AddonsServiceBaseModel + + +class InternalResource(AddonsServiceBaseModel): + resource_uri = models.URLField(unique=True, db_index=True, null=False) + + class Meta: + verbose_name = "Internal Resource" + verbose_name_plural = "Internal Resources" + app_label = "addon_service" + + class JSONAPIMeta: + resource_name = "internal-resources" diff --git a/addon_service/internal_resource/serializers.py b/addon_service/internal_resource/serializers.py new file mode 100644 index 00000000..035097d7 --- /dev/null +++ b/addon_service/internal_resource/serializers.py @@ -0,0 +1,34 @@ +from rest_framework_json_api import serializers +from rest_framework_json_api.relations import HyperlinkedRelatedField +from rest_framework_json_api.utils import get_resource_type_from_model + +from addon_service.models import ( + ConfiguredStorageAddon, + InternalResource, +) + + +RESOURCE_NAME = get_resource_type_from_model(InternalResource) + + +class InternalResourceSerializer(serializers.HyperlinkedModelSerializer): + url = serializers.HyperlinkedIdentityField(view_name=f"{RESOURCE_NAME}-detail") + configured_storage_addons = HyperlinkedRelatedField( + many=True, + queryset=ConfiguredStorageAddon.objects.all(), + related_link_view_name=f"{RESOURCE_NAME}-related", + ) + + included_serializers = { + "configured_storage_addons": ( + "addon_service.serializers.ConfiguredStorageAddonSerializer" + ), + } + + class Meta: + model = InternalResource + fields = [ + "url", + "resource_uri", + "configured_storage_addons", + ] diff --git a/addon_service/internal_resource/views.py b/addon_service/internal_resource/views.py new file mode 100644 index 00000000..040f946c --- /dev/null +++ b/addon_service/internal_resource/views.py @@ -0,0 +1,10 @@ +from rest_framework_json_api.views import ModelViewSet + +from .models import InternalResource +from .serializers import InternalResourceSerializer + + +class InternalResourceViewSet(ModelViewSet): # TODO: read-only + queryset = InternalResource.objects.all() + serializer_class = InternalResourceSerializer + # TODO: permissions_classes diff --git a/addon_service/internal_user/__init__.py b/addon_service/internal_user/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/addon_service/internal_user/models.py b/addon_service/internal_user/models.py new file mode 100644 index 00000000..18b231b9 --- /dev/null +++ b/addon_service/internal_user/models.py @@ -0,0 +1,22 @@ +from django.db import models + +from addon_service.authorized_storage_account.models import AuthorizedStorageAccount +from addon_service.common.base_model import AddonsServiceBaseModel + + +class InternalUser(AddonsServiceBaseModel): + user_uri = models.URLField(unique=True, db_index=True, null=False) + + @property + def authorized_storage_accounts(self): + return AuthorizedStorageAccount.objects.filter( + external_account__owner=self, + ) + + class Meta: + verbose_name = "Internal User" + verbose_name_plural = "Internal Users" + app_label = "addon_service" + + class JSONAPIMeta: + resource_name = "internal-users" diff --git a/addon_service/internal_user/serializers.py b/addon_service/internal_user/serializers.py new file mode 100644 index 00000000..7c609a9a --- /dev/null +++ b/addon_service/internal_user/serializers.py @@ -0,0 +1,35 @@ +from rest_framework_json_api import serializers +from rest_framework_json_api.relations import HyperlinkedRelatedField +from rest_framework_json_api.utils import get_resource_type_from_model + +from addon_service.models import ( + AuthorizedStorageAccount, + InternalUser, +) + + +RESOURCE_NAME = get_resource_type_from_model(InternalUser) + + +class InternalUserSerializer(serializers.HyperlinkedModelSerializer): + url = serializers.HyperlinkedIdentityField(view_name=f"{RESOURCE_NAME}-detail") + + authorized_storage_accounts = HyperlinkedRelatedField( + many=True, + queryset=AuthorizedStorageAccount.objects.all(), + related_link_view_name=f"{RESOURCE_NAME}-related", + ) + + included_serializers = { + "authorized_storage_accounts": ( + "addon_service.serializers.AuthorizedStorageAccountSerializer" + ), + } + + class Meta: + model = InternalUser + fields = [ + "url", + "user_uri", + "authorized_storage_accounts", + ] diff --git a/addon_service/internal_user/views.py b/addon_service/internal_user/views.py new file mode 100644 index 00000000..387cdae7 --- /dev/null +++ b/addon_service/internal_user/views.py @@ -0,0 +1,10 @@ +from rest_framework_json_api.views import ModelViewSet + +from .models import InternalUser +from .serializers import InternalUserSerializer + + +class InternalUserViewSet(ModelViewSet): # TODO: read-only + queryset = InternalUser.objects.all() + serializer_class = InternalUserSerializer + # TODO: permissions_classes diff --git a/addon_service/management/commands/__init__.py b/addon_service/management/commands/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/addon_service/management/commands/fill_garbage.py b/addon_service/management/commands/fill_garbage.py new file mode 100644 index 00000000..8a58c108 --- /dev/null +++ b/addon_service/management/commands/fill_garbage.py @@ -0,0 +1,47 @@ +from django.conf import settings +from django.core.management.base import LabelCommand + +from addon_service import models as db + + +class Command(LabelCommand): + """add garbage to the database for local/manual testing + + provide one or more labels; will create one set of connected objects for each label + """ + + def handle_label(self, label, **options): + if not settings.DEBUG: + raise Exception("must have DEBUG set to eat garbage") + _es = db.ExternalService.objects.create(name=f"entity-{label}") + _ess = db.ExternalStorageService.objects.create( + max_concurrent_downloads=2, + max_upload_mb=2, + auth_uri=f"http://foo.example/{label}", + external_service=_es, + ) + for _i in range(3): + _iu, _ = db.InternalUser.objects.get_or_create( + user_uri=f"http://osf.example/u{label}{_i}", + ) + _ec = db.ExternalCredentials.objects.create() + _ea = db.ExternalAccount.objects.create( + remote_account_id=label, + remote_account_display_name=label, + external_service=_es, + owner=_iu, + credentials=_ec, + ) + _asa = db.AuthorizedStorageAccount.objects.create( + external_storage_service=_ess, + external_account=_ea, + ) + for _j in range(5): + _ir, _ = db.InternalResource.objects.get_or_create( + resource_uri=f"http://osf.example/r{label}{_j}", + ) + _csa = db.ConfiguredStorageAddon.objects.create( + authorized_storage_account=_asa, + internal_resource=_ir, + ) + return str(_csa) diff --git a/addon_service/migrations/0001_initial.py b/addon_service/migrations/0001_initial.py new file mode 100644 index 00000000..5639ad8c --- /dev/null +++ b/addon_service/migrations/0001_initial.py @@ -0,0 +1,250 @@ +# Generated by Django 4.2.7 on 2023-11-28 19:41 + +import django.db.models.deletion +from django.db import ( + migrations, + models, +) + + +class Migration(migrations.Migration): + initial = True + + dependencies = [] + + operations = [ + migrations.CreateModel( + name="AuthorizedStorageAccount", + fields=[ + ( + "id", + models.BigAutoField( + auto_created=True, + primary_key=True, + serialize=False, + verbose_name="ID", + ), + ), + ("created", models.DateTimeField(editable=False)), + ("modified", models.DateTimeField()), + ("default_root_folder", models.CharField(blank=True)), + ], + options={ + "verbose_name": "Authorized Storage Account", + "verbose_name_plural": "Authorized Storage Accounts", + }, + ), + migrations.CreateModel( + name="ExternalCredentials", + fields=[ + ( + "id", + models.BigAutoField( + auto_created=True, + primary_key=True, + serialize=False, + verbose_name="ID", + ), + ), + ("created", models.DateTimeField(editable=False)), + ("modified", models.DateTimeField()), + ("oauth_key", models.CharField(blank=True, null=True)), + ("oauth_secret", models.CharField(blank=True, null=True)), + ("refresh_token", models.CharField(blank=True, null=True)), + ("date_last_refreshed", models.DateTimeField(blank=True, null=True)), + ("expires_at", models.DateTimeField(blank=True, null=True)), + ], + options={ + "verbose_name": "External Credentials", + "verbose_name_plural": "External Credentials", + }, + ), + migrations.CreateModel( + name="ExternalService", + fields=[ + ( + "id", + models.BigAutoField( + auto_created=True, + primary_key=True, + serialize=False, + verbose_name="ID", + ), + ), + ("created", models.DateTimeField(editable=False)), + ("modified", models.DateTimeField()), + ("name", models.CharField()), + ], + options={ + "verbose_name": "External Service", + "verbose_name_plural": "External Services", + }, + ), + migrations.CreateModel( + name="InternalResource", + fields=[ + ( + "id", + models.BigAutoField( + auto_created=True, + primary_key=True, + serialize=False, + verbose_name="ID", + ), + ), + ("created", models.DateTimeField(editable=False)), + ("modified", models.DateTimeField()), + ("resource_uri", models.URLField(db_index=True, unique=True)), + ], + options={ + "verbose_name": "Internal Resource", + "verbose_name_plural": "Internal Resources", + }, + ), + migrations.CreateModel( + name="InternalUser", + fields=[ + ( + "id", + models.BigAutoField( + auto_created=True, + primary_key=True, + serialize=False, + verbose_name="ID", + ), + ), + ("created", models.DateTimeField(editable=False)), + ("modified", models.DateTimeField()), + ("user_uri", models.URLField(db_index=True, unique=True)), + ], + options={ + "verbose_name": "Internal User", + "verbose_name_plural": "Internal Users", + }, + ), + migrations.CreateModel( + name="ExternalStorageService", + fields=[ + ( + "id", + models.BigAutoField( + auto_created=True, + primary_key=True, + serialize=False, + verbose_name="ID", + ), + ), + ("created", models.DateTimeField(editable=False)), + ("modified", models.DateTimeField()), + ("max_concurrent_downloads", models.IntegerField()), + ("max_upload_mb", models.IntegerField()), + ("auth_uri", models.URLField()), + ( + "external_service", + models.ForeignKey( + on_delete=django.db.models.deletion.CASCADE, + to="addon_service.externalservice", + ), + ), + ], + options={ + "verbose_name": "External Storage Service", + "verbose_name_plural": "External Storage Services", + }, + ), + migrations.CreateModel( + name="ExternalAccount", + fields=[ + ( + "id", + models.BigAutoField( + auto_created=True, + primary_key=True, + serialize=False, + verbose_name="ID", + ), + ), + ("created", models.DateTimeField(editable=False)), + ("modified", models.DateTimeField()), + ("remote_account_id", models.CharField()), + ("remote_account_display_name", models.CharField()), + ( + "credentials", + models.ForeignKey( + on_delete=django.db.models.deletion.CASCADE, + to="addon_service.externalcredentials", + ), + ), + ( + "external_service", + models.ForeignKey( + on_delete=django.db.models.deletion.CASCADE, + to="addon_service.externalservice", + ), + ), + ( + "owner", + models.ForeignKey( + on_delete=django.db.models.deletion.CASCADE, + to="addon_service.internaluser", + ), + ), + ], + options={ + "verbose_name": "External Account", + "verbose_name_plural": "External Accounts", + }, + ), + migrations.CreateModel( + name="ConfiguredStorageAddon", + fields=[ + ( + "id", + models.BigAutoField( + auto_created=True, + primary_key=True, + serialize=False, + verbose_name="ID", + ), + ), + ("created", models.DateTimeField(editable=False)), + ("modified", models.DateTimeField()), + ("root_folder", models.CharField()), + ( + "authorized_storage_account", + models.ForeignKey( + on_delete=django.db.models.deletion.CASCADE, + to="addon_service.authorizedstorageaccount", + ), + ), + ( + "internal_resource", + models.ForeignKey( + on_delete=django.db.models.deletion.CASCADE, + related_name="configured_storage_addons", + to="addon_service.internalresource", + ), + ), + ], + options={ + "verbose_name": "Configured Storage Addon", + "verbose_name_plural": "Configured Storage Addons", + }, + ), + migrations.AddField( + model_name="authorizedstorageaccount", + name="external_account", + field=models.ForeignKey( + on_delete=django.db.models.deletion.CASCADE, + to="addon_service.externalaccount", + ), + ), + migrations.AddField( + model_name="authorizedstorageaccount", + name="external_storage_service", + field=models.ForeignKey( + on_delete=django.db.models.deletion.CASCADE, + to="addon_service.externalstorageservice", + ), + ), + ] diff --git a/addon_service/migrations/__init__.py b/addon_service/migrations/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/addon_service/models.py b/addon_service/models.py new file mode 100644 index 00000000..c4dd90c3 --- /dev/null +++ b/addon_service/models.py @@ -0,0 +1,24 @@ +""" Import models here so they auto-detect for makemigrations """ +from addon_service.authorized_storage_account.models import AuthorizedStorageAccount +from addon_service.configured_storage_addon.models import ConfiguredStorageAddon +from addon_service.external_account.models import ExternalAccount +from addon_service.external_credentials.models import ExternalCredentials +from addon_service.external_service.models import ExternalService +from addon_service.external_storage_service.models import ExternalStorageService +from addon_service.internal_resource.models import InternalResource +from addon_service.internal_user.models import InternalUser + + +__all__ = ( + "AuthorizedStorageAccount", + # 'AuthorizedComputeAccount', + "ConfiguredStorageAddon", + # 'ConfiguredComputeAddon', + "ExternalAccount", + "ExternalCredentials", + "ExternalService", + "ExternalStorageService", + # 'ExternalComputeService', + "InternalResource", + "InternalUser", +) diff --git a/addon_service/serializers.py b/addon_service/serializers.py new file mode 100644 index 00000000..a3051d06 --- /dev/null +++ b/addon_service/serializers.py @@ -0,0 +1,21 @@ +""" Import serializers here for convenience """ +from addon_service.authorized_storage_account.serializers import ( + AuthorizedStorageAccountSerializer, +) +from addon_service.configured_storage_addon.serializers import ( + ConfiguredStorageAddonSerializer, +) +from addon_service.external_storage_service.serializers import ( + ExternalStorageServiceSerializer, +) +from addon_service.internal_resource.serializers import InternalResourceSerializer +from addon_service.internal_user.serializers import InternalUserSerializer + + +__all__ = ( + "AuthorizedStorageAccountSerializer", + "ConfiguredStorageAddonSerializer", + "ExternalStorageServiceSerializer", + "InternalResourceSerializer", + "InternalUserSerializer", +) diff --git a/addon_service/tests/__init__.py b/addon_service/tests/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/addon_service/tests/factories.py b/addon_service/tests/factories.py new file mode 100644 index 00000000..b3c43ebf --- /dev/null +++ b/addon_service/tests/factories.py @@ -0,0 +1,8 @@ +from factory.django import DjangoModelFactory + +from addon_service.internal_user.models import InternalUser + + +class InternalUserFactory(DjangoModelFactory): + class Meta: + model = InternalUser diff --git a/addon_service/tests/test_base.py b/addon_service/tests/test_base.py new file mode 100644 index 00000000..dc308e39 --- /dev/null +++ b/addon_service/tests/test_base.py @@ -0,0 +1,15 @@ +from django.test import TestCase + +from .factories import InternalUserFactory + + +class TestTestCase(TestCase): + def test_tests(self): + """Simple base test to test test infrastructure""" + pass + + def test_model(self): + """Simple base test to test test models""" + user = InternalUserFactory(user_uri="http://osf.example/hurts") + user.save() + assert user.user_uri == "http://osf.example/hurts" diff --git a/addon_service/tests/test_internal_user.py b/addon_service/tests/test_internal_user.py new file mode 100644 index 00000000..d6d252a0 --- /dev/null +++ b/addon_service/tests/test_internal_user.py @@ -0,0 +1,14 @@ +from django.urls import reverse +from rest_framework.test import APITestCase + +from addon_service.tests.factories import InternalUserFactory + + +class TestInternalUser(APITestCase): + def test_get(self): + _user = InternalUserFactory(user_uri="http://osf.example/hurts1") + _resp = self.client.get( + reverse("internal-users-detail", kwargs={"pk": _user.pk}), + ) + assert _resp.status_code == 200 + assert _resp.data["user_uri"] == "http://osf.example/hurts1" diff --git a/addon_service/urls.py b/addon_service/urls.py new file mode 100644 index 00000000..c80c6240 --- /dev/null +++ b/addon_service/urls.py @@ -0,0 +1,44 @@ +from django.urls import path +from rest_framework.routers import SimpleRouter +from rest_framework_json_api.utils import get_resource_type_from_serializer + +from addon_service import views + + +def _urls_for_viewsets(*viewsets): + """returns urlpatterns for viewsets that each correspond to a resource type + + includes patterns for jsonapi-style relationships + """ + _router = SimpleRouter() + _additional_urlpatterns = [] + for _viewset in viewsets: + # NOTE: assumes each viewset corresponds to a distinct resource_name + _resource_name = get_resource_type_from_serializer(_viewset.serializer_class) + _router.register( + prefix=_resource_name, + viewset=_viewset, + basename=_resource_name, + ) + # add route for all relationship "related" links + # https://django-rest-framework-json-api.readthedocs.io/en/stable/usage.html#related-urls + _additional_urlpatterns.append( + path( + f"{_resource_name}///", + _viewset.as_view({"get": "retrieve_related"}), + name=f"{_resource_name}-related", + ), + ) + return [ + *_router.urls, + *_additional_urlpatterns, + ] + + +urlpatterns = _urls_for_viewsets( + views.AuthorizedStorageAccountViewSet, + views.ConfiguredStorageAddonViewSet, + views.ExternalStorageServiceViewSet, + views.InternalResourceViewSet, + views.InternalUserViewSet, +) diff --git a/addon_service/views.py b/addon_service/views.py new file mode 100644 index 00000000..6331fa92 --- /dev/null +++ b/addon_service/views.py @@ -0,0 +1,17 @@ +""" Import views/viewsets here for convenience """ +from addon_service.authorized_storage_account.views import ( + AuthorizedStorageAccountViewSet, +) +from addon_service.configured_storage_addon.views import ConfiguredStorageAddonViewSet +from addon_service.external_storage_service.views import ExternalStorageServiceViewSet +from addon_service.internal_resource.views import InternalResourceViewSet +from addon_service.internal_user.views import InternalUserViewSet + + +__all__ = ( + "AuthorizedStorageAccountViewSet", + "ConfiguredStorageAddonViewSet", + "ExternalStorageServiceViewSet", + "InternalResourceViewSet", + "InternalUserViewSet", +) diff --git a/app/__init__.py b/app/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/gravyvalet/asgi.py b/app/asgi.py similarity index 57% rename from gravyvalet/asgi.py rename to app/asgi.py index e1b6d0ec..2bf746df 100644 --- a/gravyvalet/asgi.py +++ b/app/asgi.py @@ -1,16 +1,17 @@ """ -ASGI config for gravyvalet project. +ASGI config for milkmaid project. It exposes the ASGI callable as a module-level variable named ``application``. For more information on this file, see -https://docs.djangoproject.com/en/4.1/howto/deployment/asgi/ +https://docs.djangoproject.com/en/3.1/howto/deployment/asgi/ """ import os from django.core.asgi import get_asgi_application -os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'gravyvalet.settings') + +os.environ.setdefault("DJANGO_SETTINGS_MODULE", "app.settings") application = get_asgi_application() diff --git a/app/env.py b/app/env.py new file mode 100644 index 00000000..d85a45a7 --- /dev/null +++ b/app/env.py @@ -0,0 +1,18 @@ +"""settings from environment variables +""" +import os + + +POSTGRES_DB = os.environ.get("POSTGRES_DB") +POSTGRES_USER = os.environ.get("POSTGRES_USER") +POSTGRES_PASSWORD = os.environ.get("POSTGRES_PASSWORD") +POSTGRES_HOST = os.environ.get("POSTGRES_HOST") +POSTGRES_PORT = os.environ.get("POSTGRES_PORT", "5432") + +SECRET_KEY = os.environ.get("SECRET_KEY") + +# any non-empty value enables debug mode: +DEBUG = bool(os.environ.get("DEBUG")) + +# comma-separated list: +ALLOWED_HOSTS = os.environ.get("ALLOWED_HOSTS", "").split(",") diff --git a/app/settings.py b/app/settings.py new file mode 100644 index 00000000..ac910898 --- /dev/null +++ b/app/settings.py @@ -0,0 +1,135 @@ +from pathlib import Path + +from app import env + + +SECRET_KEY = env.SECRET_KEY + +# Build paths inside the project like this: BASE_DIR / 'subdir'. +BASE_DIR = Path(__file__).resolve().parent.parent + + +# Quick-start development settings - unsuitable for production +# See https://docs.djangoproject.com/en/3.1/howto/deployment/checklist/ + +# SECURITY WARNING: don't run with debug turned on in production! +DEBUG = env.DEBUG + +ALLOWED_HOSTS = env.ALLOWED_HOSTS + + +# Application definition + +INSTALLED_APPS = ( + "django.contrib.auth", + "django.contrib.contenttypes", + "django.contrib.sessions", + "django.contrib.messages", + # 'django.contrib.staticfiles', + "rest_framework", + "rest_framework_json_api", + "addon_service", +) + +MIDDLEWARE = [ + "django.middleware.security.SecurityMiddleware", + "django.contrib.sessions.middleware.SessionMiddleware", + "django.middleware.common.CommonMiddleware", + "django.middleware.csrf.CsrfViewMiddleware", + "django.contrib.auth.middleware.AuthenticationMiddleware", + "django.contrib.messages.middleware.MessageMiddleware", + "django.middleware.clickjacking.XFrameOptionsMiddleware", +] + +ROOT_URLCONF = "app.urls" + +TEMPLATES = [ + { + "BACKEND": "django.template.backends.django.DjangoTemplates", + "DIRS": [], + "APP_DIRS": True, + "OPTIONS": { + "context_processors": [ + "django.template.context_processors.debug", + "django.template.context_processors.request", + "django.contrib.auth.context_processors.auth", + "django.contrib.messages.context_processors.messages", + ], + }, + }, +] + +WSGI_APPLICATION = "app.wsgi.application" + + +# Database +# https://docs.djangoproject.com/en/3.1/ref/settings/#databases + +# Database settings for PostgreSQL +DATABASES = { + "default": { + "ENGINE": "django.db.backends.postgresql", + "NAME": env.POSTGRES_DB, + "USER": env.POSTGRES_USER, + "PASSWORD": env.POSTGRES_PASSWORD, + "HOST": env.POSTGRES_HOST, + "PORT": env.POSTGRES_PORT, + "ATOMIC_REQUESTS": True, + } +} + +EXCEPTION_HANDLER = "rest_framework_json_api.exceptions.exception_handler" +DEFAULT_PAGINATION_CLASS = ( + "rest_framework_json_api.pagination.JsonApiPageNumberPagination" +) + +REST_FRAMEWORK = { + "PAGE_SIZE": 10, + "EXCEPTION_HANDLER": EXCEPTION_HANDLER, + "DEFAULT_PAGINATION_CLASS": DEFAULT_PAGINATION_CLASS, + "DEFAULT_PARSER_CLASSES": ( + "rest_framework_json_api.parsers.JSONParser", + "rest_framework.parsers.FormParser", + "rest_framework.parsers.MultiPartParser", + ), + "DEFAULT_RENDERER_CLASSES": ( + "rest_framework_json_api.renderers.JSONRenderer", + "rest_framework_json_api.renderers.BrowsableAPIRenderer", + ), + "DEFAULT_FILTER_BACKENDS": ( + "rest_framework_json_api.filters.QueryParameterValidationFilter", + "rest_framework_json_api.filters.OrderingFilter", + "rest_framework_json_api.django_filters.DjangoFilterBackend", + "rest_framework.filters.SearchFilter", + ), + "SEARCH_PARAM": "filter[search]", + "TEST_REQUEST_RENDERER_CLASSES": ( + "rest_framework_json_api.renderers.JSONRenderer", + ), + "TEST_REQUEST_DEFAULT_FORMAT": "vnd.api+json", +} + +# Password validation +# https://docs.djangoproject.com/en/3.1/ref/settings/#auth-password-validators + +AUTH_PASSWORD_VALIDATORS = [] + + +# Internationalization +# https://docs.djangoproject.com/en/3.1/topics/i18n/ + +LANGUAGE_CODE = "en-us" + +TIME_ZONE = "UTC" + +USE_I18N = True + +USE_L10N = True + +USE_TZ = True + + +# Static files (CSS, JavaScript, Images) +# https://docs.djangoproject.com/en/3.1/howto/static-files/ + +STATIC_URL = "/static/" diff --git a/app/urls.py b/app/urls.py new file mode 100644 index 00000000..d8f02233 --- /dev/null +++ b/app/urls.py @@ -0,0 +1,9 @@ +from django.urls import ( + include, + path, +) + + +urlpatterns = [ + path("v1/", include("addon_service.urls")), +] diff --git a/gravyvalet/wsgi.py b/app/wsgi.py similarity index 57% rename from gravyvalet/wsgi.py rename to app/wsgi.py index 472457a3..b6e46470 100644 --- a/gravyvalet/wsgi.py +++ b/app/wsgi.py @@ -1,16 +1,17 @@ """ -WSGI config for gravyvalet project. +WSGI config for milkmaid project. It exposes the WSGI callable as a module-level variable named ``application``. For more information on this file, see -https://docs.djangoproject.com/en/4.1/howto/deployment/wsgi/ +https://docs.djangoproject.com/en/3.1/howto/deployment/wsgi/ """ import os from django.core.wsgi import get_wsgi_application -os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'gravyvalet.settings') + +os.environ.setdefault("DJANGO_SETTINGS_MODULE", "app.settings") application = get_wsgi_application() diff --git a/charon/admin.py b/charon/admin.py deleted file mode 100644 index 8c38f3f3..00000000 --- a/charon/admin.py +++ /dev/null @@ -1,3 +0,0 @@ -from django.contrib import admin - -# Register your models here. diff --git a/charon/apps.py b/charon/apps.py deleted file mode 100644 index a5677129..00000000 --- a/charon/apps.py +++ /dev/null @@ -1,6 +0,0 @@ -from django.apps import AppConfig - - -class CharonConfig(AppConfig): - default_auto_field = 'django.db.models.BigAutoField' - name = 'charon' diff --git a/charon/migrations/0001_initial.py.onhold b/charon/migrations/0001_initial.py.onhold deleted file mode 100644 index d7826da5..00000000 --- a/charon/migrations/0001_initial.py.onhold +++ /dev/null @@ -1,92 +0,0 @@ -# Generated by Django 4.2 on 2023-05-15 20:44 - -import django.contrib.postgres.fields -import django_extensions.db.fields -from django.db import migrations, models - -import charon.models - - -class Migration(migrations.Migration): - initial = True - - dependencies = [] - - operations = [ - migrations.CreateModel( - name="ExternalAccount", - fields=[ - ( - "id", - models.BigAutoField( - auto_created=True, - primary_key=True, - serialize=False, - verbose_name="ID", - ), - ), - ( - "created", - django_extensions.db.fields.CreationDateTimeField( - auto_now_add=True, verbose_name="created" - ), - ), - ( - "modified", - django_extensions.db.fields.ModificationDateTimeField( - auto_now=True, verbose_name="modified" - ), - ), - ( - "_id", - models.CharField( - db_index=True, - default=charon.models.generate_object_id, - max_length=24, - unique=True, - ), - ), - ("oauth_key", charon.models.EncryptedTextField(blank=True, null=True)), - ( - "oauth_secret", - charon.models.EncryptedTextField(blank=True, null=True), - ), - ( - "refresh_token", - charon.models.EncryptedTextField(blank=True, null=True), - ), - ( - "date_last_refreshed", - charon.models.NonNaiveDateTimeField(blank=True, null=True), - ), - ( - "expires_at", - charon.models.NonNaiveDateTimeField(blank=True, null=True), - ), - ( - "scopes", - django.contrib.postgres.fields.ArrayField( - base_field=models.CharField(max_length=128), - blank=True, - default=list, - size=None, - ), - ), - ("provider", models.CharField(max_length=50)), - ("provider_name", models.CharField(max_length=255)), - ("provider_id", models.CharField(max_length=255)), - ( - "display_name", - charon.models.EncryptedTextField(blank=True, null=True), - ), - ( - "profile_url", - charon.models.EncryptedTextField(blank=True, null=True), - ), - ], - options={ - "unique_together": {("provider", "provider_id")}, - }, - bases=(models.Model, charon.models.QuerySetExplainMixin), - ), - ] diff --git a/charon/models-djangostyle.py b/charon/models-djangostyle.py deleted file mode 100644 index 5ef45938..00000000 --- a/charon/models-djangostyle.py +++ /dev/null @@ -1,1929 +0,0 @@ -import abc -import logging -import os -import time - -import bson -import jwe -import markupsafe -import requests -from boxsdk import Client, OAuth2 -from boxsdk.exception import BoxAPIException -from django.contrib.contenttypes.fields import GenericForeignKey -from django.contrib.postgres.fields import ArrayField # replace with sqlite equiv? -from django.core.exceptions import ValidationError -from django.db import connections, models -from django.db.models import DateTimeField, ForeignKey, TextField -from django.db.models.query import QuerySet -from django.http import ( - HttpResponse, - HttpResponseBadRequest, - HttpResponseForbidden, - HttpResponseNotFound, -) -from django.utils import timezone -from django_extensions.db.models import TimeStampedModel -from oauthlib.oauth2 import InvalidGrantError -from urllib3.exceptions import MaxRetryError - -import charon.serializer as charon_serializer -import charon.settings as charon_settings -import charon.utils as charon_utils - -logger = logging.getLogger(__name__) - -SENSITIVE_DATA_KEY = jwe.kdf( - charon_settings.SENSITIVE_DATA_SECRET.encode('utf-8'), - charon_settings.SENSITIVE_DATA_SALT.encode('utf-8'), -) - - -# Create your models here. - - -def generate_object_id(): - return str(bson.ObjectId()) - - -def ensure_bytes(value): - """Helper function to ensure all inputs are encoded to the proper value utf-8 value - regardless of input type""" - if isinstance(value, bytes): - return value - return value.encode('utf-8') - - -def ensure_str(value): - if isinstance(value, bytes): - return value.decode() - return value - - -def encrypt_string(value, prefix='jwe:::'): - prefix = ensure_bytes(prefix) - if value: - value = ensure_bytes(value) - if value and not value.startswith(prefix): - value = (prefix + jwe.encrypt(value, SENSITIVE_DATA_KEY)).decode() - return value - - -def decrypt_string(value, prefix='jwe:::'): - prefix = ensure_bytes(prefix) - if value: - value = ensure_bytes(value) - if value.startswith(prefix): - value = jwe.decrypt(value[len(prefix) :], SENSITIVE_DATA_KEY).decode() - return value - - -class NaiveDatetimeException(Exception): - pass - - -class EncryptedTextField(TextField): - """ - This field transparently encrypts data in the database. It should probably only be - used with PG unless the user takes into account the db specific trade-offs with - TextFields. - """ - - prefix = 'jwe:::' - - def get_db_prep_value(self, value, **kwargs): - return encrypt_string(value, prefix=self.prefix) - - def to_python(self, value): - return decrypt_string(value, prefix=self.prefix) - - def from_db_value(self, value, expression, connection): - return self.to_python(value) - - -class NonNaiveDateTimeField(DateTimeField): - def get_prep_value(self, value): - value = super(NonNaiveDateTimeField, self).get_prep_value(value) - if value is not None and ( - value.tzinfo is None or value.tzinfo.utcoffset(value) is None - ): - raise NaiveDatetimeException('Tried to encode a naive datetime.') - return value - - -class QuerySetExplainMixin: - def explain(self, *args): - extra_arguments = '' - for item in args: - extra_arguments = ( - '{} {}'.format(extra_arguments, item) - if isinstance(item, str) - else extra_arguments - ) - cursor = connections[self.db].cursor() - query, params = self.query.sql_with_params() - cursor.execute('explain analyze verbose %s' % query, params) - return '\n'.join(r[0] for r in cursor.fetchall()) - - -QuerySet = type('QuerySet', (QuerySetExplainMixin, QuerySet), dict(QuerySet.__dict__)) - - -class BaseModel(TimeStampedModel, QuerySetExplainMixin): - migration_page_size = 50000 - - objects = models.QuerySet.as_manager() - - class Meta: - abstract = True - - def __unicode__(self): - return '{}'.format(self.id) - - def to_storage(self, include_auto_now=True): - local_django_fields = set( - [ - x.name - for x in self._meta.concrete_fields - if include_auto_now or not getattr(x, 'auto_now', False) - ] - ) - return {name: self.serializable_value(name) for name in local_django_fields} - - @classmethod - def get_fk_field_names(cls): - return [ - field.name - for field in cls._meta.get_fields() - if field.is_relation - and not field.auto_created - and (field.many_to_one or field.one_to_one) - and not isinstance(field, GenericForeignKey) - ] - - @classmethod - def get_m2m_field_names(cls): - return [ - field.attname or field.name - for field in cls._meta.get_fields() - if field.is_relation and field.many_to_many and not hasattr(field, 'field') - ] - - @classmethod - def load(cls, data, select_for_update=False): - try: - return ( - cls.objects.get(pk=data) - if not select_for_update - else cls.objects.filter(pk=data).select_for_update().get() - ) - except cls.DoesNotExist: - return None - - @property - def _primary_name(self): - return '_id' - - @property - def _is_loaded(self): - return bool(self.pk) - - def reload(self): - return self.refresh_from_db() - - def refresh_from_db(self, **kwargs): - super(BaseModel, self).refresh_from_db(**kwargs) - # Since Django 2.2, any cached relations are cleared from the reloaded instance. - # - # See https://docs.djangoproject.com/en/2.2/ref/models/instances/#django.db.models.Model.refresh_from_db # noqa: E501 - # - # However, the default `refresh_from_db()` doesn't refresh related fields. - # Neither can we refresh related field(s) since it will inevitably cause - # infinite loop; and Many/One-to-Many relations add to the complexity. - # - # The recommended behavior is to explicitly refresh the fields when necessary. - # In order to preserve pre-upgrade behavior, our customization only reloads GFKs - for f in self._meta._get_fields(reverse=False): - # Note: the following `if` condition is how django internally identifies GFK - if ( - f.is_relation - and f.many_to_one - and not (hasattr(f.remote_field, 'model') and f.remote_field.model) - ): - if hasattr(self, f.name): - try: - getattr(self, f.name).refresh_from_db() - except AttributeError: - continue - - def clone(self): - """Create a new, unsaved copy of this object.""" - copy = self.__class__.objects.get(pk=self.pk) - copy.id = None - - # empty all the fks - fk_field_names = [ - f.name - for f in self._meta.model._meta.get_fields() - if isinstance(f, (ForeignKey, GenericForeignKey)) - ] - for field_name in fk_field_names: - setattr(copy, field_name, None) - - try: - copy._id = bson.ObjectId() - except AttributeError: - pass - return copy - - def save(self, *args, **kwargs): - # Make Django validate on save (like modm) - if kwargs.pop('clean', True) and not ( - kwargs.get('force_insert') or kwargs.get('force_update') - ): - try: - self.full_clean() - except ValidationError as err: - raise ValidationError(*err.args) - return super(BaseModel, self).save(*args, **kwargs) - - -class BaseIDMixin(models.Model): - class Meta: - abstract = True - - -class ObjectIDMixin(BaseIDMixin): - primary_identifier_name = '_id' - - _id = models.CharField( - max_length=24, default=generate_object_id, unique=True, db_index=True - ) - - def __unicode__(self): - return '_id: {}'.format(self._id) - - @classmethod - def load(cls, q, select_for_update=False): - try: - return ( - cls.objects.get(_id=q) - if not select_for_update - else cls.objects.filter(_id=q).select_for_update().get() - ) - except cls.DoesNotExist: - # modm doesn't throw exceptions when loading things that don't exist - return None - - class Meta: - abstract = True - - -class ExternalProviderMeta(abc.ABCMeta): - """Keeps track of subclasses of the ``ExternalProvider`` object""" - - def __init__(cls, name, bases, dct): - super(ExternalProviderMeta, cls).__init__(name, bases, dct) - if not isinstance(cls.short_name, abc.abstractproperty): - PROVIDER_LOOKUP[cls.short_name] = cls - - -class ExternalProvider(object, with_metaclass(ExternalProviderMeta)): - """A connection to an external service (ex: GitHub). - - This object contains no credentials, and is not saved in the database. - It provides an unauthenticated session with the provider, unless ``account`` - has been set - in which case, it provides a connection authenticated as the - ``ExternalAccount`` instance. - - Conceptually, this can be thought of as an extension of ``ExternalAccount``. - It's a separate object because this must be subclassed for each provider, - and ``ExternalAccount`` instances are stored within a single collection. - """ - - # Default to OAuth v2.0. - _oauth_version = OAUTH2 - - # Providers that have expiring tokens must override these - auto_refresh_url = None - refresh_time = 0 # When to refresh the oauth_key (seconds) - expiry_time = 0 # If/When the refresh token expires (seconds). 0 indicates a non-expiring refresh token - - def __init__(self, account=None): - super(ExternalProvider, self).__init__() - - # provide an unauthenticated session by default - self.account = account - - def __repr__(self): - return '<{name}: {status}>'.format( - name=self.__class__.__name__, - status=self.account.provider_id if self.account else 'anonymous', - ) - - @abc.abstractproperty - def auth_url_base(self): - """The base URL to begin the OAuth dance""" - pass - - @property - def auth_url(self): - """The URL to begin the OAuth dance. - - This property method has side effects - it at least adds temporary - information to the session so that callbacks can be associated with - the correct user. For OAuth1, it calls the provider to obtain - temporary credentials to start the flow. - """ - current_session = get_session() - # create a dict on the session object if it's not already there - if current_session.get('oauth_states', None) is None: - current_session['oauth_states'] = {} - - if self._oauth_version == OAUTH2: - # Quirk: Some time between 2019/05/31 and 2019/06/04, Bitbucket's OAuth2 API no longer - # expects the query param `redirect_uri` in the `oauth2/authorize` endpoint. In - # addition, it relies on the "Callback URL" of the "OAuth Consumer" to redirect - # the auth flow after successful authorization. `ADDONS_OAUTH_NO_REDIRECT` is a - # list containing addons that do not use `redirect_uri` in OAuth2 requests. - if self.short_name in ADDONS_OAUTH_NO_REDIRECT: - redirect_uri = None - else: - redirect_uri = charon_utils.web_url_for( - 'oauth_callback', service_name=self.short_name, _absolute=True - ) - # build the URL - oauth = OAuth2Session( - self.client_id, - redirect_uri=redirect_uri, - scope=self.default_scopes, - ) - - url, state = oauth.authorization_url(self.auth_url_base) - - # save state token to the session for confirmation in the callback - current_session['oauth_states'][self.short_name] = {'state': state} - - elif self._oauth_version == OAUTH1: - # get a request token - oauth = OAuth1Session( - client_key=self.client_id, - client_secret=self.client_secret, - ) - - # request temporary credentials from the provider - response = oauth.fetch_request_token(self.request_token_url) - - # store them in the session for use in the callback - current_session['oauth_states'][self.short_name] = { - 'token': response.get('oauth_token'), - 'secret': response.get('oauth_token_secret'), - } - - url = oauth.authorization_url(self.auth_url_base) - - current_session.save() - return url - - @abc.abstractproperty - def callback_url(self): - """The provider URL to exchange the code for a token""" - pass - - @abc.abstractproperty - def client_id(self): - """OAuth Client ID. a/k/a: Application ID""" - pass - - @abc.abstractproperty - def client_secret(self): - """OAuth Client Secret. a/k/a: Application Secret, Application Key""" - pass - - default_scopes = list() - - @abc.abstractproperty - def name(self): - """Human-readable name of the service. e.g.: ORCiD, GitHub""" - pass - - @abc.abstractproperty - def short_name(self): - """Name of the service to be used internally. e.g.: orcid, github""" - pass - - def auth_callback(self, user, **kwargs): - """Exchange temporary credentials for permanent credentials - - This is called in the view that handles the user once they are returned - to the OSF after authenticating on the external service. - """ - current_session = get_session() - if 'error' in request.args: - return False - - # make sure the user has temporary credentials for this provider - try: - cached_credentials = current_session['oauth_states'][self.short_name] - except KeyError: - raise PermissionsError('OAuth flow not recognized.') - - if self._oauth_version == OAUTH1: - request_token = request.args.get('oauth_token') - - # make sure this is the same user that started the flow - if cached_credentials.get('token') != request_token: - raise PermissionsError('Request token does not match') - - response = OAuth1Session( - client_key=self.client_id, - client_secret=self.client_secret, - resource_owner_key=cached_credentials.get('token'), - resource_owner_secret=cached_credentials.get('secret'), - verifier=request.args.get('oauth_verifier'), - ).fetch_access_token(self.callback_url) - - elif self._oauth_version == OAUTH2: - state = request.args.get('state') - - # make sure this is the same user that started the flow - if cached_credentials.get('state') != state: - raise PermissionsError('Request token does not match') - - try: - # Quirk: Similarly to the `oauth2/authorize` endpoint, the `oauth2/access_token` - # endpoint of Bitbucket would fail if a not-none or non-empty `redirect_uri` - # were provided in the body of the POST request. - if self.short_name in ADDONS_OAUTH_NO_REDIRECT: - redirect_uri = None - else: - redirect_uri = charon_utils.web_url_for( - 'oauth_callback', service_name=self.short_name, _absolute=True - ) - response = OAuth2Session( - self.client_id, - redirect_uri=redirect_uri, - ).fetch_token( - self.callback_url, - client_secret=self.client_secret, - code=request.args.get('code'), - ) - except (MissingTokenError, RequestsHTTPError): - raise HTTPError(http_status.HTTP_503_SERVICE_UNAVAILABLE) - # pre-set as many values as possible for the ``ExternalAccount`` - info = self._default_handle_callback(response) - # call the hook for subclasses to parse values from the response - info.update(self.handle_callback(response)) - - return self._set_external_account(user, info) - - def _set_external_account(self, user, info): - current_session = get_session() - self.account, created = ExternalAccount.objects.get_or_create( - provider=self.short_name, - provider_id=info['provider_id'], - ) - - # ensure that provider_name is correct - self.account.provider_name = self.name - # required - self.account.oauth_key = info['key'] - - # only for OAuth1 - self.account.oauth_secret = info.get('secret') - - # only for OAuth2 - self.account.expires_at = info.get('expires_at') - self.account.refresh_token = info.get('refresh_token') - self.account.date_last_refreshed = timezone.now() - - # additional information - self.account.display_name = info.get('display_name') - self.account.profile_url = info.get('profile_url') - - self.account.save() - - # add it to the user's list of ``ExternalAccounts`` - if not user.external_accounts.filter(id=self.account.id).exists(): - user.external_accounts.add(self.account) - user.save() - - if self.short_name in current_session.get('oauth_states', {}): - del current_session['oauth_states'][self.short_name] - current_session.save() - - return True - - def _default_handle_callback(self, data): - """Parse as much out of the key exchange's response as possible. - - This should not be over-ridden in subclasses. - """ - if self._oauth_version == OAUTH1: - key = data.get('oauth_token') - secret = data.get('oauth_token_secret') - - values = {} - - if key: - values['key'] = key - if secret: - values['secret'] = secret - - return values - - elif self._oauth_version == OAUTH2: - key = data.get('access_token') - refresh_token = data.get('refresh_token') - expires_at = data.get('expires_at') - scopes = data.get('scope') - - values = {} - - if key: - values['key'] = key - if scopes: - values['scope'] = scopes - if refresh_token: - values['refresh_token'] = refresh_token - if expires_at: - values['expires_at'] = dt.datetime.fromtimestamp(float(expires_at)) - - return values - - @abc.abstractmethod - def handle_callback(self, response): - """Hook for allowing subclasses to parse information from the callback. - - Subclasses should implement this method to provide `provider_id` - and `profile_url`. - - Values provided by ``self._default_handle_callback`` can be over-ridden - here as well, in the unexpected case that they are parsed incorrectly - by default. - - :param response: The JSON returned by the provider during the exchange - :return dict: - """ - pass - - def refresh_oauth_key( - self, - force=False, - extra=None, - resp_auth_token_key='access_token', - resp_refresh_token_key='refresh_token', - resp_expiry_fn=None, - ): - """Handles the refreshing of an oauth_key for account associated with this provider. - Not all addons need to use this, as some do not have oauth_keys that expire. - - Subclasses must define the following for this functionality: - `auto_refresh_url` - URL to use when refreshing tokens. Must use HTTPS - `refresh_time` - Time (in seconds) that the oauth_key should be refreshed after. - Typically half the duration of validity. Cannot be 0. - - Providers may have different keywords in their response bodies, kwargs - `resp_*_key` allow subclasses to override these if necessary. - - kwarg `resp_expiry_fn` allows subclasses to specify a function that will return the - datetime-formatted oauth_key expiry key, given a successful refresh response from - `auto_refresh_url`. A default using 'expires_at' as a key is provided. - """ - extra = extra or {} - # Ensure this is an authenticated Provider that uses token refreshing - if not (self.account and self.auto_refresh_url): - return False - - # Ensure this Provider is for a valid addon - if not (self.client_id and self.client_secret): - return False - - # Ensure a refresh is needed - if not (force or self._needs_refresh()): - return False - - if self.has_expired_credentials and not force: - return False - - resp_expiry_fn = resp_expiry_fn or ( - lambda x: timezone.now() - + timezone.timedelta(seconds=float(x['expires_in'])) - ) - - client = OAuth2Session( - self.client_id, - token={ - 'access_token': self.account.oauth_key, - 'refresh_token': self.account.refresh_token, - 'token_type': 'Bearer', - 'expires_in': '-30', - }, - ) - - extra.update({'client_id': self.client_id, 'client_secret': self.client_secret}) - - try: - token = client.refresh_token(self.auto_refresh_url, **extra) - except (AccessDeniedError, InvalidGrantError, TokenExpiredError): - if not force: - return False - else: - raise - - self.account.oauth_key = token[resp_auth_token_key] - self.account.refresh_token = token[resp_refresh_token_key] - self.account.expires_at = resp_expiry_fn(token) - self.account.date_last_refreshed = timezone.now() - self.account.save() - return True - - def _needs_refresh(self): - """Determines whether or not an associated ExternalAccount needs - a oauth_key. - - return bool: True if needs_refresh - """ - if self.refresh_time and self.account.expires_at: - return ( - self.account.expires_at - timezone.now() - ).total_seconds() < self.refresh_time - return False - - @property - def has_expired_credentials(self): - """Determines whether or not an associated ExternalAccount has - expired credentials that can no longer be renewed - - return bool: True if cannot be refreshed - """ - if self.expiry_time and self.account.expires_at: - return ( - timezone.now() - self.account.expires_at - ).total_seconds() > self.expiry_time - return False - - -class ExternalAccount(ObjectIDMixin, BaseModel): - """An account on an external service. - - Note that this object is not and should not be aware of what other objects - are associated with it. This is by design, and this object should be kept as - thin as possible, containing only those fields that must be stored in the - database. - - The ``provider`` field is a de facto foreign key to an ``ExternalProvider`` - object, as providers are not stored in the database. - """ - - # The OAuth credentials. One or both of these fields should be populated. - # For OAuth1, this is usually the "oauth_token" - # For OAuth2, this is usually the "access_token" - oauth_key = EncryptedTextField(blank=True, null=True) - - # For OAuth1, this is usually the "oauth_token_secret" - # For OAuth2, this is not used - oauth_secret = EncryptedTextField(blank=True, null=True) - - # Used for OAuth2 only - refresh_token = EncryptedTextField(blank=True, null=True) - date_last_refreshed = NonNaiveDateTimeField(blank=True, null=True) - expires_at = NonNaiveDateTimeField(blank=True, null=True) - scopes = ArrayField(models.CharField(max_length=128), default=list, blank=True) - - # The `name` of the service - # This lets us query for only accounts on a particular provider - # TODO We should make provider an actual FK someday. - provider = models.CharField(max_length=50, blank=False, null=False) - # The proper 'name' of the service - # Needed for account serialization - provider_name = models.CharField(max_length=255, blank=False, null=False) - - # The unique, persistent ID on the remote service. - provider_id = models.CharField(max_length=255, blank=False, null=False) - - # The user's name on the external service - display_name = EncryptedTextField(blank=True, null=True) - # A link to the user's profile on the external service - profile_url = EncryptedTextField(blank=True, null=True) - - def __repr__(self): - return ''.format(self.provider, self.provider_id) - - def _natural_key(self): - if self.pk: - return self.pk - return hash(str(self.provider_id) + str(self.provider)) - - class Meta: - unique_together = [ - ( - 'provider', - 'provider_id', - ) - ] - - -class Provider(ExternalProvider): - name = 'Box' - short_name = 'box' - - client_id = charon_settings.BOX_KEY - client_secret = charon_settings.BOX_SECRET - - auth_url_base = charon_settings.BOX_OAUTH_AUTH_ENDPOINT - callback_url = charon_settings.BOX_OAUTH_TOKEN_ENDPOINT - auto_refresh_url = callback_url - refresh_time = charon_settings.REFRESH_TIME - expiry_time = charon_settings.EXPIRY_TIME - default_scopes = ['root_readwrite'] - - def handle_callback(self, response): - """View called when the Oauth flow is completed. Adds a new UserSettings - record to the user and saves the user's access token and account info. - """ - - client = Client( - OAuth2( - access_token=response['access_token'], - refresh_token=response['refresh_token'], - client_id=charon_settings.BOX_KEY, - client_secret=charon_settings.BOX_SECRET, - ) - ) - - about = client.user().get() - - return { - 'provider_id': about['id'], - 'display_name': about['name'], - 'profile_url': 'https://app.box.com/profile/{0}'.format(about['id']), - } - - -class BaseAddonSettings(ObjectIDMixin, BaseModel): - is_deleted = models.BooleanField(default=False) - deleted = NonNaiveDateTimeField(null=True, blank=True) - - class Meta: - abstract = True - - @property - def config(self): - return self._meta.app_config - - @property - def short_name(self): - return self.config.short_name - - def delete(self, save=True): - self.is_deleted = True - self.deleted = timezone.now() - self.on_delete() - if save: - self.save() - - def undelete(self, save=True): - self.is_deleted = False - self.deleted = None - self.on_add() - if save: - self.save() - - def to_json(self, user): - return { - 'addon_short_name': self.config.short_name, - 'addon_full_name': self.config.full_name, - } - - ############# - # Callbacks # - ############# - - def on_add(self): - """Called when the addon is added (or re-added) to the owner (User or Node).""" - pass - - def on_delete(self): - """Called when the addon is deleted from the owner (User or Node).""" - pass - - -class BaseUserSettings(BaseAddonSettings): - owner = models.OneToOneField( - OSFUser, - related_name='%(app_label)s_user_settings', - blank=True, - null=True, - on_delete=models.CASCADE, - ) - - class Meta: - abstract = True - - @property - def public_id(self): - return None - - @property - def has_auth(self): - """Whether the user has added credentials for this addon.""" - return False - - # TODO: Test me @asmacdo - @property - def nodes_authorized(self): - """Get authorized, non-deleted nodes. Returns an empty list if the - attached add-on does not include a node model. - """ - model = self.config.node_settings - if not model: - return [] - return [ - obj.owner - for obj in model.objects.filter( - user_settings=self, owner__is_deleted=False - ).select_related('owner') - ] - - @property - def can_be_merged(self): - return hasattr(self, 'merge') - - def to_json(self, user): - ret = super(BaseUserSettings, self).to_json(user) - ret['has_auth'] = self.has_auth - ret.update( - { - 'nodes': [ - { - '_id': node._id, - 'url': node.url, - 'title': node.title, - 'registered': node.is_registration, - 'api_url': node.api_url, - } - for node in self.nodes_authorized - ] - } - ) - return ret - - def __repr__(self): - if self.owner: - return '<{cls} owned by user {uid}>'.format( - cls=self.__class__.__name__, uid=self.owner._id - ) - return '<{cls} with no owner>'.format(cls=self.__class__.__name__) - - -# TODO: this is a signal - what do we do? -# @oauth_complete.connect -def oauth_complete(provider, account, user): - if not user or not account: - return - user.add_addon(account.provider) - user.save() - - -class BaseOAuthUserSettings(BaseUserSettings): - # Keeps track of what nodes have been given permission to use external - # accounts belonging to the user. - oauth_grants = DateTimeAwareJSONField(default=dict, blank=True) - # example: - # { - # '': { - # '': { - # - # }, - # } - # } - # - # metadata here is the specific to each addon. - - # The existence of this property is used to determine whether or not - # an addon instance is an "OAuth addon" in - # AddonModelMixin.get_oauth_addons(). - oauth_provider = None - - serializer = charon_serializer.OAuthAddonSerializer # TODO: import this? - - class Meta: - abstract = True - - @property - def has_auth(self): - return self.external_accounts.exists() - - @property - def external_accounts(self): - """The user's list of ``ExternalAccount`` instances for this provider""" - return self.owner.external_accounts.filter( - provider=self.oauth_provider.short_name - ) - - def delete(self, save=True): - for account in self.external_accounts.filter(provider=self.config.short_name): - self.revoke_oauth_access(account, save=False) - super(BaseOAuthUserSettings, self).delete(save=save) - - def grant_oauth_access(self, node, external_account, metadata=None): - """Give a node permission to use an ``ExternalAccount`` instance.""" - # ensure the user owns the external_account - if not self.owner.external_accounts.filter(id=external_account.id).exists(): - raise charon_utils.PermissionsError() - - metadata = metadata or {} - - # create an entry for the node, if necessary - if node._id not in self.oauth_grants: - self.oauth_grants[node._id] = {} - - # create an entry for the external account on the node, if necessary - if external_account._id not in self.oauth_grants[node._id]: - self.oauth_grants[node._id][external_account._id] = {} - - # update the metadata with the supplied values - for key, value in metadata.items(): - self.oauth_grants[node._id][external_account._id][key] = value - - self.save() - - # TODO: this uses the request, yes? why are we checking request in model - # @must_be_logged_in - def revoke_oauth_access(self, external_account, auth, save=True): - """Revoke all access to an ``ExternalAccount``. - - TODO: This should accept node and metadata params in the future, to - allow fine-grained revocation of grants. That's not yet been needed, - so it's not yet been implemented. - """ - for node in self.get_nodes_with_oauth_grants(external_account): - try: - node.get_addon(external_account.provider, is_deleted=True).deauthorize( - auth=auth - ) - except AttributeError: - # No associated addon settings despite oauth grant - pass - - if ( - external_account.osfuser_set.count() == 1 - and external_account.osfuser_set.filter(id=auth.user.id).exists() - ): - # Only this user is using the account, so revoke remote access as well. - self.revoke_remote_oauth_access(external_account) - - for key in self.oauth_grants: - self.oauth_grants[key].pop(external_account._id, None) - if save: - self.save() - - def revoke_remote_oauth_access(self, external_account): - """Makes outgoing request to remove the remote oauth grant - stored by third-party provider. - - Individual addons must override this method, as it is addon-specific behavior. - Not all addon providers support this through their API, but those that do - should also handle the case where this is called with an external_account - with invalid credentials, to prevent a user from being unable to disconnect - an account. - """ - pass - - def verify_oauth_access(self, node, external_account, metadata=None): - """Verify that access has been previously granted. - - If metadata is not provided, this checks only if the node can access the - account. This is suitable to check to see if the node's addon settings - is still connected to an external account (i.e., the user hasn't revoked - it in their user settings pane). - - If metadata is provided, this checks to see that all key/value pairs - have been granted. This is suitable for checking access to a particular - folder or other resource on an external provider. - """ - - metadata = metadata or {} - - # ensure the grant exists - try: - grants = self.oauth_grants[node._id][external_account._id] - except KeyError: - return False - - # Verify every key/value pair is in the grants dict - for key, value in metadata.items(): - if key not in grants or grants[key] != value: - return False - - return True - - def get_nodes_with_oauth_grants(self, external_account): - # Generator of nodes which have grants for this external account - for node_id, grants in self.oauth_grants.items(): - node = AbstractNode.load(node_id) - if external_account._id in grants.keys() and not node.is_deleted: - yield node - - def get_attached_nodes(self, external_account): - for node in self.get_nodes_with_oauth_grants(external_account): - if node is None: - continue - node_settings = node.get_addon(self.oauth_provider.short_name) - - if node_settings is None: - continue - - if node_settings.external_account == external_account: - yield node - - def merge(self, user_settings): - """Merge `user_settings` into this instance""" - if user_settings.__class__ is not self.__class__: - raise TypeError('Cannot merge different addons') - - for node_id, data in user_settings.oauth_grants.items(): - if node_id not in self.oauth_grants: - self.oauth_grants[node_id] = data - else: - node_grants = user_settings.oauth_grants[node_id].items() - for ext_acct, meta in node_grants: - if ext_acct not in self.oauth_grants[node_id]: - self.oauth_grants[node_id][ext_acct] = meta - else: - for k, v in meta: - if k not in self.oauth_grants[node_id][ext_acct]: - self.oauth_grants[node_id][ext_acct][k] = v - - user_settings.oauth_grants = {} - user_settings.save() - - try: - config = charon_settings.ADDONS_AVAILABLE_DICT[ - self.oauth_provider.short_name - ] - Model = config.models['nodesettings'] - except KeyError: - pass - else: - Model.objects.filter(user_settings=user_settings).update(user_settings=self) - - self.save() - - def to_json(self, user): - ret = super(BaseOAuthUserSettings, self).to_json(user) - - ret['accounts'] = self.serializer(user_settings=self).serialized_accounts - - return ret - - ############# - # Callbacks # - ############# - - def on_delete(self): - """When the user deactivates the addon, clear auth for connected nodes.""" - super(BaseOAuthUserSettings, self).on_delete() - nodes = [AbstractNode.load(node_id) for node_id in self.oauth_grants.keys()] - for node in nodes: - node_addon = node.get_addon(self.oauth_provider.short_name) - if node_addon and node_addon.user_settings == self: - node_addon.clear_auth() - - -class BaseNodeSettings(BaseAddonSettings): - owner = models.OneToOneField( - AbstractNode, - related_name='%(app_label)s_node_settings', - null=True, - blank=True, - on_delete=models.CASCADE, - ) - - class Meta: - abstract = True - - @property - def complete(self): - """Whether or not this addon is properly configured - :rtype bool: - """ - raise NotImplementedError() - - @property - def configured(self): - """Whether or not this addon has had a folder connected. - :rtype bool: - """ - return self.complete - - @property - def has_auth(self): - """Whether the node has added credentials for this addon.""" - return False - - def to_json(self, user): - ret = super(BaseNodeSettings, self).to_json(user) - ret.update( - { - 'user': {'permissions': self.owner.get_permissions(user)}, - 'node': { - 'id': self.owner._id, - 'api_url': self.owner.api_url, - 'url': self.owner.url, - 'is_registration': self.owner.is_registration, - }, - 'node_settings_template': os.path.basename( - self.config.node_settings_template - ), - } - ) - return ret - - ############# - # Callbacks # - ############# - - def before_page_load(self, node, user): - """ - - :param User user: - :param Node node: - - """ - pass - - def before_remove_contributor(self, node, removed): - """ - :param Node node: - :param User removed: - """ - pass - - def after_remove_contributor(self, node, removed, auth=None): - """ - :param Node node: - :param User removed: - """ - pass - - def before_make_public(self, node): - """ - - :param Node node: - :returns: Alert message or None - - """ - pass - - def before_make_private(self, node): - """ - - :param Node node: - :returns: Alert message or None - - """ - pass - - def after_set_privacy(self, node, permissions): - """ - - :param Node node: - :param str permissions: - - """ - pass - - def before_fork(self, node, user): - """Return warning text to display if user auth will be copied to a - fork. - :param Node node: - :param Uder user - :returns Alert message - """ - - if hasattr(self, 'user_settings'): - if self.user_settings is None: - return ( - u'Because you have not configured the {addon} add-on, your ' - u'authentication will not be transferred to the forked {category}. ' - u'You may authorize and configure the {addon} add-on ' - u'in the new fork on the settings page.' - ).format( - addon=self.config.full_name, - category=node.project_or_component, - ) - - elif self.user_settings and self.user_settings.owner == user: - return ( - u'Because you have authorized the {addon} add-on for this ' - u'{category}, forking it will also transfer your authentication to ' - u'the forked {category}.' - ).format( - addon=self.config.full_name, - category=node.project_or_component, - ) - else: - return ( - u'Because the {addon} add-on has been authorized by a different ' - u'user, forking it will not transfer authentication to the forked ' - u'{category}. You may authorize and configure the {addon} add-on ' - u'in the new fork on the settings page.' - ).format( - addon=self.config.full_name, - category=node.project_or_component, - ) - - def after_fork(self, node, fork, user, save=True): - """ - - :param Node node: - :param Node fork: - :param User user: - :param bool save: - :returns: cloned settings - - """ - clone = self.clone() - clone.user_settings = None - clone.owner = fork - - if save: - clone.save() - - return clone - - def before_register(self, node, user): - """ - - :param Node node: - :param User user: - :returns: Alert message - - """ - pass - - def after_register(self, node, registration, user, save=True): - """ - - :param Node node: - :param Node registration: - :param User user: - :param bool save: - :returns: Tuple of cloned settings and alert message - - """ - return None, None - - def after_delete(self, user): - """ - - :param Node node: - :param User user: - - """ - pass - - -class BaseStorageAddon(object): - """ - Mixin class for traversing file trees of addons with files - """ - - root_node = GenericRootNode() - - class Meta: - abstract = True - - @property - def archive_folder_name(self): - name = 'Archive of {addon}'.format(addon=self.config.full_name) - folder_name = getattr(self, 'folder_name', '').lstrip('/').strip() - if folder_name: - name = name + ': {folder}'.format(folder=folder_name) - return name - - def _get_fileobj_child_metadata(self, filenode, user, cookie=None, version=None): - from api.base.utils import waterbutler_api_url_for - - kwargs = {} - if version: - kwargs['version'] = version - if cookie: - kwargs['cookie'] = cookie - elif user: - kwargs['cookie'] = user.get_or_create_cookie().decode() - - metadata_url = waterbutler_api_url_for( - self.owner._id, - self.config.short_name, - path=filenode.get('path', '/'), - user=user, - view_only=True, - _internal=True, - base_url=self.owner.osfstorage_region.waterbutler_url, - **kwargs - ) - - res = requests.get(metadata_url) - - if res.status_code != 200: - raise HttpResponse(res.content, status=res.status_code) - - # TODO: better throttling? - time.sleep(1.0 / 5.0) - - data = res.json().get('data', None) - if data: - return [child['attributes'] for child in data] - return [] - - def _get_file_tree(self, filenode=None, user=None, cookie=None, version=None): - """ - Recursively get file metadata - """ - filenode = filenode or { - 'path': '/', - 'kind': 'folder', - 'name': self.root_node.name, - } - if filenode.get('kind') == 'file': - return filenode - - kwargs = { - 'version': version, - 'cookie': cookie, - } - filenode['children'] = [ - self._get_file_tree(child, user, cookie=cookie) - for child in self._get_fileobj_child_metadata(filenode, user, **kwargs) - ] - return filenode - - -class BaseOAuthNodeSettings(BaseNodeSettings): - # TODO: Validate this field to be sure it matches the provider's short_name - # NOTE: Do not set this field directly. Use ``set_auth()`` - external_account = models.ForeignKey( - ExternalAccount, - null=True, - blank=True, - related_name='%(app_label)s_node_settings', - on_delete=models.CASCADE, - ) - - # NOTE: Do not set this field directly. Use ``set_auth()`` - # user_settings = fields.AbstractForeignField() - - # The existence of this property is used to determine whether or not - # an addon instance is an "OAuth addon" in - # AddonModelMixin.get_oauth_addons(). - oauth_provider = None - - class Meta: - abstract = True - - @abc.abstractproperty - def folder_id(self): - raise NotImplementedError( - "BaseOAuthNodeSettings subclasses must expose a 'folder_id' property." - ) - - @abc.abstractproperty - def folder_name(self): - raise NotImplementedError( - "BaseOAuthNodeSettings subclasses must expose a 'folder_name' property." - ) - - @abc.abstractproperty - def folder_path(self): - raise NotImplementedError( - "BaseOAuthNodeSettings subclasses must expose a 'folder_path' property." - ) - - def fetch_folder_name(self): - return self.folder_name - - @property - def nodelogger(self): - auth = None - if self.user_settings: - auth = Auth(self.user_settings.owner) - self._logger_class = getattr( - self, - '_logger_class', - type( - '{0}NodeLogger'.format(self.config.short_name.capitalize()), - (logger.AddonNodeLogger,), - {'addon_short_name': self.config.short_name}, - ), - ) - return self._logger_class(node=self.owner, auth=auth) - - @property - def complete(self): - return bool( - self.has_auth - and self.external_account - and self.user_settings.verify_oauth_access( - node=self.owner, - external_account=self.external_account, - ) - ) - - @property - def configured(self): - return bool( - self.complete and (self.folder_id or self.folder_name or self.folder_path) - ) - - @property - def has_auth(self): - """Instance has an external account and *active* permission to use it""" - return bool(self.user_settings and self.user_settings.has_auth) and bool( - self.external_account - and self.user_settings.verify_oauth_access( - node=self.owner, external_account=self.external_account - ) - ) - - def clear_settings(self): - raise NotImplementedError( - "BaseOAuthNodeSettings subclasses must expose a 'clear_settings' method." - ) - - def set_auth(self, external_account, user, metadata=None, log=True): - """Connect the node addon to a user's external account. - - This method also adds the permission to use the account in the user's - addon settings. - """ - # tell the user's addon settings that this node is connected to it - user_settings = user.get_or_add_addon(self.oauth_provider.short_name) - user_settings.grant_oauth_access( - node=self.owner, - external_account=external_account, - metadata=metadata, # metadata can be passed in when forking - ) - user_settings.save() - - # update this instance - self.user_settings = user_settings - self.external_account = external_account - - if log: - self.nodelogger.log(action='node_authorized', save=True) - self.save() - - def deauthorize(self, auth=None, add_log=False): - """Remove authorization from this node. - - This method should be overridden for addon-specific behavior, - such as logging and clearing non-generalizable settings. - """ - self.clear_auth() - - def clear_auth(self): - """Disconnect the node settings from the user settings. - - This method does not remove the node's permission in the user's addon - settings. - """ - self.external_account = None - self.user_settings = None - self.save() - - def before_remove_contributor_message(self, node, removed): - """If contributor to be removed authorized this addon, warn that removing - will remove addon authorization. - """ - if self.has_auth and self.user_settings.owner == removed: - return ( - u'The {addon} add-on for this {category} is authenticated by {name}. ' - u'Removing this user will also remove write access to {addon} ' - u'unless another contributor re-authenticates the add-on.' - ).format( - addon=self.config.full_name, - category=node.project_or_component, - name=removed.fullname, - ) - - # backwards compatibility - before_remove_contributor = before_remove_contributor_message - - def after_remove_contributor(self, node, removed, auth=None): - """If removed contributor authorized this addon, remove addon authorization - from owner. - """ - if self.user_settings and self.user_settings.owner == removed: - # Delete OAuth tokens - self.user_settings.oauth_grants[self.owner._id].pop( - self.external_account._id - ) - self.user_settings.save() - self.clear_auth() - message = ( - u'Because the {addon} add-on for {category} "{title}" was ' - u'authenticated by {user}, authentication information has been deleted.' - ).format( - addon=self.config.full_name, - category=markupsafe.escape(node.category_display), - title=markupsafe.escape(node.title), - user=markupsafe.escape(removed.fullname), - ) - - if not auth or auth.user != removed: - url = node.web_url_for('node_addons') - message += ( - u' You can re-authenticate on the add-ons' - u' page.' - ).format(url=url) - # - return message - - def after_fork(self, node, fork, user, save=True): - """After forking, copy user settings if the user is the one who authorized - the addon. - - :return: the cloned settings - """ - clone = super(BaseOAuthNodeSettings, self).after_fork( - node=node, - fork=fork, - user=user, - save=False, - ) - if self.has_auth and self.user_settings.owner == user: - metadata = None - if self.complete: - try: - metadata = self.user_settings.oauth_grants[node._id][ - self.external_account._id - ] - except (KeyError, AttributeError): - pass - clone.set_auth(self.external_account, user, metadata=metadata, log=False) - else: - clone.clear_settings() - if save: - clone.save() - return clone - - def before_register_message(self, node, user): - """Return warning text to display if user auth will be copied to a - registration. - """ - if self.has_auth: - return ( - u'The contents of {addon} add-ons cannot be registered at this time; ' - u'the {addon} add-on linked to this {category} will not be included ' - u'as part of this registration.' - ).format( - addon=self.config.full_name, - category=node.project_or_component, - ) - - # backwards compatibility - before_register = before_register_message - - def serialize_waterbutler_credentials(self): - raise NotImplementedError( - "BaseOAuthNodeSettings subclasses must implement a \ - 'serialize_waterbutler_credentials' method." - ) - - def serialize_waterbutler_settings(self): - raise NotImplementedError( - "BaseOAuthNodeSettings subclasses must implement a \ - 'serialize_waterbutler_settings' method." - ) - - -class UserSettings(BaseOAuthUserSettings): - """Stores user-specific box information""" - - oauth_provider = Provider - serializer = charon_serializer.BoxSerializer - - def revoke_remote_oauth_access(self, external_account): - try: - # TODO: write client for box, stop using third-party lib - requests.request( - 'POST', - charon_settings.BOX_OAUTH_REVOKE_ENDPOINT, - params={ - 'client_id': charon_settings.BOX_KEY, - 'client_secret': charon_settings.BOX_SECRET, - 'token': external_account.oauth_key, - }, - ) - except requests.HTTPError: - pass - - -class NodeSettings(BaseOAuthNodeSettings, BaseStorageAddon): - oauth_provider = Provider - serializer = charon_serializer.BoxSerializer - - folder_id = models.TextField(null=True, blank=True) - folder_name = models.TextField(null=True, blank=True) - folder_path = models.TextField(null=True, blank=True) - user_settings = models.ForeignKey( - UserSettings, null=True, blank=True, on_delete=models.CASCADE - ) - - _api = None - - @property - def api(self): - """authenticated ExternalProvider instance""" - if self._api is None: - self._api = Provider(self.external_account) - return self._api - - @property - def display_name(self): - return '{0}: {1}'.format(self.config.full_name, self.folder_id) - - def fetch_full_folder_path(self): - return self.folder_path - - def get_folders(self, **kwargs): - folder_id = kwargs.get('folder_id') - if folder_id is None: - return [ - { - 'id': '0', - 'path': '/', - 'addon': 'box', - 'kind': 'folder', - 'name': '/ (Full Box)', - 'urls': { - # 'folders': node.api_url_for('box_folder_list', folderId=0), - 'folders': charon_serializer.api_v2_url( - 'nodes/{}/addons/box/folders/'.format(self.owner._id), - params={'id': '0'}, - ) - }, - } - ] - - try: - Provider(self.external_account).refresh_oauth_key() - oauth = OAuth2( - client_id=charon_settings.BOX_KEY, - client_secret=charon_settings.BOX_SECRET, - access_token=ensure_str(self.external_account.oauth_key), - ) - client = Client(oauth) - except BoxAPIException: - raise HttpResponseForbidden() - - try: - metadata = client.folder(folder_id).get() - except BoxAPIException: - raise HttpResponseNotFound() - except MaxRetryError: - raise HttpResponseBadRequest() - - folder_path = '/'.join( - [x['name'] for x in metadata['path_collection']['entries']] - + [metadata['name']] - ) - - return [ - { - 'addon': 'box', - 'kind': 'folder', - 'id': item['id'], - 'name': item['name'], - 'path': os.path.join(folder_path, item['name']).replace( - 'All Files', '' - ), - 'urls': { - 'folders': charon_serializer.api_v2_url( - 'nodes/{}/addons/box/folders/'.format(self.owner._id), - params={'id': item['id']}, - ) - }, - } - for item in metadata['item_collection']['entries'] - if item['type'] == 'folder' - ] - - def set_folder(self, folder_id, auth): - self.folder_id = str(folder_id) - self.folder_name, self.folder_path = self._folder_data(folder_id) - self.nodelogger.log(action='folder_selected', save=True) - - def _folder_data(self, folder_id): - # Split out from set_folder for ease of testing, due to - # outgoing requests. Should only be called by set_folder - try: - Provider(self.external_account).refresh_oauth_key(force=True) - except InvalidGrantError: - raise charon_utils.exceptions.InvalidAuthError() - try: - oauth = OAuth2( - client_id=charon_settings.BOX_KEY, - client_secret=charon_settings.BOX_SECRET, - access_token=ensure_str(self.external_account.oauth_key), - ) - client = Client(oauth) - folder_data = client.folder(self.folder_id).get() - except BoxAPIException: - raise charon_utils.exceptions.InvalidFolderError() - - folder_name = folder_data['name'].replace('All Files', '') or '/ (Full Box)' - folder_path = ( - '/'.join( - [ - x['name'] - for x in folder_data['path_collection']['entries'] - if x['name'] - ] - + [folder_data['name']] - ).replace('All Files', '') - or '/' - ) - - return folder_name, folder_path - - def clear_settings(self): - self.folder_id = None - self.folder_name = None - self.folder_path = None - - def deauthorize(self, auth=None, add_log=True): - """Remove user authorization from this node and log the event.""" - folder_id = self.folder_id - self.clear_settings() - - if add_log: - extra = {'folder_id': folder_id} - self.nodelogger.log(action='node_deauthorized', extra=extra, save=True) - - self.clear_auth() - - def serialize_waterbutler_credentials(self): - if not self.has_auth: - raise charon_utils.exceptions.AddonError('Addon is not authorized') - try: - Provider(self.external_account).refresh_oauth_key() - return {'token': self.external_account.oauth_key} - except BoxAPIException as error: - raise HttpResponse(error.message, status=error.status_code) - - def serialize_waterbutler_settings(self): - if self.folder_id is None: - raise charon_utils.exceptions.AddonError('Folder is not configured') - return {'folder': self.folder_id} - - def create_waterbutler_log(self, auth, action, metadata): - self.owner.add_log( - 'box_{0}'.format(action), - auth=auth, - params={ - 'path': metadata['materialized'], - 'project': self.owner.parent_id, - 'node': self.owner._id, - 'folder': self.folder_id, - 'urls': { - 'view': self.owner.web_url_for( - 'addon_view_or_download_file', - provider='box', - action='view', - path=metadata['path'], - ), - 'download': self.owner.web_url_for( - 'addon_view_or_download_file', - provider='box', - action='download', - path=metadata['path'], - ), - }, - }, - ) - - # #### Callback overrides ##### - def after_delete(self, user=None): - self.deauthorize(Auth(user=user), add_log=True) - self.save() - - def on_delete(self): - self.deauthorize(add_log=False) - self.save() - - - -#### - state = request.args.get('state') - - # make sure this is the same user that started the flow - if cached_credentials.get('state') != state: - raise PermissionsError('Request token does not match') - - try: - # Quirk: Similarly to the `oauth2/authorize` endpoint, the `oauth2/access_token` - # endpoint of Bitbucket would fail if a not-none or non-empty `redirect_uri` - # were provided in the body of the POST request. - if self.short_name in ADDONS_OAUTH_NO_REDIRECT: - redirect_uri = None - else: - redirect_uri = web_url_for( - 'oauth_callback', - service_name=self.short_name, - _absolute=True - ) - response = OAuth2Session( - self.client_id, - redirect_uri=redirect_uri, - ).fetch_token( - self.callback_url, - client_secret=self.client_secret, - code=request.args.get('code'), - ) - except (MissingTokenError, RequestsHTTPError): - raise HTTPError(http_status.HTTP_503_SERVICE_UNAVAILABLE) - # pre-set as many values as possible for the ``ExternalAccount`` - info = self._default_handle_callback(response) - # call the hook for subclasses to parse values from the response - info.update(self.handle_callback(response)) - - return self._set_external_account(user, info) - - def _set_external_account(self, user, info): - current_session = get_session() - self.account, created = ExternalAccount.objects.get_or_create( - provider=self.short_name, - provider_id=info['provider_id'], - ) - - # ensure that provider_name is correct - self.account.provider_name = self.name - # required - self.account.oauth_key = info['key'] - - # only for OAuth1 - self.account.oauth_secret = info.get('secret') - - # only for OAuth2 - self.account.expires_at = info.get('expires_at') - self.account.refresh_token = info.get('refresh_token') - self.account.date_last_refreshed = timezone.now() - - # additional information - self.account.display_name = info.get('display_name') - self.account.profile_url = info.get('profile_url') - - self.account.save() - - # add it to the user's list of ``ExternalAccounts`` - if not user.external_accounts.filter(id=self.account.id).exists(): - user.external_accounts.add(self.account) - user.save() - - if self.short_name in current_session.get('oauth_states', {}): - del current_session['oauth_states'][self.short_name] - current_session.save() - - return True - - def _default_handle_callback(self, data): - """Parse as much out of the key exchange's response as possible. - - This should not be over-ridden in subclasses. - """ - if self._oauth_version == OAUTH1: - key = data.get('oauth_token') - secret = data.get('oauth_token_secret') - - values = {} - - if key: - values['key'] = key - if secret: - values['secret'] = secret - - return values - - elif self._oauth_version == OAUTH2: - key = data.get('access_token') - refresh_token = data.get('refresh_token') - expires_at = data.get('expires_at') - scopes = data.get('scope') - - values = {} - - if key: - values['key'] = key - if scopes: - values['scope'] = scopes - if refresh_token: - values['refresh_token'] = refresh_token - if expires_at: - values['expires_at'] = dt.datetime.fromtimestamp( - float(expires_at) - ) - - return values diff --git a/charon/models.py b/charon/models.py deleted file mode 100644 index 12580e8e..00000000 --- a/charon/models.py +++ /dev/null @@ -1,266 +0,0 @@ -# stub objects representing OSF models (mostly) -# some are helper classes - -import json -import logging - -logger = logging.getLogger(__name__) - -DB = None -DB_ROOT = 'db' -with open('{}/charon.json'.format(DB_ROOT)) as json_file: - DB = json.load(json_file) - - -class Auth(object): - def __init__(self, user): - # called in: views - # return User object representing the logged in user implied by the instatiation - # creds - self.user = user - return - - # called in: views - # have valid credentials been passed and a proper user identified? - @property - def logged_in(self): - return self.user is not None - - -class User(object): - def __init__(self, _id): - self._id = _id - self._props = DB['users'].get(_id, None) - if self._props is not None: - self.fullname = self._props['fullname'] - self._our_external_accounts = [ - ExternalAccount(_id=x) for x in self._props['external_accounts'] - ] - return - - # called in: views - # returns a user_settings object for the addon - def get_addon(self, addon_name): - return UserAddon(self, addon_name) - - @property - def external_accounts(self): - return ExternalAccountProxy(self._our_external_accounts) - - -class Node(object): - def __init__(self, _id, title): - # called in: serializer - self._id = _id - self._props = DB['nodes'].get(_id, None) - self.title = title - return - - # called in: views - # returns a node_settings object for the addon - def get_addon(self, addon_name): - return NodeAddon(self, addon_name) - - # called in: views - # returns boolean indicateing if User object has `perm` access to the node - def has_permission(self, user, perm): - if DB['permissions'].get(self._id, False): - return DB['permissions'][self._id].get(user._id, False) - - return False - - # called in: serializer - # maybe a property or attribute? - def url(self): - return '' - - # called in: serializer - # auth is an Auth object, but is created in a weird place - def can_view(self, auth): - return '' - - # called in: serializer - # return API url for this node and given endpoint action - def api_url_for(self, endpoint): - return '' - - # called in: serializer - # return API url for this node and given endpoint action - def web_url_for(self, endpoint): - return '' - - -# called in: views -class ExternalAccount(object): - def __init__(self, _id): - # called in: serializer - self._id = _id - self._props = DB['external_accounts'].get(_id, None) - - if self._props is not None: - self.provider_id = self._props['provider_id'] - self.provider_name = self._props['provider_name'] - self.provider = self._props['provider'] - self.display_name = self._props['display_name'] - self.profile_url = self._props['profile_url'] - - return - - # called in: views - @classmethod - def load(cls, external_account_id): - return cls(external_account_id) - - -class ExternalAccountProxy(object): - def __init__(self, external_accounts): - self.external_accounts = external_accounts - - def all(self): - return self.external_accounts - - def filter(self, _id): - filtered = [e for e in self.external_accounts if e._id == _id] - return ExternalAccountProxy(filtered) - - def exists(self): - return len(self.external_accounts) > 0 - - -class UserAddon(object): - def __init__(self, parent, addon_name): - logger.error( - '$$$ UserAddon.__init__ -- parent:({}) addon_name:({})'.format( - parent, addon_name - ) - ) - - self.parent = parent - self.addon_name = addon_name - - if self.parent is not None: - user_addons_props = DB['user_addons'].get(parent._id, None) - self._props = user_addons_props.get(addon_name, None) - self.fake_name = self._props.get('fake_name', None) - self.external_accounts = self.parent.external_accounts - - # called in: serializer - # oauth_provider has subproperty short_name - self.oauth_provider = self._props['oauth_provider'] - - return - - # called in: views, serializer - # TODO: should be a property? - # no, calls .external_accounts on user - # return a list or queryset like of external_accounts - # .filter() is called on this in views - # .all() is called on this in serializer - # def external_accounts(self): - # return self.parent.external_accounts - - # called in: views, serializer - # return User object related to this UserAddon, i think - # serializer accesses ._primary_key attr on this object - # serializer accesses .fullname attr on this object - # TODO: property? - def owner(self): - return self.parent - - # called in: serializer - # not sure if retval is a list or queryset - def get_attached_nodes(self, external_account): - return [] - - # called in: serializer - # property or attribute - def has_auth(self): - return False - - -class NodeAddon(object): - def __init__(self, parent, addon_name): - logger.error( - '$$$ NodeAddon.__init__ -- parent:({}) addon_name:({})'.format( - parent, addon_name - ) - ) - - self.parent = parent - self.addon_name = addon_name - - if self.parent is not None: - node_addons_props = DB['node_addons'].get(parent._id, None) - self._props = node_addons_props.get(addon_name, None) - self.fake_name = self._props.get('fake_name', None) - self.folder_id = self._props.get('folder_id', None) - - return - - # called in: views - # set root folder id for nodeAddon - def set_folder(self, folder_id, auth): - self.folder_id = folder_id - return - - # called in: views - # return list of folders under folder with id=folder_id - def get_folders(self, folder_id): - return [] - - # called in: views - # return string representing path of root folder - # TODO: should be a property? - def folder_path(self): - return '' - - # called in: views - # ??? - # external_account should be an ExternalAccount object - # owner is a User object, i think - def set_auth(self, external_account, owner): - # user_settings = owner.get_addon(self.addon_name) - # self.user_settings = user_settings - return - - # called in: views - # save to store - def save(self): - # save current state of DB? - with open('{}/charon.json'.format(DB_ROOT), "w") as json_file: - json.dump(DB, json_file) - return - - # called in: views - # auth is an Auth object - def deauthorize(self, auth): - return - - # called in: serializer - # property or attribute - def has_auth(self): - return False - - # called in: serializer - # return linked ExternalAccount object - # TODO: probably a property/attr - def external_account(self): - return {} - - # called in: serializer - # either a property or attribute - # returns UserAddon object related to this NodeAddon - def user_settings(self): - return User('p4r65').get_addon(self.addon_name) - - # called in: serializer - # returns "full" path for folder - # not sure how this differs from folder_path - def fetch_full_folder_path(self): - return '' - - # called in: serializer - # property or attribute - # i'm guessing this just proxies to self.user_settings.owner - def owner(self): - return self.user_settings().owner diff --git a/charon/reference.py b/charon/reference.py deleted file mode 100644 index 7ac80b17..00000000 --- a/charon/reference.py +++ /dev/null @@ -1,199 +0,0 @@ -# ========== CODE BEING REIMPLEMENTED ========== - -# from website.oauth.views -# @must_be_logged_in -# def oauth_connect(service_name, auth): -# service = get_service(service_name) - -# return redirect(service.auth_url) - -# from website.oauth.views -# @must_be_logged_in -# def oauth_callback(service_name, auth): -# user = auth.user -# provider = get_service(service_name) - -# # Retrieve permanent credentials from provider -# if not provider.auth_callback(user=user): -# return {} - -# if provider.account and not user.external_accounts.filter(id=provider.account.id).exists(): -# user.external_accounts.add(provider.account) -# user.save() - -# oauth_complete.send(provider, account=provider.account, user=user) - -# return {} - -# from addons.models.base -# @oauth_complete.connect -# def oauth_complete(provider, account, user): -# if not user or not account: -# return -# user.add_addon(account.provider) -# user.save() - - -# @collect_auth -# def get_auth(auth, **kwargs): -# cas_resp = None -# # Central Authentication Server OAuth Bearer Token -# authorization = request.headers.get('Authorization') -# if authorization and authorization.startswith('Bearer '): -# client = cas.get_client() -# try: -# access_token = cas.parse_auth_header(authorization) -# cas_resp = client.profile(access_token) -# except cas.CasError as err: -# sentry.log_exception() -# # NOTE: We assume that the request is an AJAX request -# return json_renderer(err) -# if cas_resp.authenticated and not getattr(auth, 'user'): -# auth.user = OSFUser.load(cas_resp.user) - -# try: -# data = jwt.decode( -# jwe.decrypt(request.args.get('payload', '').encode('utf-8'), WATERBUTLER_JWE_KEY), -# settings.WATERBUTLER_JWT_SECRET, -# options={'require_exp': True}, -# algorithm=settings.WATERBUTLER_JWT_ALGORITHM -# )['data'] -# except (jwt.InvalidTokenError, KeyError) as err: -# sentry.log_message(str(err)) -# raise HTTPError(http_status.HTTP_403_FORBIDDEN) - -# if not auth.user: -# auth.user = OSFUser.from_cookie(data.get('cookie', '')) - -# try: -# action = data['action'] -# node_id = data['nid'] -# provider_name = data['provider'] -# except KeyError: -# raise HTTPError(http_status.HTTP_400_BAD_REQUEST) - -# node = AbstractNode.load(node_id) or Preprint.load(node_id) -# if node and node.is_deleted: -# raise HTTPError(http_status.HTTP_410_GONE) -# elif not node: -# raise HTTPError(http_status.HTTP_404_NOT_FOUND) - -# check_access(node, auth, action, cas_resp) -# provider_settings = None -# if hasattr(node, 'get_addon'): -# provider_settings = node.get_addon(provider_name) -# if not provider_settings: -# raise HTTPError(http_status.HTTP_400_BAD_REQUEST) - -# path = data.get('path') -# credentials = None -# waterbutler_settings = None -# fileversion = None -# if provider_name == 'osfstorage': -# if path: -# file_id = path.strip('/') -# # check to see if this is a file or a folder -# filenode = OsfStorageFileNode.load(path.strip('/')) -# if filenode and filenode.is_file: -# # default to most recent version if none is provided in the response -# version = int(data['version']) if data.get('version') else filenode.versions.count() -# try: -# fileversion = FileVersion.objects.filter( -# basefilenode___id=file_id, -# identifier=version -# ).select_related('region').get() -# except FileVersion.DoesNotExist: -# raise HTTPError(http_status.HTTP_400_BAD_REQUEST) -# if auth.user: -# # mark fileversion as seen -# FileVersionUserMetadata.objects.get_or_create(user=auth.user, file_version=fileversion) -# if not node.is_contributor_or_group_member(auth.user): -# from_mfr = download_is_from_mfr(request, payload=data) -# # version index is 0 based -# version_index = version - 1 -# if action == 'render': -# enqueue_update_analytics(node, filenode, version_index, 'view') -# elif action == 'download' and not from_mfr: -# enqueue_update_analytics(node, filenode, version_index, 'download') -# if waffle.switch_is_active(features.ELASTICSEARCH_METRICS): -# if isinstance(node, Preprint): -# metric_class = get_metric_class_for_action(action, from_mfr=from_mfr) -# if metric_class: -# try: -# metric_class.record_for_preprint( -# preprint=node, -# user=auth.user, -# version=fileversion.identifier if fileversion else None, -# path=path, -# ) -# except es_exceptions.ConnectionError: -# log_exception() -# if fileversion and provider_settings: -# region = fileversion.region -# credentials = region.waterbutler_credentials -# waterbutler_settings = fileversion.serialize_waterbutler_settings( -# node_id=provider_settings.owner._id, -# root_id=provider_settings.root_node._id, -# ) -# # If they haven't been set by version region, use the NodeSettings or Preprint directly -# if not (credentials and waterbutler_settings): -# credentials = node.serialize_waterbutler_credentials(provider_name) -# waterbutler_settings = node.serialize_waterbutler_settings(provider_name) - -# if isinstance(credentials.get('token'), bytes): -# credentials['token'] = credentials.get('token').decode() - -# return {'payload': jwe.encrypt(jwt.encode({ -# 'exp': timezone.now() + datetime.timedelta(seconds=settings.WATERBUTLER_JWT_EXPIRATION), -# 'data': { -# 'auth': make_auth(auth.user), # A waterbutler auth dict not an Auth object -# 'credentials': credentials, -# 'settings': waterbutler_settings, -# 'callback_url': node.api_url_for( -# ('create_waterbutler_log' if not getattr(node, 'is_registration', False) else 'registration_callbacks'), -# _absolute=True, -# _internal=True -# ) -# } -# }, settings.WATERBUTLER_JWT_SECRET, algorithm=settings.WATERBUTLER_JWT_ALGORITHM), WATERBUTLER_JWE_KEY).decode()} - -# from: website.project.decorators -# def must_have_addon(addon_name, model): -# """Decorator factory that ensures that a given addon has been added to -# the target node. The decorated function will throw a 404 if the required -# addon is not found. Must be applied after a decorator that adds `node` and -# `project` to the target function's keyword arguments, such as -# `must_be_contributor. - -# :param str addon_name: Name of addon -# :param str model: Name of model -# :returns: Decorator function - -# """ -# def wrapper(func): - -# @functools.wraps(func) -# @collect_auth -# def wrapped(*args, **kwargs): -# if model == 'node': -# _inject_nodes(kwargs) -# owner = kwargs['node'] -# elif model == 'user': -# auth = kwargs.get('auth') -# owner = auth.user if auth else None -# if owner is None: -# raise HTTPError(http_status.HTTP_401_UNAUTHORIZED) -# else: -# raise HTTPError(http_status.HTTP_400_BAD_REQUEST) - -# addon = owner.get_addon(addon_name) -# if addon is None: -# raise HTTPError(http_status.HTTP_400_BAD_REQUEST) - -# kwargs['{0}_addon'.format(model)] = addon - -# return func(*args, **kwargs) - -# return wrapped - -# return wrapper diff --git a/charon/serializer.py b/charon/serializer.py deleted file mode 100644 index 0097eb50..00000000 --- a/charon/serializer.py +++ /dev/null @@ -1,313 +0,0 @@ -import abc -import logging - -# from boxsdk import Client, OAuth2 -# from boxsdk.exception import BoxAPIException - -# from . import settings - -logger = logging.getLogger(__name__) - - -# called in: serializer -# there is also a separate web_url_for() method called on a node object -# not sure what the context of this one is, need to look up -def web_url_for(): - return '' - - -# called in: serializer -# return link to static osf endpoint, specifically oauth_endpoint+box -def api_url_for(): - return '' - - -# may have been added for models inline attempt -def api_v2_url(): - return '' - - -class BoxSerializer(object): - # explicit in addons.base.serializer.AddonSerializer - __metaclass__ = abc.ABCMeta - - # abstract in addons.base.serializer.AddonSerializer - # explicit in addons.box.serializer.BoxSerializer - addon_short_name = 'box' - - # from addons.base.serializer.StorageAddonSerializer - REQUIRED_URLS = ( - 'auth', - 'importAuth', - 'folders', - 'files', - 'config', - 'deauthorize', - 'accounts', - ) - - # explicit in addons.base.serializer.AddonSerializer - # copy-over-comment: TODO take addon_node_settings, addon_user_settings - def __init__(self, node_settings=None, user_settings=None): - self.node_settings = node_settings - self.user_settings = user_settings - - # abstract in addons.base.serializer.AddonSerializer - # explicit in addons.base.serializer.OAuthAddonSerializer - @property - def credentials_owner(self): - return self.user_settings.owner if self.user_settings else None - - # abstract in addons.base.serializer.AddonSerializer - # explicit in addons.base.serializer.OAuthAddonSerializer - @property - def user_is_owner(self): - if self.user_settings is None or self.node_settings is None: - return False - - user_accounts = self.user_settings.external_accounts.all() - return bool( - self.node_settings.has_auth - and self.node_settings.external_account in user_accounts - ) - - # abstract in addons.base.serializer.AddonSerializer - # explicit in addons.base.serializer.OAuthAddonSerializer - @property - def serialized_urls(self): - ret = self.addon_serialized_urls - # Make sure developer returns set of needed urls - for url in self.REQUIRED_URLS: - msg = "addon_serialized_urls must include key '{0}'".format(url) - assert url in ret, msg - - # ret.update({'settings': web_url_for('user_addons')}) - ret.update({'settings': 'https://localhost:5000/settings/addons/'}) - - return ret - - # from addons.base.serializer.OAuthAddonSerializer - @property - def serialized_accounts(self): - return [ - self.serialize_account(each) - for each in self.user_settings.external_accounts.all() - ] - - # from addons.base.serializer.OAuthAddonSerializer - @property - def serialized_user_settings(self): - # inlined call addons.base.serializer.AddonSerializer.serialized_user_settings - retval = {} - retval['accounts'] = [] - if self.user_settings: - retval['accounts'] = self.serialized_accounts - - return retval - - # explicit in addons.base.serializer.AddonSerializer - @property - def serialized_node_settings(self): - result = { - 'nodeHasAuth': self.node_settings.has_auth, - 'userIsOwner': self.user_is_owner, - 'urls': self.serialized_urls, - } - - if self.user_settings: - result['userHasAuth'] = self.user_settings.has_auth - else: - result['userHasAuth'] = False - - if self.node_settings.has_auth: - owner = self.credentials_owner - if owner: - result['urls']['owner'] = web_url_for( - 'profile_view_id', uid=owner._primary_key - ) - result['ownerName'] = owner.fullname - return result - - # from addons.base.serializer.OAuthAddonSerializer - def serialize_account(self, external_account): - if external_account is None: - return None - return { - 'id': external_account._id, - 'provider_id': external_account.provider_id, - 'provider_name': external_account.provider_name, - 'provider_short_name': external_account.provider, - 'display_name': external_account.display_name, - 'profile_url': external_account.profile_url, - 'nodes': [ - self.serialize_granted_node(node) - for node in self.user_settings.get_attached_nodes( - external_account=external_account - ) - ], - } - - # from addons.base.serializer.OAuthAddonSerializer - # @collect_auth - def serialize_granted_node(self, node, auth): - # inline @collect_auth decorator (sortof) - # this is weird, serialize_granted_node is called by serialize_account, which - # is called by the serialized_accounts property. But why are we fucking w/ the - # request this deep into the serializer? - # serialized_accounts property is called by serialized_user_settings property - # serialized_user_settings is called by addons.views.generic_views.account_list - # - # i think this is bogus, and an auth object should be passed in from outside - # - # request = None # this is a flask request object, what is it doing here? - # kwargs['auth'] = Auth.from_kwargs(request.args.to_dict(), kwargs) - - node_settings = node.get_addon(self.user_settings.oauth_provider.short_name) - serializer = node_settings.serializer(node_settings=node_settings) - urls = serializer.addon_serialized_urls - urls['view'] = node.url - - return { - 'id': node._id, - 'title': node.title if node.can_view(auth) else None, - 'urls': urls, - } - - # from addons.base.serializer.StorageAddonSerializer - def serialize_settings(self, node_settings, current_user, client=None): - # TODO: is this legit? Original code doesn't make sense - self.node_settings = node_settings - self.user_settings = current_user.get_addon('box') - - logger.info( - '¢¢¢¢ BoxSerializer.serialize_settings: node_settings:({}) ' - 'current_user:({}) client:({})'.format(node_settings, current_user, client) - ) - - self.node_settings = node_settings - user_settings = node_settings.user_settings() - current_user_settings = current_user.get_addon(self.addon_short_name) - logger.info( - '¢¢¢¢ BoxSerializer.serialize_settings: user_settings-from_node:({}) ' - 'current_user_settings-from_user:({})'.format( - user_settings, current_user_settings - ) - ) - - user_is_owner = ( - user_settings is not None and user_settings.owner() == current_user - ) - logger.info( - '¢¢¢¢ BoxSerializer.serialize_settings: ' - 'user_is_owner:({})'.format(user_is_owner) - ) - - valid_credentials = self.credentials_are_valid(user_settings, client) - logger.info( - '¢¢¢¢ BoxSerializer.serialize_settings: ' - 'valid_credentials:({})'.format(valid_credentials) - ) - - user_has_auth = ( - current_user_settings is not None and current_user_settings.has_auth - ) - logger.info( - '¢¢¢¢ BoxSerializer.serialize_settings: ' - 'user_has_auth:({})'.format(user_has_auth) - ) - - # result = { - # 'userIsOwner': user_is_owner, - # 'nodeHasAuth': node_settings.has_auth, - # 'urls': self.serialized_urls, - # 'validCredentials': valid_credentials, - # 'userHasAuth': current_user_settings is not None - # and current_user_settings.has_auth, - # } - result = { - 'userIsOwner': True, - 'nodeHasAuth': True, - 'urls': self.serialized_urls, - 'validCredentials': valid_credentials, - 'userHasAuth': True, - } - - if node_settings.has_auth: - # Add owner's profile URL - # result['urls']['owner'] = web_url_for( - # 'profile_view_id', uid=user_settings.owner._id - # ) - result['urls']['owner'] = ('https://localhost:5000/profile/p4r65',) - - # result['ownerName'] = user_settings.owner.fullname - result['ownerName'] = user_settings.owner().fullname - - # Show available folders - if node_settings.folder_id is None: - result['folder'] = {'name': None, 'path': None} - elif valid_credentials: - result['folder'] = self.serialized_folder(node_settings) - return result - - # from addons.box.serializer.BoxSerializer - def credentials_are_valid(self, user_settings, client): - # from addons.box.models import Provider as Box # Avoid circular import - - # if self.node_settings.has_auth: - # if Box(self.node_settings.external_account).refresh_oauth_key(): - # return True - - # if user_settings: - # oauth = OAuth2( - # client_id=settings.BOX_KEY, - # client_secret=settings.BOX_SECRET, - # access_token=user_settings.external_accounts[0].oauth_key, - # ) - # client = client or Client(oauth) - # try: - # client.user() - # except (BoxAPIException, IndexError): - # return False - return True - - # from addons.box.serializer.BoxSerializer - def serialized_folder(self, node_settings): - # path = node_settings.fetch_full_folder_path() - path = '/' - return { - 'path': path, - 'name': path.replace('All Files', '', 1) if path != '/' else '/ (Full Box)', - } - - # abstract in addons.base.serializer.AddonSerializer - # explicit in addons.box.serializer.BoxSerializer - @property - def addon_serialized_urls(self): - logger.error( - '§§§§ addon_serialized_urls self.node_settings:({})'.format( - self.node_settings - ) - ) - node = self.node_settings.parent - logger.error('§§§§ addon_serialized_urls node:({})'.format(node)) - # guid = node.guids.first()._id - guid = node._id - CHARON_ROOT = 'http://localhost:8011/charon' - return { - # 'auth': api_url_for('oauth_connect', service_name='box'), - 'auth': '{}/box/connect'.format(CHARON_ROOT), - # 'importAuth': node.api_url_for('box_import_auth'), - 'importAuth': '{}/projects/{}/box/user_auth/'.format(CHARON_ROOT, guid), - # 'files': node.web_url_for('collect_file_trees'), - 'files': 'https://localhost:5000/project/dve82/files/', - # 'folders': node.api_url_for('box_folder_list'), - 'folders': '{}/projects/{}/box/folders/'.format(CHARON_ROOT, guid), - # 'config': node.api_url_for('box_set_config'), - 'config': '{}/projects/{}/box/settings/'.format(CHARON_ROOT, guid), - # 'configPUT': node.api_url_for('box_set_config'), - 'configPUT': '{}/projects/{}/box/settings/'.format(CHARON_ROOT, guid), - # 'deauthorize': node.api_url_for('box_deauthorize_node'), - 'deauthorize': '{}/projects/{}/box/user_auth/'.format(CHARON_ROOT, guid), - # 'accounts': node.api_url_for('box_account_list'), - 'accounts': '{}/settings/box/accounts/'.format(CHARON_ROOT), - } diff --git a/charon/settings.py b/charon/settings.py deleted file mode 100644 index e6ca2cfd..00000000 --- a/charon/settings.py +++ /dev/null @@ -1,17 +0,0 @@ -SENSITIVE_DATA_SALT = 'yusaltydough' -SENSITIVE_DATA_SECRET = 'TrainglesAre5Squares' - -WATERBUTLER_JWE_SALT = 'yusaltydough' -WATERBUTLER_JWE_SECRET = 'CirclesAre4Squares' - -WATERBUTLER_JWT_SECRET = 'ILiekTrianglesALot' -WATERBUTLER_JWT_ALGORITHM = 'HS256' -WATERBUTLER_JWT_EXPIRATION = 15 - -BOX_KEY = '' -BOX_SECRET = '' - -ADDONS_AVAILABLE_DICT = [] - -CORS_ORIGIN_ALLOW_ALL = True -CORS_ALLOW_ALL_ORIGINS = True diff --git a/charon/templates/charon/callback.html b/charon/templates/charon/callback.html deleted file mode 100644 index aba16cf8..00000000 --- a/charon/templates/charon/callback.html +++ /dev/null @@ -1,15 +0,0 @@ - - - - - - OAuth Complete - - -

Charon user id: {{ user_id }}

- - diff --git a/charon/tests.py b/charon/tests.py deleted file mode 100644 index 7ce503c2..00000000 --- a/charon/tests.py +++ /dev/null @@ -1,3 +0,0 @@ -from django.test import TestCase - -# Create your tests here. diff --git a/charon/urls.py b/charon/urls.py deleted file mode 100644 index 2f03f5ff..00000000 --- a/charon/urls.py +++ /dev/null @@ -1,58 +0,0 @@ -from django.urls import path - -from . import views - -urlpatterns = [ - path('', views.index, name='index'), - path('box/connect', views.connect_box, name='connect_box'), - path('box/callback', views.callback_box, name='callback_box'), - # path('box/import_auth', views.import_auth_box, name='import_auth_box'), - # path( - # 'box/get_root_folder', - # views.get_root_folder_box, - # name='get_root_folder_box', - # ), - # path( - # 'box/get_folder_listing', - # views.get_folder_listing_box, - # name='get_folder_listing_box', - # ), - # GET is $addon_account_list - path( - 'settings/box/accounts/', - views.box_account_list, - name='box_account_list', - ), - # GET is $addon_get_config - # PUT is $addon_set_config - path( - 'projects//box/settings/', - views.box_project_config, - name='box_project_config', - ), - # PUT is $addon_import_auth - # DELETE is $addon_deauthorize_node - path( - 'projects//box/user_auth/', - views.box_user_auth, - name='box_user_auth', - ), - # GET is $addon_folders_list - path( - 'projects//box/folders/', - views.box_folder_list, - name='box_folder_list', - ), - # not specified in addons.base.views - # from website.routes, view is website.project.views.node.node_choose_addons - # which calls .config_addons() on node model object - # .config_addons() is defined in AddonModelMixin - path( - 'projects//settings/addons/', - views.get_project_addons, - name='get_project_addons', - ), - # TODO: osf.addons.box.serializer also refers to `collect_file_trees` endpoint, but - # it is not box-specific. Do we need to add that? Gut feeling is it might be later - # files addon work -] diff --git a/charon/utils.py b/charon/utils.py deleted file mode 100644 index 9be52258..00000000 --- a/charon/utils.py +++ /dev/null @@ -1,164 +0,0 @@ -import datetime -import logging - -import jwe -import jwt -import requests -from django.utils import timezone - -from . import settings - -logger = logging.getLogger(__name__) -WATERBUTLER_JWE_KEY = jwe.kdf( - settings.WATERBUTLER_JWE_SECRET.encode('utf-8'), - settings.WATERBUTLER_JWE_SALT.encode('utf-8'), -) - - -# TODO: not sure i love accessing request outside of views -def _get_user(request): - """Take django request object, extract auth properties, and get user identified - by these properties - """ - headers = {'Content-type': 'application/json'} - if "Authorization" in request.headers: - headers['Authorization'] = request.headers['Authorization'] - cookies = request.COOKIES - logger.error( - '¶¶¶¶ in utils._get_user headers:({}) cookies:({})'.format( - dict(request.headers), cookies - ) - ) - resp = requests.get( - 'http://localhost:5000/api/v1/user/auth/', - headers=headers, - cookies=cookies, - ) - logger.error('¶¶¶¶ in utils._get_user resp:({})'.format(resp)) - - if resp.status_code != 200: - logger.error( - '¶¶¶¶ in utils._get_user got bad response data from osf: code:({}) ' - 'content:({})'.format(resp.status_code, resp.content[0:500]) - ) - raise Exception('Couldnt get user properties for current user') - - resp_data = resp.json() - logger.info('¶¶¶¶ in utils._get_user resp-data:({})'.format(resp_data)) - user_id = resp_data['data']['user_id'] - return {'id': user_id} - - -def _get_node_by_guid(request, node_id): - """Take django request object, extract auth properties, and using these auth - properties get user identified by these properties - """ - logger.info( - '¶¶¶¶ in utils._get_node_by_guid headers:({}) cookies:({})'.format( - dict(request.headers), request.COOKIES - ) - ) - headers = {'Content-type': 'application/json'} - if "Authorization" in request.headers: - headers['Authorization'] = request.headers['Authorization'] - cookies = request.COOKIES - url = 'http://localhost:8000/v2/nodes/{}/'.format(node_id) - logger.info('¶¶¶¶ in utils._get_node_by_guid url:({})'.format(url)) - resp = requests.get( - url, - headers=headers, - cookies=cookies, - ) - logger.info('¶¶¶¶ in utils._get_node_by_guid resp:({})'.format(resp)) - - if resp.status_code != 200: - logger.error( - '¶¶¶¶ in utils._get_node_by_guid@ got bad response data from osf: ' - 'code:({}) content:({})'.format(resp.status_code, resp.content[0:500]) - ) - raise Exception( - 'Couldnt get node properties for node:({}) for current user'.format(node_id) - ) - - resp_data = resp.json() - logger.info('¶¶¶¶ in utils._get_node_by_guid resp-data:({})'.format(resp_data)) - - props = { - '_id': node_id, - 'title': resp_data['data']['attributes']['title'], - } - return props - - -def _lookup_creds_and_settings_for(user_id, node_props): - credentials, settings = None, None - return { - 'credentials': credentials, - 'settings': settings, - } - - -def _make_auth(user): - if user is not None: - return { - 'id': user._id, - 'email': '{}@osf.io'.format(user._id), - 'name': user.fullname, - } - return {} - - -def _make_osf_callback_url(node_props): - callback_url = settings.OSF_CALLBACK_BASE - - # _absolute=True, - # _internal=True - - if node_props.is_registration: - callback_url += 'registration_callbacks' - else: - callback_url += 'create_waterbutler_log' - - return callback_url - - -def _make_wb_auth_payload(user, creds_and_settings, callback_url): - return { - 'payload': jwe.encrypt( - jwt.encode( - { - 'exp': timezone.now() - + datetime.timedelta(seconds=settings.WATERBUTLER_JWT_EXPIRATION), - 'data': { - 'auth': _make_auth( - user - ), # A waterbutler auth dict not an Auth object - 'credentials': creds_and_settings['credentials'], - 'settings': creds_and_settings['settings'], - 'callback_url': callback_url, - }, - }, - settings.WATERBUTLER_JWT_SECRET, - algorithm=settings.WATERBUTLER_JWT_ALGORITHM, - ), - WATERBUTLER_JWE_KEY, - ).decode() - } - - -class PermissionsError(Exception): - """Raised if an action cannot be performed due to insufficient permissions""" - - pass - - -class AddonError(Exception): - pass - - -class InvalidFolderError(AddonError): - pass - - -class InvalidAuthError(AddonError): - pass diff --git a/charon/views.py b/charon/views.py deleted file mode 100644 index 4f01396e..00000000 --- a/charon/views.py +++ /dev/null @@ -1,412 +0,0 @@ -import json -import logging - -from django.http import ( - HttpResponse, - HttpResponseBadRequest, - HttpResponseForbidden, - JsonResponse, -) -from django.shortcuts import redirect -from django.template import loader - -from . import models, serializer, utils - -logger = logging.getLogger(__name__) - -CHARON_ROOT = 'http://localhost:8011/charon' -API_ROOT = 'http://localhost:8000/v2' - -# ========== VIEWS ========== - - -def index(request): - return HttpResponse( - "Hello, world. Welcome to the continental, rated two stars on tripadvisor." - ) - - -# pretend to connect to box, but we lie -def connect_box(request): - logger.error('@@@ got request for connect_box') - logger.error('@@@ request ib:({})'.format(request)) - - # user = utils._get_user(request) - # auth_url_base = 'https://www.box.com/api/oauth2/authorize' - # callback_url = 'https://www.box.com/api/oauth2/token' - - # return HttpResponse("You tried to box, but box we didn't.") - response = redirect(callback_box) - response['Cross-Origin-Opener-Policy'] = 'unsafe-none' - return response - - -# pretend like we were called back from box, but we lie -def callback_box(request): - logger.error('@@@ got request for callback_box') - logger.error('@@@ request ib:({})'.format(request)) - logger.error('@@@ headers are:({})'.format(request.headers)) - template = loader.get_template('charon/callback.html') - user = utils._get_user(request) - context = {'user_id': user['id'] if user else '*no user id*'} - return HttpResponse( - template.render(context, request), - headers={'Cross-Origin-Opener-Policy': 'unsafe-none'}, - ) - - -def box_account_list(request): - """ - from addons.views.generic_views.account_list - box versions currys above with args ('box', BoxSerializer) - - impl based off of addons.base.generic_views._account_list - @must_be_logged_in decorator injects `auth` into call - """ - - # must_be_logged_in impl inlined - # auth = Auth.from_kwargs(request.args.to_dict(), kwargs) - auth = _get_auth_from_request(request) - if not auth.logged_in: - return redirect(cas_get_login_url(request.url)) - - user_settings = auth.user.get_addon('box') - our_serializer = serializer.BoxSerializer(user_settings=user_settings) - return JsonResponse(our_serializer.serialized_user_settings) - - -def box_project_config(request, project_guid): - if request.method == 'GET': - return _box_get_config(request, project_guid) - elif request.method == 'PUT': - return _box_set_config(request, project_guid) - - return HttpResponse('Method Not Allowed', status=405) - - -def box_user_auth(request, project_guid): - if request.method == 'PUT': - return _box_import_auth(request, project_guid) - elif request.method == 'DELETE': - return _box_deauthorize_node(request, project_guid) - - return HttpResponse('Method Not Allowed', status=405) - - -def box_folder_list(request, project_guid): - """ - based off of addons.box.views.box_folder_list - - *DOESN'T impl or curry generic_views.folder_list or _folder_list* - - impl based off of addons.box.views.box_folder_list - - inlined decorators from website.project.decorators: - @must_have_addon('box', 'node') decorator injects node_addon - @must_be_addon_authorizer('box') decorator does ??? - - Returns all the subsequent folders under the folder id passed. - """ - # TODO: how exactly is this different from generic_views.folder_list curried method? - # inflate node - node = _get_node_by_guid(request, project_guid) - addon_name = 'box' - node_addon = _get_node_addon_for_node(node, addon_name) - folder_id = request.GET.get('folder_id', None) - blef = node_addon.get_folders(folder_id=folder_id) - - # TODO: fixture removal - after secrets are stored - # TODO: fixture removal! - blef.append( - { - "id": "0", - "path": "/", - "addon": "box", - "kind": "folder", - "name": "/ (Full Box)", - "urls": { - # "folders": '{}/nodes/{}/addons/box/folders/?id=0'.format( - # API_ROOT, project_guid - # ), - "folders": '{}/projects/{}/box/folders/?id=0'.format( - CHARON_ROOT, project_guid - ), - }, - } - ) - logger.error('%%%% gonkus: blef ib:({})'.format(blef)) - return JsonResponse(blef, safe=False) - - -# from website.routes, view is website.project.views.node.node_choose_addons -# which calls .config_addons() on node model object -# .config_addons() is defined in AddonModelMixin -def get_project_addons(request, project_guid): - return JsonResponse(['box'], safe=False) - - -def _box_get_config(request, project_guid): - """ - from addons.views.generic_views.get_config - box versions currys above with args ('box', BoxSerializer) - - impl based off of addons.base.generic_views._get_config - @must_be_logged_in decorator injects `auth` into call - @must_have_addon('box', 'node') decorator does ??? - @must_be_valid_project decorator does ??? - @must_have_permission('WRITE') decorator does ??? - - _get_config docstring - API that returns the serialized node settings. - """ - - logger.info('>>> _box_get_config - alpha:({})'.format(None)) - - # auth was injected by @must_be_logged_in - auth = _get_auth_from_request(request) - - # node_addon injected by @must_have_addon('box', 'node') - node = _get_node_by_guid(request, project_guid) - - addon_name = 'box' - node_addon = _get_node_addon_for_node(node, addon_name) - - return JsonResponse( - {'result': serializer.BoxSerializer().serialize_settings(node_addon, auth.user)} - ) - - -def _box_set_config(request, project_guid): - """ - from addons.views.generic_views.set_config - box versions currys above with args ('box', 'Box', BoxSerializer, _set_folder()) - - impl based off of addons.base.generic_views._set_config - @must_not_be_registration - @must_have_addon('user') decorator does ??? - @must_have_addon('node') decorator does ??? - @must_be_addon_authorizer decorator does ??? - @must_have_permission(WRITE) decorator does ??? - - _set_config docstring - View for changing a node's linked folder. - """ - - def set_folder(node_addon, folder, auth): - uid = folder['id'] # TODO: why called `uid`? - node_addon.set_folder(uid, auth=auth) - node_addon.save() - - # auth was injected by @must_be_logged_in - auth = _get_auth_from_request(request) - # user = auth.user - - # node_addon injected by @must_have_addon('box', 'node') - node = _get_node_by_guid(request, project_guid) - addon_name = 'box' - node_addon = _get_node_addon_for_node(node, addon_name) - - # user_addon injected by @must_have_addon('box', 'user') - # user_addon = _get_user_addon_for_user(user) # TODO: we dont use it? - - folder = request.json.get('selected') # TODO: flask syntax? - set_folder(node_addon, folder, auth) - - path = node_addon.folder_path - - folder_name = None - if path != '/': - folder_name = path.replace('All Files', '') - else: - folder_name = '/ (Full {0})'.format('Box') - - return JsonResponse( - { - 'result': { - 'folder': { - 'name': folder_name, - 'path': path, - }, - 'urls': serializer.BoxSerializer( - node_settings=node_addon - ).addon_serialized_urls, - }, - 'message': 'Successfully updated settings.', - } - ) - - -def _box_import_auth(request, project_guid): - """ - based off of addons.base.generic_views.import_auth - box versions currys above with args ('box', BoxSerializer) - - impl based off of addons.base.generic_views._import_auth - - inlined decorators from website.project.decorators: - must_have_permission - must_have_addon - """ - - logger.error('### in import_auth_box! request ib:({})'.format(request)) - - # query_params = request.GET - # kwargs = {**query_params} - # kwargs['project_guid'] = project_guid - # kwargs['node'] = node - # kwargs in osf:({'pid': 'dve82', 'parent': None, - # 'node': (title='Provider - S3', category='project') with guid 'dve82'}) - - # ===> utils._verify_permissions('WRITE', user, node, kwargs) - # Auth defined in frameworks.auth.core.Auth - # three params: - # self.user = user - # self.api_node = api_node - # self.private_key = private_key - # @prop.logged_in - # @prop.private_link - # def from_kwargs(cls, request_args, kwargs): - # user = request_args.get('user') or kwargs.get('user') or _get_current_user() - # private_key = request_args.get('view_only') - # cls(user=user, private_key=private_key) - # kwargs['auth_user'] = Auth.from_kwargs(request.args.to_dict(), kwargs) - # auth_user = kwargs['auth_user'].user - auth = _get_auth_from_request(request) - user = auth.user - - # inflate node - node = _get_node_by_guid(request, project_guid) - - addon_name = 'box' - - logger.error('### import_auth_box! alpha null:({})'.format(None)) - - # User must be logged in - if user is None: - raise HttpResponse('Unauthorized', status=401) - - # logger.error('### import_auth_box! beta null:({})'.format(None)) - - # User must have permissions - if not node.has_permission(user, 'WRITE'): - return HttpResponseForbidden('User has not permissions on node') - - # logger.error('### import_auth_box! gamma null:({})'.format(None)) - - # ====> @must_have_addon('box', 'user') - user_addon = user.get_addon(addon_name) - if user_addon is None: - return HttpResponseBadRequest('No user addon found') - - # logger.error('### import_auth_box! delta null:({})'.format(None)) - - # ====> @must_have_addon('box', 'node') - node_addon = node.get_addon(addon_name) - if node_addon is None: - return HttpResponseBadRequest('No node addon found') - - # logger.error('### import_auth_box! epsilon null:({})'.format(None)) - - req_data = json.loads(request.body) - external_account = models.ExternalAccount.load(req_data['external_account_id']) - - # logger.error('### import_auth_box! zeta null:({})'.format(None)) - - if not user_addon.external_accounts.filter(_id=external_account._id).exists(): - return HttpResponseForbidden('User has no such account') - - # logger.error('### import_auth_box! eta null:({})'.format(None)) - - try: - node_addon.set_auth(external_account, user_addon.owner()) - except utils.PermissionsError: - raise HttpResponseForbidden('Unable to apply users auth to node') - - # logger.error('### import_auth_box! theta null:({})'.format(None)) - - node_addon.save() - - # logger.error('### import_auth_box! iota null:({})'.format(None)) - - return JsonResponse( - { - 'result': serializer.BoxSerializer().serialize_settings(node_addon, user), - 'message': 'Successfully imported access token from profile.', - } - ) - - -def _box_deauthorize_node(request, project_guid): - """ - based off of addons.base.generic_views.deauthorize_node - box versions currys above with args ('box') - - impl based off of addons.base.generic_views._deauthorize_node - - inlined decorators from website.project.decorators: - @must_not_be_registration decorator does ??? - @must_have_addon('node') decorator does ??? - @must_have_permission(WRITE) decorator does ??? - """ - auth = _get_auth_from_request(request) - - # inflate node - node = _get_node_by_guid(request, project_guid) - addon_name = 'box' - node_addon = node.get_addon(addon_name) - - node_addon.deauthorize(auth=auth) - node_addon.save() - return HttpResponse(status=204) - - -def _get_auth_from_request(request): - # TODO: i think this basically inlines @must_be_logged_in - # did I start doing this with get_credentials? - # i think so - user_params = utils._get_user(request) - user = models.User(user_params['id']) - return models.Auth(user=user) - - -# take a project guid and inflate it into a node object -def _get_node_by_guid(request, project_guid): - node_props = utils._get_node_by_guid(request, project_guid) - node = models.Node(node_props['_id'], node_props['title']) - return node - - -# reimplementation of @must_have_addon('addon_name', 'node') -# broken out in case there is other validation to be incorporated from the decorator -def _get_node_addon_for_node(node, addon_name): - return node.get_addon(addon_name) - - -# reimplementation of @must_have_addon('addon_name', 'node') -# broken out in case there is other validation to be incorporated from the decorator -def _get_user_addon_for_user(user, addon_name): - return user.get_addon(addon_name) - - -def cas_get_login_url(url): - # TODO: implement this! - return url - - -# not currently being used -def get_credentials(request): - logger.error('@@@ got request for get_credentials') - - user = utils._get_user(request) - # check_access(node, auth, action, cas_resp) - # provider_settings = None - # if hasattr(node, 'get_addon'): - # provider_settings = node.get_addon(provider_name) - # if not provider_settings: - # raise HTTPError(http_status.HTTP_400_BAD_REQUEST) - - node_id = None - node_props = utils._get_node_by_guid(node_id) - creds_and_settings = utils._lookup_creds_and_settings_for(user['id'], node_props) - callback_url = utils._make_osf_callback_url(node_props) - return utils._make_wb_auth_payload(user, creds_and_settings, callback_url) diff --git a/db/charon.json b/db/charon.json deleted file mode 100644 index cbbb2a7f..00000000 --- a/db/charon.json +++ /dev/null @@ -1,113 +0,0 @@ -{ - "external_accounts" : { - "5a397723ab83f70009839fc1" : { - "_id" : "5a397723ab83f70009839fc1", - "date_last_refreshed" : "", - "display_name" : "Fitcosz Elliott", - "expires_at" : "", - "oauth_key" : "", - "oauth_secret" : "", - "profile_url" : "https://app.box.com/profile/244672277", - "provider" : "box", - "provider_id" : "244672277", - "provider_name" : "Box", - "refresh_token" : "" - }, - "alpha" : { - "_id" : "alpha", - "date_last_refreshed" : "", - "display_name" : "dumpfust-alpha", - "expires_at" : "", - "oauth_key" : "", - "oauth_secret" : "", - "profile_url" : "enchhort-alpha", - "provider" : "crechdolg-alpha", - "provider_id" : "plopsome-alpha", - "provider_name" : "borfhome-alpha", - "refresh_token" : "" - }, - "beta" : { - "_id" : "beta", - "date_last_refreshed" : "", - "display_name" : "dumpfust-beta", - "expires_at" : "", - "oauth_key" : "", - "oauth_secret" : "", - "profile_url" : "enchhort-beta", - "provider" : "crechdolg-beta", - "provider_id" : "plopsome-beta", - "provider_name" : "borfhome-beta", - "refresh_token" : "" - } - }, - "node_addons" : { - "dve82" : { - "oldbox" : { - "created" : "2018-02-27 16:59:43.887806+00", - "deleted" : null, - "external_account_id" : null, - "fake_name" : "squishybits", - "folder_id" : "0", - "folder_name" : "/", - "folder_path" : "/", - "is_deleted" : false, - "modified" : "2018-07-29 21:32:09.574112+00", - "owner_id" : "p4r65", - "user_settings_id" : null - } - } - }, - "nodes" : { - "dve82" : { - "node_addon" : { - "box" : { - "fake_name" : "meow" - } - } - }, - "fbi4u" : {}, - "mst3k" : {} - }, - "permissions" : { - "dve82" : { - "p4r65" : true - } - }, - "user_addons" : { - "p4r65" : { - "box" : { - "created" : "2017-12-19 20:31:31.907425+00", - "deleted" : null, - "is_deleted" : false, - "modified" : "2023-06-13 00:48:59.891857+00", - "oauth_grants" : { - "csab4" : {}, - "dve82" : { - "5a397723ab83f70009839fc1" : {} - } - }, - "oauth_provider" : { - "short_name" : "box" - }, - "owner_id" : "p4r65" - } - } - }, - "users" : { - "fbi4u" : {}, - "mst3k" : {}, - "p4r65" : { - "external_accounts" : [ - "alpha", - "beta", - "5a397723ab83f70009839fc1" - ], - "fullname" : "Fitz Elliott", - "user_addon" : { - "box" : { - "fake_name" : "meow" - } - } - } - } -} diff --git a/docker-compose.yml b/docker-compose.yml new file mode 100644 index 00000000..69c3cd44 --- /dev/null +++ b/docker-compose.yml @@ -0,0 +1,34 @@ +version: "3.8" + +services: + addon_service: + build: . + restart: unless-stopped + command: python3 manage.py runserver 0.0.0.0:8004 + environment: + DJANGO_SETTINGS_MODULE: app.settings + PYTHONUNBUFFERED: 1 + DEBUG: 1 + POSTGRES_HOST: postgres + POSTGRES_DB: gravyvalet + POSTGRES_USER: postgres + ALLOWED_HOSTS: 0.0.0.0,localhost + SECRET_KEY: so-secret + ports: + - 8004:8004 + stdin_open: true + volumes: + - ./:/code:cached + depends_on: + - postgres + postgres: + image: postgres:latest + volumes: + - "${POSTGRES_DATA_VOL:-postgres_data_vol}:/var/lib/postgresql/data/" + environment: + POSTGRES_HOST_AUTH_METHOD: trust + POSTGRES_DB: gravyvalet + +volumes: + postgres_data_vol: + external: false diff --git a/gravyvalet/settings.py b/gravyvalet/settings.py deleted file mode 100644 index ac62d293..00000000 --- a/gravyvalet/settings.py +++ /dev/null @@ -1,154 +0,0 @@ -""" -Django settings for gravyvalet project. - -Generated by 'django-admin startproject' using Django 4.1.7. - -For more information on this file, see -https://docs.djangoproject.com/en/4.1/topics/settings/ - -For the full list of settings and their values, see -https://docs.djangoproject.com/en/4.1/ref/settings/ -""" - -# import os -from pathlib import Path - -# Build paths inside the project like this: BASE_DIR / 'subdir'. -BASE_DIR = Path(__file__).resolve().parent.parent - - -# Quick-start development settings - unsuitable for production -# See https://docs.djangoproject.com/en/4.1/howto/deployment/checklist/ - -# SECURITY WARNING: keep the secret key used in production secret! -SECRET_KEY = 'django-insecure-nukgeq%e^p9$$m!vk)&8u^(cfpemg+!-75y#b$r*!#v@vvtl71' - -# SECURITY WARNING: don't run with debug turned on in production! -DEBUG = True - -ALLOWED_HOSTS = ['localhost', '192.168.168.167'] - - -# Application definition - -INSTALLED_APPS = [ - 'charon.apps.CharonConfig', - 'django.contrib.admin', - 'django.contrib.auth', - 'django.contrib.contenttypes', - 'django.contrib.sessions', - 'django.contrib.messages', - 'django.contrib.staticfiles', - 'corsheaders', -] - -MIDDLEWARE = [ - 'corsheaders.middleware.CorsMiddleware', - 'django.middleware.security.SecurityMiddleware', - 'django.contrib.sessions.middleware.SessionMiddleware', - 'django.middleware.common.CommonMiddleware', - # 'django.middleware.csrf.CsrfViewMiddleware', - 'django.contrib.auth.middleware.AuthenticationMiddleware', - 'django.contrib.messages.middleware.MessageMiddleware', - # 'django.middleware.clickjacking.XFrameOptionsMiddleware', -] - -ROOT_URLCONF = 'gravyvalet.urls' - -TEMPLATES = [ - { - 'BACKEND': 'django.template.backends.django.DjangoTemplates', - 'DIRS': [], - 'APP_DIRS': True, - 'OPTIONS': { - 'context_processors': [ - 'django.template.context_processors.debug', - 'django.template.context_processors.request', - 'django.contrib.auth.context_processors.auth', - 'django.contrib.messages.context_processors.messages', - ], - }, - }, -] - -WSGI_APPLICATION = 'gravyvalet.wsgi.application' - - -# Database -# https://docs.djangoproject.com/en/4.1/ref/settings/#databases - -DATABASES = { - 'default': { - 'ENGINE': 'django.db.backends.sqlite3', - 'NAME': BASE_DIR / 'db.sqlite3', - }, - # 'default': { - # 'CONN_MAX_AGE': 0, - # 'ENGINE': 'django.db.backends.postgresql', - # 'NAME': os.environ.get('OSF_DB_NAME', 'gravyvalet'), - # 'USER': os.environ.get('OSF_DB_USER', 'postgres'), - # 'PASSWORD': os.environ.get('OSF_DB_PASSWORD', ''), - # 'HOST': os.environ.get('OSF_DB_HOST', '127.0.0.1'), - # 'PORT': os.environ.get('OSF_DB_PORT', '5432'), - # 'ATOMIC_REQUESTS': True, - # 'TEST': { - # 'SERIALIZE': False, - # }, - # }, -} - - -# Password validation -# https://docs.djangoproject.com/en/4.1/ref/settings/#auth-password-validators - -AUTH_PASSWORD_VALIDATORS = [ - { - 'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator', # noqa: E501 - }, - { - 'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator', - }, - { - 'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator', - }, - { - 'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator', - }, -] - - -# Internationalization -# https://docs.djangoproject.com/en/4.1/topics/i18n/ - -LANGUAGE_CODE = 'en-us' - -TIME_ZONE = 'UTC' - -USE_I18N = True - -USE_TZ = True - - -# Static files (CSS, JavaScript, Images) -# https://docs.djangoproject.com/en/4.1/howto/static-files/ - -STATIC_URL = 'static/' - -# Default primary key field type -# https://docs.djangoproject.com/en/4.1/ref/settings/#default-auto-field - -DEFAULT_AUTO_FIELD = 'django.db.models.BigAutoField' - - -# TODO! this is dangerous, only for development -# CORS_ORIGIN_ALLOW_ALL = True -# CORS_ALLOW_ALL_ORIGINS = True -CORS_ALLOWED_ORIGINS = [ - 'http://localhost:5000', -] -CORS_ALLOW_CREDENTIALS = True - - -CSRF_TRUSTED_ORIGINS = [ - 'http://localhost:5000', -] diff --git a/gravyvalet/urls.py b/gravyvalet/urls.py deleted file mode 100644 index a1e2019e..00000000 --- a/gravyvalet/urls.py +++ /dev/null @@ -1,22 +0,0 @@ -"""gravyvalet URL Configuration - -The `urlpatterns` list routes URLs to views. For more information please see: - https://docs.djangoproject.com/en/4.1/topics/http/urls/ -Examples: -Function views - 1. Add an import: from my_app import views - 2. Add a URL to urlpatterns: path('', views.home, name='home') -Class-based views - 1. Add an import: from other_app.views import Home - 2. Add a URL to urlpatterns: path('', Home.as_view(), name='home') -Including another URLconf - 1. Import the include() function: from django.urls import include, path - 2. Add a URL to urlpatterns: path('blog/', include('blog.urls')) -""" -from django.contrib import admin -from django.urls import include, path - -urlpatterns = [ - path('charon/', include('charon.urls')), - path('admin/', admin.site.urls), -] diff --git a/manage.py b/manage.py old mode 100755 new mode 100644 index b2d88523..1a64b14a --- a/manage.py +++ b/manage.py @@ -6,7 +6,7 @@ def main(): """Run administrative tasks.""" - os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'gravyvalet.settings') + os.environ.setdefault("DJANGO_SETTINGS_MODULE", "app.settings") try: from django.core.management import execute_from_command_line except ImportError as exc: @@ -18,5 +18,5 @@ def main(): execute_from_command_line(sys.argv) -if __name__ == '__main__': +if __name__ == "__main__": main() diff --git a/requirements.txt b/requirements.txt deleted file mode 100644 index aac459c4..00000000 --- a/requirements.txt +++ /dev/null @@ -1,21 +0,0 @@ -# dev-requirements -pip==23.0.1 -black==23.3.0 -flake8==6.0.0 -isort==5.12.0 - -# requirements -Django==4.2 - - -bson==0.5.10 -pyjwe==1.0.0 -django-extensions==3.2.1 -requests==2.30.0 -jwt==1.3.1 - -boxsdk==3.7.2 -markupsafe==2.1.3 -oauthlib==3.2.2 -django-cors-headers==4.0.0 -requests-oauthlib==1.3.1 diff --git a/requirements/dev-requirements.txt b/requirements/dev-requirements.txt new file mode 100644 index 00000000..5605da94 --- /dev/null +++ b/requirements/dev-requirements.txt @@ -0,0 +1,12 @@ +-r ./requirements.txt + +# Requirements that are used in the development environment only +# Testing +factory-boy +responses + +# Syntax checking +flake8 +black +isort +pre-commit diff --git a/requirements/release.txt b/requirements/release.txt new file mode 100644 index 00000000..0dbc1238 --- /dev/null +++ b/requirements/release.txt @@ -0,0 +1,3 @@ +-r ./requirements.txt + +# Requirements to be installed on server deployments diff --git a/requirements/requirements.txt b/requirements/requirements.txt new file mode 100644 index 00000000..4bcb4841 --- /dev/null +++ b/requirements/requirements.txt @@ -0,0 +1,5 @@ +Django==4.2.7 +psycopg>=3.1.8 +djangorestframework==3.14.0 +djangorestframework-jsonapi==6.1.0 +django-filter diff --git a/setup.cfg b/setup.cfg index 735b209b..2bfde5ae 100644 --- a/setup.cfg +++ b/setup.cfg @@ -1,15 +1,8 @@ -# E203: colons should not have any space before them [flake8] +# match default max line length in `black` max-line-length = 88 -extend-ignore = E203 - -# E501: Line too long -# W503: line break before binary operator -# E731: Do not assign a lambda expression, use a def -# ignore=E501,W503,E731 [isort] profile = black -# filter_files = true -# force_grid_wrap = 2 -# lines_after_imports = 2 +force_grid_wrap = 2 +lines_after_imports = 2