Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

nginx recursive content routing for non-production environments #4855

Open
wants to merge 4 commits into
base: hotfixes
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
9 changes: 4 additions & 5 deletions .github/workflows/pythontest.yml
Original file line number Diff line number Diff line change
Expand Up @@ -65,11 +65,10 @@ jobs:
- name: Set up minio
run: |
docker run -d -p 9000:9000 --name minio \
-e "MINIO_ACCESS_KEY=development" \
-e "MINIO_SECRET_KEY=development" \
-v /tmp/minio_data:/data \
-v /tmp/minio_config:/root/.minio \
minio/minio server /data
-e "MINIO_ROOT_USER=development" \
-e "MINIO_ROOT_PASSWORD=development" \
-e "MINIO_DEFAULT_BUCKETS=content:public" \
bitnami/minio:2024.5.28
- name: Set up Python 3.10
uses: actions/setup-python@v5
with:
Expand Down
6 changes: 6 additions & 0 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -41,3 +41,9 @@ As soon as you open a pull request, it may take us a week or two to review it as
---

*Thank you for your interest in contributing! Learning Equality was founded by volunteers dedicated to helping make educational materials more accessible to those in need, and every contribution makes a difference.*


## Licensing
Kolibri Studio is licensed under the MIT license. See [LICENSE](./LICENSE) for more details.

Other tools and libraries used in Kolibri Studio are licensed under their respective licenses, and some are only used during development and are not intended for distribution or use in production environments.
7 changes: 0 additions & 7 deletions contentcuration/contentcuration/apps.py
Original file line number Diff line number Diff line change
@@ -1,7 +1,4 @@
from django.apps import AppConfig
from django.conf import settings

from contentcuration.utils.storage_common import is_gcs_backend


class ContentConfig(AppConfig):
Expand All @@ -10,7 +7,3 @@ class ContentConfig(AppConfig):
def ready(self):
# Import signals
import contentcuration.signals # noqa

if settings.AWS_AUTO_CREATE_BUCKET and not is_gcs_backend():
from contentcuration.utils.minio_utils import ensure_storage_bucket_public
ensure_storage_bucket_public()
6 changes: 0 additions & 6 deletions contentcuration/contentcuration/management/commands/setup.py
Original file line number Diff line number Diff line change
Expand Up @@ -24,7 +24,6 @@
from contentcuration.utils.db_tools import create_user
from contentcuration.utils.files import duplicate_file
from contentcuration.utils.publish import publish_channel
from contentcuration.utils.storage_common import is_gcs_backend

logging = logmodule.getLogger(__name__)

Expand Down Expand Up @@ -54,11 +53,6 @@ def handle(self, *args, **options):
print("{} is not a valid email".format(email))
sys.exit()

# create the minio bucket
if not is_gcs_backend():
from contentcuration.utils.minio_utils import ensure_storage_bucket_public
ensure_storage_bucket_public()

# create the cache table
try:
call_command("createcachetable")
Expand Down
60 changes: 2 additions & 58 deletions contentcuration/contentcuration/tests/base.py
Original file line number Diff line number Diff line change
Expand Up @@ -18,47 +18,9 @@

from . import testdata
from contentcuration.models import User
from contentcuration.utils import minio_utils


class BucketTestClassMixin(object):
@classmethod
def create_bucket(cls):
minio_utils.ensure_storage_bucket_public(will_sleep=False)

@classmethod
def delete_bucket(cls):
minio_utils.ensure_bucket_deleted()


class BucketTestMixin:
"""
Handles bucket setup and tear down for test classes. If you want your entire TestCase to share the same bucket,
call create_bucket in setUpClass and then set persist_bucket to True, then make sure you call self.delete_bucket()
in tearDownClass.
"""

persist_bucket = False

@classmethod
def create_bucket(cls):
minio_utils.ensure_storage_bucket_public(will_sleep=False)

@classmethod
def delete_bucket(cls):
minio_utils.ensure_bucket_deleted()

def setUp(self):
raise Exception("Called?")
if not self.persist_bucket:
self.create_bucket()

def tearDown(self):
if not self.persist_bucket:
self.delete_bucket()


class StudioTestCase(TestCase, BucketTestMixin):
class StudioTestCase(TestCase):
@classmethod
def setUpClass(cls):
super(StudioTestCase, cls).setUpClass()
Expand All @@ -67,22 +29,12 @@ def setUpClass(cls):
"big_shot", "[email protected]", "password"
)

def setUp(self):
if not self.persist_bucket:
self.create_bucket()

def setUpBase(self):
if not self.persist_bucket:
self.create_bucket()
self.channel = testdata.channel()
self.user = testdata.user()
self.channel.editors.add(self.user)
self.channel.main_tree.refresh_from_db()

def tearDown(self):
if not self.persist_bucket:
self.delete_bucket()

def admin_client(self):
client = APIClient()
client.force_authenticate(self.admin_user)
Expand Down Expand Up @@ -115,20 +67,12 @@ def get(self, url, data=None, follow=False, secure=False):
)


class StudioAPITestCase(APITestCase, BucketTestMixin):
class StudioAPITestCase(APITestCase):
@classmethod
def setUpClass(cls):
super(StudioAPITestCase, cls).setUpClass()
call_command("loadconstants")

def setUp(self):
if not self.persist_bucket:
self.create_bucket()

def tearDown(self):
if not self.persist_bucket:
self.delete_bucket()

def sign_in(self, user=None):
if not user:
user = self.user
Expand Down
20 changes: 0 additions & 20 deletions contentcuration/contentcuration/tests/viewsets/test_clipboard.py
Original file line number Diff line number Diff line change
Expand Up @@ -17,16 +17,6 @@


class SyncTestCase(SyncTestMixin, StudioAPITestCase):
@classmethod
def setUpClass(cls):
cls.create_bucket()
super(SyncTestCase, cls).setUpClass()

@classmethod
def tearDownClass(cls):
super(SyncTestCase, cls).tearDownClass()
cls.create_bucket()

@classmethod
def setUpTestData(cls):
call_command("loadconstants")
Expand Down Expand Up @@ -214,16 +204,6 @@ def test_delete_clipboards(self):


class CRUDTestCase(StudioAPITestCase):
@classmethod
def setUpClass(cls):
cls.create_bucket()
super(CRUDTestCase, cls).setUpClass()

@classmethod
def tearDownClass(cls):
super(CRUDTestCase, cls).tearDownClass()
cls.create_bucket()

@classmethod
def setUpTestData(cls):
call_command("loadconstants")
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,6 @@

from contentcuration import models
from contentcuration.tests import testdata
from contentcuration.tests.base import BucketTestMixin
from contentcuration.tests.base import StudioAPITestCase
from contentcuration.tests.viewsets.base import generate_copy_event
from contentcuration.tests.viewsets.base import generate_create_event
Expand Down Expand Up @@ -68,12 +67,10 @@ def rebuild_tree(tree_id):
models.ContentNode.objects.partial_rebuild(tree_id)


@pytest.mark.skipif(True, reason="Concurrent processes overload Travis VM")
class ConcurrencyTestCase(TransactionTestCase, BucketTestMixin):
@pytest.mark.skipif(True, reason="Concurrent processes overload CI")
class ConcurrencyTestCase(TransactionTestCase):
def setUp(self):
super(ConcurrencyTestCase, self).setUp()
if not self.persist_bucket:
self.create_bucket()
call_command("loadconstants")
self.channel = testdata.channel()
self.user = testdata.user()
Expand All @@ -85,8 +82,6 @@ def setUp(self):
def tearDown(self):
call_command("flush", interactive=False)
super(ConcurrencyTestCase, self).tearDown()
if not self.persist_bucket:
self.delete_bucket()

def test_create_contentnodes_concurrently(self):
results = call_concurrently(
Expand Down
105 changes: 0 additions & 105 deletions contentcuration/contentcuration/utils/minio_utils.py

This file was deleted.

10 changes: 0 additions & 10 deletions contentcuration/contentcuration/utils/storage_common.py
Original file line number Diff line number Diff line change
@@ -1,7 +1,6 @@
import mimetypes
import os
from datetime import timedelta
from urllib.parse import urlparse

from django.conf import settings
from django.core.files.storage import default_storage
Expand All @@ -20,15 +19,6 @@ class UnknownStorageBackendError(Exception):
pass


def is_gcs_backend():
"""
Determines if storage is GCS backend, which if not we can assume it is minio
:return: A bool
"""
host = urlparse(settings.AWS_S3_ENDPOINT_URL).netloc
return "storage.googleapis.com" in host


def determine_content_type(filename):
"""
Guesses the content type of a filename. Returns the mimetype of a file.
Expand Down
1 change: 1 addition & 0 deletions deploy/includes/README.md
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
This directory contains nginx configuration files that are included in the main configuration file, via the `include` directive. This entire directory is copied to `/etc/nginx/includes` in the image.
18 changes: 18 additions & 0 deletions deploy/includes/content/_proxy.conf
Original file line number Diff line number Diff line change
@@ -0,0 +1,18 @@
# location {} settings for /content proxy
# used by files in this directory, via `include` directive

limit_except GET HEAD OPTIONS {
deny all;
}

proxy_http_version 1.1;
proxy_set_header Host $proxy_host;
proxy_set_header Accept-Encoding Identity;
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
proxy_redirect off;
proxy_buffering off;
proxy_cache off;
proxy_read_timeout 100s;
proxy_ssl_server_name on;

gzip off;
Loading
Loading