From b5f875271b971dfcdc0a6f79fc2f4681e49bfb85 Mon Sep 17 00:00:00 2001 From: Pushyami Gundala Date: Mon, 18 Dec 2023 16:25:38 -0500 Subject: [PATCH] I1544 backend dependencies (#1554) --- Dockerfile | 2 +- config/cron_udp.hjson | 29 +++++----- config/env_sample.hjson | 2 +- dashboard/admin.py | 8 ++- dashboard/common/db_util.py | 2 +- dashboard/cron.py | 77 ++++++++++++++++--------- dashboard/graphql/objects.py | 2 +- dashboard/graphql/query.py | 2 +- dashboard/graphql/view.py | 3 +- dashboard/management/commands/course.py | 6 +- dashboard/management/commands/term.py | 6 +- dashboard/models.py | 4 +- dashboard/settings.py | 7 ++- dashboard/urls.py | 2 +- docs/CONTRIBUTING.md | 4 ++ docs/github_actions.md | 4 +- requirements.txt | 66 +++++++++++---------- 17 files changed, 132 insertions(+), 94 deletions(-) diff --git a/Dockerfile b/Dockerfile index e1f43959d..f9ccad55e 100644 --- a/Dockerfile +++ b/Dockerfile @@ -39,7 +39,7 @@ WORKDIR /code COPY requirements.txt . RUN apt-get update && \ apt-get install -y --no-install-recommends \ - build-essential curl apt-transport-https libpq-dev netcat-traditional jq python3-dev xmlsec1 cron git && \ + build-essential curl apt-transport-https libpq-dev netcat-traditional default-libmysqlclient-dev pkg-config jq python3-dev xmlsec1 cron git && \ apt-get upgrade -y # Install MariaDB from the mariadb repository rather than using Debians diff --git a/config/cron_udp.hjson b/config/cron_udp.hjson index f63cae7c1..0f6304a9f 100644 --- a/config/cron_udp.hjson +++ b/config/cron_udp.hjson @@ -41,8 +41,8 @@ left join entity.course_grade cg on cse.course_section_id = cg.course_section_id and cse.person_id = cg.person_id where - co.lms_int_id in %(course_ids)s - and cse.role in ('Student', 'TeachingAssistant', 'Teacher') + co.lms_int_id = ANY(%(course_ids)s) + and cse.role = ANY(ARRAY['Student', 'Teacher', 'TeachingAssistant']::text[]) and cse.role_status = 'Enrolled' and cse.enrollment_status = 'Active' order by user_id @@ -56,14 +56,14 @@ la.visibility = 'everyone' and la.status = 'published' and la.course_offering_id = co.id - and co.lms_int_id in %(course_ids)s + and co.lms_int_id = ANY(%(course_ids)s) ), assignment_grp as ( select lg.* from entity.learner_activity_group lg, keymap.course_offering co where lg.status = 'available' and lg.course_offering_id = co.id - and co.lms_int_id in %(course_ids)s + and co.lms_int_id = ANY(%(course_ids)s) ), assign_more as ( select distinct(a.learner_activity_group_id), da.group_points from assignment_details a @@ -125,7 +125,7 @@ la.visibility = 'everyone' and la.status = 'published' and la.course_offering_id = co.id - and co.lms_int_id in %(course_ids)s + and co.lms_int_id = ANY(%(course_ids)s) and la.learner_activity_id = la_km.id and la.learner_activity_group_id = lag_km.id ) @@ -147,7 +147,7 @@ keymap.course_offering co_km where lag.course_offering_id = co_km.id - and co_km.lms_int_id in %(course_ids)s + and co_km.lms_int_id = ANY(%(course_ids)s) group by co_km.lms_int_id ''', "term": @@ -182,7 +182,7 @@ LEFT OUTER JOIN entity.academic_term at1 on (co.academic_term_id = at1.academic_term_id), keymap.course_offering co2, keymap.academic_term at2 - WHERE co2.lms_int_id in %(course_ids)s + WHERE co2.lms_int_id = ANY(%(course_ids)s) and co.course_offering_id = co2.id and at1.academic_term_id = at2.id ''', @@ -196,7 +196,7 @@ where f.course_offering_id = co_km.id and f.file_id = f_km.id - and co_km.lms_int_id in %(course_ids)s + and co_km.lms_int_id = ANY(%(course_ids)s) order by id ''', "submission": @@ -212,7 +212,7 @@ left join keymap.course_offering co on cs.le_current_course_offering_id = co.id where - co.lms_int_id in %(course_ids)s + co.lms_int_id = ANY(:course_ids) and cse.role_status ='Enrolled' and cse."role" = 'Student' and cse.enrollment_status = 'Active' @@ -228,13 +228,13 @@ lar.published_score as published_score, lar.response_date as submitted_at, lar.graded_date as graded_at, - timezone(%(time_zone)s, lar.posted_at AT TIME ZONE 'UTC') as grade_posted_local_date, + timezone(:time_zone, lar.posted_at AT TIME ZONE 'UTC') as grade_posted_local_date, lar.grading_status as submission_workflow_state, la.title as title, lar.learner_activity_result_id as learner_activity_result_id, lar.person_id as short_user_id, cast(lar2.lms_int_id as BIGINT) as submission_id, - (cast(%(canvas_data_id_increment)s as bigint) + cast(p.lms_ext_id as bigint)) as canvas_user_id + (cast(:canvas_data_id_increment as bigint) + cast(p.lms_ext_id as bigint)) as canvas_user_id from entity.learner_activity_result lar join enrollment on lar.person_id= enrollment.user_id join enrollment e on lar.person_id = e.user_id @@ -244,7 +244,7 @@ left join keymap.course_offering co on co.id = la.course_offering_id join keymap.person p on p.id = lar.person_id where - co.lms_int_id in %(course_ids)s + co.lms_int_id = ANY(:course_ids) and la.status = 'published' ) select @@ -267,7 +267,10 @@ grade_posted_local_date from submission - ); + ) + ''', + "submission_with_avg_score": + ''' select f.id::bigint, f.assignment_id::bigint assignment_id, diff --git a/config/env_sample.hjson b/config/env_sample.hjson index cf33dbc36..56891edf3 100644 --- a/config/env_sample.hjson +++ b/config/env_sample.hjson @@ -53,7 +53,7 @@ # Enable secure cookies, also set your trusted origin (example of instructure.com) # This needs to be true for deployments or when testing LTI with ngrok or loophole. "CSRF_COOKIE_SECURE": false, - "CSRF_TRUSTED_ORIGINS": ["instructure.com"], + "CSRF_TRUSTED_ORIGINS": ["https://*.instructure.com", "https://*.umich.edu"], # If you have a proxy that sets this header then set this to true. Default is false "USE_X_FORWARDED_HOST": false, # SameSite settings for Session and CSRF (defaults in settings.py should work), if you do want non-string None set to null. diff --git a/dashboard/admin.py b/dashboard/admin.py index 9a8d4b771..005706b7c 100644 --- a/dashboard/admin.py +++ b/dashboard/admin.py @@ -68,6 +68,7 @@ def clean(self): return self.cleaned_data +@admin.register(AcademicTerms) class TermAdmin(admin.ModelAdmin): exclude = ('id',) list_display = ('canvas_id', 'name', 'date_start', 'date_end') @@ -77,6 +78,7 @@ def has_add_permission(self, request): return False +@admin.register(Course) class CourseAdmin(admin.ModelAdmin): inlines = [CourseViewOptionInline, ] form = CourseForm @@ -95,11 +97,13 @@ def clear_course_updated_dates(self, request, queryset): self.message_user(request, "All selected last updated values cleared.") # Need this method to correctly display the line breaks + @admin.display( + description="Course View Option(s)" + ) def _courseviewoption(self, obj): return mark_safe(linebreaksbr(obj.courseviewoption)) # Known mypy issue: https://github.com/python/mypy/issues/708 - _courseviewoption.short_description = "Course View Option(s)" # type: ignore[attr-defined] def course_link(self, obj): return format_html('Link', obj.absolute_url) @@ -160,8 +164,6 @@ def has_change_permission(request, obj=None): def has_delete_permission(request, obj=None): return False -admin.site.register(AcademicTerms, TermAdmin) -admin.site.register(Course, CourseAdmin) # Remove the pinax LogAdmin and add ours admin.site.unregister(Log) diff --git a/dashboard/common/db_util.py b/dashboard/common/db_util.py index e19ff87d1..36d0d2d1a 100644 --- a/dashboard/common/db_util.py +++ b/dashboard/common/db_util.py @@ -37,7 +37,7 @@ def create_sqlalchemy_engine(db_params: DjangoDBParams) -> Engine: if new_db_params['ENGINE'] == (BACKENDS_PATH + 'mysql'): return create_engine(f'mysql+mysqldb://{core_string}?charset=utf8mb4') else: - return create_engine('postgresql://' + core_string) + return create_engine('postgresql+psycopg://' + core_string) def canvas_id_to_incremented_id(canvas_id): diff --git a/dashboard/cron.py b/dashboard/cron.py index 7a49055e8..20f2a9bd9 100644 --- a/dashboard/cron.py +++ b/dashboard/cron.py @@ -2,10 +2,10 @@ import logging from collections import namedtuple from typing import Any, Dict, List, Union +from zoneinfo import ZoneInfo import hjson import pandas as pd -import pytz import pangres from django.conf import settings @@ -13,10 +13,11 @@ from django.db.models import QuerySet from django_cron import CronJobBase, Schedule from google.cloud import bigquery -from sqlalchemy import types +from sqlalchemy import types, text from sqlalchemy.engine import ResultProxy +from sqlalchemy.orm import sessionmaker -from dashboard.common import db_util, utils +from dashboard.common import db_util from dashboard.models import Course, Resource, AcademicTerms, User @@ -67,17 +68,17 @@ def util_function(sql_string, mysql_table, param_object=None, table_identifier=N # execute database query -def execute_db_query(query: str, params: List = None) -> ResultProxy: - with engine.connect() as connection: +def execute_db_query(query: str, params: Dict = None) -> ResultProxy: + with engine.begin() as connection: connection.detach() if params: - return connection.execute(query, params) + return connection.execute(text(query), params) else: - return connection.execute(query) + return connection.execute(text(query)) # remove all records inside the specified table -def delete_all_records_in_table(table_name: str, where_clause: str = "", where_params: List = None): +def delete_all_records_in_table(table_name: str, where_clause: str = "", where_params: Dict = None): # delete all records in the table first, can have an optional where clause result_proxy = execute_db_query(f"delete from {table_name} {where_clause}", where_params) return(f"\n{result_proxy.rowcount} rows deleted from {table_name}\n") @@ -99,7 +100,7 @@ def soft_update_datetime_field( f'Skipped update of {field_name} for {model_name} instance ({model_inst.id}); existing value was found') else: if warehouse_field_value: - warehouse_field_value = warehouse_field_value.replace(tzinfo=pytz.UTC) + warehouse_field_value = warehouse_field_value.replace(tzinfo=ZoneInfo('UTC')) setattr(model_inst, field_name, warehouse_field_value) logger.info(f'Updated {field_name} for {model_name} instance ({model_inst.id})') return [field_name] @@ -124,7 +125,7 @@ def verify_course_ids(self): logger.debug("in checking course") supported_courses = Course.objects.get_supported_courses() course_ids = [str(x) for x in supported_courses.values_list('id', flat=True)] - courses_data = pd.read_sql(queries['course'], data_warehouse_engine, params={'course_ids': tuple(course_ids)}) + courses_data = pd.read_sql(queries['course'], data_warehouse_engine, params={'course_ids': course_ids}) # error out when course id is invalid, otherwise add DataFrame to list for course_id, data_last_updated in supported_courses: if course_id not in list(courses_data['id']): @@ -151,7 +152,7 @@ def update_user(self): # cron status status = "" - logger.debug("in update with data warehouse user") + logger.info("in update with data warehouse user") # delete all records in the table first status += delete_all_records_in_table("user") @@ -160,7 +161,7 @@ def update_user(self): status += util_function( queries['user'], 'user', - {'course_ids': tuple(self.valid_locked_course_ids), + {'course_ids': self.valid_locked_course_ids, 'canvas_data_id_increment': settings.CANVAS_DATA_ID_INCREMENT }) @@ -193,13 +194,13 @@ def update_canvas_resource(self): # cron status status = "" - logger.debug("in update canvas resource") + logger.info("in update canvas resource") # Select all the files for these courses # convert int array to str array df_attach = pd.read_sql(queries['resource'], data_warehouse_engine, - params={'course_ids': tuple(self.valid_locked_course_ids)}) + params={'course_ids': self.valid_locked_course_ids }) logger.debug(df_attach) # Update these back again based on the dataframe # Remove any rows where file_state is not available! @@ -217,6 +218,8 @@ def update_resource_access(self): # cron status status = "" + logger.info("in update resource access") + # return string with concatenated SQL insert result return_string = "" @@ -231,7 +234,7 @@ def update_resource_access(self): logger.info(f"Deleting all records in resource_access after {data_last_updated}") - status += delete_all_records_in_table("resource_access", f"WHERE access_time > %s", [data_last_updated, ]) + status += delete_all_records_in_table("resource_access", f"WHERE access_time > :data_last_updated", {'data_last_updated': data_last_updated }) # loop through multiple course ids, 20 at a time # (This is set by the CRON_BQ_IN_LIMIT from settings) @@ -393,7 +396,7 @@ def update_resource_access(self): student_enrollment_type = User.EnrollmentType.STUDENT student_enrollment_df = pd.read_sql( 'select user_id, course_id from user where enrollment_type= %s', - engine, params={student_enrollment_type}) + engine, params=[(str(student_enrollment_type),)]) resource_access_df = pd.merge( resource_access_df, student_enrollment_df, on=['user_id', 'course_id'], @@ -437,6 +440,8 @@ def update_groups(self): # cron status status = "" + logger.info("update_groups(): ") + # delete all records in assignment_group table status += delete_all_records_in_table("assignment_groups") @@ -447,7 +452,7 @@ def update_groups(self): # loop through multiple course ids status += util_function(queries['assignment_groups'], 'assignment_groups', - {'course_ids': tuple(self.valid_locked_course_ids)}) + {'course_ids': self.valid_locked_course_ids}) return status @@ -463,7 +468,7 @@ def update_assignment(self): # loop through multiple course ids status += util_function(queries['assignment'], 'assignment', - {'course_ids': tuple(self.valid_locked_course_ids), + {'course_ids': self.valid_locked_course_ids, 'time_zone': settings.TIME_ZONE}) return status @@ -480,14 +485,30 @@ def submission(self): # loop through multiple course ids # filter out not released grades (submission_dim.posted_at date is not null) and partial grades (submission_dim.workflow_state != 'graded') - status += util_function(queries['submission'], - 'submission', - { - 'course_ids': tuple(self.valid_locked_course_ids), - 'canvas_data_id_increment': settings.CANVAS_DATA_ID_INCREMENT, - 'time_zone': settings.TIME_ZONE - }) + query_params = { + 'course_ids': self.valid_locked_course_ids, + 'time_zone': settings.TIME_ZONE, + 'canvas_data_id_increment': settings.CANVAS_DATA_ID_INCREMENT, + } + Session = sessionmaker(bind=data_warehouse_engine) + try: + # Create a session + with Session() as session: + # Execute the first query to create the temporary table + session.execute(text(queries['submission']).bindparams(**query_params)) + + # Execute the second query using the temporary table + result = session.execute(text(queries['submission_with_avg_score'])) + df = pd.DataFrame(result.fetchall(), columns=result.keys()) + df = df.drop_duplicates(keep='first') + df.to_sql(con=engine, name='submission', if_exists='append', index=False) + + except Exception as e: + logger.exception('Error running sql on table submission', str(e)) + raise + status+=f"{str(df.shape[0])} submission: {query_params}\n" + # returns the row size of dataframe return status def weight_consideration(self): @@ -503,7 +524,7 @@ def weight_consideration(self): # loop through multiple course ids status += util_function(queries['assignment_weight'], 'assignment_weight_consideration', - {'course_ids': tuple(self.valid_locked_course_ids)}, + {'course_ids': self.valid_locked_course_ids }, 'weight') logger.debug(status + "\n\n") @@ -543,7 +564,7 @@ def update_course(self, warehouse_courses_data: pd.DataFrame) -> str: Updates course records with data returned from verify_course_ids, only making changes when necessary. """ status: str = '' - logger.debug('update_course()') + logger.info('update_course()') logger.debug(warehouse_courses_data.to_json(orient='records')) courses: QuerySet = Course.objects.filter(id__in=self.valid_locked_course_ids) @@ -588,7 +609,7 @@ def do(self) -> str: status = "" - run_start = datetime.now(pytz.UTC) + run_start = datetime.now(ZoneInfo('UTC')) status += f"Start cron: {str(run_start)} UTC\n" course_verification = self.verify_course_ids() invalid_course_id_list = course_verification.invalid_course_ids diff --git a/dashboard/graphql/objects.py b/dashboard/graphql/objects.py index bbb9be68f..b9f3a897f 100644 --- a/dashboard/graphql/objects.py +++ b/dashboard/graphql/objects.py @@ -6,7 +6,7 @@ from graphql import GraphQLError from dashboard.rules import is_admin_or_instructor_in_course_id -from dashboard.models import Course, User, Assignment, Submission, \ +from dashboard.models import Course, Assignment, Submission, \ AssignmentGroups, UserDefaultSelection, AcademicTerms import logging diff --git a/dashboard/graphql/query.py b/dashboard/graphql/query.py index 780cc5619..68f6ce229 100644 --- a/dashboard/graphql/query.py +++ b/dashboard/graphql/query.py @@ -3,7 +3,7 @@ from dashboard.models import Course from dashboard.graphql.objects import CourseType -from dashboard.rules import is_admin_or_enrolled_in_course, is_admin +from dashboard.rules import is_admin_or_enrolled_in_course from graphql import GraphQLError import logging diff --git a/dashboard/graphql/view.py b/dashboard/graphql/view.py index bf27b2d98..6e238c53b 100644 --- a/dashboard/graphql/view.py +++ b/dashboard/graphql/view.py @@ -1,5 +1,6 @@ from graphene_django.views import GraphQLView from django.contrib.auth.mixins import LoginRequiredMixin +from graphql_core_promise import PromiseExecutionContext from dashboard.common.db_util import canvas_id_to_incremented_id from dashboard.graphql.loaders import AssignmentsByCourseIdLoader, \ SubmissionsByAssignmentIdLoader, SubmissionByAssignmentIdAndUserIdLoader, \ @@ -8,7 +9,6 @@ AssignmentGroupByCourseIdAndIdLoader, AssignmentWeightConsiderationByCourseIdLoader, \ UserDefaultSelectionsByCourseIdAndUserLoader, UserDefaultSelectionByCourseIdAndUserAndViewTypeLoader, \ AcademicTermByIdLoader - from django.db.models import Q from dashboard.models import User from pinax.eventlog.models import log as eventlog @@ -18,6 +18,7 @@ class DashboardGraphQLView(LoginRequiredMixin, GraphQLView): + execution_context_class = PromiseExecutionContext def get_context(self, request): loaders = { 'assignment_weight_consideration_by_course_id_loader': AssignmentWeightConsiderationByCourseIdLoader( diff --git a/dashboard/management/commands/course.py b/dashboard/management/commands/course.py index b983a88f6..881e99740 100644 --- a/dashboard/management/commands/course.py +++ b/dashboard/management/commands/course.py @@ -2,7 +2,7 @@ from dashboard.models import Course, CourseViewOption, AcademicTerms from dashboard.common.db_util import canvas_id_to_incremented_id from datetime import datetime -import pytz +from zoneinfo import ZoneInfo class Command(BaseCommand): def add_arguments(self, parser): @@ -18,10 +18,10 @@ def handle(self, *args, **options): name = options.get('name') date_start = options.get('date_start') if date_start is not None: - date_start = datetime.strptime(date_start, '%Y-%m-%d %H:%M:%S').replace(tzinfo=pytz.UTC) + date_start = datetime.strptime(date_start, '%Y-%m-%d %H:%M:%S').replace(tzinfo=ZoneInfo('UTC')) date_end = options.get('date_end') if date_end is not None: - date_end = datetime.strptime(date_end, '%Y-%m-%d %H:%M:%S').replace(tzinfo=pytz.UTC) + date_end = datetime.strptime(date_end, '%Y-%m-%d %H:%M:%S').replace(tzinfo=ZoneInfo('UTC')) prefixed_course_id = canvas_id_to_incremented_id(course_id) if term_id is not None: diff --git a/dashboard/management/commands/term.py b/dashboard/management/commands/term.py index cd31f4683..a9e08c648 100644 --- a/dashboard/management/commands/term.py +++ b/dashboard/management/commands/term.py @@ -2,7 +2,7 @@ from dashboard.models import AcademicTerms from dashboard.common.db_util import canvas_id_to_incremented_id from datetime import datetime -import pytz +from zoneinfo import ZoneInfo class Command(BaseCommand): def add_arguments(self, parser): @@ -14,8 +14,8 @@ def add_arguments(self, parser): def handle(self, *args, **options): term_id = options.get('term_id') name = options.get('name') - date_start = datetime.strptime(options.get('date_start'), '%Y-%m-%d %H:%M:%S').replace(tzinfo=pytz.UTC) - date_end = datetime.strptime(options.get('date_end'), '%Y-%m-%d %H:%M:%S').replace(tzinfo=pytz.UTC) + date_start = datetime.strptime(options.get('date_start'), '%Y-%m-%d %H:%M:%S').replace(tzinfo=ZoneInfo('UTC')) + date_end = datetime.strptime(options.get('date_end'), '%Y-%m-%d %H:%M:%S').replace(tzinfo=ZoneInfo('UTC')) prefixed_term_id = canvas_id_to_incremented_id(term_id) diff --git a/dashboard/models.py b/dashboard/models.py index f30615088..e669d8769 100644 --- a/dashboard/models.py +++ b/dashboard/models.py @@ -11,7 +11,7 @@ from datetime import datetime, timedelta from typing import Optional, Union -import pytz +from zoneinfo import ZoneInfo from django.conf import settings from django.core.exceptions import ObjectDoesNotExist from django.db import models @@ -219,7 +219,7 @@ def determine_date_start(self) -> datetime: date_start = self.term.date_start else: logger.info(f"No date_start value was found for course {self.name} ({self.canvas_id}) or term; setting to current date and time") - date_start = datetime.now(pytz.UTC) + date_start = datetime.now(ZoneInfo('UTC')) return date_start def determine_date_end(self, start: Union[datetime, None] = None) -> datetime: diff --git a/dashboard/settings.py b/dashboard/settings.py index 6d8b3cb94..4eb344067 100644 --- a/dashboard/settings.py +++ b/dashboard/settings.py @@ -169,7 +169,11 @@ def apply_env_overrides(env: Dict[str, Any], environ: os._Environ) -> Dict[str, CRON_QUERY_FILE = os.path.join(BASE_DIR, ENV.get('CRON_QUERY_FILE', 'config/cron_udp.hjson')) -STATICFILES_STORAGE = 'whitenoise.storage.CompressedManifestStaticFilesStorage' +STORAGES = { + "staticfiles": { + "BACKEND": 'whitenoise.storage.CompressedManifestStaticFilesStorage', + }, +} CONTEXT_PROCESSORS = [ 'django.contrib.auth.context_processors.auth', 'django.template.context_processors.debug', @@ -273,7 +277,6 @@ def apply_env_overrides(env: Dict[str, Any], environ: os._Environ) -> Dict[str, USE_I18N = True -USE_L10N = True USE_TZ = True diff --git a/dashboard/urls.py b/dashboard/urls.py index dcc2c0c06..542a2adf7 100644 --- a/dashboard/urls.py +++ b/dashboard/urls.py @@ -19,8 +19,8 @@ from django.contrib.auth.decorators import login_required from django.conf import settings -from django.conf.urls import include from django.conf.urls.static import static +from django.urls import include from django.urls import path, re_path from dashboard.graphql.view import DashboardGraphQLView diff --git a/docs/CONTRIBUTING.md b/docs/CONTRIBUTING.md index 44977680e..f0390fe7c 100644 --- a/docs/CONTRIBUTING.md +++ b/docs/CONTRIBUTING.md @@ -172,3 +172,7 @@ When prompted, specify the password for the root MySQL user. It should be found in the `MYSQL.ROOT_PASSWORD` property of `env.hjson`. [Next: Contributors](../docs/CONTRIBUTORS.md) + +### Dependency Upgrade + + For Auto-upgrade django to future version, use the helpful library called [django-upgrade](https://github.com/adamchainz/django-upgrade ) diff --git a/docs/github_actions.md b/docs/github_actions.md index 5f20f813a..e53f95008 100644 --- a/docs/github_actions.md +++ b/docs/github_actions.md @@ -5,6 +5,8 @@ 2. The action is triggered whenever a commit is made to the `master` branch. E.g., when a pull request is merged to `master`. 3. OpenShift projects can periodically pull this image from GHCR. Configure only **_NON-PRODUCTION_** MyLA projects to pull the imageā€¦ ```sh - oc tag ghcr.io/tl-its-umich-edu/my-learning-analytics:latest my-learning-analytics:latest --scheduled + oc tag ghcr.io/tl-its-umich-edu/my-learning-analytics:latest my-learning-analytics:latest --scheduled --reference-policy=local ``` See the OpenShift documentation "[Managing image streams: Configuring periodic importing of image stream tags](https://docs.openshift.com/container-platform/4.11/openshift_images/image-streams-manage.html#images-imagestream-import_image-streams-managing)" for details. + + `reference-policy=local` : If you want to instruct OpenShift Container Platform to always fetch the tagged image from the [integrated registry](https://docs.openshift.com/container-platform/4.11/openshift_images/managing_images/tagging-images.html#images-add-tags-to-imagestreams_tagging-images) diff --git a/requirements.txt b/requirements.txt index 71c55ad3e..5a84545b7 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,42 +1,44 @@ -gunicorn==20.1.0 +gunicorn==21.2.0 -Django==3.2.20 -whitenoise==5.3.0 +Django==4.2.7 +whitenoise==6.6.0 -# No update since 2018 -django-cron==0.6.0 -django-watchman==1.2.0 -django-su==0.9.0 -django-mysql==3.12.0 +django-cron==0.6.0 #deprecated + +django-watchman==1.3.0 +django-su==1.0.0 +django-mysql==4.12.0 # The alternative to this is redis, but we don't use redis yet -django-constance[database]==2.8.0 -django-webpack-loader==1.5.0 -django-csp==3.7 -django-import-export==2.6.0 -django-admin-rangefilter==0.8.1 +django-constance[database]==3.1.0 +django-webpack-loader==2.0.1 + +django-csp==3.7 #no updates +django-import-export==3.3.1 +django-admin-rangefilter==0.11.2 # Django/Python fontawesome library supporting CSS in UI -fontawesomefree==6.1.1 +fontawesomefree==6.4.2 # graphql -graphene-django==2.15.0 -django-filter==2.4.0 +graphene-django==3.1.5 +graphql-core-promise==3.2.3.post1 +django-filter==23.3 # object-level permissions -rules==3.0 +rules==3.3 # These should be okay to update minors -numpy==1.23.3 -pandas==1.4.4 -pangres==4.1.2 - -SQLAlchemy==1.4.22 -psycopg2==2.9.1 -mysqlclient==2.0.3 -google-cloud-bigquery[pandas]==3.3.2 - -debugpy==1.4.1 -jsonschema==3.2.0 -pinax-eventlog==5.1.1 -pycryptodome==3.10.1 -PyLTI1p3==1.12.1 -hjson==3.0.1 +numpy==1.26.1 +pandas==2.1.2 +pangres==4.2.1 + +SQLAlchemy==2.0.23 +psycopg==3.1.12 +mysqlclient==2.2.0 +google-cloud-bigquery[pandas]==3.13.0 + +debugpy==1.8.0 +jsonschema==4.19.2 +pinax-eventlog==5.1.1 #no updates +pycryptodome==3.19.0 +PyLTI1p3==2.0.0 #no further update Nov 2022) +hjson==3.1.0