From 3ebac8f23c1e54b837044d1d881a374990b1f174 Mon Sep 17 00:00:00 2001 From: Lebaud Antoine Date: Sun, 23 Jul 2023 16:15:15 +0200 Subject: [PATCH 1/7] =?UTF-8?q?=E2=AC=86=EF=B8=8F(api)=20bump=20`ralph-mal?= =?UTF-8?q?p`=20to=20v3.8?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Upgrade ralph-malp to import a new xAPI verb. Downloaded, from tincanapi is required to add the new endpoint downloads. --- src/api/core/pyproject.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/api/core/pyproject.toml b/src/api/core/pyproject.toml index 7800be83..3c817845 100644 --- a/src/api/core/pyproject.toml +++ b/src/api/core/pyproject.toml @@ -36,7 +36,7 @@ dependencies = [ "rfc3987==1.3.8", "sentry-sdk[fastapi]==1.15.0", "uvicorn[standard]==0.20.0", - "ralph-malph[backend-lrs]==3.7.0" + "ralph-malph[backend-lrs]==3.8.0" ] dynamic = ["version"] From 6495acd93a29743223def86b8bdab85247ee925f Mon Sep 17 00:00:00 2001 From: Lebaud Antoine Date: Sun, 23 Jul 2023 16:38:10 +0200 Subject: [PATCH 2/7] =?UTF-8?q?=E2=9C=A8(api)=20implement=20video=20new=20?= =?UTF-8?q?`downloads`=20endpoint?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Enhance engagement metrics by implementing a new endpoint to track download events on video resources. This endpoint queries an LRS for statements with the 'Downloaded' xAPI verb (http://id.tincanapi.com/verb/downloaded). The endpoint provides both a total count of events and a daily count for studying download patterns. --- CHANGELOG.md | 1 + src/api/plugins/video/warren_video/api.py | 33 ++++++- .../plugins/video/warren_video/indicators.py | 85 ++++++++++++++++++- src/api/plugins/video/warren_video/models.py | 14 +++ 4 files changed, 130 insertions(+), 3 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index ebb0bb18..8194dec2 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -22,5 +22,6 @@ and this project adheres to - Add the LTI django application - Rename the API directory to a more descriptive name. - Add a select and date range picker to the web dashboard. +- Implement video downloads endpoint [unreleased]: https://github.com/openfun/warren \ No newline at end of file diff --git a/src/api/plugins/video/warren_video/api.py b/src/api/plugins/video/warren_video/api.py index f51e7c93..e3ebc532 100644 --- a/src/api/plugins/video/warren_video/api.py +++ b/src/api/plugins/video/warren_video/api.py @@ -7,8 +7,12 @@ from warren.fields import IRI from warren.filters import BaseQueryFilters, DatetimeRange from warren.models import Error, Response, StatusEnum -from warren_video.indicators import DailyCompletedVideoViews, DailyVideoViews -from warren_video.models import VideoViews +from warren_video.indicators import ( + DailyCompletedVideoViews, + DailyVideoDownloads, + DailyVideoViews, +) +from warren_video.models import VideoDownloads, VideoViews router = APIRouter( prefix="/video", @@ -46,3 +50,28 @@ async def views( status=StatusEnum.FAILED, content=Error(error_message=str(exception)) ) return response + + +@router.get("/{video_uuid:path}/downloads") +async def downloads( + video_uuid: IRI, + filters: Annotated[BaseQueryFilters, Depends()], + unique: bool = False, +) -> Response[VideoDownloads]: + """Number of downloads for `video_uuid` in the `since` -> `until` date range.""" + indicator = DailyVideoDownloads( + client=lrs_client, + video_uuid=video_uuid, + date_range=DatetimeRange(since=filters.since, until=filters.until), + is_unique=unique, + ) + try: + response = Response[VideoDownloads]( + status=StatusEnum.SUCCESS, content=indicator.compute() + ) + except KeyError as exception: + logger.error(exception) + return Response[Error]( + status=StatusEnum.FAILED, content=Error(error_message=str(exception)) + ) + return response diff --git a/src/api/plugins/video/warren_video/indicators.py b/src/api/plugins/video/warren_video/indicators.py index 8f94f973..3da4c7b4 100644 --- a/src/api/plugins/video/warren_video/indicators.py +++ b/src/api/plugins/video/warren_video/indicators.py @@ -5,12 +5,13 @@ from ralph.backends.http.lrs import BaseHTTP, LRSQuery from ralph.models.xapi.concepts.constants.video import RESULT_EXTENSION_TIME from ralph.models.xapi.concepts.verbs.scorm_profile import CompletedVerb +from ralph.models.xapi.concepts.verbs.tincan_vocabulary import DownloadedVerb from ralph.models.xapi.concepts.verbs.video import PlayedVerb from warren.base_indicator import BaseIndicator, pre_process_statements from warren.filters import DatetimeRange from warren.models import XAPI_STATEMENT from warren_video.conf import settings as video_plugin_settings -from warren_video.models import VideoViews +from warren_video.models import VideoDownloads, VideoViews class DailyVideoViews(BaseIndicator): @@ -188,3 +189,85 @@ def compute(self) -> VideoViews: indicator.count_by_date = count_by_date.to_dict("records") return indicator + + +class DailyVideoDownloads(BaseIndicator): + """Daily Video Downloads indicator. + + Defines the daily video downloads indicator. It is, for a given video, and + a date range, the total number of downloads, and the number of downloads per day. + """ + + def __init__( + self, + client: BaseHTTP, + video_uuid: str, + date_range: DatetimeRange, + is_unique: bool, + ): + """Instantiate the indicator with its parameters. + + Args: + client: The LRS backend to query + video_uuid: The UUID of the video on which to compute the metric + date_range: The date range on which to compute the indicator. It has + 2 fields, `since` and `until` which are dates or timestamps that must be + in ISO format (YYYY-MM-DD, YYYY-MM-DDThh:mm:ss.sss±hh:mm or + YYYY-MM-DDThh:mm:ss.sssZ") + is_unique: If true, multiple downloads by the same actor are counted as one + """ + self.client = client + self.video_uuid = video_uuid + self.date_range = date_range + self.is_unique = is_unique + + def get_lrs_query(self) -> LRSQuery: + """Returns the LRS query for fetching required statements.""" + return LRSQuery( + query={ + "verb": DownloadedVerb().id, + "activity": self.video_uuid, + "since": self.date_range.since.isoformat(), + "until": self.date_range.until.isoformat(), + } + ) + + def fetch_statements(self) -> List[XAPI_STATEMENT]: + """Executes the LRS query to obtain statements required for this indicator.""" + return list( + self.client.read( + target=self.client.statements_endpoint, query=self.get_lrs_query() + ) + ) + + def compute(self) -> VideoDownloads: + """Fetches statements and computes the current indicator. + + Fetches the statements from the LRS, filters and aggregates them to return the + number of video downloads per day. + """ + indicator = VideoDownloads() + raw_statements = self.fetch_statements() + if not raw_statements: + return indicator + preprocessed_statements = pre_process_statements(raw_statements) + + # If the `is_unique` filter is selected, filter out rows with duplicate + # `actor.account.name` + if self.is_unique: + preprocessed_statements.drop_duplicates(subset="actor.uuid", inplace=True) + + # Group by day and calculate sum of downloads per day + count_by_date = ( + preprocessed_statements.groupby(preprocessed_statements["date"]) + .count() + .reset_index() + .rename(columns={"id": "count"}) + .loc[:, ["date", "count"]] + ) + + # Calculate the total number of downloads + indicator.total_downloads = len(preprocessed_statements.index) + indicator.count_by_date = count_by_date.to_dict("records") + + return indicator diff --git a/src/api/plugins/video/warren_video/models.py b/src/api/plugins/video/warren_video/models.py index 025ec54d..7b96ca0f 100644 --- a/src/api/plugins/video/warren_video/models.py +++ b/src/api/plugins/video/warren_video/models.py @@ -17,3 +17,17 @@ class VideoViews(BaseModel): total_views: int = 0 count_by_date: List[VideoDayViews] = [] + + +class VideoDayDownloads(BaseModel): + """Model to represent video downloads for a date.""" + + date: Date + count: int = 0 + + +class VideoDownloads(BaseModel): + """Model to represent video downloads.""" + + total_downloads: int = 0 + count_by_date: List[VideoDayDownloads] = [] From ebb272f81bec377a7848932b2cc3e1aabfe247a5 Mon Sep 17 00:00:00 2001 From: Lebaud Antoine Date: Sun, 23 Jul 2023 16:44:50 +0200 Subject: [PATCH 3/7] =?UTF-8?q?=E2=99=BB=EF=B8=8F(api)=20avoid=20confusion?= =?UTF-8?q?=20while=20designating=20actor's=20identifier?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit To improve clarity and avoid confusion, we propose unifying the naming of actor.uuid to use uid throughout the codebase. This change will provide a consistent and clear representation, emphasizing that the identifier is unique, though not necessarily universal. --- src/api/core/warren/base_indicator.py | 14 +++++------ .../core/warren/tests/test_base_indicator.py | 23 ++++++++++--------- .../plugins/video/warren_video/indicators.py | 15 +++++------- 3 files changed, 25 insertions(+), 27 deletions(-) diff --git a/src/api/core/warren/base_indicator.py b/src/api/core/warren/base_indicator.py index a95db898..2bc9f05f 100644 --- a/src/api/core/warren/base_indicator.py +++ b/src/api/core/warren/base_indicator.py @@ -19,13 +19,13 @@ def parse_raw_statements(raw_statements) -> pd.DataFrame: return flattened -def add_actor_unique_id(statements: pd.DataFrame) -> pd.DataFrame: - """Add a `actor.uuid` column that uniquely identifies the agent. +def add_actor_uid(statements: pd.DataFrame) -> pd.DataFrame: + """Add a `actor.uid` column that uniquely identifies the agent. Depending on the xAPI statements, the actor can be identified in 4 different ways : https://github.com/adlnet/xAPI-Spec/blob/master/xAPI-Data.md#details-4. This - function handles the 4 cases and creates a `unique_actor_id` column that - can be used later without worrying about the 4 IFIs. + function handles the 4 cases and creates a `uid` column that can be used later + without worrying about the 4 IFIs. """ xapi_actor_identifier_columns = settings.XAPI_ACTOR_IDENTIFIER_PATHS.intersection( set(statements.columns) @@ -35,7 +35,7 @@ def add_actor_unique_id(statements: pd.DataFrame) -> pd.DataFrame: raise ValueError( "There is no way of identifying the agent in submitted statements." ) - statements["actor.uuid"] = statements.apply( + statements["actor.uid"] = statements.apply( lambda row: hashlib.sha256( "-".join(str(row[col]) for col in xapi_actor_identifier_columns).encode() ).hexdigest(), @@ -48,8 +48,8 @@ def add_actor_unique_id(statements: pd.DataFrame) -> pd.DataFrame: def pre_process_statements(statements: List) -> pd.DataFrame: """Denormalize raw statements, and add utility columns.""" parsed = parse_raw_statements(statements) - with_unique_id = add_actor_unique_id(parsed) - return with_unique_id + with_actor_uid = add_actor_uid(parsed) + return with_actor_uid class BaseIndicator(ABC): diff --git a/src/api/core/warren/tests/test_base_indicator.py b/src/api/core/warren/tests/test_base_indicator.py index 931cfaa6..74918510 100644 --- a/src/api/core/warren/tests/test_base_indicator.py +++ b/src/api/core/warren/tests/test_base_indicator.py @@ -1,7 +1,7 @@ """Test the functions from the BaseIndicator class.""" import pandas as pd -from warren.base_indicator import add_actor_unique_id, parse_raw_statements +from warren.base_indicator import add_actor_uid, parse_raw_statements from warren.factories.base import BaseXapiStatementFactory @@ -24,13 +24,13 @@ def test_parse_raw_statements(): assert len(statements) == len(parsed) -def test_add_actor_unique_id(): - """Test the generation of a `actor.uuid` column. +def test_add_actor_uid(): + """Test the generation of a `actor.uid` column. Builds a list of xAPI statements with various identification methods (See the 4 IFIs of the spec https://github.com/adlnet/xAPI-Spec/blob/master/xAPI-Data.md#details-4), and ensure - the UUID is created properly, and is the same for two equal actors. + the UID is created properly, and is the same for two equal actors. """ statements = [ BaseXapiStatementFactory.build( @@ -86,10 +86,11 @@ def test_add_actor_unique_id(): ] statements = parse_raw_statements(statements) - statements = add_actor_unique_id(statements) - assert "actor.uuid" in statements.columns - assert statements["actor.uuid"].notna - # Check that 2 identical actors have the same UUID - uuids_john = statements[statements["actor.account.name"] == "John"]["actor.uuid"] - assert len(uuids_john) == 2 - assert len(uuids_john.unique()) == 1 + statements = add_actor_uid(statements) + assert "actor.uid" in statements.columns + assert statements["actor.uid"].notna + # Check that 2 identical actors have the same UID + ids_john = statements[statements["actor.account.name"] == "John"]["actor.uid"] + assert len(ids_john) == 2 + # Make sure these ids are UID. + assert len(ids_john.unique()) == 1 diff --git a/src/api/plugins/video/warren_video/indicators.py b/src/api/plugins/video/warren_video/indicators.py index 3da4c7b4..337f9c2d 100644 --- a/src/api/plugins/video/warren_video/indicators.py +++ b/src/api/plugins/video/warren_video/indicators.py @@ -88,10 +88,9 @@ def filter_view_duration(row): flattened.apply(filter_view_duration, axis=1) ] - # If the `is_unique` filter is selected, filter out rows with duplicate - # `actor.account.name` + # Filter out duplicate 'actor.uid' if 'is_unique' is selected. if self.is_unique: - filtered_view_duration.drop_duplicates(subset="actor.uuid", inplace=True) + filtered_view_duration.drop_duplicates(subset="actor.uid", inplace=True) # Group by day and calculate sum of events per day count_by_date = ( @@ -170,10 +169,9 @@ def compute(self) -> VideoViews: return indicator flattened = pre_process_statements(raw_statements) - # If the `is_unique` filter is selected, filter out rows with duplicate - # `actor.account.name` + # Filter out duplicate 'actor.uid' if 'is_unique' is selected. if self.is_unique: - flattened.drop_duplicates(subset="actor.uuid", inplace=True) + flattened.drop_duplicates(subset="actor.uid", inplace=True) # Group by day and calculate sum of events per day count_by_date = ( @@ -252,10 +250,9 @@ def compute(self) -> VideoDownloads: return indicator preprocessed_statements = pre_process_statements(raw_statements) - # If the `is_unique` filter is selected, filter out rows with duplicate - # `actor.account.name` + # Filter out duplicate 'actor.uid' if 'is_unique' is selected. if self.is_unique: - preprocessed_statements.drop_duplicates(subset="actor.uuid", inplace=True) + preprocessed_statements.drop_duplicates(subset="actor.uid", inplace=True) # Group by day and calculate sum of downloads per day count_by_date = ( From ddcca66ec6f63e1fd3ac8a93552bceac74ba9907 Mon Sep 17 00:00:00 2001 From: Lebaud Antoine Date: Mon, 24 Jul 2023 15:13:02 +0200 Subject: [PATCH 4/7] =?UTF-8?q?=E2=99=BB=EF=B8=8F(api)=20merge=20video=20`?= =?UTF-8?q?views`=20and=20`downloads`=20pydantic=20models?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Current specific classes were not adding significant value beyond the proposed base DailyCount and DailyCounts classes. They served as aliases with different names, which led to redundant code. --- src/api/core/warren/models.py | 18 ++++++++++- src/api/plugins/video/tests/test_api.py | 15 ++++----- src/api/plugins/video/warren_video/api.py | 11 +++---- .../plugins/video/warren_video/indicators.py | 21 ++++++------ src/api/plugins/video/warren_video/models.py | 32 ------------------- src/frontend/packages/ui/video/types/index.ts | 2 +- 6 files changed, 40 insertions(+), 59 deletions(-) diff --git a/src/api/core/warren/models.py b/src/api/core/warren/models.py index 48d0ace9..2a1a9e1a 100644 --- a/src/api/core/warren/models.py +++ b/src/api/core/warren/models.py @@ -1,9 +1,11 @@ """Warren's core models.""" from enum import Enum -from typing import Any, Dict, Generic, TypeVar +from typing import Any, Dict, Generic, List, TypeVar from pydantic.main import BaseModel +from warren.fields import Date + class StatusEnum(str, Enum): """Enum for status types.""" @@ -29,3 +31,17 @@ class Response(BaseModel, Generic[T]): XAPI_STATEMENT = Dict[str, Any] + + +class DailyCount(BaseModel): + """Base model to represent a count for a date.""" + + date: Date + count: int = 0 + + +class DailyCounts(BaseModel): + """Base model to represent daily counts summary.""" + + total_count: int = 0 + count_by_date: List[DailyCount] = [] diff --git a/src/api/plugins/video/tests/test_api.py b/src/api/plugins/video/tests/test_api.py index 51018303..4a46b63f 100644 --- a/src/api/plugins/video/tests/test_api.py +++ b/src/api/plugins/video/tests/test_api.py @@ -8,9 +8,8 @@ from ralph.models.xapi.concepts.constants.video import RESULT_EXTENSION_TIME from ralph.models.xapi.concepts.verbs.video import PlayedVerb from warren.backends import lrs_client -from warren.models import Response +from warren.models import DailyCounts, Response from warren_video.factories import VideoPlayedFactory -from warren_video.models import VideoViews @pytest.mark.anyio @@ -57,9 +56,9 @@ async def test_views_valid_video_id_path_but_no_matching_video( ) assert response.status_code == 200 - assert VideoViews.parse_obj(response.json()) == VideoViews( - total_views=0, - views_count_by_date=[], + assert DailyCounts.parse_obj(response.json()) == DailyCounts( + total_count=0, + count_by_date=[], ) @@ -114,11 +113,11 @@ async def test_views_backend_query(http_client: AsyncClient, httpx_mock: HTTPXMo assert response.status_code == 200 # Parse the response to obtain video views count_by_date - video_views = (Response[VideoViews]).parse_obj(response.json()).content + video_views = (Response[DailyCounts]).parse_obj(response.json()).content # Counting all views is expected expected_video_views = { - "total_views": 3, + "total_count": 3, "count_by_date": [ {"date": "2020-01-01", "count": 2}, {"date": "2020-01-02", "count": 1}, @@ -187,7 +186,7 @@ async def test_unique_views_backend_query( assert response.status_code == 200 # Parse the response to obtain video views count_by_date - video_views = (Response[VideoViews]).parse_obj(response.json()).content + video_views = (Response[DailyCounts]).parse_obj(response.json()).content # Counting only the first view is expected expected_video_views = { diff --git a/src/api/plugins/video/warren_video/api.py b/src/api/plugins/video/warren_video/api.py index e3ebc532..46774194 100644 --- a/src/api/plugins/video/warren_video/api.py +++ b/src/api/plugins/video/warren_video/api.py @@ -6,13 +6,12 @@ from warren.backends import lrs_client from warren.fields import IRI from warren.filters import BaseQueryFilters, DatetimeRange -from warren.models import Error, Response, StatusEnum +from warren.models import DailyCounts, Error, Response, StatusEnum from warren_video.indicators import ( DailyCompletedVideoViews, DailyVideoDownloads, DailyVideoViews, ) -from warren_video.models import VideoDownloads, VideoViews router = APIRouter( prefix="/video", @@ -27,7 +26,7 @@ async def views( filters: Annotated[BaseQueryFilters, Depends()], complete: bool = False, unique: bool = False, -) -> Response[VideoViews]: +) -> Response[DailyCounts]: """Number of views for `video_uuid` in the `since` -> `until` date range.""" indicator_kwargs = { "client": lrs_client, @@ -41,7 +40,7 @@ async def views( else: indicator = DailyVideoViews(**indicator_kwargs) try: - response = Response[VideoViews]( + response = Response[DailyCounts]( status=StatusEnum.SUCCESS, content=indicator.compute() ) except KeyError as exception: @@ -57,7 +56,7 @@ async def downloads( video_uuid: IRI, filters: Annotated[BaseQueryFilters, Depends()], unique: bool = False, -) -> Response[VideoDownloads]: +) -> Response[DailyCounts]: """Number of downloads for `video_uuid` in the `since` -> `until` date range.""" indicator = DailyVideoDownloads( client=lrs_client, @@ -66,7 +65,7 @@ async def downloads( is_unique=unique, ) try: - response = Response[VideoDownloads]( + response = Response[DailyCounts]( status=StatusEnum.SUCCESS, content=indicator.compute() ) except KeyError as exception: diff --git a/src/api/plugins/video/warren_video/indicators.py b/src/api/plugins/video/warren_video/indicators.py index 337f9c2d..f1e955ca 100644 --- a/src/api/plugins/video/warren_video/indicators.py +++ b/src/api/plugins/video/warren_video/indicators.py @@ -9,9 +9,8 @@ from ralph.models.xapi.concepts.verbs.video import PlayedVerb from warren.base_indicator import BaseIndicator, pre_process_statements from warren.filters import DatetimeRange -from warren.models import XAPI_STATEMENT +from warren.models import XAPI_STATEMENT, DailyCounts from warren_video.conf import settings as video_plugin_settings -from warren_video.models import VideoDownloads, VideoViews class DailyVideoViews(BaseIndicator): @@ -64,13 +63,13 @@ def fetch_statements(self) -> List[XAPI_STATEMENT]: ) ) - def compute(self) -> VideoViews: + def compute(self) -> DailyCounts: """Fetches statements and computes the current indicator. Fetches the statements from the LRS, filters and aggregates them to return the number of video views per day. """ - indicator = VideoViews() + indicator = DailyCounts() raw_statements = self.fetch_statements() if not raw_statements: return indicator @@ -102,7 +101,7 @@ def filter_view_duration(row): ) # Calculate the total number of events - indicator.total_views = len(filtered_view_duration.index) + indicator.total_count = len(filtered_view_duration.index) indicator.count_by_date = count_by_date.to_dict("records") return indicator @@ -157,13 +156,13 @@ def fetch_statements(self) -> List[XAPI_STATEMENT]: ) ) - def compute(self) -> VideoViews: + def compute(self) -> DailyCounts: """Fetches statements and computes the current indicator. Fetches the statements from the LRS, filters and aggregates them to return the number of video views per day. """ - indicator = VideoViews() + indicator = DailyCounts() raw_statements = self.fetch_statements() if not raw_statements: return indicator @@ -183,7 +182,7 @@ def compute(self) -> VideoViews: ) # Calculate the total number of events - indicator.total_views = len(flattened.index) + indicator.total_count = len(flattened.index) indicator.count_by_date = count_by_date.to_dict("records") return indicator @@ -238,13 +237,13 @@ def fetch_statements(self) -> List[XAPI_STATEMENT]: ) ) - def compute(self) -> VideoDownloads: + def compute(self) -> DailyCounts: """Fetches statements and computes the current indicator. Fetches the statements from the LRS, filters and aggregates them to return the number of video downloads per day. """ - indicator = VideoDownloads() + indicator = DailyCounts() raw_statements = self.fetch_statements() if not raw_statements: return indicator @@ -264,7 +263,7 @@ def compute(self) -> VideoDownloads: ) # Calculate the total number of downloads - indicator.total_downloads = len(preprocessed_statements.index) + indicator.total_count = len(preprocessed_statements.index) indicator.count_by_date = count_by_date.to_dict("records") return indicator diff --git a/src/api/plugins/video/warren_video/models.py b/src/api/plugins/video/warren_video/models.py index 7b96ca0f..1001df06 100644 --- a/src/api/plugins/video/warren_video/models.py +++ b/src/api/plugins/video/warren_video/models.py @@ -1,33 +1 @@ """Defines the schema of what is returned inside the API responses.""" -from typing import List - -from pydantic.main import BaseModel -from warren.fields import Date - - -class VideoDayViews(BaseModel): - """Model to represent video views for a date.""" - - date: Date - count: int = 0 - - -class VideoViews(BaseModel): - """Model to represent video views.""" - - total_views: int = 0 - count_by_date: List[VideoDayViews] = [] - - -class VideoDayDownloads(BaseModel): - """Model to represent video downloads for a date.""" - - date: Date - count: int = 0 - - -class VideoDownloads(BaseModel): - """Model to represent video downloads.""" - - total_downloads: int = 0 - count_by_date: List[VideoDayDownloads] = [] diff --git a/src/frontend/packages/ui/video/types/index.ts b/src/frontend/packages/ui/video/types/index.ts index 859ffe16..e2f56e2c 100644 --- a/src/frontend/packages/ui/video/types/index.ts +++ b/src/frontend/packages/ui/video/types/index.ts @@ -5,6 +5,6 @@ export type VideoViewsResponseItem = { export type VideoViewsResponse = { id: string; - total_views: number; + total_count: number; count_by_date: Array; }; From a61c44b846646374c3bf5c3b32623d10337ec8f6 Mon Sep 17 00:00:00 2001 From: Lebaud Antoine Date: Mon, 24 Jul 2023 17:19:30 +0200 Subject: [PATCH 5/7] =?UTF-8?q?=F0=9F=A9=B9(api)=20fix=20mis-renamed=20end?= =?UTF-8?q?point=20parameter=20in=20tests?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit video_id was renamed to video_uuid whithout thorough attention. Align all existing API tests. --- src/api/plugins/video/tests/test_api.py | 14 +++++++------- 1 file changed, 7 insertions(+), 7 deletions(-) diff --git a/src/api/plugins/video/tests/test_api.py b/src/api/plugins/video/tests/test_api.py index 4a46b63f..bc7ef3f8 100644 --- a/src/api/plugins/video/tests/test_api.py +++ b/src/api/plugins/video/tests/test_api.py @@ -14,17 +14,17 @@ @pytest.mark.anyio @pytest.mark.parametrize( - "video_id", ["foo", "foo/bar", "/foo/bar", "foo%2Fbar", "%2Ffoo%2Fbar"] + "video_uuid", ["foo", "foo/bar", "/foo/bar", "foo%2Fbar", "%2Ffoo%2Fbar"] ) -async def test_views_invalid_video_id(http_client: AsyncClient, video_id: str): - """Test the video views endpoint with an invalid `video_id` path.""" +async def test_views_invalid_video_uuid(http_client: AsyncClient, video_uuid: str): + """Test the video views endpoint with an invalid `video_uuid` path.""" date_query_params = { "since": "2023-01-01", "until": "2023-01-31", } response = await http_client.get( - f"/api/v1/video/{video_id}/views", params=date_query_params + f"/api/v1/video/{video_uuid}/views", params=date_query_params ) assert response.status_code == 422 @@ -32,10 +32,10 @@ async def test_views_invalid_video_id(http_client: AsyncClient, video_id: str): @pytest.mark.anyio -async def test_views_valid_video_id_path_but_no_matching_video( +async def test_views_valid_video_uuid_path_but_no_matching_video( http_client: AsyncClient, httpx_mock: HTTPXMock ): - """Test the video views endpoint with a valid `video_id` but no results.""" + """Test the video views endpoint with a valid `video_uuid` but no results.""" lrs_client.base_url = "http://fake-lrs.com" # Mock the call to the LRS so that it would return no statements, as it @@ -190,7 +190,7 @@ async def test_unique_views_backend_query( # Counting only the first view is expected expected_video_views = { - "total_views": 1, + "total_count": 1, "count_by_date": [ {"date": "2020-01-01", "count": 1}, ], From 4da99edfe1145ac4117c1eccc10d7982713cbe80 Mon Sep 17 00:00:00 2001 From: Lebaud Antoine Date: Mon, 24 Jul 2023 17:48:39 +0200 Subject: [PATCH 6/7] =?UTF-8?q?=E2=99=BB=EF=B8=8F(api)=20update=20timestam?= =?UTF-8?q?ps=20fixtures=20to=20Ralph's=20format?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit After sutdying some raw statements from ralph, I noticed that statements' timestamps were stored as date and time string with an offset from UTC, and not as UTC. --- src/api/plugins/video/tests/test_api.py | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/src/api/plugins/video/tests/test_api.py b/src/api/plugins/video/tests/test_api.py index bc7ef3f8..16f0cc7a 100644 --- a/src/api/plugins/video/tests/test_api.py +++ b/src/api/plugins/video/tests/test_api.py @@ -68,9 +68,9 @@ async def test_views_backend_query(http_client: AsyncClient, httpx_mock: HTTPXMo # Define 3 video views fixtures video_uuid = "uuid://ba4252ce-d042-43b0-92e8-f033f45612ee" video_views_fixtures = [ - {"timestamp": "2020-01-01T00:00:00.000Z", "time": 100}, - {"timestamp": "2020-01-01T00:00:30.000Z", "time": 200}, - {"timestamp": "2020-01-02T00:00:00.000Z", "time": 300}, + {"timestamp": "2020-01-01T00:00:00.000+00:00", "time": 100}, + {"timestamp": "2020-01-01T00:00:30.000+00:00", "time": 200}, + {"timestamp": "2020-01-02T00:00:00.000+00:00", "time": 300}, ] # Build video statements from fixtures @@ -135,9 +135,9 @@ async def test_unique_views_backend_query( # Define 3 video views fixtures video_uuid = "uuid://ba4252ce-d042-43b0-92e8-f033f45612ee" video_views_fixtures = [ - {"timestamp": "2020-01-01T00:00:00.000Z", "time": 100}, - {"timestamp": "2020-01-01T00:00:30.000Z", "time": 200}, - {"timestamp": "2020-01-02T00:00:00.000Z", "time": 300}, + {"timestamp": "2020-01-01T00:00:00.000+00:00", "time": 100}, + {"timestamp": "2020-01-01T00:00:30.000+00:00", "time": 200}, + {"timestamp": "2020-01-02T00:00:00.000+00:00", "time": 300}, ] # Build video statements from fixtures From 97e6e579d5a7c4ab54da6683fe736ac63646ffe5 Mon Sep 17 00:00:00 2001 From: Lebaud Antoine Date: Mon, 24 Jul 2023 18:24:24 +0200 Subject: [PATCH 7/7] =?UTF-8?q?=E2=9C=85(api)=20add=20tests=20on=20video?= =?UTF-8?q?=20new=20`downloads`=20endpoint?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Current tests are following the exact same structure as the `views` ones. Please note that these tests are a basis and should be extended with more advanced one as soon as the API complexity and usage increase. --- src/api/plugins/video/tests/test_api.py | 188 +++++++++++++++++- .../plugins/video/warren_video/factories.py | 62 +++++- 2 files changed, 248 insertions(+), 2 deletions(-) diff --git a/src/api/plugins/video/tests/test_api.py b/src/api/plugins/video/tests/test_api.py index 16f0cc7a..ab9cbbe6 100644 --- a/src/api/plugins/video/tests/test_api.py +++ b/src/api/plugins/video/tests/test_api.py @@ -6,10 +6,11 @@ from httpx import AsyncClient from pytest_httpx import HTTPXMock from ralph.models.xapi.concepts.constants.video import RESULT_EXTENSION_TIME +from ralph.models.xapi.concepts.verbs.tincan_vocabulary import DownloadedVerb from ralph.models.xapi.concepts.verbs.video import PlayedVerb from warren.backends import lrs_client from warren.models import DailyCounts, Response -from warren_video.factories import VideoPlayedFactory +from warren_video.factories import VideoDownloadedFactory, VideoPlayedFactory @pytest.mark.anyio @@ -197,3 +198,188 @@ async def test_unique_views_backend_query( } assert video_views == expected_video_views + + +@pytest.mark.anyio +@pytest.mark.parametrize( + "video_uuid", ["foo", "foo/bar", "/foo/bar", "foo%2Fbar", "%2Ffoo%2Fbar"] +) +async def test_downloads_invalid_video_uuid(http_client: AsyncClient, video_uuid: str): + """Test the video downloads endpoint with an invalid `video_uuid` path.""" + date_query_params = { + "since": "2023-01-01", + "until": "2023-01-31", + } + + response = await http_client.get( + f"/api/v1/video/{video_uuid}/downloads", params=date_query_params + ) + + assert response.status_code == 422 + assert "is not a valid 'IRI'." in response.json().get("detail")[0].get("msg") + + +@pytest.mark.anyio +async def test_downloads_valid_video_uuid_path_but_no_matching_video( + http_client: AsyncClient, httpx_mock: HTTPXMock +): + """Test the video downloads endpoint with a valid `video_uuid` but no results.""" + lrs_client.base_url = "http://fake-lrs.com" + + # Mock the call to the LRS so that it would return no statements, as it + # would do with no matching video + httpx_mock.add_response( + url=re.compile(r"^http://fake-lrs\.com/xAPI/statements\?.*$"), + method="GET", + json={"statements": []}, + status_code=200, + ) + + response = await http_client.get( + url="/api/v1/video/uuid://fake-uuid/downloads", + params={ + "since": "2023-01-01", + "until": "2023-01-01", + }, + ) + + assert response.status_code == 200 + assert DailyCounts.parse_obj(response.json()) == DailyCounts( + total_count=0, + count_by_date=[], + ) + + +@pytest.mark.anyio +async def test_downloads_backend_query(http_client: AsyncClient, httpx_mock: HTTPXMock): + """Test the video downloads endpoint backend query results.""" + # Define 3 video downloads fixtures + video_uuid = "uuid://ba4252ce-d042-43b0-92e8-f033f45612ee" + video_download_timestamps = [ + "2020-01-01T00:00:00.000+00:00", + "2020-01-01T00:00:30.000+00:00", + "2020-01-02T00:00:00.000+00:00", + ] + + # Build video statements from fixtures + video_statements = [ + VideoDownloadedFactory.build( + [ + {"object": {"id": video_uuid, "objectType": "Activity"}}, + {"verb": {"id": DownloadedVerb().id}}, + {"timestamp": download_timestamp}, + ] + ) + for download_timestamp in video_download_timestamps + ] + + # Convert each video statement to a JSON object + video_statements_json = [ + json.loads(statement.json()) for statement in video_statements + ] + + # Mock the LRS call so that it returns the fixture statements + lrs_client.base_url = "http://fake-lrs.com" + httpx_mock.add_response( + url=re.compile(r"^http://fake-lrs\.com/xAPI/statements\?.*$"), + method="GET", + json={"statements": video_statements_json}, + status_code=200, + ) + + # Perform the call to warren backend. When fetching the LRS statements, it will + # get the above mocked statements + response = await http_client.get( + url=f"/api/v1/video/{video_uuid}/downloads", + params={ + "since": "2020-01-01", + "until": "2020-01-03", + }, + ) + + assert response.status_code == 200 + + # Parse the response to obtain video downloads count_by_date + video_downloads = (Response[DailyCounts]).parse_obj(response.json()).content + + # Counting all downloads is expected + expected_video_downloads = { + "total_count": 3, + "count_by_date": [ + {"date": "2020-01-01", "count": 2}, + {"date": "2020-01-02", "count": 1}, + ], + } + + assert video_downloads == expected_video_downloads + + +@pytest.mark.anyio +async def test_unique_downloads_backend_query( + http_client: AsyncClient, httpx_mock: HTTPXMock +): + """Test the video downloads endpoint, with parameter unique=True.""" + # Define 3 video views fixtures + video_uuid = "uuid://ba4252ce-d042-43b0-92e8-f033f45612ee" + video_download_timestamps = [ + "2020-01-01T00:00:00.000+00:00", + "2020-01-01T00:00:30.000+00:00", + "2020-01-02T00:00:00.000+00:00", + ] + + # Build video statements from fixtures + video_statements = [ + VideoDownloadedFactory.build( + [ + { + "actor": { + "objectType": "Agent", + "account": {"name": "John", "homePage": "http://fun-mooc.fr"}, + } + }, + {"object": {"id": video_uuid, "objectType": "Activity"}}, + {"verb": {"id": DownloadedVerb().id}}, + {"timestamp": download_timestamp}, + ] + ) + for download_timestamp in video_download_timestamps + ] + + # Convert each video statement to a JSON object + video_statements_json = [ + json.loads(statement.json()) for statement in video_statements + ] + + # Mock the LRS call so that it returns the fixture statements + lrs_client.base_url = "http://fake-lrs.com" + httpx_mock.add_response( + url=re.compile(r"^http://fake-lrs\.com/xAPI/statements\?.*$"), + method="GET", + json={"statements": video_statements_json}, + status_code=200, + ) + + # Perform the call to warren backend. When fetching the LRS statements, it will + # get the above mocked statements + response = await http_client.get( + url=f"/api/v1/video/{video_uuid}/downloads?unique=true", + params={ + "since": "2020-01-01", + "until": "2020-01-03", + }, + ) + + assert response.status_code == 200 + + # Parse the response to obtain video downloads count_by_date + video_downloads = (Response[DailyCounts]).parse_obj(response.json()).content + + # Counting only the first download is expected + expected_video_downloads = { + "total_count": 1, + "count_by_date": [ + {"date": "2020-01-01", "count": 1}, + ], + } + + assert video_downloads == expected_video_downloads diff --git a/src/api/plugins/video/warren_video/factories.py b/src/api/plugins/video/warren_video/factories.py index 73dae76f..42a17c0a 100644 --- a/src/api/plugins/video/warren_video/factories.py +++ b/src/api/plugins/video/warren_video/factories.py @@ -1,6 +1,6 @@ """Factories for video xAPI events.""" from ralph.models.xapi.concepts.constants.video import RESULT_EXTENSION_TIME -from ralph.models.xapi.video.statements import VideoPlayed +from ralph.models.xapi.video.statements import VideoDownloaded, VideoPlayed from warren.factories.base import BaseXapiStatementFactory @@ -61,3 +61,63 @@ class VideoPlayedFactory(BaseXapiStatementFactory): "timestamp": "2021-12-01T08:17:47.150905+00:00", } model: VideoPlayed = VideoPlayed + + +class VideoDownloadedFactory(BaseXapiStatementFactory): + """VideoDownloaded xAPI statement factory.""" + + template: dict = { + "verb": { + "id": "http://id.tincanapi.com/verb/downloaded", + "display": {"en-US": "downloaded"}, + }, + "context": { + "extensions": { + "https://w3id.org/xapi/video/extensions/session-id": ( + "6e952c73-71e3-5c8c-837b-39baea0e73b9" + ), + "https://w3id.org/xapi/video/extensions/quality": 720, + "https://w3id.org/xapi/video/extensions/length": 508, + }, + "contextActivities": { + "category": [ + { + "id": "https://w3id.org/xapi/video", + "definition": { + "id": "uuid://C678149d-956a-483d-8975-c1506de1e1a9", + "definition": { + "type": "http://adlnet.gov/expapi/activities/profile" + }, + }, + } + ], + "parent": [ + { + "id": "course-v1:FUN-MOOC+00001+session01", + "objectType": "Activity", + "definition": { + "type": "http://adlnet.gov/expapi/activities/course" + }, + } + ], + }, + }, + "id": "2140967b-563b-464b-90c0-2e114bd8e133", + "actor": { + "objectType": "Agent", + "account": { + "name": "d5b3733b-ccd9-4ab1-bb29-22e3c2f2e592", + "homePage": "http://lms.example.org", + }, + }, + "object": { + "definition": { + "type": "https://w3id.org/xapi/video/activity-type/video", + "name": {"en-US": "Learning analytics 101"}, + }, + "id": "uuid://dd38149d-956a-483d-8975-c1506de1e1a9", + "objectType": "Activity", + }, + "timestamp": "2021-12-01T08:17:47.150905+00:00", + } + model: VideoPlayed = VideoDownloaded