diff --git a/.buildinfo b/.buildinfo new file mode 100644 index 00000000..6a118b89 --- /dev/null +++ b/.buildinfo @@ -0,0 +1,4 @@ +# Sphinx build info version 1 +# This file records the configuration used when building these files. When it is not found, a full rebuild will be done. +config: e26001d7ff39a6e1aa0c173411608fc6 +tags: 645f666f9bcd5a90fca523b33c5a78b7 diff --git a/.doctrees/api/api.doctree b/.doctrees/api/api.doctree new file mode 100644 index 00000000..8e7a227b Binary files /dev/null and b/.doctrees/api/api.doctree differ diff --git a/.doctrees/api/index.doctree b/.doctrees/api/index.doctree new file mode 100644 index 00000000..06ebf1cd Binary files /dev/null and b/.doctrees/api/index.doctree differ diff --git a/.doctrees/articles/creating-your-own-data-donation-task.doctree b/.doctrees/articles/creating-your-own-data-donation-task.doctree new file mode 100644 index 00000000..d35c84e6 Binary files /dev/null and b/.doctrees/articles/creating-your-own-data-donation-task.doctree differ diff --git a/.doctrees/articles/data-donation-checklist.doctree b/.doctrees/articles/data-donation-checklist.doctree new file mode 100644 index 00000000..d25f8f01 Binary files /dev/null and b/.doctrees/articles/data-donation-checklist.doctree differ diff --git a/.doctrees/articles/deployment.doctree b/.doctrees/articles/deployment.doctree new file mode 100644 index 00000000..0ad18dfb Binary files /dev/null and b/.doctrees/articles/deployment.doctree differ diff --git a/.doctrees/articles/index.doctree b/.doctrees/articles/index.doctree new file mode 100644 index 00000000..b2ea4900 Binary files /dev/null and b/.doctrees/articles/index.doctree differ diff --git a/.doctrees/articles/installation.doctree b/.doctrees/articles/installation.doctree new file mode 100644 index 00000000..3b37f5c2 Binary files /dev/null and b/.doctrees/articles/installation.doctree differ diff --git a/.doctrees/articles/introduction-to-data-donation.doctree b/.doctrees/articles/introduction-to-data-donation.doctree new file mode 100644 index 00000000..dc91f023 Binary files /dev/null and b/.doctrees/articles/introduction-to-data-donation.doctree differ diff --git a/.doctrees/articles/next-in-docker.doctree b/.doctrees/articles/next-in-docker.doctree new file mode 100644 index 00000000..0ff4d174 Binary files /dev/null and b/.doctrees/articles/next-in-docker.doctree differ diff --git a/.doctrees/articles/visualizations.doctree b/.doctrees/articles/visualizations.doctree new file mode 100644 index 00000000..5f3c53c7 Binary files /dev/null and b/.doctrees/articles/visualizations.doctree differ diff --git a/.doctrees/environment.pickle b/.doctrees/environment.pickle new file mode 100644 index 00000000..2acf0830 Binary files /dev/null and b/.doctrees/environment.pickle differ diff --git a/.doctrees/index.doctree b/.doctrees/index.doctree new file mode 100644 index 00000000..230907ae Binary files /dev/null and b/.doctrees/index.doctree differ diff --git a/.doctrees/standard_scripts/index.doctree b/.doctrees/standard_scripts/index.doctree new file mode 100644 index 00000000..8f8e6aed Binary files /dev/null and b/.doctrees/standard_scripts/index.doctree differ diff --git a/.nojekyll b/.nojekyll new file mode 100644 index 00000000..e69de29b diff --git a/_sources/api/api.md.txt b/_sources/api/api.md.txt new file mode 100644 index 00000000..46cd77ef --- /dev/null +++ b/_sources/api/api.md.txt @@ -0,0 +1,29 @@ +# API Reference + +## Props + +```{eval-rst} +.. automodule:: port.api.props + :members: +``` + +## Extraction helpers + +```{eval-rst} +.. automodule:: port.helpers.extraction_helpers + :members: +``` + +## Port helpers + +```{eval-rst} +.. automodule:: port.helpers.port_helpers + :members: +``` + +## Validation + +```{eval-rst} +.. automodule:: port.helpers.validate + :members: +``` diff --git a/_sources/api/index.rst.txt b/_sources/api/index.rst.txt new file mode 100644 index 00000000..15da24b5 --- /dev/null +++ b/_sources/api/index.rst.txt @@ -0,0 +1,7 @@ +API Reference +============= + +.. toctree:: + :maxdepth: 3 + + api.md diff --git a/_sources/articles/creating-your-own-data-donation-task.md.txt b/_sources/articles/creating-your-own-data-donation-task.md.txt new file mode 100644 index 00000000..ed54f970 --- /dev/null +++ b/_sources/articles/creating-your-own-data-donation-task.md.txt @@ -0,0 +1,413 @@ +# Creating your own donation task + +The donation task is at the core of a data donation study. +It is the step where the participant is actually going to donate their data. + +The data donation task goes as follows: + +1. The participant goes to your data donation task app in a browser +2. The participant is prompted to submit their data download package (DDP) +3. A Python script you wrote, extracts the data you need for your research +4. That data gets presented to the participant on screen +5. The participants decides to donate and you receive the data + +## Using the data donation task in a data donation study + +The data donation task needs to be used in together with Next. Next is the backend for the data donation task. With Next you can configure a data donation study: i.e. configure: + +* Your data donation task +* An information page +* An informed consent page +* A privacy policy +* Instruction manuals for participants +* Configure where the data should go +* and more + +You can find more information on how to deploy a study in the wiki + + +## How does the data donation task work? + +**The idea behind the data donation task** +The data donation task repository is in essence a toolkit with which you can build your own custom data donation task. +It is designed as a toolkit because researchers need to tailor the data donation task to their own interests; they interested in different DDPs and are interested in different data within those DDPs. + +This toolkit approach has several benefits: + +1. It allows for a bespoke experience for the participant +2. You can only extract (and possibly aggregate) the data you need for you study, which we think is important in order to preserve the privacy of the participant and is often a requirement from your data steward/ethical review board + +These benefits come at the cost of added complexity compared to a one size fits all approach that extracts most or all data from the participant. + +**The design of the data donation task** +The data donation task contains reusable components (such as: a screen that prompts the participant to submit their DDP and a screen with tables that the participants need to review prior to donation) that you can use and combine in creative ways to make your own study. +These components are combined in a Python script that is created by the researcher. + +On a high level the script works as follows: + +1. The Python script determines which user interface (UI) component needs to be shown to the participant +2. Whenever the participant is done interacting with the UI component, the result of that interaction is returned to the script +3. The script handles the return result en determine the next UI component that the participant needs to see or interact with, go back to step 1 until the end of the donation task + +**The architecture of the data donation task** +The data donation task is a web application (build with [React](https://react.dev/) and [Pyodide](https://pyodide.org/en/stable/)) that completely runs in the browser of the participant. +The Python script and the UI components will run completely in the browser of the participant. +Data is only sent to the server upon the participant clicking a consent button. + + +## Start writing your first data donation task script + +After you have forked or cloned and installed this repository (see [instruction](https://github.com/d3i-infra/feldspar/wiki/Installation)) you can start creating your own donation task. + +You can create your own study by changing and/or adapting the code in the following directory `port/src/framework/processing/py/port/` +This directory contains the following files: + +* `script.py`: Contains your donation task logic; which screen the participants will see and in what order +* `api/props.py`: Contains all the UI elements you can use; you can use this file for reference +* `api/commands.py`: Contains the Render and the Donate commands +* `main.py`: The main driver of you donation task, you don't need to touch this file + +### `script.py` + +`script.py` is the most important file and the one we will be working with the most + +Lets look at a full example of a `script.py`. In this example we will be extracting data from a fictional DDP. +Participants are asked to submit a zip file (any zip file will do in this case), and we will extract the file names and some meta data from this zip file. +In a real study you would extract something substantial from the data. + +`script.py` must contain a function called `process` this function determines the whole data donation task from start to finish (Which screens the participant will see and in what order, and what kind of data extraction will take place). +At the time of writing this example is also the default `script.py`. + +In this example process defines the following data donation task: + +1. Ask the participant to submit a zip file +2. Perform validation on the submitted zip file, if not valid return to step 1 +3. Extract the data from the submitted zip file +4. Render the extract data on screen in a table +5. Send the data to the data storage upon consent + +Although these can vary per data donation task, they will be more or less similar. + +Below you can find the annotated example `process` function: + +```python +# script.py +import port.api.props as props +from port.api.commands import (CommandSystemDonate, CommandUIRender, CommandSystemExit) + +import pandas as pd +import zipfile + +def process(session_id: str): + platform = "Platform of interest" + + # Start of the data donation task + while True: + # Ask the participant to submit a file + file_prompt = generate_file_prompt(platform, "application/zip, text/plain") + file_prompt_result = yield render_page(platform, file_prompt) + + # If the participant submitted a file: continue + if file_prompt_result.__type__ == 'PayloadString': + + # Validate the file the participant submitted + # In general this is wise to do + is_data_valid = validate_the_participants_input(file_prompt_result.value) + + # Happy flow (all is well): + # The file the participant submitted is valid + if is_data_valid == True: + + # Extract the data you as a researcher are interested in, and put it in a pandas DataFrame + # Show this data to the participant in a table on screen + # The participant can now decide to donate + extracted_data = extract_the_data_you_are_interested_in(file_prompt_result.value) + consent_prompt = generate_consent_prompt(extracted_data) + consent_prompt_result = yield render_page(platform, consent_prompt) + + # If the participant wants to donate the data gets donated + if consent_prompt_result.__type__ == "PayloadJSON": + yield donate(f"{session_id}-{platform}", consent_prompt_result.value) + + break + + # Sad flow + # The data was not valid, ask the participant to retry + if is_data_valid == False: + retry_prompt = generate_retry_prompt(platform) + retry_prompt_result = yield render_page(platform, retry_prompt) + + # The participant wants to retry: start from the beginning + if retry_prompt_result.__type__ == 'PayloadTrue': + continue + # The participant does not want to retry or pressed skip + else: + break + + # The participant did not submit a file and pressed skip + else: + break + + yield exit_port(0, "Success") + yield render_end_page() + +``` + +**The functions used in `process`** + +These are all the functions used in `process` together they make up `script.py`. + +
+extract_the_data_you_are_interested_in + +```python +def extract_the_data_you_are_interested_in(zip_file: str) -> pd.DataFrame: + """ + This function extracts the data the researcher is interested in + + In this case we extract from the zipfile: + * The filesnames + * The compressed file size + * The file size + + You could extract anything here + """ + names = [] + out = pd.DataFrame() + + try: + file = zipfile.ZipFile(zip_file) + data = [] + for name in file.namelist(): + names.append(name) + info = file.getinfo(name) + data.append((name, info.compress_size, info.file_size)) + + out = pd.DataFrame(data, columns=["File name", "Compressed file size", "File size"]) + + except Exception as e: + print(f"Something went wrong: {e}") + + return out +``` + +
+ +
+validate_the_participants_input + +```python +def validate_the_participants_input(zip_file: str) -> bool: + """ + Check if the participant actually submitted a zipfile + Returns True if participant submitted a zipfile, otherwise False + + In reality you need to do a lot more validation. + Some things you could check: + - Check if the the file(s) are the correct format (json, html, binary, etc.) + - If the files are in the correct language + """ + + try: + with zipfile.ZipFile(zip_file) as zf: + return True + except zipfile.BadZipFile: + return False +``` + +
+ +
+render_end_page + +```python +def render_end_page(): + """ + Renders a thank you page + """ + page = props.PropsUIPageEnd() + return CommandUIRender(page) + +``` + +
+ +
+render_page + +```python +def render_page(platform: str, body, progress: int): + """ + Renders the UI components + """ + header = props.PropsUIHeader(props.Translatable({"en": platform, "nl": platform })) + footer = props.PropsUIFooter(progress) + page = props.PropsUIPageDonation(platform, header, body, footer) + return CommandUIRender(page) +``` + +
+ +
+generate_retry_prompt + +```python +def generate_retry_prompt(platform: str) -> props.PropsUIPromptConfirm: + """ + Generates a retry screen, this can be used if validation failed for example. + You can aks the participant to try again, and go back to the submit file prompt + """ + text = props.Translatable({ + "en": f"Unfortunately, we cannot process your {platform} file. Continue, if you are sure that you selected the right file. Try again to select a different file.", + "nl": f"Helaas, kunnen we uw {platform} bestand niet verwerken. Weet u zeker dat u het juiste bestand heeft gekozen? Ga dan verder. Probeer opnieuw als u een ander bestand wilt kiezen." + }) + ok = props.Translatable({ + "en": "Try again", + "nl": "Probeer opnieuw" + }) + cancel = props.Translatable({ + "en": "Continue", + "nl": "Verder" + }) + return props.PropsUIPromptConfirm(text, ok, cancel) +``` + +
+ + +
+generate_file_prompt + +```python +def generate_file_prompt(platform: str) -> props.PropsUIPromptFileInput: + """ + Generates a prompt that asks the participant to input a file + """ + description = props.Translatable({ + "en": f"Please follow the download instructions and choose the file that you stored on your device. Click “Skip” at the right bottom, if you do not have a {platform} file. ", + "nl": f"Volg de download instructies en kies het bestand dat u opgeslagen heeft op uw apparaat. Als u geen {platform} bestand heeft klik dan op “Overslaan” rechts onder." + }) + + return props.PropsUIPromptFileInput(description, "application/zip, text/plain") +``` + +
+ +
+generate_consent_prompt + +```python +def generate_consent_prompt(df: pd.DataFrame) -> props.PropsUIPromptConsentForm: + """ + Generates a prompt screen with table(s) with the extract data for the participant to review + """ + table_title = props.Translatable({ + "en": "Zip file contents", + "nl": "Inhoud zip bestand" + }) + table = props.PropsUIPromptConsentFormTable("zip_contents", table_title, df) + return props.PropsUIPromptConsentForm([table], []) +``` + +
+ +
+donate + +```python +def donate(key, json_string): + """ + Sends data to the backend + """ + return CommandSystemDonate(key, json_string) +``` + +
+ + +
+exit_port + +```python +def exit_port(code, info): + """ + When in Next this function exits the data donation task, and lets the participant return to Next + """ + return CommandSystemExit(code, info) +``` + +
+ + +### Start writing your own `script.py` using the api + +Now that you have seen a full example, you can start to try and create your own data donation task. With the elements from the example you can already build some pretty intricate data donation tasks. +Start creating your own by `script.py` by adapting this example to your own needs, for example, instead of file names you could extract data you would actually like to extract yourself. + +If you want to see which up what UI elements are available to you checkout `api/props.py`. In general you need to construct your own pages (prompts) and render them with `render_page` (dont forget `yield`). + +### The usage of `yield` in `script.py` + +Did you notice `yield` instead of return? `yield` makes sure that whenever the code resumes after a page render, it starts where it left off. +If you render a page you need to use yield instead of return, just like in the example. + +### Install Python packages + +The data donation task runs in the browser of the participant, it is important to understand that when Python is running in your browser it is not using the Python version you have installed on your system. +The data donation task is using [Pyodide](https://pyodide.org/en/stable/) this is Python compiled to web assembly that runs in the browser. +This means that packages you have available on your system install of Python, won't be available in the browser. + +If you want to use external packages they should be available for Pyodide, you can check the list of available packages [here](https://pyodide.org/en/stable/usage/packages-in-pyodide.html). +If you have found a package you want to use you can installed it by adding it to the array in the `loadPackages` function in `src/framework/processing/py_worker.js` as shown below: + +```javascript +// src/framework/processing/py_worker.js +function loadPackages() { + console.log('[ProcessingWorker] loading packages') + // These packages are now installed and usable: micropip, numpy, pandas, and lxml + return self.pyodide.loadPackage(['micropip', 'numpy', 'pandas', 'lxml']) +} +``` + +You can now import the packages as you would normally do in Python. + +### Try the donation task from the perspective of the participant + +If you want to try out the above example, follow the installation instructions and start the server with `npm run start`. + +### Tips when writing your own `script.py` + +**Split the extraction logic from the data donation task logic** +You can define your own modules where you create your data extraction, you can `import` those modules in `script.py` + +**Develop in separate script** +You are better off engineering your extraction logic in different scripts and put them in `script.py` whenever you are finished developing. Only do small tweaks in `script.py` + +**Use the console in your browser** +In case of errors they will show up in the browser console. You can use `print` in the Python script and it will show up in the browser console. + +**Keep the diverse nature of DDPs into account** +At least check a couple of DDPs to make sure its reflective of the population you are interesed in. Thinks you can check are: data formats (html, json, plain text, csv, etc.), language settings (they somethines lead to json keys being in a different language or file names other than English). + +**Keep your code efficient** +If your code is not efficient the extraction will take longer, which can result in a bad experience for the participant. In practice I have found that in most cases it's not really an issue, and don't have to pay that much attention to efficiency of your code. +Where efficiency really matters is when you have parse huge html files, beautifulsoup4 is a library that is commonly used to do this, this library is too slow however. As an alternative you can use lxml which is fast enough. + + +**Don't let your code crash** +You cannot have your script crash, if your Python script crashes the task stops as well. This is not a good experience for your participant. +For example in the code you do the following: `value_i_want_to_extract = extracted_data_in_a_dictionary["interesting key"]` if the key `"interesting key"` does not exists, because it does not occur in the data of the participant, the script crashes and the participant cannot continue the data donation task. + +**Data donation checklist** +Creating a good data donation task can be hard due to the variety of DDPs you will encounted. +Check out the following [wiki article](https://github.com/d3i-infra/data-donation-task/wiki/Data-donation-checklist) + + +## Limits of the data donation task + +Currently the data donation task has the following limitations: + +* The data donation task is a frontend, you need to package this together with Next to deploy it. If you want to use it with your own backend you have to make the data donation task compatible with it yourself. A tutorial on how to do this might be added in the future. +* The data donation task is running in the browser of the participant that brings in limitations, such as constraints on the files participant can submit. The limits are around 2GiB thats what Pyodide can handle. But less is better. So keep that in mind whenever you, for example, want to collect data from YouTube: your participants should exclude their own personal videos from their DDP (including these would result in a huge number of separate DDPs of around 2GiB). +* The data donation currently works with text data, nothing keeps us from using other formats in the future (but the constraints on file sizes are still there) +* The data donation task should run fine on mobile, however its not optimized for it, you might need to do some tweaking to the UI yourself diff --git a/_sources/articles/data-donation-checklist.md.txt b/_sources/articles/data-donation-checklist.md.txt new file mode 100644 index 00000000..efc5088e --- /dev/null +++ b/_sources/articles/data-donation-checklist.md.txt @@ -0,0 +1,59 @@ +# Data donation checklist + +Writing data donation scripts can be challenging due to the diverse nature of data download packages (DDPs) your participants will try to submit. +If your participants try to submit a DDP that you did not anticipate your extraction might fail, or your script might crash, in that case you will end up with a non-response and a frustrated participant. + +In order to minimize the number of problems you encounter. We developed a list of points you can pay attention to when developing scripts or performing your own study. + + +**Inspect at least 5 DDPs from persons in the population you are going to sample from** + +Because DDPs will be different for different people, you need to inspect a couple of them (preferably from people from your target population) to learn what those differences are. +You need to verify that the extraction you have in mind works for all DDPs you might encounter. + + +**DDP formats will change over time** + +DDP formats will change over time. Organization providing you with a DDP are under no obligation to keep the format of their DDP the same. The formats could change during data collection, be mindful of this fact. +Before going live with a study request a fresh package of your own and check whether your extraction still works as expected, and in the worst case scenario be ready to change your script during your data donation study. + + +**Validate the DDPs your participant submit and log the results** + +This is crucial to do for two reasons: + +1. If there are multiple versions of a DDP, you need to know which version the participant submitted so you can handle it appropriately. As an example consider the Youtube DDP: depending on the language setting files in the DDP are named differently. Another example is for the Instagram DDPs, keys in json files can be different depending on the language. +2. You probably want to give participants an incentive whenever they did a serious attempt of donating their data. In order to know whether they did a serious attempt you need to validate their DDP to see whether they submitted a package you expect. Example: a participant wants to participate in your study, but finds out its way too much work to request and download a DDP. They submit a random zipfile containing garbage, and they claim they completed the process succesfully. The only way for you to verify whether the participant gave it a fair shot is, to check what they submitted and why that did not result in you receiving data from that participant (If you would have received data its easy to verify they participated). + + +**Write your extraction so it matches the DDP request defaults** + +Often when requesting a DDP participants have a choice, for example you can request a portion of the DDP in html format or json format. The most human readible format (html) is often the default. It will be worth while to tailor your extraction script to the request defaults because participants will not read your instructions where you tell them to get the DDP in json format instead of html. This goes wrong quite often. + +Our way of dealing with this is to bite the bullet and deal with the default DDP formats. This prevents mistakes and keeps the requests instruction for the participant as simple as possible. +Another way of dealing with it is to provide feedback to the participant whenever you detected they submitted the DDP in a format you did not anticipate. + + +**Make sure your code will not crash** + +A crash in your code causes the data donation task to be stuck. The participant will see an error screen with the stacktrace displayed. You don't want this to happen. Carefull programming can prevent your code from crashing. +A common cause for crashes is trying to access a key value pair in a dict that does not exist or sorting a `pd.DataFrame` column that does not exist. Most crashes will be caused by your expectation that the extraction is in a certain format, while in some cases it won't be. + + +**Make the least amount of assumptions possible about the data in a DDP** + +The more assumptions you make about the data the more opportunities your code has to fail. Some examples: + +* Date parsing: Only parse date when its absolutely required. Date formats can be wildly different between participants, anticipating them all or writing code that can parse all dates you might encounter is less trivial than you might think. +* Files in a zip: You can look for file paths you are interested in, or only file names you are interested in. If the file name is unique, there is no need to check for the full file path. Example: if the folder structure in a zip changes but files remain the same, the code that looks only at file names will still work. +* Nested dictionaries: Sometimes you are interested in a value in a dict that is heavily nested. An approach you can take, instead of doing `dict["key1"]["key2"]["key3"]["key_that_sometimes_is_called_something_different"]["key_which_value_you_want"]`, you can to first denest or flatten the dictionary start looking directly for "key_which_value_you_want". You can find an example [here](https://github.com/d3i-infra/port-vu-pilot/blob/master/src/framework/processing/py/port/helpers.py), look for `dict_denester` and `find_items`. + + +**The researcher is responsible for providing you with DDPs and should be knowledgeable about the data** + +If you are reading this checklist chances are you are going to create a data donation task. It could be the case that you are not the principal investigator of the project but just an engineer or person with some coding experience helping the researcher out. Some researchers expect you to be the one to have knowledge about a certain DDP they are interested in. Some researchers believe that because you are working with data, you also have that data available to you, know what it all means or whether data is present in a DDP. This is of course not always the case. Communicate clearly to the researcher that they responsible for the contents of their study, they should know what the data means and that they should tell you what to extract. In some cases the researcher might not even use the platform they are interested in, if that is the case, tell the researcher to start using the platform they are interested in so they can start instructing you on what to extract. + + +**Test a study carefully before you go live** + +All researchers involved in the research group should test the study before you go live. A data donation study has more points of failure compared to traditional survey research, therefore its crucial that every researcher that is involved will test the complete data donation flow and shares their findings with you. diff --git a/_sources/articles/deployment.md.txt b/_sources/articles/deployment.md.txt new file mode 100644 index 00000000..38472356 --- /dev/null +++ b/_sources/articles/deployment.md.txt @@ -0,0 +1,57 @@ +# Deployment of the data donation task + +This article will discuss the option you have when using the data donation task in a data donation study. +The data donation task is designed to be used with Next. + +## The data donation task with Next + +Next is a software as a service platform developed by [Eyra](https://eyra.co/). +As a researcher you can log in to Next and configure data donation study, this means: + +1. Configuring a landing zone for your participants: I.e. a Consent form page, an information page, a privacy policy +2. Configure a task list for your participants to complete: After the landing zone participants see a list of task they have to complete, typically these tasks are: viewing instruction on how to request and download data from a specific platform and administering the data donation task that you developed using this repository +3. Configuring where the donated data should be stored. Next has solutions for: AWS, Azure and Yoda. + +After configuration participants can be sent to Next with a unique id in the url. This unique key id be used when storing the data, so you know who donated their data + + +### Next as a paid service + +You can use Next as a paid service provided by [Eyra](https://eyra.co/). +Please contact Eyra if this is something you are interested in. + + +### Self service Next (community version) on Surf Research Cloud + +Next is available as an offering on Surf Research Cloud available for Researchers at Dutch universities and universities of applied sciences. +Dutch researchers can apply for an EINFRA grant and get access to Research cloud this way. You can apply for an EINFRA grant [here](https://www.surf.nl/en/small-compute-applications-nwo) and click "Straight to the request portal". + +This offering comes with no service or warranties. Contact [datadonation.eu](https://datadonation.eu/) if you are interested in setting this up. + + +### Self service Next (community version) + +Next is a free and open source tool and you could host it yourself. You can find Next [here](https://github.com/eyra/mono/blob/master/SELFHOSTING.md) + + +### Which option should I choose? + +* Next as a paid service: If you have research budget; want to be unburdened and get your data donation study done, this is the best option. +* Self service community Next on Surf Research Cloud: You are a researcher at a Dutch university with no budget this is the best option. When choosing this option you have to realize that it comes with no service or warranties, you have to know what you are doing. +* Self service community Next: If you want to provide Next as a service to your organization. + + +### Add data donation task to your data donation study on Next + +After you have created your data donation task with this repository, you can use this task directly in Next. You can do this as follows: + +1. In the data donation task run the command `npm run release`, this creates a zip file named `release.zip` +2. In Next when configuring your data donation study, go to work flow and create a new item task list item called data donation task +3. In the newly created task list item select the `release.zip` you have just created + +Your data donation task list item is configured! + + +## Use the data donation task without Next + +The data donation task can be adapted so it works with your own bespoke back end. A tutorial on how to do this is might be added in the future. diff --git a/_sources/articles/index.rst.txt b/_sources/articles/index.rst.txt new file mode 100644 index 00000000..c1474238 --- /dev/null +++ b/_sources/articles/index.rst.txt @@ -0,0 +1,13 @@ +Articles +======== + +.. toctree:: + :maxdepth: 2 + + introduction-to-data-donation.md + installation.md + creating-your-own-data-donation-task.md + visualizations.md + data-donation-checklist.md + deployment.md + next-in-docker.md diff --git a/_sources/articles/installation.md.txt b/_sources/articles/installation.md.txt new file mode 100644 index 00000000..8137653f --- /dev/null +++ b/_sources/articles/installation.md.txt @@ -0,0 +1,89 @@ +# Installation + +This guide covers the installation of the data donation task locally, so you can start creating your own data donation study! + +## Installation Guide for the Pre-requisites + +You need to install the following software: + +- Python: Make sure it is at least version 3.10 +- Node.js: Make sure it is at least version 16 +- [Poetry](https://python-poetry.org/): It is a build system for Python packages that the data donation task uses. + +Below you can find more detailed instructions on how to install the required software depending on your operating system. +These instructions are just suggestions, always prefer the official instructions that suite your situation best. + +### Linux + +You can install Python and Node.js from the official repositories of your distribution. Here are the general steps: + +1. Open your terminal +2. Use your package manager to install Python and Node.js +3. Install Poetry using pipx, see the instruction [manual](https://python-poetry.org/docs/) + +### Mac OSX + +If you are using a Mac OSX, you can install Python and Node.js using the [HomeBrew](https://brew.sh/) package manager. Follow these steps: + +1. Open your terminal +2. Install HomeBrew following [instructions](https://brew.sh/) if you haven't already +3. Install Python and Node.js by runnning: `brew install python node` +4. Install Poetry using pipx, see the instruction [manual](https://python-poetry.org/docs/) + + +### Windows + +In order to develop on Windows we recommend using Windows Subsystem for Linux (WSL) in combination with VSCode. +Windows subsystem for Linux is a convenient way of running Linux on Windows. +This section will contain a bit more context because the steps might be less familiar to Windows only users. + +If you are already familiar with WSL/Linux, VSCode or both, the installation won't give you too much trouble. +If you are completely new to WSL (or Linux) expect a certain amount of problem solving you have to do. +Key topics to understand are: WSL, and the Ubuntu basics; knowledge on these topics will help you a lot. + +1. Install WSL, see the official installation [instructions](https://learn.microsoft.com/en-us/windows/wsl/install) +2. Install the default Linux distro (Ubuntu 22.04 at the time of writing) and choose a username and password +3. Download and install VSCode +4. Connect VSCode to WSL, see [instructions](https://code.visualstudio.com/docs/remote/wsl-tutorial) +5. Now you can follow the instructions for Linux, Note that Python will be already installed for you + +In theory these steps should cause no problems but in reality you have a couple of issues you could run into. I will discuss some of them I encountered here: + +* You have the be an administrator of your own device. If you are not an administrator you cannot continue the installation +* In order to install WSL, kernel virtualization needs to be on. You can go into the Windows Task Manager and check whether it is on. If its not on, you have to turn it on in the bios of your motherboard. Check what CPU you have (AMD or Intel) and check what the settings is called. If the setting is not present in the bios your CPU might not support virtualization, this means you cannot run WSL +* If you have WSL 1 installed make sure you continue with WSL 2 +* Make sure you don't forget the username and password you chose during the installation +* If you have VSCode open make sure you are connected to WSL, you can check this by looking at the "><" icon in the lower left corner of VSCode +* Remember that if you are connected to WSL with VSCode you are working in Ubuntu. Programs and files are not shared between Windows and Ubuntu, meaning if you have installed a program on Windows is not available for Ubuntu and vice versa. +* Remember to not use Powershell when connected to WSL use bash +* If you see error messages related to Windows in the terminal (something with cmd.exe for example), you know that Ubuntu is trying to open a program on Windows. This will never work. This is happening because Windows manipulates the PATH variable on Ubuntu that contains information about where the programs Ubuntu can open are. Example: you want to check which version of node you have `node -v` and you get an error with cmd.exe in the error message. Solutions: uninstall the windows version of the Node.js or manipulate the PATH variable so it does not try to open the Windows version of Nodejs. How to do that is outside the scope of this manual. +* To run port you need version Nodejs version 18 this version is not in the official Ubuntu 22.04 repositories. See for example this [guide](https://www.digitalocean.com/community/tutorials/how-to-install-node-js-on-ubuntu-22-04) on how to get nodejs version 18. If you run into errors you are expected to search for them and to solve them + +#### Don't want to use WSL? + +That's completely fine too, you can change the commands in `package.json` so they work on Windows instead. + +## Installation of the data donation task + +If you have the Pre-requisites installed the installation of the data donation task should be straightforward. + +1. Clone the repository: + +``` +git clone https://github.com/d3i-infra/data-donation-task.git +``` + +2. Install the dependencies by running the following commands: + +``` +cd ./data-donation-task +npm install +``` + +3. Start a local web server to server the data donation app: + +``` +npm run start +``` + +You can now go to the browser: [`http://localhost:3000`](http://localhost:3000) and you should be greeted by a mock data donation task diff --git a/_sources/articles/introduction-to-data-donation.md.txt b/_sources/articles/introduction-to-data-donation.md.txt new file mode 100644 index 00000000..e4b24f63 --- /dev/null +++ b/_sources/articles/introduction-to-data-donation.md.txt @@ -0,0 +1,5 @@ +# Introduction to Data Donation + +We have produced 2 videos detailing the concept of data donation, accessible on our website at [datadonation.eu/data-donation](https://datadonation.eu/data-donation/). + +Additionally, our [website](https://datadonation.eu) provides guidance on how to set up a data donation study; simply refer to the "Prepare a study" tab. diff --git a/_sources/articles/next-in-docker.md.txt b/_sources/articles/next-in-docker.md.txt new file mode 100644 index 00000000..ed0beaff --- /dev/null +++ b/_sources/articles/next-in-docker.md.txt @@ -0,0 +1,198 @@ +# Try out Next with Docker + +This tutorial outlines how you can run Next in a docker container. + +This is great for trying out the Next platform and will show you the necessary settings so you could use it in production. + +## Prerequisites + +In order for you to try out Next you need to set up some prerequisites. + +### Unsplash + +Configure a developer account at [unsplash](https://unsplash.com/) and get an API key. You can do this for free. + +Unsplash is used as the source for banner images used to customize studies. + + +### Google OIDC + +Configure a google OIDC connect application in the [google cloud console](https://console.cloud.google.com/welcome?project=stalwart-yen-241815). For the details check the [official instructions](https://developers.google.com/identity/openid-connect/openid-connect). + +Google OIDC (OpenID Connect) is used to manage user authentication and account sign-ins. + + +## Run Next in a Docker container + +In this step, we will create and run the necessary containers using Docker Compose. + +We are going to create a folder with the following structure: + +``` +. +├── docker-compose.yaml +├── proxy +│   ├── certs +│   │   ├── nginx-selfsigned.crt +│   │   └── nginx-selfsigned.key +│   └── conf +│   └── nginx.conf +``` + +In the next step we are going to create the files. + + +### Build the Next Docker image + +Clone or fork [Next](https://github.com/eyra/mono) + +`cd` into `/core` + +and build the image with: + +``` +docker build --build-arg VERSION=1.0.0 --build-arg BUNDLE=self . -t self-d3i:latest +``` + +### Setup certificates for TLS + +Create certificates and put them in `proxy/certs` + +``` +openssl req -x509 -nodes -days 365 -newkey rsa:2048 -keyout nginx-selfsigned.key -out nginx-selfsigned.crt +``` + +### Nginx configuration + +We are going to use Nginx as [reverse proxy](https://docs.nginx.com/nginx/admin-guide/web-server/reverse-proxy/). + +Nginx will be used to provide TLS for our HTTP connections. + +Paste the following nginx configuration in `proxy/conf`: + +``` +# nginx.conf +events {} +http { + server { + listen 80; + listen [::]:80; + server_name localhost; + + # Redirect all HTTP requests to HTTPS + return 301 https://$server_name$request_uri; + } + + server { + server_name localhost; + + if ($scheme != "https") { + return 301 https://$host$request_uri; + } + + location / { + allow all; + proxy_pass http://app:8000; + proxy_set_header X-Forwarded-Proto $scheme; + proxy_set_header X-Forwarded-For $remote_addr; + proxy_set_header X-Real-IP $remote_addr; + proxy_set_header Host $http_host; + proxy_http_version 1.1; + proxy_set_header Upgrade $http_upgrade; + proxy_set_header Connection "upgrade"; + proxy_max_temp_file_size 1m; + } + + listen 443 ssl; + ssl_certificate /etc/nginx/certs/nginx-selfsigned.crt; + ssl_certificate_key /etc/nginx/certs/nginx-selfsigned.key; + } +} +``` + +This Nginx configuration works with websocket connections which Next (Phoenix web application) uses. + + +### Docker compose yaml + +Now create the docker-compose.yaml: + +``` +#docker-compose.yaml +services: + app: + image: self-d3i:latest + container_name: self-d3i + restart: always + environment: + APP_NAME: next + APP_DOMAIN: localhost + APP_MAIL_DOMAIN: "@gmail" + APP_ADMINS: youremail@gmail.com + DB_USER: user + DB_PASS: password + DB_HOST: db + DB_NAME: test_database + SECRET_KEY_BASE: "aUMZobj7oJn58XIlMGVcwTYrCsAllwDCGlwDCGlwDCGwDChdhsjahdghaggdgdGt7MoQYJtJbA=" + STATIC_PATH: "/tmp" + UNSPLASH_ACCESS_KEY: "" + UNSPLASH_APP_NAME: "" + GOOGLE_SIGN_IN_CLIENT_ID: "" + GOOGLE_SIGN_IN_CLIENT_SECRET: "" + STORAGE_SERVICES: "builtin, yoda, azure" + volumes: + - app_data:/tmp + depends_on: + - db + + db: + image: postgres:latest + container_name: db-next + restart: always + environment: + POSTGRES_USER: user + POSTGRES_PASSWORD: password + POSTGRES_DB: test_database + volumes: + - postgres_data:/var/lib/postgresql/data + + proxy: + image: nginx:latest + container_name: nginx + ports: + - 443:443 + volumes: + - ./proxy/conf/nginx.conf:/etc/nginx/nginx.conf + - ./proxy/certs:/etc/nginx/certs + depends_on: + - app + +volumes: + postgres_data: + app_data: +``` + +and replace the following variables with the values you obtained in the previous steps: + +``` +UNSPLASH_ACCESS_KEY: "" +UNSPLASH_APP_NAME: "" +GOOGLE_SIGN_IN_CLIENT_ID: "" +GOOGLE_SIGN_IN_CLIENT_SECRET: "" +``` + +If you want to learn more about the variables you can read the [documentation](https://github.com/eyra/mono/blob/master/SELFHOSTING.md). + +Now you are ready to start the containers with: + +``` +docker compose up +``` + +Go to `https://localhost:80` and if everything went well you should see Next. + +Note: because you self-signed your TLS certificates your browser will complain: accept all the risks and continue. + +## Next steps in Next + +Now you can play around in Next. If you want to login as admin go to `/admin/login`. diff --git a/_sources/articles/visualizations.md.txt b/_sources/articles/visualizations.md.txt new file mode 100644 index 00000000..e6012616 --- /dev/null +++ b/_sources/articles/visualizations.md.txt @@ -0,0 +1,195 @@ +# Adding data visualizations + +You can add data visualizations to the consent form page, that will be shown below a data table. These visualizations will dynamically aggregate and visualize the data, responding to search queries and deleted items. + +Good visualizations can help participants to see and explore what data they are about to donate, and thereby support informed consent. Furthermore, it can make the data donation process more educational and enjoyable. + +## Adding visualizations to tables + +Visualizations are always directly connected to a **consent form table**. When in script.py you create a consent form table, you can implement visualizations as follows: + +```python +table_title = props.Translatable({ + "en": "Table title", + "nl": "Tabel titel" +}) + +table = props.PropsUIPromptConsentFormTable( + id = "tableId", + title = table_title, + data_frame = df, + visualizations = []) +``` + +You can now add one or multiple **visualization specifications** to the `visualizations` list. + +## Visualization Specification + +A visualization specification provides instructions for creating a visualization based on the data in the table. This visualization will then be created dynamically, so that when the table is updated (e.g., when participants search the data or remove rows) the visualization is updated as well. + +A specification covers three main components: + +- **Aggregation**: How should the table data be aggregated. e.g., count the number of rows per day +- **Display**: How should the aggregated data be displayed? e.g., line chart, bar chart, wordcloud +- **Labels**: Any labels to help along interpretation, optionally with translations (as seen above in the table_title) + +A detailed explanation of the visualizatoin specification is shown below in the **Specification Guide**. But we recommend first having a look at the following examples. + +## Examples + +Say we have data about every time a participant viewed a certain channel, and we also also know the channel category (e.g., sports, entertainment) and the exact timestampe. We have put this in a `data_frame` with the columns: **channel**, **category** and **timestamp**. We can then make a number of different visualizations. + +### Categorical variables | Bar chart of views per category + +```python +vis1 = dict( + title = dict(en= "views per category", ...), + type = "bar", + group = dict(column = "category", label = "Category") + values = [dict(aggregate = "count", label = dict(en = "number of views", ...))] +) +``` + +The **type** determines the chart type, and can in this case be "bar","line" or "area". The **group** determines how the data should be grouped and aggregated, which in this case is per category. The **values** determines the values to calculate per group, which here is just the count of the rows. + +**!!!** Notice that `values` is a list, and not a single dictionary. Adding multiple value dictionaries will create multiple y-values, for grouped barcharts or multiple lines or areas. + +The **label**'s can be either a single _string_ (as in the `group`) or a dictionary with different languages, where keys are country codes, and values are labels (as in the `values`). + +### Date variables | Area chart of views per month + +```python +vis2 = dict( + title = dict(en= "views over time", ...), + type = "area", + group = dict(column = "timestamp", dateFormat = "month", label = "Month") + values = [dict(aggregate = "count", label = dict(en = "number of views", ...))] +) +``` + +In this area chart (i.e. a line chart where the area below the line is coloured) we group the data by month, and use the same aggregation values as in the previous example to count the number of views per group. + +The **dateFormat** grouping variable can be set if the column is a date string in ISO format: `YYYY-MM-DD` for date or `YYYY-MM-DD HH:MM:SS` for datetime (You can also use `YYYY-MM-DDTHH:SS:MM)`, but that doesn't look niced in the table). + +The following formats are supported: + +- **Fixed interval**: "year", "quarter", "month", "day", "hour" +- **Automatic interval**: "auto" will pick an interval based on the min/max date. Pick this if the min/max date can vary heavily between participants. This also avoids slowing down the application by accidentally generating a huge graph (e.g., a one year period with "hour" interval) +- **cycles / season**: "month_cycle" (January - December), "weekday_cycle" (Monday - Sunday) and "hour_cycle" (1 - 24). + +### Second-level aggregation | Line chart of views over time per category + +Above we mentioned that you can add multiple values to create multiple y-values. But this only works if your data is _wide_. Alternatively, you can also perform a second-level aggregation on _long_ data. + +```python +vis3 = dict( + title = dict(en= "views per category over time", ...), + type = "line", + group = dict(column = "timestamp", dateFormat = "auto", label = "Month") + values = [dict( + aggregate = "count", + label = dict(en = "number of views", ...), + group_by = "category" + )] +) +``` + +Here we changed three things. First, we changed the type to "line", because that's a bit easier on the eye with multiple y-values. Second, we added `group_by` to the aggregation value, setting it to "category". This will break the values data into groups for categories, and calculate the aggregation statistic per category. This will be visualized as a line chart where the frequency of each category (e.g., sport, entertainment) will be displayed on separate lines. + +A third change is that we set the dateFormat to "auto" instead of fixing it to "month". This will automatically pick a suitable time interval based on the range of column (last date - first date). This could mean that different participants see different intervals, depending on what works best for their own data. + +### Text variables | A wordcloud + +As a final example, we'll look at a different sub-specification for visualizing textual data. We'll make a wordcloud of channels, based on their frequency in the data. + +```python +vis4 = dict( + title = dict(en= "Most viewed channels", ...), + type = "wordcloud", + textColumn = 'channel', + tokenize = False, +) +``` + +This creates a wordcloud of the full channel names. Note that we could also have tokenized the texts, but for channels (e.g., YouTube channels) the full names are probably most informative. + +## Example wrap-up + +Now that we have created visualizations, we can add them to the consent form table. Note that above we assigned our specifications to **vis1** to **vis4**. We can now simply add them to the visualiations list. + +```python +table = props.PropsUIPromptConsentFormTable( + id = "tableId", + title = table_title, + data_frame = df, + visualizations = [vis1, vis2, vis3, vis4]) +``` + +## Specification guide + +This is an overview of the visualiation specification. First, there are some **general visualization arguments** that every visualization has. Second, there are specific arguments depending on the visualization **type** + +### General visualization arguments + +Every visualization has the following arguments + +- **title**: A title for the visualization. This has to be a translation dictionary (see **translation** spec below) +- **type**: The type of the visualization. The type determines what specification you need to follow + - **Chart visualiation**: "line", "bar" or "area" + - **Text visualization**: "wordcloud" +- **height (optional)**: The height of the chart in pixels + +### Chart visualization arguments + +Chart visualizations work by aggregating the data into X, Y and optionally Z axes. It's the basis for most common charts. + +- **type**: "line", "bar" or "area" +- **group**: specifies the column to group and aggregate the data by. The group is visualized on the x-axis. + - **label**: x-axis label. Either a string or translation dictionary (see **translation** spec below) + - **column**: the name of the column + - **dateFormat (optional)**: if column is a date, select how it should be grouped. (see **dateFormat** spec below) + - **levels (optional)**. A list of strings with the specific column values to use. This also makes sure these values are displayed if they are missing in a participants data (also see **values** -> **addZeroes**) +- **values**: A list (**!!**) of objects. Each object specifies an (aggregate) value to calculate per group. A value is visualized on the y-axis. Multiple values can be given for multiple y-values + - **label**: y-axis label. Either a string or translation dictionary (see **translation** spec below) + - **column (optional)**: the column based on which the value is calculated. Can be empty if just counting rows. + - **aggregate**: The aggregation function. (see **aggregate** spec below) + - **addZeroes**: Boolean. If true, add zeroes for empty groups. If **levels** are specified, participants will explicitly see that they occured zero times in their data. If **dateFormat** is used, this fills possible gaps (note that this mostly makes sense for row "count" aggregations where absense implies zero) + - **group_by (optional)**: the name of a column to do a second-level aggregation. This will create multiple y-values where the value in the column becomes the label. + +### Text visualization arguments + +Text visualizations take a text column as input. + +- **type**: "wordcloud" +- **textColumn**: A text (string) column in the data +- **tokenize (optional)**: Boolean. If true, the text will be tokenized +- **valueColumn (optional)**: By default, every text or token will be given a value based on the number of rows in which it occurs. Alternatively, you can specify a numeric column, in which case (the sum of) the values in this column will be used. +- **extract (optional)**: Normally, all preprocessing of the data should be handled in the import scripts, but for convenience we will provide some common methods for extracting parts of a string. Currently supports: + - "url_domain": If the column contains URLs, extract only the domain. + +### Spec details + +Here are some details for the more complicated spec components. + +#### - translation + +A translation dictionary has country codes as keys and the translations as values: `dict(en = "english label", nl = "dutch label")`. (This is identical to the dictionary used in the `props.Translatable`) + +#### - dateFormat + +If column is a date (`YYYY-MM-DD`, `YYYY-MM-DD HH:MM` or `YYYY-MM-DD HH:MM:SS`), select how the date is grouped. options are: + +- **Fixed interval**: "year", "quarter", "month", "day", "hour" +- **Automatic interval**: "auto" will pick an interval based on the min/max date. Pick this if the min/max date can vary heavily between participants. This also avoids slowing down the application by accidentally generating a huge graph (e.g., a one year period with "hour" interval) +- **cycles / season**: "month_cycle" (January - December), "weekday_cycle" (Monday - Sunday) and "hour_cycle" (1 - 24). + +#### - aggregate + +The function by which to aggregate the column in `values`. The following functions are currently supported + +- "count" just counts the rows +- "mean" and "sum" require the value column to be numeric. +- "count_pct" gives the count as a percentage of the total number of rows.\* +- "pct" sums the values of a numeric column and divides by the total sum.\* + +**\*** _If a secondary aggregation is used, percentages are calculated within the primary aggregation group_ diff --git a/_sources/index.rst.txt b/_sources/index.rst.txt new file mode 100644 index 00000000..bb6395e0 --- /dev/null +++ b/_sources/index.rst.txt @@ -0,0 +1,43 @@ +The Data Donation Task Documentation +==================================== + +Welcome to the Data Donation Task Documentation! + +This is the place to learn about the data donation task. + +Getting started +--------------- + +Checkout the following wiki articles to get started: + +.. toctree:: + :maxdepth: 2 + + articles/index + +API Reference +----------------- + +You can find the API documentation here: + +.. toctree:: + :maxdepth: 3 + + api/index + +Standard scripts +---------------- + +We provide standard extraction scripts for a various platforms which you can find here: + +.. toctree:: + :maxdepth: 1 + + standard_scripts/index + +Indices and tables +================== + +* :ref:`genindex` +* :ref:`modindex` +* :ref:`search` diff --git a/_sources/standard_scripts/index.rst.txt b/_sources/standard_scripts/index.rst.txt new file mode 100644 index 00000000..f38e98ce --- /dev/null +++ b/_sources/standard_scripts/index.rst.txt @@ -0,0 +1,32 @@ +Platform Documentation +============================= + +For various platforms we provide default extraction scripts, so you do not have to invent the wheel. + +Freel free to use the extraction scripts as you see fit. + +In order to use the scripts open the file `src/framework/processing/py/port/main.py` and change this line: + +.. code-block:: python + + from port.script import process + +to: + +.. code-block:: python + + #from port.script import process + + # Change to (in this case the standard script for instagram will be used): + from port.platforms.instagram import process + +Available platforms +------------------- + +.. automodule:: port.platforms.chatgpt + + +Instagram +--------- + +.. automodule:: port.platforms.instagram diff --git a/_static/basic.css b/_static/basic.css new file mode 100644 index 00000000..7ebbd6d0 --- /dev/null +++ b/_static/basic.css @@ -0,0 +1,914 @@ +/* + * Sphinx stylesheet -- basic theme. + */ + +/* -- main layout ----------------------------------------------------------- */ + +div.clearer { + clear: both; +} + +div.section::after { + display: block; + content: ''; + clear: left; +} + +/* -- relbar ---------------------------------------------------------------- */ + +div.related { + width: 100%; + font-size: 90%; +} + +div.related h3 { + display: none; +} + +div.related ul { + margin: 0; + padding: 0 0 0 10px; + list-style: none; +} + +div.related li { + display: inline; +} + +div.related li.right { + float: right; + margin-right: 5px; +} + +/* -- sidebar --------------------------------------------------------------- */ + +div.sphinxsidebarwrapper { + padding: 10px 5px 0 10px; +} + +div.sphinxsidebar { + float: left; + width: 230px; + margin-left: -100%; + font-size: 90%; + word-wrap: break-word; + overflow-wrap : break-word; +} + +div.sphinxsidebar ul { + list-style: none; +} + +div.sphinxsidebar ul ul, +div.sphinxsidebar ul.want-points { + margin-left: 20px; + list-style: square; +} + +div.sphinxsidebar ul ul { + margin-top: 0; + margin-bottom: 0; +} + +div.sphinxsidebar form { + margin-top: 10px; +} + +div.sphinxsidebar input { + border: 1px solid #98dbcc; + font-family: sans-serif; + font-size: 1em; +} + +div.sphinxsidebar #searchbox form.search { + overflow: hidden; +} + +div.sphinxsidebar #searchbox input[type="text"] { + float: left; + width: 80%; + padding: 0.25em; + box-sizing: border-box; +} + +div.sphinxsidebar #searchbox input[type="submit"] { + float: left; + width: 20%; + border-left: none; + padding: 0.25em; + box-sizing: border-box; +} + + +img { + border: 0; + max-width: 100%; +} + +/* -- search page ----------------------------------------------------------- */ + +ul.search { + margin-top: 10px; +} + +ul.search li { + padding: 5px 0; +} + +ul.search li a { + font-weight: bold; +} + +ul.search li p.context { + color: #888; + margin: 2px 0 0 30px; + text-align: left; +} + +ul.keywordmatches li.goodmatch a { + font-weight: bold; +} + +/* -- index page ------------------------------------------------------------ */ + +table.contentstable { + width: 90%; + margin-left: auto; + margin-right: auto; +} + +table.contentstable p.biglink { + line-height: 150%; +} + +a.biglink { + font-size: 1.3em; +} + +span.linkdescr { + font-style: italic; + padding-top: 5px; + font-size: 90%; +} + +/* -- general index --------------------------------------------------------- */ + +table.indextable { + width: 100%; +} + +table.indextable td { + text-align: left; + vertical-align: top; +} + +table.indextable ul { + margin-top: 0; + margin-bottom: 0; + list-style-type: none; +} + +table.indextable > tbody > tr > td > ul { + padding-left: 0em; +} + +table.indextable tr.pcap { + height: 10px; +} + +table.indextable tr.cap { + margin-top: 10px; + background-color: #f2f2f2; +} + +img.toggler { + margin-right: 3px; + margin-top: 3px; + cursor: pointer; +} + +div.modindex-jumpbox { + border-top: 1px solid #ddd; + border-bottom: 1px solid #ddd; + margin: 1em 0 1em 0; + padding: 0.4em; +} + +div.genindex-jumpbox { + border-top: 1px solid #ddd; + border-bottom: 1px solid #ddd; + margin: 1em 0 1em 0; + padding: 0.4em; +} + +/* -- domain module index --------------------------------------------------- */ + +table.modindextable td { + padding: 2px; + border-collapse: collapse; +} + +/* -- general body styles --------------------------------------------------- */ + +div.body { + min-width: 360px; + max-width: 800px; +} + +div.body p, div.body dd, div.body li, div.body blockquote { + -moz-hyphens: auto; + -ms-hyphens: auto; + -webkit-hyphens: auto; + hyphens: auto; +} + +a.headerlink { + visibility: hidden; +} + +a:visited { + color: #551A8B; +} + +h1:hover > a.headerlink, +h2:hover > a.headerlink, +h3:hover > a.headerlink, +h4:hover > a.headerlink, +h5:hover > a.headerlink, +h6:hover > a.headerlink, +dt:hover > a.headerlink, +caption:hover > a.headerlink, +p.caption:hover > a.headerlink, +div.code-block-caption:hover > a.headerlink { + visibility: visible; +} + +div.body p.caption { + text-align: inherit; +} + +div.body td { + text-align: left; +} + +.first { + margin-top: 0 !important; +} + +p.rubric { + margin-top: 30px; + font-weight: bold; +} + +img.align-left, figure.align-left, .figure.align-left, object.align-left { + clear: left; + float: left; + margin-right: 1em; +} + +img.align-right, figure.align-right, .figure.align-right, object.align-right { + clear: right; + float: right; + margin-left: 1em; +} + +img.align-center, figure.align-center, .figure.align-center, object.align-center { + display: block; + margin-left: auto; + margin-right: auto; +} + +img.align-default, figure.align-default, .figure.align-default { + display: block; + margin-left: auto; + margin-right: auto; +} + +.align-left { + text-align: left; +} + +.align-center { + text-align: center; +} + +.align-default { + text-align: center; +} + +.align-right { + text-align: right; +} + +/* -- sidebars -------------------------------------------------------------- */ + +div.sidebar, +aside.sidebar { + margin: 0 0 0.5em 1em; + border: 1px solid #ddb; + padding: 7px; + background-color: #ffe; + width: 40%; + float: right; + clear: right; + overflow-x: auto; +} + +p.sidebar-title { + font-weight: bold; +} + +nav.contents, +aside.topic, +div.admonition, div.topic, blockquote { + clear: left; +} + +/* -- topics ---------------------------------------------------------------- */ + +nav.contents, +aside.topic, +div.topic { + border: 1px solid #ccc; + padding: 7px; + margin: 10px 0 10px 0; +} + +p.topic-title { + font-size: 1.1em; + font-weight: bold; + margin-top: 10px; +} + +/* -- admonitions ----------------------------------------------------------- */ + +div.admonition { + margin-top: 10px; + margin-bottom: 10px; + padding: 7px; +} + +div.admonition dt { + font-weight: bold; +} + +p.admonition-title { + margin: 0px 10px 5px 0px; + font-weight: bold; +} + +div.body p.centered { + text-align: center; + margin-top: 25px; +} + +/* -- content of sidebars/topics/admonitions -------------------------------- */ + +div.sidebar > :last-child, +aside.sidebar > :last-child, +nav.contents > :last-child, +aside.topic > :last-child, +div.topic > :last-child, +div.admonition > :last-child { + margin-bottom: 0; +} + +div.sidebar::after, +aside.sidebar::after, +nav.contents::after, +aside.topic::after, +div.topic::after, +div.admonition::after, +blockquote::after { + display: block; + content: ''; + clear: both; +} + +/* -- tables ---------------------------------------------------------------- */ + +table.docutils { + margin-top: 10px; + margin-bottom: 10px; + border: 0; + border-collapse: collapse; +} + +table.align-center { + margin-left: auto; + margin-right: auto; +} + +table.align-default { + margin-left: auto; + margin-right: auto; +} + +table caption span.caption-number { + font-style: italic; +} + +table caption span.caption-text { +} + +table.docutils td, table.docutils th { + padding: 1px 8px 1px 5px; + border-top: 0; + border-left: 0; + border-right: 0; + border-bottom: 1px solid #aaa; +} + +th { + text-align: left; + padding-right: 5px; +} + +table.citation { + border-left: solid 1px gray; + margin-left: 1px; +} + +table.citation td { + border-bottom: none; +} + +th > :first-child, +td > :first-child { + margin-top: 0px; +} + +th > :last-child, +td > :last-child { + margin-bottom: 0px; +} + +/* -- figures --------------------------------------------------------------- */ + +div.figure, figure { + margin: 0.5em; + padding: 0.5em; +} + +div.figure p.caption, figcaption { + padding: 0.3em; +} + +div.figure p.caption span.caption-number, +figcaption span.caption-number { + font-style: italic; +} + +div.figure p.caption span.caption-text, +figcaption span.caption-text { +} + +/* -- field list styles ----------------------------------------------------- */ + +table.field-list td, table.field-list th { + border: 0 !important; +} + +.field-list ul { + margin: 0; + padding-left: 1em; +} + +.field-list p { + margin: 0; +} + +.field-name { + -moz-hyphens: manual; + -ms-hyphens: manual; + -webkit-hyphens: manual; + hyphens: manual; +} + +/* -- hlist styles ---------------------------------------------------------- */ + +table.hlist { + margin: 1em 0; +} + +table.hlist td { + vertical-align: top; +} + +/* -- object description styles --------------------------------------------- */ + +.sig { + font-family: 'Consolas', 'Menlo', 'DejaVu Sans Mono', 'Bitstream Vera Sans Mono', monospace; +} + +.sig-name, code.descname { + background-color: transparent; + font-weight: bold; +} + +.sig-name { + font-size: 1.1em; +} + +code.descname { + font-size: 1.2em; +} + +.sig-prename, code.descclassname { + background-color: transparent; +} + +.optional { + font-size: 1.3em; +} + +.sig-paren { + font-size: larger; +} + +.sig-param.n { + font-style: italic; +} + +/* C++ specific styling */ + +.sig-inline.c-texpr, +.sig-inline.cpp-texpr { + font-family: unset; +} + +.sig.c .k, .sig.c .kt, +.sig.cpp .k, .sig.cpp .kt { + color: #0033B3; +} + +.sig.c .m, +.sig.cpp .m { + color: #1750EB; +} + +.sig.c .s, .sig.c .sc, +.sig.cpp .s, .sig.cpp .sc { + color: #067D17; +} + + +/* -- other body styles ----------------------------------------------------- */ + +ol.arabic { + list-style: decimal; +} + +ol.loweralpha { + list-style: lower-alpha; +} + +ol.upperalpha { + list-style: upper-alpha; +} + +ol.lowerroman { + list-style: lower-roman; +} + +ol.upperroman { + list-style: upper-roman; +} + +:not(li) > ol > li:first-child > :first-child, +:not(li) > ul > li:first-child > :first-child { + margin-top: 0px; +} + +:not(li) > ol > li:last-child > :last-child, +:not(li) > ul > li:last-child > :last-child { + margin-bottom: 0px; +} + +ol.simple ol p, +ol.simple ul p, +ul.simple ol p, +ul.simple ul p { + margin-top: 0; +} + +ol.simple > li:not(:first-child) > p, +ul.simple > li:not(:first-child) > p { + margin-top: 0; +} + +ol.simple p, +ul.simple p { + margin-bottom: 0; +} + +aside.footnote > span, +div.citation > span { + float: left; +} +aside.footnote > span:last-of-type, +div.citation > span:last-of-type { + padding-right: 0.5em; +} +aside.footnote > p { + margin-left: 2em; +} +div.citation > p { + margin-left: 4em; +} +aside.footnote > p:last-of-type, +div.citation > p:last-of-type { + margin-bottom: 0em; +} +aside.footnote > p:last-of-type:after, +div.citation > p:last-of-type:after { + content: ""; + clear: both; +} + +dl.field-list { + display: grid; + grid-template-columns: fit-content(30%) auto; +} + +dl.field-list > dt { + font-weight: bold; + word-break: break-word; + padding-left: 0.5em; + padding-right: 5px; +} + +dl.field-list > dd { + padding-left: 0.5em; + margin-top: 0em; + margin-left: 0em; + margin-bottom: 0em; +} + +dl { + margin-bottom: 15px; +} + +dd > :first-child { + margin-top: 0px; +} + +dd ul, dd table { + margin-bottom: 10px; +} + +dd { + margin-top: 3px; + margin-bottom: 10px; + margin-left: 30px; +} + +.sig dd { + margin-top: 0px; + margin-bottom: 0px; +} + +.sig dl { + margin-top: 0px; + margin-bottom: 0px; +} + +dl > dd:last-child, +dl > dd:last-child > :last-child { + margin-bottom: 0; +} + +dt:target, span.highlighted { + background-color: #fbe54e; +} + +rect.highlighted { + fill: #fbe54e; +} + +dl.glossary dt { + font-weight: bold; + font-size: 1.1em; +} + +.versionmodified { + font-style: italic; +} + +.system-message { + background-color: #fda; + padding: 5px; + border: 3px solid red; +} + +.footnote:target { + background-color: #ffa; +} + +.line-block { + display: block; + margin-top: 1em; + margin-bottom: 1em; +} + +.line-block .line-block { + margin-top: 0; + margin-bottom: 0; + margin-left: 1.5em; +} + +.guilabel, .menuselection { + font-family: sans-serif; +} + +.accelerator { + text-decoration: underline; +} + +.classifier { + font-style: oblique; +} + +.classifier:before { + font-style: normal; + margin: 0 0.5em; + content: ":"; + display: inline-block; +} + +abbr, acronym { + border-bottom: dotted 1px; + cursor: help; +} + +.translated { + background-color: rgba(207, 255, 207, 0.2) +} + +.untranslated { + background-color: rgba(255, 207, 207, 0.2) +} + +/* -- code displays --------------------------------------------------------- */ + +pre { + overflow: auto; + overflow-y: hidden; /* fixes display issues on Chrome browsers */ +} + +pre, div[class*="highlight-"] { + clear: both; +} + +span.pre { + -moz-hyphens: none; + -ms-hyphens: none; + -webkit-hyphens: none; + hyphens: none; + white-space: nowrap; +} + +div[class*="highlight-"] { + margin: 1em 0; +} + +td.linenos pre { + border: 0; + background-color: transparent; + color: #aaa; +} + +table.highlighttable { + display: block; +} + +table.highlighttable tbody { + display: block; +} + +table.highlighttable tr { + display: flex; +} + +table.highlighttable td { + margin: 0; + padding: 0; +} + +table.highlighttable td.linenos { + padding-right: 0.5em; +} + +table.highlighttable td.code { + flex: 1; + overflow: hidden; +} + +.highlight .hll { + display: block; +} + +div.highlight pre, +table.highlighttable pre { + margin: 0; +} + +div.code-block-caption + div { + margin-top: 0; +} + +div.code-block-caption { + margin-top: 1em; + padding: 2px 5px; + font-size: small; +} + +div.code-block-caption code { + background-color: transparent; +} + +table.highlighttable td.linenos, +span.linenos, +div.highlight span.gp { /* gp: Generic.Prompt */ + user-select: none; + -webkit-user-select: text; /* Safari fallback only */ + -webkit-user-select: none; /* Chrome/Safari */ + -moz-user-select: none; /* Firefox */ + -ms-user-select: none; /* IE10+ */ +} + +div.code-block-caption span.caption-number { + padding: 0.1em 0.3em; + font-style: italic; +} + +div.code-block-caption span.caption-text { +} + +div.literal-block-wrapper { + margin: 1em 0; +} + +code.xref, a code { + background-color: transparent; + font-weight: bold; +} + +h1 code, h2 code, h3 code, h4 code, h5 code, h6 code { + background-color: transparent; +} + +.viewcode-link { + float: right; +} + +.viewcode-back { + float: right; + font-family: sans-serif; +} + +div.viewcode-block:target { + margin: -1px -10px; + padding: 0 10px; +} + +/* -- math display ---------------------------------------------------------- */ + +img.math { + vertical-align: middle; +} + +div.body div.math p { + text-align: center; +} + +span.eqno { + float: right; +} + +span.eqno a.headerlink { + position: absolute; + z-index: 1; +} + +div.math:hover a.headerlink { + visibility: visible; +} + +/* -- printout stylesheet --------------------------------------------------- */ + +@media print { + div.document, + div.documentwrapper, + div.bodywrapper { + margin: 0 !important; + width: 100%; + } + + div.sphinxsidebar, + div.related, + div.footer, + #top-link { + display: none; + } +} \ No newline at end of file diff --git a/_static/basic_mod.css b/_static/basic_mod.css new file mode 100644 index 00000000..0df77588 --- /dev/null +++ b/_static/basic_mod.css @@ -0,0 +1,1194 @@ +@font-face { + font-family: Roboto; + font-style: normal; + font-weight: 400; + src: local("Roboto"), local("Roboto-Regular"), url(fonts/roboto/roboto.woff2) format("woff2"); +} +@font-face { + font-family: Roboto; + font-style: italic; + font-weight: 400; + src: local("Roboto Italic"), local("Roboto-Italic"), url(fonts/roboto/roboto-italic.woff2) format("woff2"); +} +@font-face { + font-family: Roboto; + font-style: normal; + font-weight: 700; + src: local("Roboto Bold"), local("Roboto-Bold"), url(fonts/roboto/roboto-bold.woff2) format("woff2"); +} +@font-face { + font-family: Roboto Mono; + font-style: normal; + font-weight: 400; + src: local("Roboto Mono Regular"), local("RobotoMono-Regular"), url(fonts/roboto-mono/roboto-mono.woff2) format("woff2"); +} +@font-face { + font-family: Roboto Mono; + font-style: italic; + font-weight: 400; + src: local("Roboto Mono Italic"), local("RobotoMono-Italic"), url(fonts/roboto-mono/roboto-mono-italic.woff2) format("woff2"); +} +@font-face { + font-family: Roboto Mono; + font-style: normal; + font-weight: 700; + src: local("Roboto Mono Bold"), local("RobotoMono-Bold"), url(fonts/roboto-mono/roboto-mono-bold.woff2) format("woff2"); +} +@font-face { + font-family: Roboto Mono; + font-style: italic; + font-weight: 700; + src: local("Roboto Mono Bold Italic"), local("RobotoMono-BoldItalic"), url(fonts/roboto-mono/roboto-mono-bold-italic.woff2) format("woff2"); +} +/*****************************************************************************/ +/* Typography */ +:root { + --codeBackgroundColor: #f8f8f8; + --inlineCodeBackgroundColor: #f8f8f8; + --codeBlue: #0000ff; + --codeGreen: #008000; + --dividerColor: rgba(0, 0, 0, 0.08); + --faintFontColor: rgba(0, 0, 0, 0.6); + --fontColor: #252630; + --linkColor: #2980b9; + --mainBackgroundColor: white; + --mainNavColor: #3889ce; + --notificationBannerColor: #176bb0; + --searchHighlightColor: #fff150; + --sidebarColor: white; + --navbarHeight: 4rem; +} +:root[data-mode=darkest] { + --mainBackgroundColor: black; + --sidebarColor: black; + --codeBackgroundColor: rgba(255, 255, 255, 0.1); + --inlineCodeBackgroundColor: rgba(255, 255, 255, 0.1); +} +:root[data-mode=dark] { + --mainBackgroundColor: #242429; + --sidebarColor: #242429; + --codeBackgroundColor: rgba(0, 0, 0, 0.1); + --inlineCodeBackgroundColor: rgba(255, 255, 255, 0.06); +} +:root[data-mode=dark], :root[data-mode=darkest] { + --codeBlue: #77baff; + --codeGreen: #38c038; + --dividerColor: rgba(255, 255, 255, 0.1); + --faintFontColor: rgba(255, 255, 255, 0.6); + --fontColor: white; + --linkColor: #319be0; + --searchHighlightColor: #fe8e04; +} + +body { + font-family: Roboto, "OpenSans", sans-serif; + background-color: var(--mainBackgroundColor); + color: var(--fontColor); +} + +h1 { + font-size: 2rem; +} + +h2 { + font-size: 1.5rem; +} + +h3 { + font-size: 1.17rem; +} + +a { + color: var(--linkColor); + text-decoration: none; +} + +/*****************************************************************************/ +html { + height: 100%; + scroll-padding-top: var(--navbarHeight); +} + +html, +body { + padding: 0; + margin: 0; + min-height: 100%; +} + +body { + display: flex; + flex-direction: column; +} + +/*****************************************************************************/ +/* Top nav */ +#searchbox h3#searchlabel { + display: none; +} +#searchbox form.search { + display: flex; + flex-direction: row; +} +#searchbox form.search input { + display: block; + box-sizing: border-box; + padding: 0.3rem; + color: rgba(0, 0, 0, 0.7); + border-radius: 0.2rem; +} +#searchbox form.search input[type=text] { + border: none; + background-color: rgba(255, 255, 255, 0.6); + flex-grow: 1; + margin-right: 0.2rem; +} +#searchbox form.search input[type=text]::placeholder { + color: rgba(0, 0, 0, 0.6); +} +#searchbox form.search input[type=submit] { + cursor: pointer; + color: var(--mainNavColor); + flex-grow: 0; + border: none; + background-color: white; +} + +div#top_nav { + position: fixed; + top: 0; + left: 0; + right: 0; + color: white; + z-index: 100; +} +div#top_nav div#notification_banner { + background-color: var(--notificationBannerColor); + box-sizing: border-box; + padding: 0.1rem 1rem; + display: flex; + flex-direction: row; + align-items: center; + justify-content: right; +} +div#top_nav div#notification_banner a.close { + flex-grow: 0; + flex-shrink: 0; + color: rgba(255, 255, 255, 0.85); + text-align: right; + font-size: 0.6rem; + text-transform: uppercase; + display: block; + text-decoration: none; + margin-left: 0.5rem; +} +div#top_nav div#notification_banner a.close:hover { + color: white; +} +div#top_nav div#notification_banner p { + flex-grow: 1; + margin: 0; + text-align: center; + font-size: 0.9rem; + line-height: 1.2; + padding: 0.4rem 0; +} +div#top_nav div#notification_banner p a { + color: white; + text-decoration: underline; +} +div#top_nav nav { + background-color: var(--mainNavColor); + box-sizing: border-box; + padding: 1rem; + display: flex; + flex-direction: row; + align-items: center; +} +div#top_nav nav h1 { + flex-grow: 1; + font-size: 1.2rem; + margin: 0; + padding: 0 0 0 0.8rem; + line-height: 1; +} +div#top_nav nav h1 a { + color: white; +} +div#top_nav nav h1 img { + height: 1.3rem; + width: auto; +} +div#top_nav nav p#toggle_sidebar { + transform: rotate(90deg); + letter-spacing: 0.1rem; + flex-grow: 0; + margin: 0; + padding: 0; +} +div#top_nav nav p#toggle_sidebar a { + color: white; + font-weight: bold; +} +div#top_nav nav a#mode_toggle, div#top_nav nav a#source_link { + margin-right: 1rem; + display: block; + flex-grow: 0; +} +div#top_nav nav a#mode_toggle svg, div#top_nav nav a#source_link svg { + height: 1.3rem; + width: 1.3rem; + vertical-align: middle; +} +div#top_nav nav p.mobile_search_link { + margin: 0; +} +@media (min-width: 50rem) { + div#top_nav nav p.mobile_search_link { + display: none; + } +} +div#top_nav nav p.mobile_search_link a { + color: white; +} +div#top_nav nav p.mobile_search_link a svg { + height: 1rem; + vertical-align: middle; +} +@media (max-width: 50rem) { + div#top_nav nav div.searchbox_wrapper { + display: none; + } +} +div#top_nav nav div.searchbox_wrapper #searchbox { + align-items: center; + display: flex !important; + flex-direction: row-reverse; +} +div#top_nav nav div.searchbox_wrapper #searchbox p.highlight-link { + margin: 0 0.5rem 0 0; +} +div#top_nav nav div.searchbox_wrapper #searchbox p.highlight-link a { + color: rgba(255, 255, 255, 0.8); + font-size: 0.8em; + padding-right: 0.5rem; + text-decoration: underline; +} +div#top_nav nav div.searchbox_wrapper #searchbox p.highlight-link a:hover { + color: white; +} + +/*****************************************************************************/ +/* Main content */ +div.document { + flex-grow: 1; + margin-top: 2rem; + margin-bottom: 5rem; + margin-left: 15rem; + margin-right: 15rem; + padding-top: var(--navbarHeight); + /***************************************************************************/ + /***************************************************************************/ +} +@media (max-width: 50rem) { + div.document { + margin-left: 0px; + margin-right: 0px; + } +} +div.document section, +div.document div.section { + margin: 4rem 0; +} +div.document section:first-child, +div.document div.section:first-child { + margin-top: 0; +} +div.document section > section, +div.document div.section > div.section { + margin: 4rem 0; +} +div.document section > section > section, +div.document div.section > div.section > div.section { + margin: 2rem 0 0 0; +} +div.document section > section > section > section, +div.document div.section > div.section > div.section > div.section { + margin: 1.5rem 0 0 0; +} +div.document h1 + section, +div.document h1 + div.section { + margin-top: 2.5rem !important; +} +div.document h2 + section, +div.document h2 + div.section { + margin-top: 1.5rem !important; +} +div.document img { + max-width: 100%; +} +div.document code { + padding: 2px 4px; + background-color: var(--inlineCodeBackgroundColor); + border-radius: 0.2rem; + font-family: "Roboto Mono", monospace, Monaco, Consolas, Andale Mono; + font-size: 0.9em; +} +div.document div.documentwrapper { + max-width: 45rem; + margin: 0 auto; + flex-grow: 1; + box-sizing: border-box; + padding: 1rem; +} +div.document div.highlight { + color: #252630; + box-sizing: border-box; + padding: 0.2rem 1rem; + margin: 0.5rem 0; + border-radius: 0.2rem; + font-size: 0.9rem; +} +div.document div.highlight pre { + font-family: "Roboto Mono", monospace, Monaco, Consolas, Andale Mono; +} +div.document div[class*=highlight] { + overflow-x: auto; +} +div.document a.headerlink { + font-size: 0.6em; + display: none; + padding-left: 0.5rem; + vertical-align: middle; +} +div.document h1, +div.document h2, +div.document h3, +div.document h4, +div.document h5, +div.document h6, +div.document str, +div.document b { + font-weight: 700; +} +div.document h1 { + margin: 0.8rem 0 0.5rem 0; +} +div.document h2 { + margin: 0.8rem 0 0.5rem 0; +} +div.document h3, div.document h4 { + margin: 1rem 0 0.5rem 0; +} +div.document h1:hover a.headerlink, +div.document h2:hover a.headerlink, +div.document h3:hover a.headerlink, +div.document h4:hover a.headerlink { + display: inline-block; +} +div.document p, +div.document li { + font-size: 1rem; + line-height: 1.5; +} +div.document li p { + margin: 0 0 0.5rem 0; +} +div.document ul, div.document ol { + padding-left: 2rem; +} +div.document ol.loweralpha { + list-style: lower-alpha; +} +div.document ol.arabic { + list-style: decimal; +} +div.document ol.lowerroman { + list-style: lower-roman; +} +div.document ol.upperalpha { + list-style: upper-alpha; +} +div.document ol.upperroman { + list-style: upper-roman; +} +div.document dd { + margin-left: 1.5rem; +} +div.document hr { + border: none; + height: 1px; + background-color: var(--dividerColor); + margin: 2rem 0; +} +div.document table.docutils { + border-collapse: collapse; +} +div.document table.docutils th, div.document table.docutils td { + border: 1px solid var(--dividerColor); + box-sizing: border-box; + padding: 0.5rem 1rem; +} +div.document table.docutils th p, div.document table.docutils th ul, div.document table.docutils td p, div.document table.docutils td ul { + margin: 0.3rem 0; +} +div.document table.docutils th ul, div.document table.docutils td ul { + padding-left: 1rem; +} +div.document form input { + padding: 0.5rem; +} +div.document form input[type=submit], div.document form button { + border: none; + background-color: var(--mainNavColor); + color: white; + padding: 0.5rem 1rem; + border-radius: 0.2rem; +} +div.document span.highlighted { + background-color: var(--searchHighlightColor); + padding: 0 0.1em; +} +div.document div#search-results { + padding-top: 2rem; +} +div.document div#search-results p.search-summary { + font-size: 0.8em; +} +div.document div#search-results ul.search { + list-style: none; + padding-left: 0; +} +div.document div#search-results ul.search li { + border-bottom: 1px solid var(--dividerColor); + margin: 0; + padding: 2rem 0; +} +div.document div#search-results ul.search li > a:first-child { + font-size: 1.2rem; +} +div.document dd ul, div.document dd ol { + padding-left: 1rem; +} +div.document dl.py { + margin-bottom: 2rem; +} +div.document dl.py dt.sig { + background-color: var(--codeBackgroundColor); + color: var(--fontColor); + box-sizing: border-box; + font-family: "Roboto Mono", monospace, Monaco, Consolas, Andale Mono; + font-size: 0.9rem; + padding: 1rem; + border-left: 5px solid rgba(0, 0, 0, 0.1); + border-radius: 0.2rem; +} +div.document dl.py em.property { + color: var(--sidebarColor); + font-weight: bold; +} +div.document dl.py span.sig-name { + color: var(--codeBlue); + font-weight: bold; +} +div.document dl.py em.property { + color: var(--codeGreen); +} +div.document dl.py em.sig-param { + margin-left: 2rem; +} +div.document dl.py em.sig-param span.default_value { + color: var(--codeGreen); +} +div.document dl.py span.sig-return span.sig-return-typehint { + color: var(--fontColor); +} +div.document dl.py span.sig-return span.sig-return-typehint pre { + color: var(--fontColor); +} +div.document dl.py em.sig-param > span:first-child { + font-weight: bold; +} +div.document dl.cpp, div.document dl.c { + margin-bottom: 1rem; +} +div.document dl.cpp dt.sig, div.document dl.c dt.sig { + background-color: var(--codeBackgroundColor); + color: var(--fontColor); + box-sizing: border-box; + font-family: "Roboto Mono", monospace, Monaco, Consolas, Andale Mono; + font-size: 0.9rem; + padding: 1rem; + border-left: 5px solid rgba(0, 0, 0, 0.1); + border-radius: 0.2rem; + line-height: 1.4; +} +div.document dl.cpp span.sig-name, div.document dl.c span.sig-name { + color: var(--codeBlue); + font-weight: bold; +} +div.document dl.cpp span.sig-indent, div.document dl.c span.sig-indent { + margin-left: 2rem; +} +div.document dl.cpp span.target + span, div.document dl.c span.target + span { + color: var(--codeGreen); +} +div.document dl.cpp span.sig-param > span:first-child, div.document dl.c span.sig-param > span:first-child { + font-weight: bold; +} +div.document div.admonition { + box-shadow: 0px 0px 0px 1px var(--dividerColor); + border-radius: 0.2rem; + margin: 1rem 0; + overflow: hidden; +} +div.document div.admonition p { + box-sizing: border-box; + font-size: 0.9rem; + padding: 0.5rem; + margin: 0; +} +div.document div.admonition p:first-child { + padding-bottom: 0; + margin-bottom: 0; +} +div.document div.admonition p + p { + padding-top: 0.2rem; +} +div.document div.admonition p.admonition-title { + font-weight: bolder; + letter-spacing: 0.01rem; +} +div.document div.admonition.hint, div.document div.admonition.important, div.document div.admonition.tip { + border-left: 5px solid #56b79c; +} +div.document div.admonition.hint p.admonition-title, div.document div.admonition.important p.admonition-title, div.document div.admonition.tip p.admonition-title { + color: #56b79c; +} +div.document div.admonition.note { + border-left: 5px solid #587f9f; +} +div.document div.admonition.note p.admonition-title { + color: #587f9f; +} +div.document div.admonition.danger, div.document div.admonition.error { + border-left: 5px solid #e6a39a; +} +div.document div.admonition.danger p.admonition-title, div.document div.admonition.error p.admonition-title { + color: #e6a39a; +} +div.document div.admonition.attention, div.document div.admonition.caution, div.document div.admonition.warning { + border-left: 5px solid #e7b486; +} +div.document div.admonition.attention p.admonition-title, div.document div.admonition.caution p.admonition-title, div.document div.admonition.warning p.admonition-title { + color: #e7b486; +} + +/*****************************************************************************/ +/* Sidebar */ +div.sphinxsidebar { + background-color: var(--sidebarColor); + border-right: 1px solid var(--dividerColor); + position: fixed; + left: 0; + top: 0; + bottom: 0; + width: 15rem; + box-sizing: border-box; + padding: var(--navbarHeight) 1rem 1rem; + z-index: 50; +} +@media (max-width: 50rem) { + div.sphinxsidebar { + display: none; + } +} +div.sphinxsidebar div.sphinxsidebarwrapper { + height: 100%; + overflow-y: auto; +} +div.sphinxsidebar ul { + padding-left: 0rem; + list-style: none; +} +div.sphinxsidebar ul li { + font-size: 0.9rem; + line-height: 1.2; +} +div.sphinxsidebar ul li a { + display: block; + box-sizing: border-box; + padding: 0 0.2rem 0.6rem; + color: var(--fontColor); + text-decoration: none; +} +div.sphinxsidebar ul li a.current { + color: var(--linkColor); +} +div.sphinxsidebar ul li a:hover { + color: var(--linkColor); +} +div.sphinxsidebar ul li > ul { + padding-left: 1rem; +} +div.sphinxsidebar p { + color: var(--faintFontColor); +} + +/*****************************************************************************/ +/* The right sidebar, showing the table of contents for the current page. */ +div#show_right_sidebar { + position: fixed; + right: 0; + top: 0; + z-index: 20; + background-color: var(--sidebarColor); + border-left: 1px solid var(--dividerColor); + border-bottom: 1px solid var(--dividerColor); + padding: var(--navbarHeight) 1rem 0rem; +} +div#show_right_sidebar p { + font-size: 0.9em; +} +div#show_right_sidebar p span { + color: var(--faintFontColor); + vertical-align: middle; +} +div#show_right_sidebar p span.icon { + color: var(--linkColor); + font-size: 0.9em; + padding-right: 0.2rem; +} + +div#right_sidebar { + position: fixed; + right: 0; + top: 0; + z-index: 50; + background-color: var(--sidebarColor); + width: 15rem; + border-left: 1px solid var(--dividerColor); + box-sizing: border-box; + padding: var(--navbarHeight) 1rem 1rem; + height: 100%; + overflow-y: auto; +} +div#right_sidebar p span { + color: var(--faintFontColor); + vertical-align: middle; +} +div#right_sidebar p span.icon { + color: var(--linkColor); + font-size: 0.9em; + padding-right: 0.2rem; +} +div#right_sidebar ul { + padding-left: 0rem; + list-style: none; +} +div#right_sidebar ul li { + font-size: 0.9rem; + line-height: 1.2; +} +div#right_sidebar ul li a { + display: block; + box-sizing: border-box; + padding: 0 0.2rem 0.6rem; + color: var(--fontColor); + text-decoration: none; +} +div#right_sidebar ul li a.current { + color: var(--linkColor); +} +div#right_sidebar ul li a:hover { + color: var(--linkColor); +} +div#right_sidebar ul li > ul { + padding-left: 1rem; +} +div#right_sidebar p { + color: var(--faintFontColor); +} +@media (max-width: 50rem) { + div#right_sidebar { + display: none; + } +} + +/*****************************************************************************/ +/* Footer */ +div.footer { + box-sizing: border-box; + padding-top: 2rem; + font-size: 0.7rem; + text-align: center; + text-transform: uppercase; + color: var(--faintFontColor); +} + +p#theme_credit { + font-size: 0.6rem; + text-transform: uppercase; + text-align: center; + color: var(--faintFontColor); +} + +/*****************************************************************************/ +/* Buttons */ +div.button_nav_wrapper { + margin-left: 15rem; + margin-right: 15rem; +} +@media (max-width: 50rem) { + div.button_nav_wrapper { + margin-left: 0px; + margin-right: 0px; + } +} +div.button_nav_wrapper div.button_nav { + max-width: 45rem; + margin: 0 auto; + display: flex; + flex-direction: row; + width: 100%; +} +div.button_nav_wrapper div.button_nav div { + box-sizing: border-box; + padding: 1rem; + flex: 50%; +} +div.button_nav_wrapper div.button_nav div a { + display: block; +} +div.button_nav_wrapper div.button_nav div a span { + vertical-align: middle; +} +div.button_nav_wrapper div.button_nav div a span.icon { + font-weight: bold; + font-size: 0.8em; +} +div.button_nav_wrapper div.button_nav div.left a { + text-align: left; +} +div.button_nav_wrapper div.button_nav div.left a span.icon { + padding-right: 0.4rem; +} +div.button_nav_wrapper div.button_nav div.right a { + text-align: right; +} +div.button_nav_wrapper div.button_nav div.right a span.icon { + padding-left: 0.4rem; +} + +/*****************************************************************************/ +/* Pygments overrides in dark mode */ +:root[data-mode=dark] body[data-dark_mode_code_blocks=true] .highlight, +:root[data-mode=darkest] body[data-dark_mode_code_blocks=true] .highlight { + --black: #000000; + --red: #ff9393; + --darkBlue: #6b83fe; + --grey: #a8a8a8; + --pink: #ff99d8; + --torquoise: #68e9e9; + --brown: #d48a00; + --purple: #ce04e9; + --paleYellow: #454534; + background: var(--codeBackgroundColor); + color: var(--fontColor); + /* Comment */ + /* Error */ + /* Keyword */ + /* Operator */ + /* Comment.Hashbang */ + /* Comment.Multiline */ + /* Comment.Preproc */ + /* Comment.PreprocFile */ + /* Comment.Single */ + /* Comment.Special */ + /* Generic.Deleted */ + /* Generic.Emph */ + /* Generic.Error */ + /* Generic.Heading */ + /* Generic.Inserted */ + /* Generic.Output */ + /* Generic.Prompt */ + /* Generic.Strong */ + /* Generic.Subheading */ + /* Generic.Traceback */ + /* Keyword.Constant */ + /* Keyword.Declaration */ + /* Keyword.Namespace */ + /* Keyword.Pseudo */ + /* Keyword.Reserved */ + /* Keyword.Type */ + /* Literal.Number */ + /* Literal.String */ + /* Name.Attribute */ + /* Name.Builtin */ + /* Name.Class */ + /* Name.Constant */ + /* Name.Decorator */ + /* Name.Entity */ + /* Name.Exception */ + /* Name.Function */ + /* Name.Label */ + /* Name.Namespace */ + /* Name.Tag */ + /* Name.Variable */ + /* Operator.Word */ + /* Text.Whitespace */ + /* Literal.Number.Bin */ + /* Literal.Number.Float */ + /* Literal.Number.Hex */ + /* Literal.Number.Integer */ + /* Literal.Number.Oct */ + /* Literal.String.Affix */ + /* Literal.String.Backtick */ + /* Literal.String.Char */ + /* Literal.String.Delimiter */ + /* Literal.String.Doc */ + /* Literal.String.Double */ + /* Literal.String.Escape */ + /* Literal.String.Heredoc */ + /* Literal.String.Interpol */ + /* Literal.String.Other */ + /* Literal.String.Regex */ + /* Literal.String.Single */ + /* Literal.String.Symbol */ + /* Name.Builtin.Pseudo */ + /* Name.Function.Magic */ + /* Name.Variable.Class */ + /* Name.Variable.Global */ + /* Name.Variable.Instance */ + /* Name.Variable.Magic */ + /* Literal.Number.Integer.Long */ +} +:root[data-mode=dark] body[data-dark_mode_code_blocks=true] .highlight pre, +:root[data-mode=darkest] body[data-dark_mode_code_blocks=true] .highlight pre { + line-height: 125%; +} +:root[data-mode=dark] body[data-dark_mode_code_blocks=true] .highlight td.linenos .normal, +:root[data-mode=darkest] body[data-dark_mode_code_blocks=true] .highlight td.linenos .normal { + color: inherit; + background-color: transparent; + padding-left: 5px; + padding-right: 5px; +} +:root[data-mode=dark] body[data-dark_mode_code_blocks=true] .highlight span.linenos, +:root[data-mode=darkest] body[data-dark_mode_code_blocks=true] .highlight span.linenos { + color: inherit; + background-color: transparent; + padding-left: 5px; + padding-right: 5px; +} +:root[data-mode=dark] body[data-dark_mode_code_blocks=true] .highlight td.linenos .special, +:root[data-mode=darkest] body[data-dark_mode_code_blocks=true] .highlight td.linenos .special { + color: var(--black); + background-color: var(--paleYellow); + padding-left: 5px; + padding-right: 5px; +} +:root[data-mode=dark] body[data-dark_mode_code_blocks=true] .highlight span.linenos.special, +:root[data-mode=darkest] body[data-dark_mode_code_blocks=true] .highlight span.linenos.special { + color: var(--black); + background-color: var(--paleYellow); + padding-left: 5px; + padding-right: 5px; +} +:root[data-mode=dark] body[data-dark_mode_code_blocks=true] .highlight .hll, +:root[data-mode=darkest] body[data-dark_mode_code_blocks=true] .highlight .hll { + background-color: var(--paleYellow); +} +:root[data-mode=dark] body[data-dark_mode_code_blocks=true] .highlight .c, +:root[data-mode=darkest] body[data-dark_mode_code_blocks=true] .highlight .c { + color: var(--torquoise); + font-style: italic; +} +:root[data-mode=dark] body[data-dark_mode_code_blocks=true] .highlight .err, +:root[data-mode=darkest] body[data-dark_mode_code_blocks=true] .highlight .err { + border: 1px solid var(--red); +} +:root[data-mode=dark] body[data-dark_mode_code_blocks=true] .highlight .k, +:root[data-mode=darkest] body[data-dark_mode_code_blocks=true] .highlight .k { + color: var(--codeGreen); + font-weight: bold; +} +:root[data-mode=dark] body[data-dark_mode_code_blocks=true] .highlight .o, +:root[data-mode=darkest] body[data-dark_mode_code_blocks=true] .highlight .o { + color: var(--grey); +} +:root[data-mode=dark] body[data-dark_mode_code_blocks=true] .highlight .ch, +:root[data-mode=darkest] body[data-dark_mode_code_blocks=true] .highlight .ch { + color: var(--torquoise); + font-style: italic; +} +:root[data-mode=dark] body[data-dark_mode_code_blocks=true] .highlight .cm, +:root[data-mode=darkest] body[data-dark_mode_code_blocks=true] .highlight .cm { + color: var(--torquoise); + font-style: italic; +} +:root[data-mode=dark] body[data-dark_mode_code_blocks=true] .highlight .cp, +:root[data-mode=darkest] body[data-dark_mode_code_blocks=true] .highlight .cp { + color: var(--brown); +} +:root[data-mode=dark] body[data-dark_mode_code_blocks=true] .highlight .cpf, +:root[data-mode=darkest] body[data-dark_mode_code_blocks=true] .highlight .cpf { + color: var(--torquoise); + font-style: italic; +} +:root[data-mode=dark] body[data-dark_mode_code_blocks=true] .highlight .c1, +:root[data-mode=darkest] body[data-dark_mode_code_blocks=true] .highlight .c1 { + color: var(--torquoise); + font-style: italic; +} +:root[data-mode=dark] body[data-dark_mode_code_blocks=true] .highlight .cs, +:root[data-mode=darkest] body[data-dark_mode_code_blocks=true] .highlight .cs { + color: var(--torquoise); + font-style: italic; +} +:root[data-mode=dark] body[data-dark_mode_code_blocks=true] .highlight .gd, +:root[data-mode=darkest] body[data-dark_mode_code_blocks=true] .highlight .gd { + color: var(--red); +} +:root[data-mode=dark] body[data-dark_mode_code_blocks=true] .highlight .ge, +:root[data-mode=darkest] body[data-dark_mode_code_blocks=true] .highlight .ge { + font-style: italic; +} +:root[data-mode=dark] body[data-dark_mode_code_blocks=true] .highlight .gr, +:root[data-mode=darkest] body[data-dark_mode_code_blocks=true] .highlight .gr { + color: var(--red); +} +:root[data-mode=dark] body[data-dark_mode_code_blocks=true] .highlight .gh, +:root[data-mode=darkest] body[data-dark_mode_code_blocks=true] .highlight .gh { + color: var(--codeBlue); + font-weight: bold; +} +:root[data-mode=dark] body[data-dark_mode_code_blocks=true] .highlight .gi, +:root[data-mode=darkest] body[data-dark_mode_code_blocks=true] .highlight .gi { + color: var(--codeGreen); +} +:root[data-mode=dark] body[data-dark_mode_code_blocks=true] .highlight .go, +:root[data-mode=darkest] body[data-dark_mode_code_blocks=true] .highlight .go { + color: var(--grey); +} +:root[data-mode=dark] body[data-dark_mode_code_blocks=true] .highlight .gp, +:root[data-mode=darkest] body[data-dark_mode_code_blocks=true] .highlight .gp { + color: var(--codeBlue); + font-weight: bold; +} +:root[data-mode=dark] body[data-dark_mode_code_blocks=true] .highlight .gs, +:root[data-mode=darkest] body[data-dark_mode_code_blocks=true] .highlight .gs { + font-weight: bold; +} +:root[data-mode=dark] body[data-dark_mode_code_blocks=true] .highlight .gu, +:root[data-mode=darkest] body[data-dark_mode_code_blocks=true] .highlight .gu { + color: var(--purple); + font-weight: bold; +} +:root[data-mode=dark] body[data-dark_mode_code_blocks=true] .highlight .gt, +:root[data-mode=darkest] body[data-dark_mode_code_blocks=true] .highlight .gt { + color: var(--codeBlue); +} +:root[data-mode=dark] body[data-dark_mode_code_blocks=true] .highlight .kc, +:root[data-mode=darkest] body[data-dark_mode_code_blocks=true] .highlight .kc { + color: var(--codeGreen); + font-weight: bold; +} +:root[data-mode=dark] body[data-dark_mode_code_blocks=true] .highlight .kd, +:root[data-mode=darkest] body[data-dark_mode_code_blocks=true] .highlight .kd { + color: var(--codeGreen); + font-weight: bold; +} +:root[data-mode=dark] body[data-dark_mode_code_blocks=true] .highlight .kn, +:root[data-mode=darkest] body[data-dark_mode_code_blocks=true] .highlight .kn { + color: var(--codeGreen); + font-weight: bold; +} +:root[data-mode=dark] body[data-dark_mode_code_blocks=true] .highlight .kp, +:root[data-mode=darkest] body[data-dark_mode_code_blocks=true] .highlight .kp { + color: var(--codeGreen); +} +:root[data-mode=dark] body[data-dark_mode_code_blocks=true] .highlight .kr, +:root[data-mode=darkest] body[data-dark_mode_code_blocks=true] .highlight .kr { + color: var(--codeGreen); + font-weight: bold; +} +:root[data-mode=dark] body[data-dark_mode_code_blocks=true] .highlight .kt, +:root[data-mode=darkest] body[data-dark_mode_code_blocks=true] .highlight .kt { + color: var(--red); +} +:root[data-mode=dark] body[data-dark_mode_code_blocks=true] .highlight .m, +:root[data-mode=darkest] body[data-dark_mode_code_blocks=true] .highlight .m { + color: var(--grey); +} +:root[data-mode=dark] body[data-dark_mode_code_blocks=true] .highlight .s, +:root[data-mode=darkest] body[data-dark_mode_code_blocks=true] .highlight .s { + color: var(--red); +} +:root[data-mode=dark] body[data-dark_mode_code_blocks=true] .highlight .na, +:root[data-mode=darkest] body[data-dark_mode_code_blocks=true] .highlight .na { + color: var(--codeGreen); +} +:root[data-mode=dark] body[data-dark_mode_code_blocks=true] .highlight .nb, +:root[data-mode=darkest] body[data-dark_mode_code_blocks=true] .highlight .nb { + color: var(--codeGreen); +} +:root[data-mode=dark] body[data-dark_mode_code_blocks=true] .highlight .nc, +:root[data-mode=darkest] body[data-dark_mode_code_blocks=true] .highlight .nc { + color: var(--codeBlue); + font-weight: bold; +} +:root[data-mode=dark] body[data-dark_mode_code_blocks=true] .highlight .no, +:root[data-mode=darkest] body[data-dark_mode_code_blocks=true] .highlight .no { + color: var(--red); +} +:root[data-mode=dark] body[data-dark_mode_code_blocks=true] .highlight .nd, +:root[data-mode=darkest] body[data-dark_mode_code_blocks=true] .highlight .nd { + color: var(--purple); +} +:root[data-mode=dark] body[data-dark_mode_code_blocks=true] .highlight .ni, +:root[data-mode=darkest] body[data-dark_mode_code_blocks=true] .highlight .ni { + color: var(--grey); + font-weight: bold; +} +:root[data-mode=dark] body[data-dark_mode_code_blocks=true] .highlight .ne, +:root[data-mode=darkest] body[data-dark_mode_code_blocks=true] .highlight .ne { + color: var(--red); + font-weight: bold; +} +:root[data-mode=dark] body[data-dark_mode_code_blocks=true] .highlight .nf, +:root[data-mode=darkest] body[data-dark_mode_code_blocks=true] .highlight .nf { + color: var(--codeBlue); +} +:root[data-mode=dark] body[data-dark_mode_code_blocks=true] .highlight .nl, +:root[data-mode=darkest] body[data-dark_mode_code_blocks=true] .highlight .nl { + color: var(--codeGreen); +} +:root[data-mode=dark] body[data-dark_mode_code_blocks=true] .highlight .nn, +:root[data-mode=darkest] body[data-dark_mode_code_blocks=true] .highlight .nn { + color: var(--codeBlue); + font-weight: bold; +} +:root[data-mode=dark] body[data-dark_mode_code_blocks=true] .highlight .nt, +:root[data-mode=darkest] body[data-dark_mode_code_blocks=true] .highlight .nt { + color: var(--codeGreen); + font-weight: bold; +} +:root[data-mode=dark] body[data-dark_mode_code_blocks=true] .highlight .nv, +:root[data-mode=darkest] body[data-dark_mode_code_blocks=true] .highlight .nv { + color: var(--darkBlue); +} +:root[data-mode=dark] body[data-dark_mode_code_blocks=true] .highlight .ow, +:root[data-mode=darkest] body[data-dark_mode_code_blocks=true] .highlight .ow { + color: var(--pink); + font-weight: bold; +} +:root[data-mode=dark] body[data-dark_mode_code_blocks=true] .highlight .w, +:root[data-mode=darkest] body[data-dark_mode_code_blocks=true] .highlight .w { + color: var(--grey); +} +:root[data-mode=dark] body[data-dark_mode_code_blocks=true] .highlight .mb, +:root[data-mode=darkest] body[data-dark_mode_code_blocks=true] .highlight .mb { + color: var(--grey); +} +:root[data-mode=dark] body[data-dark_mode_code_blocks=true] .highlight .mf, +:root[data-mode=darkest] body[data-dark_mode_code_blocks=true] .highlight .mf { + color: var(--grey); +} +:root[data-mode=dark] body[data-dark_mode_code_blocks=true] .highlight .mh, +:root[data-mode=darkest] body[data-dark_mode_code_blocks=true] .highlight .mh { + color: var(--grey); +} +:root[data-mode=dark] body[data-dark_mode_code_blocks=true] .highlight .mi, +:root[data-mode=darkest] body[data-dark_mode_code_blocks=true] .highlight .mi { + color: var(--grey); +} +:root[data-mode=dark] body[data-dark_mode_code_blocks=true] .highlight .mo, +:root[data-mode=darkest] body[data-dark_mode_code_blocks=true] .highlight .mo { + color: var(--grey); +} +:root[data-mode=dark] body[data-dark_mode_code_blocks=true] .highlight .sa, +:root[data-mode=darkest] body[data-dark_mode_code_blocks=true] .highlight .sa { + color: var(--red); +} +:root[data-mode=dark] body[data-dark_mode_code_blocks=true] .highlight .sb, +:root[data-mode=darkest] body[data-dark_mode_code_blocks=true] .highlight .sb { + color: var(--red); +} +:root[data-mode=dark] body[data-dark_mode_code_blocks=true] .highlight .sc, +:root[data-mode=darkest] body[data-dark_mode_code_blocks=true] .highlight .sc { + color: var(--red); +} +:root[data-mode=dark] body[data-dark_mode_code_blocks=true] .highlight .dl, +:root[data-mode=darkest] body[data-dark_mode_code_blocks=true] .highlight .dl { + color: var(--red); +} +:root[data-mode=dark] body[data-dark_mode_code_blocks=true] .highlight .sd, +:root[data-mode=darkest] body[data-dark_mode_code_blocks=true] .highlight .sd { + color: var(--red); + font-style: italic; +} +:root[data-mode=dark] body[data-dark_mode_code_blocks=true] .highlight .s2, +:root[data-mode=darkest] body[data-dark_mode_code_blocks=true] .highlight .s2 { + color: var(--red); +} +:root[data-mode=dark] body[data-dark_mode_code_blocks=true] .highlight .se, +:root[data-mode=darkest] body[data-dark_mode_code_blocks=true] .highlight .se { + color: var(--brown); + font-weight: bold; +} +:root[data-mode=dark] body[data-dark_mode_code_blocks=true] .highlight .sh, +:root[data-mode=darkest] body[data-dark_mode_code_blocks=true] .highlight .sh { + color: var(--red); +} +:root[data-mode=dark] body[data-dark_mode_code_blocks=true] .highlight .si, +:root[data-mode=darkest] body[data-dark_mode_code_blocks=true] .highlight .si { + color: var(--pink); + font-weight: bold; +} +:root[data-mode=dark] body[data-dark_mode_code_blocks=true] .highlight .sx, +:root[data-mode=darkest] body[data-dark_mode_code_blocks=true] .highlight .sx { + color: var(--codeGreen); +} +:root[data-mode=dark] body[data-dark_mode_code_blocks=true] .highlight .sr, +:root[data-mode=darkest] body[data-dark_mode_code_blocks=true] .highlight .sr { + color: var(--pink); +} +:root[data-mode=dark] body[data-dark_mode_code_blocks=true] .highlight .s1, +:root[data-mode=darkest] body[data-dark_mode_code_blocks=true] .highlight .s1 { + color: var(--red); +} +:root[data-mode=dark] body[data-dark_mode_code_blocks=true] .highlight .ss, +:root[data-mode=darkest] body[data-dark_mode_code_blocks=true] .highlight .ss { + color: var(--darkBlue); +} +:root[data-mode=dark] body[data-dark_mode_code_blocks=true] .highlight .bp, +:root[data-mode=darkest] body[data-dark_mode_code_blocks=true] .highlight .bp { + color: var(--codeGreen); +} +:root[data-mode=dark] body[data-dark_mode_code_blocks=true] .highlight .fm, +:root[data-mode=darkest] body[data-dark_mode_code_blocks=true] .highlight .fm { + color: var(--codeBlue); +} +:root[data-mode=dark] body[data-dark_mode_code_blocks=true] .highlight .vc, +:root[data-mode=darkest] body[data-dark_mode_code_blocks=true] .highlight .vc { + color: var(--darkBlue); +} +:root[data-mode=dark] body[data-dark_mode_code_blocks=true] .highlight .vg, +:root[data-mode=darkest] body[data-dark_mode_code_blocks=true] .highlight .vg { + color: var(--darkBlue); +} +:root[data-mode=dark] body[data-dark_mode_code_blocks=true] .highlight .vi, +:root[data-mode=darkest] body[data-dark_mode_code_blocks=true] .highlight .vi { + color: var(--darkBlue); +} +:root[data-mode=dark] body[data-dark_mode_code_blocks=true] .highlight .vm, +:root[data-mode=darkest] body[data-dark_mode_code_blocks=true] .highlight .vm { + color: var(--darkBlue); +} +:root[data-mode=dark] body[data-dark_mode_code_blocks=true] .highlight .il, +:root[data-mode=darkest] body[data-dark_mode_code_blocks=true] .highlight .il { + color: var(--grey); +} + +/*# sourceMappingURL=basic_mod.css.map */ diff --git a/_static/basic_mod.css.map b/_static/basic_mod.css.map new file mode 100644 index 00000000..332d772f --- /dev/null +++ b/_static/basic_mod.css.map @@ -0,0 +1 @@ +{"version":3,"sourceRoot":"","sources":["../../src/sass/basic_mod.scss"],"names":[],"mappings":"AAGA;EACC;EACA;EACA;EACA;;AAED;EACC;EACA;EACA;EACA;;AAED;EACC;EACA;EACA;EACA;;AAID;EACC;EACA;EACA;EACA;;AAED;EACC;EACA;EACA;EACA;;AAED;EACC;EACA;EACA;EACA;;AAED;EACC;EACA;EACA;EACA;;AAaD;AACA;AAEA;EACE;EACA;EACA;EACA;EACA;EACA;EACA;EACA;EACA;EACA;EACA;EACA;EACA;EACA;;AAEA;EACE;EACA;EACA;EACA;;AAGF;EACE;EACA;EACA;EACA;;AAGF;EAEE;EACA;EACA;EACA;EACA;EACA;EACA;;;AAIJ;EACE;EACA;EACA;;;AAGF;EACE;;;AAGF;EACE;;;AAGF;EACE;;;AAGF;EACE;EACA;;;AAGF;AAEA;EACE;EAEA;;;AAGF;AAAA;EAEE;EACA;EACA;;;AAGF;EACE;EACA;;;AAGF;AACA;AAKE;EACE;;AAGF;EACE;EACA;;AAEA;EACE;EACA;EACA;EACA;EACA,eAhHS;;AAmHX;EACE;EACA;EACA;EACA;;AAEA;EACE;;AAIJ;EACE;EACA;EACA;EACA;EACA;;;AAKN;EACE;EACA;EACA;EACA;EACA;EACA;;AAEA;EACE;EACA;EACA;EACA;EACA;EACA;EACA;;AAEA;EACE;EACA;EACA;EACA;EACA;EACA;EACA;EACA;EACA;;AAEA;EACE;;AAIJ;EACE;EACA;EACA;EACA;EACA;EACA;;AAEA;EACE;EACA;;AAMN;EACE;EACA;EACA;EACA;EACA;EACA;;AAEA;EACE;EACA;EACA;EACA;EACA;;AAEA;EACE;;AAGF;EACE;EACA;;AAKJ;EACE;EACA;EACA;EACA;EACA;;AAEA;EACE,OA9Na;EA+Nb;;AAKJ;EACE;EACA;EACA;;AAEA;EACE;EACA;EACA;;AAKJ;EACE;;AAEA;EAHF;IAII;;;AAGF;EACE;;AAEA;EACE;EACA;;AAOJ;EADF;IAEI;;;AAKF;EACE;EACA;EACA;;AAEA;EACE;;AAEA;EACE;EACA;EACA;EACA;;AAEA;EACE;;;AASd;AACA;AAEA;EACE;EACA;EACA;EACA,aAnSa;EAoSb,cApSa;EAqSb;AAOA;AAqDA;;AA1DA;EARF;IASI;IACA;;;AAgBF;AAAA;EAEE;;AAGA;AAAA;EACE;;AAOJ;AAAA;EAEE;;AAIF;AAAA;EAEE;;AAIF;AAAA;EAEE;;AAGF;AAAA;EAEE;;AAGF;AAAA;EAEE;;AAKF;EACE;;AAGF;EACE;EACA;EACA,eA7WW;EA8WX,aAhXO;EAiXP;;AAGF;EACE,WAlXW;EAmXX;EACA;EACA;EACA;;AAGF;EACE;EACA;EACA;EACA;EACA,eA/XW;EAgYX;;AAEA;EACE,aArYK;;AA0YT;EACE;;AAGF;EACE;EACA;EACA;EACA;;AAGF;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;EAQE;;AAGF;EACE;;AAGF;EACE;;AAGF;EACE;;AAOA;AAAA;AAAA;AAAA;EACE;;AAIJ;AAAA;EAEE;EACA;;AAQA;EACE;;AAIJ;EACE;;AAOA;EACE;;AAGF;EACE;;AAGF;EACE;;AAGF;EACE;;AAGF;EACE;;AAIJ;EACE;;AAGF;EACE;EACA;EACA;EACA;;AAGF;EACE;;AACA;EACE;EACA;EACA;;AAEA;EACE;;AAEF;EACE;;AAMJ;EACE;;AAGF;EACE;EACA;EACA;EACA;EACA;;AAOJ;EACE;EACA;;AAGF;EACE;;AAEA;EACE;;AAGF;EACE;EACA;;AAEA;EACE;EACA;EACA;;AAEA;EACE;;AASN;EACE;;AAIJ;EACE;;AAEA;EACE;EACA;EACA;EACA,aAzjBK;EA0jBL;EACA;EACA;EACA,eA3jBS;;AA+jBX;EACE;EACA;;AAIF;EACE;EACA;;AAIF;EACE;;AAGF;EACE;;AAEA;EACE;;AAKF;EACE;;AAEA;EACE;;AAMN;EACE;;AAMJ;EACE;;AAEA;EACE;EACA;EACA;EACA,aAlnBK;EAmnBL;EACA;EACA;EACA,eApnBS;EAqnBT;;AAIF;EACE;EACA;;AAIF;EACE;;AAIF;EACE;;AAIF;EACE;;AAMJ;EACE;EACA,eAlpBW;EAmpBX;EACA;;AAEA;EACE;EACA;EACA;EACA;;AAGF;EACE;EACA;;AAGF;EACE;;AAGF;EACE;EACA;;AAGF;EAIE;;AAEA;EACE,OAJM;;AAQV;EAEE;;AAEA;EACE,OAJM;;AAQV;EAGE;;AAEA;EACE,OAJM;;AAQV;EAIE;;AAEA;EACE,OAJM;;;AAUd;AACA;AAwCA;EACE;EACA;EACA;EACA;EACA;EACA;EACA,OAnwBa;EAowBb;EACA;EACA;;AAEA;EAZF;IAaI;;;AAGF;EACE;EACA;;AAvDF;EACE;EACA;;AAEA;EACE;EACA;;AAEA;EACE;EACA;EACA;EACA;EACA;;AAEA;EACE;;AAGF;EACE;;AAKN;EACE;;AAMJ;EACE;;;AA6BJ;AACA;AAiBA;EACE;EACA;EACA;EACA;EACA;EACA;EACA;EACA;;AAEA;EACE;;AAxBF;EACE;EACA;;AAEA;EACE;EACA;EACA;;;AAuBN;EACE;EACA;EACA;EACA;EACA;EACA,OA9zBa;EA+zBb;EACA;EACA;EACA;EACA;;AAzCA;EACE;EACA;;AAEA;EACE;EACA;EACA;;AA1EJ;EACE;EACA;;AAEA;EACE;EACA;;AAEA;EACE;EACA;EACA;EACA;EACA;;AAEA;EACE;;AAGF;EACE;;AAKN;EACE;;AAMJ;EACE;;AAoFF;EApBF;IAqBI;;;;AAIJ;AACA;AAEA;EACE;EACA;EACA;EACA;EACA;EACA;;;AAGF;EACE;EACA;EACA;EACA;;;AAGF;AACA;AAEA;EACE,aAx2Ba;EAy2Bb,cAz2Ba;;AA22Bb;EAJF;IAKI;IACA;;;AAGF;EACE,WAn3BW;EAo3BX;EACA;EACA;EACA;;AAEA;EACE;EACA;EACA;;AAEA;EACE;;AAEA;EACE;;AAGF;EACE;EACA;;AAKF;EACE;;AAEA;EACE;;AAMJ;EACE;;AAEA;EACE;;;AAQZ;AACA;AAOE;AAAA;EACE;EACA;EACA;EACA;EACA;EACA;EACA;EACA;EACA;EAEA;EACA;AAoCE;AAGA;AAIA;AAGA;AAIA;AAIA;AAGA;AAIA;AAIA;AAIA;AAGA;AAGA;AAGA;AAIA;AAGA;AAGA;AAIA;AAGA;AAIA;AAGA;AAIA;AAIA;AAIA;AAGA;AAIA;AAGA;AAGA;AAGA;AAGA;AAGA;AAIA;AAGA;AAGA;AAIA;AAIA;AAGA;AAGA;AAIA;AAIA;AAGA;AAIA;AAGA;AAGA;AAGA;AAGA;AAGA;AAGA;AAGA;AAGA;AAGA;AAGA;AAIA;AAGA;AAIA;AAGA;AAIA;AAGA;AAGA;AAGA;AAGA;AAGA;AAGA;AAGA;AAGA;AAGA;AAGA;AAGA;;AA9PF;AAAA;EACE;;AAGF;AAAA;EACE;EACA;EACA;EACA;;AAEF;AAAA;EACE;EACA;EACA;EACA;;AAEF;AAAA;EACE;EACA;EACA;EACA;;AAEF;AAAA;EACE;EACA;EACA;EACA;;AAEF;AAAA;EACE;;AAEF;AAAA;EACE;EACA;;AAEF;AAAA;EACE;;AAEF;AAAA;EACE;EACA;;AAEF;AAAA;EACE;;AAEF;AAAA;EACE;EACA;;AAEF;AAAA;EACE;EACA;;AAEF;AAAA;EACE;;AAEF;AAAA;EACE;EACA;;AAEF;AAAA;EACE;EACA;;AAEF;AAAA;EACE;EACA;;AAEF;AAAA;EACE;;AAEF;AAAA;EACE;;AAEF;AAAA;EACE;;AAEF;AAAA;EACE;EACA;;AAEF;AAAA;EACE;;AAEF;AAAA;EACE;;AAEF;AAAA;EACE;EACA;;AAEF;AAAA;EACE;;AAEF;AAAA;EACE;EACA;;AAEF;AAAA;EACE;;AAEF;AAAA;EACE;EACA;;AAEF;AAAA;EACE;EACA;;AAEF;AAAA;EACE;EACA;;AAEF;AAAA;EACE;;AAEF;AAAA;EACE;EACA;;AAEF;AAAA;EACE;;AAEF;AAAA;EACE;;AAEF;AAAA;EACE;;AAEF;AAAA;EACE;;AAEF;AAAA;EACE;;AAEF;AAAA;EACE;EACA;;AAEF;AAAA;EACE;;AAEF;AAAA;EACE;;AAEF;AAAA;EACE;EACA;;AAEF;AAAA;EACE;EACA;;AAEF;AAAA;EACE;;AAEF;AAAA;EACE;;AAEF;AAAA;EACE;EACA;;AAEF;AAAA;EACE;EACA;;AAEF;AAAA;EACE;;AAEF;AAAA;EACE;EACA;;AAEF;AAAA;EACE;;AAEF;AAAA;EACE;;AAEF;AAAA;EACE;;AAEF;AAAA;EACE;;AAEF;AAAA;EACE;;AAEF;AAAA;EACE;;AAEF;AAAA;EACE;;AAEF;AAAA;EACE;;AAEF;AAAA;EACE;;AAEF;AAAA;EACE;;AAEF;AAAA;EACE;EACA;;AAEF;AAAA;EACE;;AAEF;AAAA;EACE;EACA;;AAEF;AAAA;EACE;;AAEF;AAAA;EACE;EACA;;AAEF;AAAA;EACE;;AAEF;AAAA;EACE;;AAEF;AAAA;EACE;;AAEF;AAAA;EACE;;AAEF;AAAA;EACE;;AAEF;AAAA;EACE;;AAEF;AAAA;EACE;;AAEF;AAAA;EACE;;AAEF;AAAA;EACE;;AAEF;AAAA;EACE;;AAEF;AAAA;EACE","file":"basic_mod.css"} \ No newline at end of file diff --git a/_static/doctools.js b/_static/doctools.js new file mode 100644 index 00000000..0398ebb9 --- /dev/null +++ b/_static/doctools.js @@ -0,0 +1,149 @@ +/* + * Base JavaScript utilities for all Sphinx HTML documentation. + */ +"use strict"; + +const BLACKLISTED_KEY_CONTROL_ELEMENTS = new Set([ + "TEXTAREA", + "INPUT", + "SELECT", + "BUTTON", +]); + +const _ready = (callback) => { + if (document.readyState !== "loading") { + callback(); + } else { + document.addEventListener("DOMContentLoaded", callback); + } +}; + +/** + * Small JavaScript module for the documentation. + */ +const Documentation = { + init: () => { + Documentation.initDomainIndexTable(); + Documentation.initOnKeyListeners(); + }, + + /** + * i18n support + */ + TRANSLATIONS: {}, + PLURAL_EXPR: (n) => (n === 1 ? 0 : 1), + LOCALE: "unknown", + + // gettext and ngettext don't access this so that the functions + // can safely bound to a different name (_ = Documentation.gettext) + gettext: (string) => { + const translated = Documentation.TRANSLATIONS[string]; + switch (typeof translated) { + case "undefined": + return string; // no translation + case "string": + return translated; // translation exists + default: + return translated[0]; // (singular, plural) translation tuple exists + } + }, + + ngettext: (singular, plural, n) => { + const translated = Documentation.TRANSLATIONS[singular]; + if (typeof translated !== "undefined") + return translated[Documentation.PLURAL_EXPR(n)]; + return n === 1 ? singular : plural; + }, + + addTranslations: (catalog) => { + Object.assign(Documentation.TRANSLATIONS, catalog.messages); + Documentation.PLURAL_EXPR = new Function( + "n", + `return (${catalog.plural_expr})` + ); + Documentation.LOCALE = catalog.locale; + }, + + /** + * helper function to focus on search bar + */ + focusSearchBar: () => { + document.querySelectorAll("input[name=q]")[0]?.focus(); + }, + + /** + * Initialise the domain index toggle buttons + */ + initDomainIndexTable: () => { + const toggler = (el) => { + const idNumber = el.id.substr(7); + const toggledRows = document.querySelectorAll(`tr.cg-${idNumber}`); + if (el.src.substr(-9) === "minus.png") { + el.src = `${el.src.substr(0, el.src.length - 9)}plus.png`; + toggledRows.forEach((el) => (el.style.display = "none")); + } else { + el.src = `${el.src.substr(0, el.src.length - 8)}minus.png`; + toggledRows.forEach((el) => (el.style.display = "")); + } + }; + + const togglerElements = document.querySelectorAll("img.toggler"); + togglerElements.forEach((el) => + el.addEventListener("click", (event) => toggler(event.currentTarget)) + ); + togglerElements.forEach((el) => (el.style.display = "")); + if (DOCUMENTATION_OPTIONS.COLLAPSE_INDEX) togglerElements.forEach(toggler); + }, + + initOnKeyListeners: () => { + // only install a listener if it is really needed + if ( + !DOCUMENTATION_OPTIONS.NAVIGATION_WITH_KEYS && + !DOCUMENTATION_OPTIONS.ENABLE_SEARCH_SHORTCUTS + ) + return; + + document.addEventListener("keydown", (event) => { + // bail for input elements + if (BLACKLISTED_KEY_CONTROL_ELEMENTS.has(document.activeElement.tagName)) return; + // bail with special keys + if (event.altKey || event.ctrlKey || event.metaKey) return; + + if (!event.shiftKey) { + switch (event.key) { + case "ArrowLeft": + if (!DOCUMENTATION_OPTIONS.NAVIGATION_WITH_KEYS) break; + + const prevLink = document.querySelector('link[rel="prev"]'); + if (prevLink && prevLink.href) { + window.location.href = prevLink.href; + event.preventDefault(); + } + break; + case "ArrowRight": + if (!DOCUMENTATION_OPTIONS.NAVIGATION_WITH_KEYS) break; + + const nextLink = document.querySelector('link[rel="next"]'); + if (nextLink && nextLink.href) { + window.location.href = nextLink.href; + event.preventDefault(); + } + break; + } + } + + // some keyboard layouts may need Shift to get / + switch (event.key) { + case "/": + if (!DOCUMENTATION_OPTIONS.ENABLE_SEARCH_SHORTCUTS) break; + Documentation.focusSearchBar(); + event.preventDefault(); + } + }); + }, +}; + +// quick alias for translations +const _ = Documentation.gettext; + +_ready(Documentation.init); diff --git a/_static/documentation_options.js b/_static/documentation_options.js new file mode 100644 index 00000000..89435bb4 --- /dev/null +++ b/_static/documentation_options.js @@ -0,0 +1,13 @@ +const DOCUMENTATION_OPTIONS = { + VERSION: '1.0.0', + LANGUAGE: 'en', + COLLAPSE_INDEX: false, + BUILDER: 'html', + FILE_SUFFIX: '.html', + LINK_SUFFIX: '.html', + HAS_SOURCE: true, + SOURCELINK_SUFFIX: '.txt', + NAVIGATION_WITH_KEYS: false, + SHOW_SEARCH_SUMMARY: true, + ENABLE_SEARCH_SHORTCUTS: true, +}; \ No newline at end of file diff --git a/_static/file.png b/_static/file.png new file mode 100644 index 00000000..a858a410 Binary files /dev/null and b/_static/file.png differ diff --git a/_static/fonts/roboto-mono/LICENSE.txt b/_static/fonts/roboto-mono/LICENSE.txt new file mode 100644 index 00000000..d6456956 --- /dev/null +++ b/_static/fonts/roboto-mono/LICENSE.txt @@ -0,0 +1,202 @@ + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/_static/fonts/roboto-mono/roboto-mono-bold-italic.woff2 b/_static/fonts/roboto-mono/roboto-mono-bold-italic.woff2 new file mode 100644 index 00000000..595f902d Binary files /dev/null and b/_static/fonts/roboto-mono/roboto-mono-bold-italic.woff2 differ diff --git a/_static/fonts/roboto-mono/roboto-mono-bold.woff2 b/_static/fonts/roboto-mono/roboto-mono-bold.woff2 new file mode 100644 index 00000000..eb7eb9d4 Binary files /dev/null and b/_static/fonts/roboto-mono/roboto-mono-bold.woff2 differ diff --git a/_static/fonts/roboto-mono/roboto-mono-italic.woff2 b/_static/fonts/roboto-mono/roboto-mono-italic.woff2 new file mode 100644 index 00000000..8f5146aa Binary files /dev/null and b/_static/fonts/roboto-mono/roboto-mono-italic.woff2 differ diff --git a/_static/fonts/roboto-mono/roboto-mono.woff2 b/_static/fonts/roboto-mono/roboto-mono.woff2 new file mode 100644 index 00000000..9e69f6d1 Binary files /dev/null and b/_static/fonts/roboto-mono/roboto-mono.woff2 differ diff --git a/_static/fonts/roboto/LICENSE.txt b/_static/fonts/roboto/LICENSE.txt new file mode 100644 index 00000000..d6456956 --- /dev/null +++ b/_static/fonts/roboto/LICENSE.txt @@ -0,0 +1,202 @@ + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/_static/fonts/roboto/roboto-bold.woff2 b/_static/fonts/roboto/roboto-bold.woff2 new file mode 100644 index 00000000..ed8b5520 Binary files /dev/null and b/_static/fonts/roboto/roboto-bold.woff2 differ diff --git a/_static/fonts/roboto/roboto-italic.woff2 b/_static/fonts/roboto/roboto-italic.woff2 new file mode 100644 index 00000000..71997929 Binary files /dev/null and b/_static/fonts/roboto/roboto-italic.woff2 differ diff --git a/_static/fonts/roboto/roboto.woff2 b/_static/fonts/roboto/roboto.woff2 new file mode 100644 index 00000000..39cd5a6f Binary files /dev/null and b/_static/fonts/roboto/roboto.woff2 differ diff --git a/_static/js/petite-vue.js b/_static/js/petite-vue.js new file mode 100644 index 00000000..b2acae46 --- /dev/null +++ b/_static/js/petite-vue.js @@ -0,0 +1 @@ +var pn=Object.defineProperty,hn=(e,t,n)=>t in e?pn(e,t,{enumerable:!0,configurable:!0,writable:!0,value:n}):e[t]=n,C=(e,t,n)=>(hn(e,"symbol"!=typeof t?t+"":t,n),n),PetiteVue=function(e){"use strict";function t(e){if(a(e)){const n={};for(let s=0;s{if(e){const n=e.split(s);n.length>1&&(t[n[0].trim()]=n[1].trim())}})),t}function i(e){let t="";if(d(e))t=e;else if(a(e))for(let n=0;no(e,t)))}const l=Object.assign,f=Object.prototype.hasOwnProperty,u=(e,t)=>f.call(e,t),a=Array.isArray,p=e=>"[object Map]"===y(e),h=e=>e instanceof Date,d=e=>"string"==typeof e,m=e=>"symbol"==typeof e,g=e=>null!==e&&"object"==typeof e,v=Object.prototype.toString,y=e=>v.call(e),b=e=>d(e)&&"NaN"!==e&&"-"!==e[0]&&""+parseInt(e,10)===e,x=e=>{const t=Object.create(null);return n=>t[n]||(t[n]=e(n))},_=/-(\w)/g,w=x((e=>e.replace(_,((e,t)=>t?t.toUpperCase():"")))),$=/\B([A-Z])/g,k=x((e=>e.replace($,"-$1").toLowerCase())),O=e=>{const t=parseFloat(e);return isNaN(t)?e:t};function S(e,t){(t=t||undefined)&&t.active&&t.effects.push(e)}const E=e=>{const t=new Set(e);return t.w=0,t.n=0,t},j=e=>(e.w&N)>0,A=e=>(e.n&N)>0,P=new WeakMap;let R=0,N=1;const T=[];let M;const B=Symbol(""),L=Symbol("");class W{constructor(e,t=null,n){this.fn=e,this.scheduler=t,this.active=!0,this.deps=[],S(this,n)}run(){if(!this.active)return this.fn();if(!T.includes(this))try{return T.push(M=this),F.push(V),V=!0,N=1<<++R,R<=30?(({deps:e})=>{if(e.length)for(let t=0;t{const{deps:t}=e;if(t.length){let n=0;for(let s=0;s0?T[e-1]:void 0}}stop(){this.active&&(I(this),this.onStop&&this.onStop(),this.active=!1)}}function I(e){const{deps:t}=e;if(t.length){for(let n=0;n{("length"===t||t>=s)&&c.push(e)}));else switch(void 0!==n&&c.push(o.get(n)),t){case"add":a(e)?b(n)&&c.push(o.get("length")):(c.push(o.get(B)),p(e)&&c.push(o.get(L)));break;case"delete":a(e)||(c.push(o.get(B)),p(e)&&c.push(o.get(L)));break;case"set":p(e)&&c.push(o.get(B))}if(1===c.length)c[0]&&Z(c[0]);else{const e=[];for(const t of c)t&&e.push(...t);Z(E(e))}}function Z(e,t){for(const n of a(e)?e:[...e])(n!==M||n.allowRecurse)&&(n.scheduler?n.scheduler():n.run())}const q=function(e,t){const n=Object.create(null),s=e.split(",");for(let r=0;r!!n[e.toLowerCase()]:e=>!!n[e]}("__proto__,__v_isRef,__isVue"),D=new Set(Object.getOwnPropertyNames(Symbol).map((e=>Symbol[e])).filter(m)),G=X(),U=X(!0),Q=function(){const e={};return["includes","indexOf","lastIndexOf"].forEach((t=>{e[t]=function(...e){const n=le(this);for(let t=0,r=this.length;t{e[t]=function(...e){F.push(V),V=!1;const n=le(this)[t].apply(this,e);return z(),n}})),e}();function X(e=!1,t=!1){return function(n,s,r){if("__v_isReactive"===s)return!e;if("__v_isReadonly"===s)return e;if("__v_raw"===s&&r===(e?t?re:se:t?ne:te).get(n))return n;const i=a(n);if(!e&&i&&u(Q,s))return Reflect.get(Q,s,r);const o=Reflect.get(n,s,r);return(m(s)?D.has(s):q(s))||(e||H(n,0,s),t)?o:fe(o)?i&&b(s)?o:o.value:g(o)?e?function(e){return ce(e,!0,ee,null,se)}(o):oe(o):o}}const Y={get:G,set:function(e=!1){return function(t,n,s,r){let i=t[n];if(!e&&!function(e){return!(!e||!e.__v_isReadonly)}(s)&&(s=le(s),i=le(i),!a(t)&&fe(i)&&!fe(s)))return i.value=s,!0;const o=a(t)&&b(n)?Number(n)!Object.is(e,t))(s,i)&&J(t,"set",n,s):J(t,"add",n,s)),c}}(),deleteProperty:function(e,t){const n=u(e,t);e[t];const s=Reflect.deleteProperty(e,t);return s&&n&&J(e,"delete",t,void 0),s},has:function(e,t){const n=Reflect.has(e,t);return(!m(t)||!D.has(t))&&H(e,0,t),n},ownKeys:function(e){return H(e,0,a(e)?"length":B),Reflect.ownKeys(e)}},ee={get:U,set:(e,t)=>!0,deleteProperty:(e,t)=>!0},te=new WeakMap,ne=new WeakMap,se=new WeakMap,re=new WeakMap;function ie(e){return e.__v_skip||!Object.isExtensible(e)?0:function(e){switch(e){case"Object":case"Array":return 1;case"Map":case"Set":case"WeakMap":case"WeakSet":return 2;default:return 0}}((e=>y(e).slice(8,-1))(e))}function oe(e){return e&&e.__v_isReadonly?e:ce(e,!1,Y,null,te)}function ce(e,t,n,s,r){if(!g(e)||e.__v_raw&&(!t||!e.__v_isReactive))return e;const i=r.get(e);if(i)return i;const o=ie(e);if(0===o)return e;const c=new Proxy(e,2===o?s:n);return r.set(e,c),c}function le(e){const t=e&&e.__v_raw;return t?le(t):e}function fe(e){return Boolean(e&&!0===e.__v_isRef)}Promise.resolve();let ue=!1;const ae=[],pe=Promise.resolve(),he=e=>pe.then(e),de=e=>{ae.includes(e)||ae.push(e),ue||(ue=!0,he(me))},me=()=>{for(const e of ae)e();ae.length=0,ue=!1},ge=/^(spellcheck|draggable|form|list|type)$/,ve=({el:e,get:t,effect:n,arg:s,modifiers:r})=>{let i;"class"===s&&(e._class=e.className),n((()=>{let n=t();if(s)(null==r?void 0:r.camel)&&(s=w(s)),ye(e,s,n,i);else{for(const t in n)ye(e,t,n[t],i&&i[t]);for(const t in i)(!n||!(t in n))&&ye(e,t,null)}i=n}))},ye=(e,n,s,r)=>{if("class"===n)e.setAttribute("class",i(e._class?[e._class,s]:s)||"");else if("style"===n){s=t(s);const{style:n}=e;if(s)if(d(s))s!==r&&(n.cssText=s);else{for(const e in s)xe(n,e,s[e]);if(r&&!d(r))for(const e in r)null==s[e]&&xe(n,e,"")}else e.removeAttribute("style")}else e instanceof SVGElement||!(n in e)||ge.test(n)?"true-value"===n?e._trueValue=s:"false-value"===n?e._falseValue=s:null!=s?e.setAttribute(n,s):e.removeAttribute(n):(e[n]=s,"value"===n&&(e._value=s))},be=/\s*!important$/,xe=(e,t,n)=>{a(n)?n.forEach((n=>xe(e,t,n))):t.startsWith("--")?e.setProperty(t,n):be.test(n)?e.setProperty(k(t),n.replace(be,""),"important"):e[t]=n},_e=(e,t)=>{const n=e.getAttribute(t);return null!=n&&e.removeAttribute(t),n},we=(e,t,n,s)=>{e.addEventListener(t,n,s)},$e=/^[A-Za-z_$][\w$]*(?:\.[A-Za-z_$][\w$]*|\['[^']*?']|\["[^"]*?"]|\[\d+]|\[[A-Za-z_$][\w$]*])*$/,ke=["ctrl","shift","alt","meta"],Oe={stop:e=>e.stopPropagation(),prevent:e=>e.preventDefault(),self:e=>e.target!==e.currentTarget,ctrl:e=>!e.ctrlKey,shift:e=>!e.shiftKey,alt:e=>!e.altKey,meta:e=>!e.metaKey,left:e=>"button"in e&&0!==e.button,middle:e=>"button"in e&&1!==e.button,right:e=>"button"in e&&2!==e.button,exact:(e,t)=>ke.some((n=>e[`${n}Key`]&&!t[n]))},Se=({el:e,get:t,exp:n,arg:s,modifiers:r})=>{if(!s)return;let i=$e.test(n)?t(`(e => ${n}(e))`):t(`($event => { ${n} })`);if("vue:mounted"!==s){if("vue:unmounted"===s)return()=>i();if(r){"click"===s&&(r.right&&(s="contextmenu"),r.middle&&(s="mouseup"));const e=i;i=t=>{if(!("key"in t)||k(t.key)in r){for(const e in r){const n=Oe[e];if(n&&n(t,r))return}return e(t)}}}we(e,s,i,r)}else he(i)},Ee=({el:e,get:t,effect:n})=>{n((()=>{e.textContent=Ce(t())}))},Ce=e=>null==e?"":g(e)?JSON.stringify(e,null,2):String(e),je=e=>"_value"in e?e._value:e.value,Ae=(e,t)=>{const n=t?"_trueValue":"_falseValue";return n in e?e[n]:t},Pe=e=>{e.target.composing=!0},Re=e=>{const t=e.target;t.composing&&(t.composing=!1,Ne(t,"input"))},Ne=(e,t)=>{const n=document.createEvent("HTMLEvents");n.initEvent(t,!0,!0),e.dispatchEvent(n)},Te=Object.create(null),Me=(e,t,n)=>Be(e,`return(${t})`,n),Be=(e,t,n)=>{const s=Te[t]||(Te[t]=Le(t));try{return s(e,n)}catch(r){console.error(r)}},Le=e=>{try{return new Function("$data","$el",`with($data){${e}}`)}catch(t){return console.error(`${t.message} in expression: ${e}`),()=>{}}},We={bind:ve,on:Se,show:({el:e,get:t,effect:n})=>{const s=e.style.display;n((()=>{e.style.display=t()?s:"none"}))},text:Ee,html:({el:e,get:t,effect:n})=>{n((()=>{e.innerHTML=t()}))},model:({el:e,exp:t,get:n,effect:s,modifiers:r})=>{const i=e.type,l=n(`(val) => { ${t} = val }`),{trim:f,number:u="number"===i}=r||{};if("SELECT"===e.tagName){const t=e;we(e,"change",(()=>{const e=Array.prototype.filter.call(t.options,(e=>e.selected)).map((e=>u?O(je(e)):je(e)));l(t.multiple?e:e[0])})),s((()=>{const e=n(),s=t.multiple;for(let n=0,r=t.options.length;n-1:r.selected=e.has(i);else if(o(je(r),e))return void(t.selectedIndex!==n&&(t.selectedIndex=n))}!s&&-1!==t.selectedIndex&&(t.selectedIndex=-1)}))}else if("checkbox"===i){let t;we(e,"change",(()=>{const t=n(),s=e.checked;if(a(t)){const n=je(e),r=c(t,n),i=-1!==r;if(s&&!i)l(t.concat(n));else if(!s&&i){const e=[...t];e.splice(r,1),l(e)}}else l(Ae(e,s))})),s((()=>{const s=n();a(s)?e.checked=c(s,je(e))>-1:s!==t&&(e.checked=o(s,Ae(e,!0))),t=s}))}else if("radio"===i){let t;we(e,"change",(()=>{l(je(e))})),s((()=>{const s=n();s!==t&&(e.checked=o(s,je(e)))}))}else{const t=e=>f?e.trim():u?O(e):e;we(e,"compositionstart",Pe),we(e,"compositionend",Re),we(e,(null==r?void 0:r.lazy)?"change":"input",(()=>{e.composing||l(t(e.value))})),f&&we(e,"change",(()=>{e.value=e.value.trim()})),s((()=>{if(e.composing)return;const s=e.value,r=n();document.activeElement===e&&t(s)===r||s!==r&&(e.value=r)}))}},effect:({el:e,ctx:t,exp:n,effect:s})=>{he((()=>s((()=>Be(t.scope,n,e)))))}},Ie=/([\s\S]*?)\s+(?:in|of)\s+([\s\S]*)/,Ke=/,([^,\}\]]*)(?:,([^,\}\]]*))?$/,Ve=/^\(|\)$/g,Fe=/^[{[]\s*((?:[\w_$]+\s*,?\s*)+)[\]}]$/,ze=(e,t,n)=>{const s=t.match(Ie);if(!s)return;const r=e.nextSibling,i=e.parentElement,o=new Text("");i.insertBefore(o,e),i.removeChild(e);const c=s[2].trim();let l,f,u,p,h=s[1].trim().replace(Ve,"").trim(),d=!1,m="key",v=e.getAttribute(m)||e.getAttribute(m=":key")||e.getAttribute(m="v-bind:key");v&&(e.removeAttribute(m),"key"===m&&(v=JSON.stringify(v))),(p=h.match(Ke))&&(h=h.replace(Ke,"").trim(),f=p[1].trim(),p[2]&&(u=p[2].trim())),(p=h.match(Fe))&&(l=p[1].split(",").map((e=>e.trim())),d="["===h[0]);let y,b,x,_=!1;const w=(e,t,s,r)=>{const i={};l?l.forEach(((e,n)=>i[e]=t[d?n:e])):i[h]=t,r?(f&&(i[f]=r),u&&(i[u]=s)):f&&(i[f]=s);const o=et(n,i),c=v?Me(o.scope,v):s;return e.set(c,s),o.key=c,o},$=(t,n)=>{const s=new nt(e,t);return s.key=t.key,s.insert(i,n),s};return n.effect((()=>{const e=Me(n.scope,c),t=x;if([b,x]=(e=>{const t=new Map,n=[];if(a(e))for(let s=0;s$(e,o))),_=!0})),r},He=({el:e,ctx:{scope:{$refs:t}},get:n,effect:s})=>{let r;return s((()=>{const s=n();t[s]=e,r&&s!==r&&delete t[r],r=s})),()=>{r&&delete t[r]}},Je=/^(?:v-|:|@)/,Ze=/\.([\w-]+)/g;let qe=!1;const De=(e,t)=>{const n=e.nodeType;if(1===n){const n=e;if(n.hasAttribute("v-pre"))return;let s;if(_e(n,"v-cloak"),s=_e(n,"v-if"))return((e,t,n)=>{const s=e.parentElement,r=new Comment("v-if");s.insertBefore(r,e);const i=[{exp:t,el:e}];let o,c;for(;(o=e.nextElementSibling)&&(c=null,""===_e(o,"v-else")||(c=_e(o,"v-else-if")));)s.removeChild(o),i.push({exp:c,el:o});const l=e.nextSibling;s.removeChild(e);let f,u=-1;const a=()=>{f&&(s.insertBefore(r,f.el),f.remove(),f=void 0)};return n.effect((()=>{for(let e=0;e{let n=e.firstChild;for(;n;)n=De(n,t)||n.nextSibling},Ue=(e,t,n,s)=>{let r,i,o;if(":"===(t=t.replace(Ze,((e,t)=>((o||(o={}))[t]=!0,""))))[0])r=ve,i=t.slice(1);else if("@"===t[0])r=Se,i=t.slice(1);else{const e=t.indexOf(":"),n=e>0?t.slice(2,e):t.slice(2);r=We[n]||s.dirs[n],i=e>0?t.slice(e+1):void 0}r&&(r===ve&&"ref"===i&&(r=He),Qe(e,r,n,s,i,o),e.removeAttribute(t))},Qe=(e,t,n,s,r,i)=>{const o=t({el:e,get:(t=n)=>Me(s.scope,t,e),effect:s.effect,ctx:s,exp:n,arg:r,modifiers:i});o&&s.cleanups.push(o)},Xe=(e,t)=>{if("#"!==t[0])e.innerHTML=t;else{const n=document.querySelector(t);e.appendChild(n.content.cloneNode(!0))}},Ye=e=>{const t={delimiters:["{{","}}"],delimitersRE:/\{\{([^]+?)\}\}/g,...e,scope:e?e.scope:oe({}),dirs:e?e.dirs:{},effects:[],blocks:[],cleanups:[],effect:e=>{if(qe)return de(e),e;const n=function(e,t){e.effect&&(e=e.effect.fn);const n=new W(e);t&&(l(n,t),t.scope&&S(n,t.scope)),(!t||!t.lazy)&&n.run();const s=n.run.bind(n);return s.effect=n,s}(e,{scheduler:()=>de(n)});return t.effects.push(n),n}};return t},et=(e,t={})=>{const n=e.scope,s=Object.create(n);Object.defineProperties(s,Object.getOwnPropertyDescriptors(t)),s.$refs=Object.create(n.$refs);const r=oe(new Proxy(s,{set:(e,t,s,i)=>i!==r||e.hasOwnProperty(t)?Reflect.set(e,t,s,i):Reflect.set(n,t,s)}));return tt(r),{...e,scope:r}},tt=e=>{for(const t of Object.keys(e))"function"==typeof e[t]&&(e[t]=e[t].bind(e))};class nt{constructor(e,t,n=!1){C(this,"template"),C(this,"ctx"),C(this,"key"),C(this,"parentCtx"),C(this,"isFragment"),C(this,"start"),C(this,"end"),this.isFragment=e instanceof HTMLTemplateElement,n?this.template=e:this.isFragment?this.template=e.content.cloneNode(!0):this.template=e.cloneNode(!0),n?this.ctx=t:(this.parentCtx=t,t.blocks.push(this),this.ctx=Ye(t)),De(this.template,this.ctx)}get el(){return this.start||this.template}insert(e,t=null){if(this.isFragment)if(this.start){let n,s=this.start;for(;s&&(n=s.nextSibling,e.insertBefore(s,t),s!==this.end);)s=n}else this.start=new Text(""),this.end=new Text(""),e.insertBefore(this.end,t),e.insertBefore(this.start,this.end),e.insertBefore(this.template,this.end);else e.insertBefore(this.template,t)}remove(){if(this.parentCtx&&((e,t)=>{const n=e.indexOf(t);n>-1&&e.splice(n,1)})(this.parentCtx.blocks,this),this.start){const e=this.start.parentNode;let t,n=this.start;for(;n&&(t=n.nextSibling,e.removeChild(n),n!==this.end);)n=t}else this.template.parentNode.removeChild(this.template);this.teardown()}teardown(){this.ctx.blocks.forEach((e=>{e.teardown()})),this.ctx.effects.forEach(K),this.ctx.cleanups.forEach((e=>e()))}}const st=e=>e.replace(/[-.*+?^${}()|[\]\/\\]/g,"\\$&"),rt=e=>{const t=Ye();if(e&&(t.scope=oe(e),tt(t.scope),e.$delimiters)){const[n,s]=t.delimiters=e.$delimiters;t.delimitersRE=new RegExp(st(n)+"([^]+?)"+st(s),"g")}let n;return t.scope.$s=Ce,t.scope.$nextTick=he,t.scope.$refs=Object.create(null),{directive(e,n){return n?(t.dirs[e]=n,this):t.dirs[e]},mount(e){if("string"==typeof e&&!(e=document.querySelector(e)))return;let s;return s=(e=e||document.documentElement).hasAttribute("v-scope")?[e]:[...e.querySelectorAll("[v-scope]")].filter((e=>!e.matches("[v-scope] [v-scope]"))),s.length||(s=[e]),n=s.map((e=>new nt(e,t,!0))),this},unmount(){n.forEach((e=>e.teardown()))}}},it=document.currentScript;return it&&it.hasAttribute("init")&&rt().mount(),e.createApp=rt,e.nextTick=he,e.reactive=oe,Object.defineProperty(e,"__esModule",{value:!0}),e[Symbol.toStringTag]="Module",e}({}); diff --git a/_static/js/theme.js b/_static/js/theme.js new file mode 100644 index 00000000..bf36d744 --- /dev/null +++ b/_static/js/theme.js @@ -0,0 +1,108 @@ + +/** + * We add extra br tags to the autodoc output, so each parameter is shown on + * its own line. + */ +function setupAutodocPy() { + const paramElements = document.querySelectorAll('.py .sig-param') + + Array(...paramElements).forEach((element) => { + let brElement = document.createElement('br') + element.parentNode.insertBefore(brElement, element) + }) + + const lastParamElements = document.querySelectorAll('.py em.sig-param:last-of-type') + + Array(...lastParamElements).forEach((element) => { + let brElement = document.createElement('br') + element.after(brElement) + }) +} + +function setupAutodocCpp() { + const highlightableElements = document.querySelectorAll(".c dt.sig-object, .cpp dt.sig-object") + + Array(...highlightableElements).forEach((element) => { + element.classList.add("highlight"); + }) + + const documentables = document.querySelectorAll("dt.sig-object.c,dt.sig-object.cpp"); + + Array(...documentables).forEach((element) => { + element.classList.add("highlight"); + + var parens = element.querySelectorAll(".sig-paren"); + var commas = Array(...element.childNodes).filter(e => e.textContent == ", ") + + if (parens.length != 2) return; + + commas.forEach(c => { + if (c.compareDocumentPosition(parens[0]) == Node.DOCUMENT_POSITION_PRECEDING && + c.compareDocumentPosition(parens[1]) == Node.DOCUMENT_POSITION_FOLLOWING + ) { + let brElement = document.createElement('br') + let spanElement = document.createElement('span') + spanElement.className = "sig-indent" + c.after(brElement) + brElement.after(spanElement) + } + }); + + if (parens[0].nextSibling != parens[1]) { + // not an empty argument list + let brElement = document.createElement('br') + let spanElement = document.createElement('span') + spanElement.className = "sig-indent" + parens[0].after(brElement) + brElement.after(spanElement) + let brElement1 = document.createElement('br') + parens[1].parentNode.insertBefore(brElement1, parens[1]); + } + }) +} + +function setupSearchSidebar() { + const searchInput = document.querySelector('form.search input[type=text]') + if (searchInput) { + searchInput.placeholder = 'Search...' + } + + const searchButton = document.querySelector('form.search input[type=submit]') + if (searchButton) { + searchButton.value = 'Search' + } +} + +function setupSidebarToggle() { + const sidebar = document.querySelector('.sphinxsidebar') + document.querySelector('#toggle_sidebar a').onclick = (event) => { + console.log("Toggling sidebar") + event.preventDefault() + sidebar.style.display = window.getComputedStyle(sidebar, null).display == 'none' ? 'block' : 'none' + } +} + +function setupRightSidebarToggle() { + const sidebar = document.querySelector('#right_sidebar') + + const links = document.querySelectorAll('a.toggle_right_sidebar') + + Array(...links).forEach((element) => { + element.onclick = (event) => { + console.log("Toggling right sidebar") + event.preventDefault() + sidebar.style.display = window.getComputedStyle(sidebar, null).display == 'none' ? 'block' : 'none' + } + }) +} + + +document.addEventListener("DOMContentLoaded", function() { + console.log("custom theme loaded") + + setupAutodocPy() + setupAutodocCpp() + setupSearchSidebar() + setupSidebarToggle() + setupRightSidebarToggle() +}) diff --git a/_static/language_data.js b/_static/language_data.js new file mode 100644 index 00000000..c7fe6c6f --- /dev/null +++ b/_static/language_data.js @@ -0,0 +1,192 @@ +/* + * This script contains the language-specific data used by searchtools.js, + * namely the list of stopwords, stemmer, scorer and splitter. + */ + +var stopwords = ["a", "and", "are", "as", "at", "be", "but", "by", "for", "if", "in", "into", "is", "it", "near", "no", "not", "of", "on", "or", "such", "that", "the", "their", "then", "there", "these", "they", "this", "to", "was", "will", "with"]; + + +/* Non-minified version is copied as a separate JS file, if available */ + +/** + * Porter Stemmer + */ +var Stemmer = function() { + + var step2list = { + ational: 'ate', + tional: 'tion', + enci: 'ence', + anci: 'ance', + izer: 'ize', + bli: 'ble', + alli: 'al', + entli: 'ent', + eli: 'e', + ousli: 'ous', + ization: 'ize', + ation: 'ate', + ator: 'ate', + alism: 'al', + iveness: 'ive', + fulness: 'ful', + ousness: 'ous', + aliti: 'al', + iviti: 'ive', + biliti: 'ble', + logi: 'log' + }; + + var step3list = { + icate: 'ic', + ative: '', + alize: 'al', + iciti: 'ic', + ical: 'ic', + ful: '', + ness: '' + }; + + var c = "[^aeiou]"; // consonant + var v = "[aeiouy]"; // vowel + var C = c + "[^aeiouy]*"; // consonant sequence + var V = v + "[aeiou]*"; // vowel sequence + + var mgr0 = "^(" + C + ")?" + V + C; // [C]VC... is m>0 + var meq1 = "^(" + C + ")?" + V + C + "(" + V + ")?$"; // [C]VC[V] is m=1 + var mgr1 = "^(" + C + ")?" + V + C + V + C; // [C]VCVC... is m>1 + var s_v = "^(" + C + ")?" + v; // vowel in stem + + this.stemWord = function (w) { + var stem; + var suffix; + var firstch; + var origword = w; + + if (w.length < 3) + return w; + + var re; + var re2; + var re3; + var re4; + + firstch = w.substr(0,1); + if (firstch == "y") + w = firstch.toUpperCase() + w.substr(1); + + // Step 1a + re = /^(.+?)(ss|i)es$/; + re2 = /^(.+?)([^s])s$/; + + if (re.test(w)) + w = w.replace(re,"$1$2"); + else if (re2.test(w)) + w = w.replace(re2,"$1$2"); + + // Step 1b + re = /^(.+?)eed$/; + re2 = /^(.+?)(ed|ing)$/; + if (re.test(w)) { + var fp = re.exec(w); + re = new RegExp(mgr0); + if (re.test(fp[1])) { + re = /.$/; + w = w.replace(re,""); + } + } + else if (re2.test(w)) { + var fp = re2.exec(w); + stem = fp[1]; + re2 = new RegExp(s_v); + if (re2.test(stem)) { + w = stem; + re2 = /(at|bl|iz)$/; + re3 = new RegExp("([^aeiouylsz])\\1$"); + re4 = new RegExp("^" + C + v + "[^aeiouwxy]$"); + if (re2.test(w)) + w = w + "e"; + else if (re3.test(w)) { + re = /.$/; + w = w.replace(re,""); + } + else if (re4.test(w)) + w = w + "e"; + } + } + + // Step 1c + re = /^(.+?)y$/; + if (re.test(w)) { + var fp = re.exec(w); + stem = fp[1]; + re = new RegExp(s_v); + if (re.test(stem)) + w = stem + "i"; + } + + // Step 2 + re = /^(.+?)(ational|tional|enci|anci|izer|bli|alli|entli|eli|ousli|ization|ation|ator|alism|iveness|fulness|ousness|aliti|iviti|biliti|logi)$/; + if (re.test(w)) { + var fp = re.exec(w); + stem = fp[1]; + suffix = fp[2]; + re = new RegExp(mgr0); + if (re.test(stem)) + w = stem + step2list[suffix]; + } + + // Step 3 + re = /^(.+?)(icate|ative|alize|iciti|ical|ful|ness)$/; + if (re.test(w)) { + var fp = re.exec(w); + stem = fp[1]; + suffix = fp[2]; + re = new RegExp(mgr0); + if (re.test(stem)) + w = stem + step3list[suffix]; + } + + // Step 4 + re = /^(.+?)(al|ance|ence|er|ic|able|ible|ant|ement|ment|ent|ou|ism|ate|iti|ous|ive|ize)$/; + re2 = /^(.+?)(s|t)(ion)$/; + if (re.test(w)) { + var fp = re.exec(w); + stem = fp[1]; + re = new RegExp(mgr1); + if (re.test(stem)) + w = stem; + } + else if (re2.test(w)) { + var fp = re2.exec(w); + stem = fp[1] + fp[2]; + re2 = new RegExp(mgr1); + if (re2.test(stem)) + w = stem; + } + + // Step 5 + re = /^(.+?)e$/; + if (re.test(w)) { + var fp = re.exec(w); + stem = fp[1]; + re = new RegExp(mgr1); + re2 = new RegExp(meq1); + re3 = new RegExp("^" + C + v + "[^aeiouwxy]$"); + if (re.test(stem) || (re2.test(stem) && !(re3.test(stem)))) + w = stem; + } + re = /ll$/; + re2 = new RegExp(mgr1); + if (re.test(w) && re2.test(w)) { + re = /.$/; + w = w.replace(re,""); + } + + // and turn initial Y back to y + if (firstch == "y") + w = firstch.toLowerCase() + w.substr(1); + return w; + } +} + diff --git a/_static/minus.png b/_static/minus.png new file mode 100644 index 00000000..d96755fd Binary files /dev/null and b/_static/minus.png differ diff --git a/_static/plus.png b/_static/plus.png new file mode 100644 index 00000000..7107cec9 Binary files /dev/null and b/_static/plus.png differ diff --git a/_static/pygments.css b/_static/pygments.css new file mode 100644 index 00000000..84ab3030 --- /dev/null +++ b/_static/pygments.css @@ -0,0 +1,75 @@ +pre { line-height: 125%; } +td.linenos .normal { color: inherit; background-color: transparent; padding-left: 5px; padding-right: 5px; } +span.linenos { color: inherit; background-color: transparent; padding-left: 5px; padding-right: 5px; } +td.linenos .special { color: #000000; background-color: #ffffc0; padding-left: 5px; padding-right: 5px; } +span.linenos.special { color: #000000; background-color: #ffffc0; padding-left: 5px; padding-right: 5px; } +.highlight .hll { background-color: #ffffcc } +.highlight { background: #f8f8f8; } +.highlight .c { color: #3D7B7B; font-style: italic } /* Comment */ +.highlight .err { border: 1px solid #FF0000 } /* Error */ +.highlight .k { color: #008000; font-weight: bold } /* Keyword */ +.highlight .o { color: #666666 } /* Operator */ +.highlight .ch { color: #3D7B7B; font-style: italic } /* Comment.Hashbang */ +.highlight .cm { color: #3D7B7B; font-style: italic } /* Comment.Multiline */ +.highlight .cp { color: #9C6500 } /* Comment.Preproc */ +.highlight .cpf { color: #3D7B7B; font-style: italic } /* Comment.PreprocFile */ +.highlight .c1 { color: #3D7B7B; font-style: italic } /* Comment.Single */ +.highlight .cs { color: #3D7B7B; font-style: italic } /* Comment.Special */ +.highlight .gd { color: #A00000 } /* Generic.Deleted */ +.highlight .ge { font-style: italic } /* Generic.Emph */ +.highlight .ges { font-weight: bold; font-style: italic } /* Generic.EmphStrong */ +.highlight .gr { color: #E40000 } /* Generic.Error */ +.highlight .gh { color: #000080; font-weight: bold } /* Generic.Heading */ +.highlight .gi { color: #008400 } /* Generic.Inserted */ +.highlight .go { color: #717171 } /* Generic.Output */ +.highlight .gp { color: #000080; font-weight: bold } /* Generic.Prompt */ +.highlight .gs { font-weight: bold } /* Generic.Strong */ +.highlight .gu { color: #800080; font-weight: bold } /* Generic.Subheading */ +.highlight .gt { color: #0044DD } /* Generic.Traceback */ +.highlight .kc { color: #008000; font-weight: bold } /* Keyword.Constant */ +.highlight .kd { color: #008000; font-weight: bold } /* Keyword.Declaration */ +.highlight .kn { color: #008000; font-weight: bold } /* Keyword.Namespace */ +.highlight .kp { color: #008000 } /* Keyword.Pseudo */ +.highlight .kr { color: #008000; font-weight: bold } /* Keyword.Reserved */ +.highlight .kt { color: #B00040 } /* Keyword.Type */ +.highlight .m { color: #666666 } /* Literal.Number */ +.highlight .s { color: #BA2121 } /* Literal.String */ +.highlight .na { color: #687822 } /* Name.Attribute */ +.highlight .nb { color: #008000 } /* Name.Builtin */ +.highlight .nc { color: #0000FF; font-weight: bold } /* Name.Class */ +.highlight .no { color: #880000 } /* Name.Constant */ +.highlight .nd { color: #AA22FF } /* Name.Decorator */ +.highlight .ni { color: #717171; font-weight: bold } /* Name.Entity */ +.highlight .ne { color: #CB3F38; font-weight: bold } /* Name.Exception */ +.highlight .nf { color: #0000FF } /* Name.Function */ +.highlight .nl { color: #767600 } /* Name.Label */ +.highlight .nn { color: #0000FF; font-weight: bold } /* Name.Namespace */ +.highlight .nt { color: #008000; font-weight: bold } /* Name.Tag */ +.highlight .nv { color: #19177C } /* Name.Variable */ +.highlight .ow { color: #AA22FF; font-weight: bold } /* Operator.Word */ +.highlight .w { color: #bbbbbb } /* Text.Whitespace */ +.highlight .mb { color: #666666 } /* Literal.Number.Bin */ +.highlight .mf { color: #666666 } /* Literal.Number.Float */ +.highlight .mh { color: #666666 } /* Literal.Number.Hex */ +.highlight .mi { color: #666666 } /* Literal.Number.Integer */ +.highlight .mo { color: #666666 } /* Literal.Number.Oct */ +.highlight .sa { color: #BA2121 } /* Literal.String.Affix */ +.highlight .sb { color: #BA2121 } /* Literal.String.Backtick */ +.highlight .sc { color: #BA2121 } /* Literal.String.Char */ +.highlight .dl { color: #BA2121 } /* Literal.String.Delimiter */ +.highlight .sd { color: #BA2121; font-style: italic } /* Literal.String.Doc */ +.highlight .s2 { color: #BA2121 } /* Literal.String.Double */ +.highlight .se { color: #AA5D1F; font-weight: bold } /* Literal.String.Escape */ +.highlight .sh { color: #BA2121 } /* Literal.String.Heredoc */ +.highlight .si { color: #A45A77; font-weight: bold } /* Literal.String.Interpol */ +.highlight .sx { color: #008000 } /* Literal.String.Other */ +.highlight .sr { color: #A45A77 } /* Literal.String.Regex */ +.highlight .s1 { color: #BA2121 } /* Literal.String.Single */ +.highlight .ss { color: #19177C } /* Literal.String.Symbol */ +.highlight .bp { color: #008000 } /* Name.Builtin.Pseudo */ +.highlight .fm { color: #0000FF } /* Name.Function.Magic */ +.highlight .vc { color: #19177C } /* Name.Variable.Class */ +.highlight .vg { color: #19177C } /* Name.Variable.Global */ +.highlight .vi { color: #19177C } /* Name.Variable.Instance */ +.highlight .vm { color: #19177C } /* Name.Variable.Magic */ +.highlight .il { color: #666666 } /* Literal.Number.Integer.Long */ \ No newline at end of file diff --git a/_static/searchtools.js b/_static/searchtools.js new file mode 100644 index 00000000..2c774d17 --- /dev/null +++ b/_static/searchtools.js @@ -0,0 +1,632 @@ +/* + * Sphinx JavaScript utilities for the full-text search. + */ +"use strict"; + +/** + * Simple result scoring code. + */ +if (typeof Scorer === "undefined") { + var Scorer = { + // Implement the following function to further tweak the score for each result + // The function takes a result array [docname, title, anchor, descr, score, filename] + // and returns the new score. + /* + score: result => { + const [docname, title, anchor, descr, score, filename, kind] = result + return score + }, + */ + + // query matches the full name of an object + objNameMatch: 11, + // or matches in the last dotted part of the object name + objPartialMatch: 6, + // Additive scores depending on the priority of the object + objPrio: { + 0: 15, // used to be importantResults + 1: 5, // used to be objectResults + 2: -5, // used to be unimportantResults + }, + // Used when the priority is not in the mapping. + objPrioDefault: 0, + + // query found in title + title: 15, + partialTitle: 7, + // query found in terms + term: 5, + partialTerm: 2, + }; +} + +// Global search result kind enum, used by themes to style search results. +class SearchResultKind { + static get index() { return "index"; } + static get object() { return "object"; } + static get text() { return "text"; } + static get title() { return "title"; } +} + +const _removeChildren = (element) => { + while (element && element.lastChild) element.removeChild(element.lastChild); +}; + +/** + * See https://developer.mozilla.org/en-US/docs/Web/JavaScript/Guide/Regular_Expressions#escaping + */ +const _escapeRegExp = (string) => + string.replace(/[.*+\-?^${}()|[\]\\]/g, "\\$&"); // $& means the whole matched string + +const _displayItem = (item, searchTerms, highlightTerms) => { + const docBuilder = DOCUMENTATION_OPTIONS.BUILDER; + const docFileSuffix = DOCUMENTATION_OPTIONS.FILE_SUFFIX; + const docLinkSuffix = DOCUMENTATION_OPTIONS.LINK_SUFFIX; + const showSearchSummary = DOCUMENTATION_OPTIONS.SHOW_SEARCH_SUMMARY; + const contentRoot = document.documentElement.dataset.content_root; + + const [docName, title, anchor, descr, score, _filename, kind] = item; + + let listItem = document.createElement("li"); + // Add a class representing the item's type: + // can be used by a theme's CSS selector for styling + // See SearchResultKind for the class names. + listItem.classList.add(`kind-${kind}`); + let requestUrl; + let linkUrl; + if (docBuilder === "dirhtml") { + // dirhtml builder + let dirname = docName + "/"; + if (dirname.match(/\/index\/$/)) + dirname = dirname.substring(0, dirname.length - 6); + else if (dirname === "index/") dirname = ""; + requestUrl = contentRoot + dirname; + linkUrl = requestUrl; + } else { + // normal html builders + requestUrl = contentRoot + docName + docFileSuffix; + linkUrl = docName + docLinkSuffix; + } + let linkEl = listItem.appendChild(document.createElement("a")); + linkEl.href = linkUrl + anchor; + linkEl.dataset.score = score; + linkEl.innerHTML = title; + if (descr) { + listItem.appendChild(document.createElement("span")).innerHTML = + " (" + descr + ")"; + // highlight search terms in the description + if (SPHINX_HIGHLIGHT_ENABLED) // set in sphinx_highlight.js + highlightTerms.forEach((term) => _highlightText(listItem, term, "highlighted")); + } + else if (showSearchSummary) + fetch(requestUrl) + .then((responseData) => responseData.text()) + .then((data) => { + if (data) + listItem.appendChild( + Search.makeSearchSummary(data, searchTerms, anchor) + ); + // highlight search terms in the summary + if (SPHINX_HIGHLIGHT_ENABLED) // set in sphinx_highlight.js + highlightTerms.forEach((term) => _highlightText(listItem, term, "highlighted")); + }); + Search.output.appendChild(listItem); +}; +const _finishSearch = (resultCount) => { + Search.stopPulse(); + Search.title.innerText = _("Search Results"); + if (!resultCount) + Search.status.innerText = Documentation.gettext( + "Your search did not match any documents. Please make sure that all words are spelled correctly and that you've selected enough categories." + ); + else + Search.status.innerText = Documentation.ngettext( + "Search finished, found one page matching the search query.", + "Search finished, found ${resultCount} pages matching the search query.", + resultCount, + ).replace('${resultCount}', resultCount); +}; +const _displayNextItem = ( + results, + resultCount, + searchTerms, + highlightTerms, +) => { + // results left, load the summary and display it + // this is intended to be dynamic (don't sub resultsCount) + if (results.length) { + _displayItem(results.pop(), searchTerms, highlightTerms); + setTimeout( + () => _displayNextItem(results, resultCount, searchTerms, highlightTerms), + 5 + ); + } + // search finished, update title and status message + else _finishSearch(resultCount); +}; +// Helper function used by query() to order search results. +// Each input is an array of [docname, title, anchor, descr, score, filename, kind]. +// Order the results by score (in opposite order of appearance, since the +// `_displayNextItem` function uses pop() to retrieve items) and then alphabetically. +const _orderResultsByScoreThenName = (a, b) => { + const leftScore = a[4]; + const rightScore = b[4]; + if (leftScore === rightScore) { + // same score: sort alphabetically + const leftTitle = a[1].toLowerCase(); + const rightTitle = b[1].toLowerCase(); + if (leftTitle === rightTitle) return 0; + return leftTitle > rightTitle ? -1 : 1; // inverted is intentional + } + return leftScore > rightScore ? 1 : -1; +}; + +/** + * Default splitQuery function. Can be overridden in ``sphinx.search`` with a + * custom function per language. + * + * The regular expression works by splitting the string on consecutive characters + * that are not Unicode letters, numbers, underscores, or emoji characters. + * This is the same as ``\W+`` in Python, preserving the surrogate pair area. + */ +if (typeof splitQuery === "undefined") { + var splitQuery = (query) => query + .split(/[^\p{Letter}\p{Number}_\p{Emoji_Presentation}]+/gu) + .filter(term => term) // remove remaining empty strings +} + +/** + * Search Module + */ +const Search = { + _index: null, + _queued_query: null, + _pulse_status: -1, + + htmlToText: (htmlString, anchor) => { + const htmlElement = new DOMParser().parseFromString(htmlString, 'text/html'); + for (const removalQuery of [".headerlink", "script", "style"]) { + htmlElement.querySelectorAll(removalQuery).forEach((el) => { el.remove() }); + } + if (anchor) { + const anchorContent = htmlElement.querySelector(`[role="main"] ${anchor}`); + if (anchorContent) return anchorContent.textContent; + + console.warn( + `Anchored content block not found. Sphinx search tries to obtain it via DOM query '[role=main] ${anchor}'. Check your theme or template.` + ); + } + + // if anchor not specified or not found, fall back to main content + const docContent = htmlElement.querySelector('[role="main"]'); + if (docContent) return docContent.textContent; + + console.warn( + "Content block not found. Sphinx search tries to obtain it via DOM query '[role=main]'. Check your theme or template." + ); + return ""; + }, + + init: () => { + const query = new URLSearchParams(window.location.search).get("q"); + document + .querySelectorAll('input[name="q"]') + .forEach((el) => (el.value = query)); + if (query) Search.performSearch(query); + }, + + loadIndex: (url) => + (document.body.appendChild(document.createElement("script")).src = url), + + setIndex: (index) => { + Search._index = index; + if (Search._queued_query !== null) { + const query = Search._queued_query; + Search._queued_query = null; + Search.query(query); + } + }, + + hasIndex: () => Search._index !== null, + + deferQuery: (query) => (Search._queued_query = query), + + stopPulse: () => (Search._pulse_status = -1), + + startPulse: () => { + if (Search._pulse_status >= 0) return; + + const pulse = () => { + Search._pulse_status = (Search._pulse_status + 1) % 4; + Search.dots.innerText = ".".repeat(Search._pulse_status); + if (Search._pulse_status >= 0) window.setTimeout(pulse, 500); + }; + pulse(); + }, + + /** + * perform a search for something (or wait until index is loaded) + */ + performSearch: (query) => { + // create the required interface elements + const searchText = document.createElement("h2"); + searchText.textContent = _("Searching"); + const searchSummary = document.createElement("p"); + searchSummary.classList.add("search-summary"); + searchSummary.innerText = ""; + const searchList = document.createElement("ul"); + searchList.setAttribute("role", "list"); + searchList.classList.add("search"); + + const out = document.getElementById("search-results"); + Search.title = out.appendChild(searchText); + Search.dots = Search.title.appendChild(document.createElement("span")); + Search.status = out.appendChild(searchSummary); + Search.output = out.appendChild(searchList); + + const searchProgress = document.getElementById("search-progress"); + // Some themes don't use the search progress node + if (searchProgress) { + searchProgress.innerText = _("Preparing search..."); + } + Search.startPulse(); + + // index already loaded, the browser was quick! + if (Search.hasIndex()) Search.query(query); + else Search.deferQuery(query); + }, + + _parseQuery: (query) => { + // stem the search terms and add them to the correct list + const stemmer = new Stemmer(); + const searchTerms = new Set(); + const excludedTerms = new Set(); + const highlightTerms = new Set(); + const objectTerms = new Set(splitQuery(query.toLowerCase().trim())); + splitQuery(query.trim()).forEach((queryTerm) => { + const queryTermLower = queryTerm.toLowerCase(); + + // maybe skip this "word" + // stopwords array is from language_data.js + if ( + stopwords.indexOf(queryTermLower) !== -1 || + queryTerm.match(/^\d+$/) + ) + return; + + // stem the word + let word = stemmer.stemWord(queryTermLower); + // select the correct list + if (word[0] === "-") excludedTerms.add(word.substr(1)); + else { + searchTerms.add(word); + highlightTerms.add(queryTermLower); + } + }); + + if (SPHINX_HIGHLIGHT_ENABLED) { // set in sphinx_highlight.js + localStorage.setItem("sphinx_highlight_terms", [...highlightTerms].join(" ")) + } + + // console.debug("SEARCH: searching for:"); + // console.info("required: ", [...searchTerms]); + // console.info("excluded: ", [...excludedTerms]); + + return [query, searchTerms, excludedTerms, highlightTerms, objectTerms]; + }, + + /** + * execute search (requires search index to be loaded) + */ + _performSearch: (query, searchTerms, excludedTerms, highlightTerms, objectTerms) => { + const filenames = Search._index.filenames; + const docNames = Search._index.docnames; + const titles = Search._index.titles; + const allTitles = Search._index.alltitles; + const indexEntries = Search._index.indexentries; + + // Collect multiple result groups to be sorted separately and then ordered. + // Each is an array of [docname, title, anchor, descr, score, filename, kind]. + const normalResults = []; + const nonMainIndexResults = []; + + _removeChildren(document.getElementById("search-progress")); + + const queryLower = query.toLowerCase().trim(); + for (const [title, foundTitles] of Object.entries(allTitles)) { + if (title.toLowerCase().trim().includes(queryLower) && (queryLower.length >= title.length/2)) { + for (const [file, id] of foundTitles) { + const score = Math.round(Scorer.title * queryLower.length / title.length); + const boost = titles[file] === title ? 1 : 0; // add a boost for document titles + normalResults.push([ + docNames[file], + titles[file] !== title ? `${titles[file]} > ${title}` : title, + id !== null ? "#" + id : "", + null, + score + boost, + filenames[file], + SearchResultKind.title, + ]); + } + } + } + + // search for explicit entries in index directives + for (const [entry, foundEntries] of Object.entries(indexEntries)) { + if (entry.includes(queryLower) && (queryLower.length >= entry.length/2)) { + for (const [file, id, isMain] of foundEntries) { + const score = Math.round(100 * queryLower.length / entry.length); + const result = [ + docNames[file], + titles[file], + id ? "#" + id : "", + null, + score, + filenames[file], + SearchResultKind.index, + ]; + if (isMain) { + normalResults.push(result); + } else { + nonMainIndexResults.push(result); + } + } + } + } + + // lookup as object + objectTerms.forEach((term) => + normalResults.push(...Search.performObjectSearch(term, objectTerms)) + ); + + // lookup as search terms in fulltext + normalResults.push(...Search.performTermsSearch(searchTerms, excludedTerms)); + + // let the scorer override scores with a custom scoring function + if (Scorer.score) { + normalResults.forEach((item) => (item[4] = Scorer.score(item))); + nonMainIndexResults.forEach((item) => (item[4] = Scorer.score(item))); + } + + // Sort each group of results by score and then alphabetically by name. + normalResults.sort(_orderResultsByScoreThenName); + nonMainIndexResults.sort(_orderResultsByScoreThenName); + + // Combine the result groups in (reverse) order. + // Non-main index entries are typically arbitrary cross-references, + // so display them after other results. + let results = [...nonMainIndexResults, ...normalResults]; + + // remove duplicate search results + // note the reversing of results, so that in the case of duplicates, the highest-scoring entry is kept + let seen = new Set(); + results = results.reverse().reduce((acc, result) => { + let resultStr = result.slice(0, 4).concat([result[5]]).map(v => String(v)).join(','); + if (!seen.has(resultStr)) { + acc.push(result); + seen.add(resultStr); + } + return acc; + }, []); + + return results.reverse(); + }, + + query: (query) => { + const [searchQuery, searchTerms, excludedTerms, highlightTerms, objectTerms] = Search._parseQuery(query); + const results = Search._performSearch(searchQuery, searchTerms, excludedTerms, highlightTerms, objectTerms); + + // for debugging + //Search.lastresults = results.slice(); // a copy + // console.info("search results:", Search.lastresults); + + // print the results + _displayNextItem(results, results.length, searchTerms, highlightTerms); + }, + + /** + * search for object names + */ + performObjectSearch: (object, objectTerms) => { + const filenames = Search._index.filenames; + const docNames = Search._index.docnames; + const objects = Search._index.objects; + const objNames = Search._index.objnames; + const titles = Search._index.titles; + + const results = []; + + const objectSearchCallback = (prefix, match) => { + const name = match[4] + const fullname = (prefix ? prefix + "." : "") + name; + const fullnameLower = fullname.toLowerCase(); + if (fullnameLower.indexOf(object) < 0) return; + + let score = 0; + const parts = fullnameLower.split("."); + + // check for different match types: exact matches of full name or + // "last name" (i.e. last dotted part) + if (fullnameLower === object || parts.slice(-1)[0] === object) + score += Scorer.objNameMatch; + else if (parts.slice(-1)[0].indexOf(object) > -1) + score += Scorer.objPartialMatch; // matches in last name + + const objName = objNames[match[1]][2]; + const title = titles[match[0]]; + + // If more than one term searched for, we require other words to be + // found in the name/title/description + const otherTerms = new Set(objectTerms); + otherTerms.delete(object); + if (otherTerms.size > 0) { + const haystack = `${prefix} ${name} ${objName} ${title}`.toLowerCase(); + if ( + [...otherTerms].some((otherTerm) => haystack.indexOf(otherTerm) < 0) + ) + return; + } + + let anchor = match[3]; + if (anchor === "") anchor = fullname; + else if (anchor === "-") anchor = objNames[match[1]][1] + "-" + fullname; + + const descr = objName + _(", in ") + title; + + // add custom score for some objects according to scorer + if (Scorer.objPrio.hasOwnProperty(match[2])) + score += Scorer.objPrio[match[2]]; + else score += Scorer.objPrioDefault; + + results.push([ + docNames[match[0]], + fullname, + "#" + anchor, + descr, + score, + filenames[match[0]], + SearchResultKind.object, + ]); + }; + Object.keys(objects).forEach((prefix) => + objects[prefix].forEach((array) => + objectSearchCallback(prefix, array) + ) + ); + return results; + }, + + /** + * search for full-text terms in the index + */ + performTermsSearch: (searchTerms, excludedTerms) => { + // prepare search + const terms = Search._index.terms; + const titleTerms = Search._index.titleterms; + const filenames = Search._index.filenames; + const docNames = Search._index.docnames; + const titles = Search._index.titles; + + const scoreMap = new Map(); + const fileMap = new Map(); + + // perform the search on the required terms + searchTerms.forEach((word) => { + const files = []; + const arr = [ + { files: terms[word], score: Scorer.term }, + { files: titleTerms[word], score: Scorer.title }, + ]; + // add support for partial matches + if (word.length > 2) { + const escapedWord = _escapeRegExp(word); + if (!terms.hasOwnProperty(word)) { + Object.keys(terms).forEach((term) => { + if (term.match(escapedWord)) + arr.push({ files: terms[term], score: Scorer.partialTerm }); + }); + } + if (!titleTerms.hasOwnProperty(word)) { + Object.keys(titleTerms).forEach((term) => { + if (term.match(escapedWord)) + arr.push({ files: titleTerms[term], score: Scorer.partialTitle }); + }); + } + } + + // no match but word was a required one + if (arr.every((record) => record.files === undefined)) return; + + // found search word in contents + arr.forEach((record) => { + if (record.files === undefined) return; + + let recordFiles = record.files; + if (recordFiles.length === undefined) recordFiles = [recordFiles]; + files.push(...recordFiles); + + // set score for the word in each file + recordFiles.forEach((file) => { + if (!scoreMap.has(file)) scoreMap.set(file, {}); + scoreMap.get(file)[word] = record.score; + }); + }); + + // create the mapping + files.forEach((file) => { + if (!fileMap.has(file)) fileMap.set(file, [word]); + else if (fileMap.get(file).indexOf(word) === -1) fileMap.get(file).push(word); + }); + }); + + // now check if the files don't contain excluded terms + const results = []; + for (const [file, wordList] of fileMap) { + // check if all requirements are matched + + // as search terms with length < 3 are discarded + const filteredTermCount = [...searchTerms].filter( + (term) => term.length > 2 + ).length; + if ( + wordList.length !== searchTerms.size && + wordList.length !== filteredTermCount + ) + continue; + + // ensure that none of the excluded terms is in the search result + if ( + [...excludedTerms].some( + (term) => + terms[term] === file || + titleTerms[term] === file || + (terms[term] || []).includes(file) || + (titleTerms[term] || []).includes(file) + ) + ) + break; + + // select one (max) score for the file. + const score = Math.max(...wordList.map((w) => scoreMap.get(file)[w])); + // add result to the result list + results.push([ + docNames[file], + titles[file], + "", + null, + score, + filenames[file], + SearchResultKind.text, + ]); + } + return results; + }, + + /** + * helper function to return a node containing the + * search summary for a given text. keywords is a list + * of stemmed words. + */ + makeSearchSummary: (htmlText, keywords, anchor) => { + const text = Search.htmlToText(htmlText, anchor); + if (text === "") return null; + + const textLower = text.toLowerCase(); + const actualStartPosition = [...keywords] + .map((k) => textLower.indexOf(k.toLowerCase())) + .filter((i) => i > -1) + .slice(-1)[0]; + const startWithContext = Math.max(actualStartPosition - 120, 0); + + const top = startWithContext === 0 ? "" : "..."; + const tail = startWithContext + 240 < text.length ? "..." : ""; + + let summary = document.createElement("p"); + summary.classList.add("context"); + summary.textContent = top + text.substr(startWithContext, 240).trim() + tail; + + return summary; + }, +}; + +_ready(Search.init); diff --git a/_static/sphinx_highlight.js b/_static/sphinx_highlight.js new file mode 100644 index 00000000..8a96c69a --- /dev/null +++ b/_static/sphinx_highlight.js @@ -0,0 +1,154 @@ +/* Highlighting utilities for Sphinx HTML documentation. */ +"use strict"; + +const SPHINX_HIGHLIGHT_ENABLED = true + +/** + * highlight a given string on a node by wrapping it in + * span elements with the given class name. + */ +const _highlight = (node, addItems, text, className) => { + if (node.nodeType === Node.TEXT_NODE) { + const val = node.nodeValue; + const parent = node.parentNode; + const pos = val.toLowerCase().indexOf(text); + if ( + pos >= 0 && + !parent.classList.contains(className) && + !parent.classList.contains("nohighlight") + ) { + let span; + + const closestNode = parent.closest("body, svg, foreignObject"); + const isInSVG = closestNode && closestNode.matches("svg"); + if (isInSVG) { + span = document.createElementNS("http://www.w3.org/2000/svg", "tspan"); + } else { + span = document.createElement("span"); + span.classList.add(className); + } + + span.appendChild(document.createTextNode(val.substr(pos, text.length))); + const rest = document.createTextNode(val.substr(pos + text.length)); + parent.insertBefore( + span, + parent.insertBefore( + rest, + node.nextSibling + ) + ); + node.nodeValue = val.substr(0, pos); + /* There may be more occurrences of search term in this node. So call this + * function recursively on the remaining fragment. + */ + _highlight(rest, addItems, text, className); + + if (isInSVG) { + const rect = document.createElementNS( + "http://www.w3.org/2000/svg", + "rect" + ); + const bbox = parent.getBBox(); + rect.x.baseVal.value = bbox.x; + rect.y.baseVal.value = bbox.y; + rect.width.baseVal.value = bbox.width; + rect.height.baseVal.value = bbox.height; + rect.setAttribute("class", className); + addItems.push({ parent: parent, target: rect }); + } + } + } else if (node.matches && !node.matches("button, select, textarea")) { + node.childNodes.forEach((el) => _highlight(el, addItems, text, className)); + } +}; +const _highlightText = (thisNode, text, className) => { + let addItems = []; + _highlight(thisNode, addItems, text, className); + addItems.forEach((obj) => + obj.parent.insertAdjacentElement("beforebegin", obj.target) + ); +}; + +/** + * Small JavaScript module for the documentation. + */ +const SphinxHighlight = { + + /** + * highlight the search words provided in localstorage in the text + */ + highlightSearchWords: () => { + if (!SPHINX_HIGHLIGHT_ENABLED) return; // bail if no highlight + + // get and clear terms from localstorage + const url = new URL(window.location); + const highlight = + localStorage.getItem("sphinx_highlight_terms") + || url.searchParams.get("highlight") + || ""; + localStorage.removeItem("sphinx_highlight_terms") + url.searchParams.delete("highlight"); + window.history.replaceState({}, "", url); + + // get individual terms from highlight string + const terms = highlight.toLowerCase().split(/\s+/).filter(x => x); + if (terms.length === 0) return; // nothing to do + + // There should never be more than one element matching "div.body" + const divBody = document.querySelectorAll("div.body"); + const body = divBody.length ? divBody[0] : document.querySelector("body"); + window.setTimeout(() => { + terms.forEach((term) => _highlightText(body, term, "highlighted")); + }, 10); + + const searchBox = document.getElementById("searchbox"); + if (searchBox === null) return; + searchBox.appendChild( + document + .createRange() + .createContextualFragment( + '" + ) + ); + }, + + /** + * helper function to hide the search marks again + */ + hideSearchWords: () => { + document + .querySelectorAll("#searchbox .highlight-link") + .forEach((el) => el.remove()); + document + .querySelectorAll("span.highlighted") + .forEach((el) => el.classList.remove("highlighted")); + localStorage.removeItem("sphinx_highlight_terms") + }, + + initEscapeListener: () => { + // only install a listener if it is really needed + if (!DOCUMENTATION_OPTIONS.ENABLE_SEARCH_SHORTCUTS) return; + + document.addEventListener("keydown", (event) => { + // bail for input elements + if (BLACKLISTED_KEY_CONTROL_ELEMENTS.has(document.activeElement.tagName)) return; + // bail with special keys + if (event.shiftKey || event.altKey || event.ctrlKey || event.metaKey) return; + if (DOCUMENTATION_OPTIONS.ENABLE_SEARCH_SHORTCUTS && (event.key === "Escape")) { + SphinxHighlight.hideSearchWords(); + event.preventDefault(); + } + }); + }, +}; + +_ready(() => { + /* Do not call highlightSearchWords() when we are on the search page. + * It will highlight words from the *previous* search query. + */ + if (typeof Search === "undefined") SphinxHighlight.highlightSearchWords(); + SphinxHighlight.initEscapeListener(); +}); diff --git a/api/api.html b/api/api.html new file mode 100644 index 00000000..52ff07b5 --- /dev/null +++ b/api/api.html @@ -0,0 +1,1946 @@ + + + + + + + + API Reference — Port 1.0.0 documentation + + + + + + + + + + + + + +
+ + + +
+ + + + + +
+
+
+
+ +
+

API Reference

+
+

Props

+
+
+class port.api.props.PropsUIFooter
+

Page footer.

+
+
+toDict()
+

Convert the object to a dictionary.

+
+
Returns:
+

A dictionary representation of the object.

+
+
Return type:
+

dict

+
+
+
+ +
+ +
+
+class port.api.props.PropsUIHeader(title: Translatable)
+

Page header.

+
+
+toDict()
+

Convert the object to a dictionary.

+
+
Returns:
+

A dictionary representation of the object.

+
+
Return type:
+

dict

+
+
+
+ +
+ +
+
+class port.api.props.PropsUIPageDonation(platform: str, header: PropsUIHeader, body: PropsUIPromptRadioInput | PropsUIPromptConsentForm | PropsUIPromptFileInput | PropsUIPromptFileInputMultiple | PropsUIPromptConfirm | PropsUIPromptQuestionnaire, footer: PropsUIFooter | None = None)
+

A multi-purpose page that gets shown to the user.

+
+
+platform
+

The platform name the user is currently in the process of donating data from.

+
+
Type:
+

str

+
+
+
+ +
+
+header
+

Page header.

+
+
Type:
+

PropsUIHeader

+
+
+
+ +
+
+body (PropsUIPromptRadioInput | PropsUIPromptConsentForm |
+

PropsUIPromptFileInput | PropsUIPromptFileInputMultiple | +PropsUIPromptConfirm | PropsUIPromptQuestionnaire): +Main body of the page.

+
+ +
+
+footer
+

Optional page footer.

+
+
Type:
+

Optional[PropsUIFooter]

+
+
+
+ +
+
+toDict()
+

Convert the object to a dictionary.

+
+
Returns:
+

A dictionary representation of the object.

+
+
Return type:
+

dict

+
+
+
+ +
+ +
+
+class port.api.props.PropsUIPageEnd
+

An ending page to show the user they are done.

+
+
+toDict()
+

Convert the object to a dictionary.

+
+
Returns:
+

A dictionary representation of the object.

+
+
Return type:
+

dict

+
+
+
+ +
+ +
+
+class port.api.props.PropsUIPromptConfirm(text: Translatable, ok: Translatable, cancel: Translatable)
+

Retry submitting a file page.

+

Prompt the user if they want to submit a new file. +This can be used in case a file could not be processed.

+
+
+text
+

Message to display.

+
+
Type:
+

Translatable

+
+
+
+ +
+
+ok
+

Message to display if the user wants to try again.

+
+
Type:
+

Translatable

+
+
+
+ +
+
+cancel
+

Message to display if the user wants to continue regardless.

+
+
Type:
+

Translatable

+
+
+
+ +
+
+toDict()
+

Convert the object to a dictionary.

+
+
Returns:
+

A dictionary representation of the object.

+
+
Return type:
+

dict

+
+
+
+ +
+ +
+
+class port.api.props.PropsUIPromptConsentForm(id: str, tables: list[PropsUIPromptConsentFormTable], meta_tables: list[PropsUIPromptConsentFormTable], description: Translatable | None = None, donate_question: Translatable | None = None, donate_button: Translatable | None = None)
+

Tables to be shown to the participant prior to donation.

+
+
+id
+

will be used as part of the filename when the data is stored

+
+
Type:
+

str

+
+
+
+ +
+
+tables
+

A list of tables.

+
+
Type:
+

list[PropsUIPromptConsentFormTable]

+
+
+
+ +
+
+meta_tables
+

A list of optional tables, for example for logging data.

+
+
Type:
+

list[PropsUIPromptConsentFormTable]

+
+
+
+ +
+
+description
+

Optional description of the consent form.

+
+
Type:
+

Optional[Translatable]

+
+
+
+ +
+
+donate_question
+

Optional donation question.

+
+
Type:
+

Optional[Translatable]

+
+
+
+ +
+
+donate_button
+

Optional text for the donate button.

+
+
Type:
+

Optional[Translatable]

+
+
+
+ +
+
+toDict()
+

Convert the object to a dictionary.

+
+
Returns:
+

A dictionary representation of the object.

+
+
Return type:
+

dict

+
+
+
+ +
+
+translate_meta_tables()
+

Translate the meta tables to a list of dictionaries.

+
+
Returns:
+

A list of dictionaries representing the meta tables.

+
+
Return type:
+

list

+
+
+
+ +
+
+translate_tables()
+

Translate the tables to a list of dictionaries.

+
+
Returns:
+

A list of dictionaries representing the tables.

+
+
Return type:
+

list

+
+
+
+ +
+ +
+
+class port.api.props.PropsUIPromptConsentFormTable(id: str, title: Translatable, data_frame: pandas.DataFrame | Dict[str, Dict[str, Any]], description: Translatable | None = None, visualizations: list | None = None, folded: bool | None = False, delete_option: bool | None = True)
+

Table to be shown to the participant prior to donation.

+
+
+id
+

A unique string to identify the table after donation.

+
+
Type:
+

str

+
+
+
+ +
+
+title
+

Title of the table.

+
+
Type:
+

Translatable

+
+
+
+ +
+
+data_frame
+

Table to be shown can be a pandas DataFrame or a dictionary.

+
+
Type:
+

pd.DataFrame | Dict[str, Dict[str, Any]]

+
+
+
+ +
+
+description
+

Optional description of the table.

+
+
Type:
+

Optional[Translatable]

+
+
+
+ +
+
+visualizations
+

Optional visualizations to be shown.

+
+
Type:
+

Optional[list]

+
+
+
+ +
+
+folded
+

Whether the table should be initially folded.

+
+
Type:
+

Optional[bool]

+
+
+
+ +
+
+delete_option
+

Whether to show a delete option for the table.

+
+
Type:
+

Optional[bool]

+
+
+
+ +

Examples:

+
data_frame_df = pd.DataFrame([
+    {"column1": 1, "column2": 4},
+    {"column1": 2, "column2": 5},
+    {"column1": 3, "column2": 6},
+])
+
+example1 = PropsUIPromptConsentFormTable(
+    id="example1",
+    title=Translatable("Table as DataFrame"),
+    data_frame=data_frame_df,
+)
+
+data_frame_dict = {
+    "column1": {"0": 1, "1": 4},
+    "column2": {"0": 2, "1": 5},
+    "column3": {"0": 3, "1": 6},
+}
+
+example2 = PropsUIPromptConsentFormTable(
+    id="example2",
+    title=Translatable("Table as Dictionary"),
+    data_frame=data_frame_dict,
+)
+
+
+
+
+toDict()
+

Convert the object to a dictionary.

+
+
Returns:
+

A dictionary representation of the object.

+
+
Return type:
+

dict

+
+
+
+ +
+ +
+
+class port.api.props.PropsUIPromptFileInput(description: Translatable, extensions: str)
+

Prompt the user to submit a file.

+
+
+description
+

Text with an explanation.

+
+
Type:
+

Translatable

+
+
+
+ +
+
+extensions
+

Accepted mime types, example: “application/zip, text/plain”.

+
+
Type:
+

str

+
+
+
+ +
+
+toDict()
+

Convert the object to a dictionary.

+
+
Returns:
+

A dictionary representation of the object.

+
+
Return type:
+

dict

+
+
+
+ +
+ +
+
+class port.api.props.PropsUIPromptFileInputMultiple(description: Translatable, extensions: str)
+

Prompt the user to submit multiple files.

+
+
+description
+

Text with an explanation.

+
+
Type:
+

Translatable

+
+
+
+ +
+
+extensions
+

Accepted mime types, example: “application/zip, text/plain”.

+
+
Type:
+

str

+
+
+
+ +
+
+toDict()
+

Convert the object to a dictionary.

+
+
Returns:
+

A dictionary representation of the object.

+
+
Return type:
+

dict

+
+
+
+ +
+ +
+
+class port.api.props.PropsUIPromptProgress(description: Translatable, message: str, percentage: int | None = None)
+

Prompt the user information during the extraction.

+
+
+description
+

Text with an explanation.

+
+
Type:
+

Translatable

+
+
+
+ +
+
+message
+

Can be used to show extraction progress.

+
+
Type:
+

str

+
+
+
+ +
+
+percentage
+

Optional percentage of progress.

+
+
Type:
+

Optional[int]

+
+
+
+ +
+
+toDict()
+

Convert the object to a dictionary.

+
+
Returns:
+

A dictionary representation of the object.

+
+
Return type:
+

dict

+
+
+
+ +
+ +
+
+class port.api.props.PropsUIPromptQuestionnaire(description: Translatable, questions: list[PropsUIQuestionMultipleChoice | PropsUIQuestionMultipleChoiceCheckbox | PropsUIQuestionOpen])
+

Questionnaire containing multiple questions.

+
+
+description
+

Description of the questionnaire.

+
+
Type:
+

Translatable

+
+
+
+ +
+
+questions
+

List of questions in the questionnaire.

+
+
Type:
+

list[PropsUIQuestionMultipleChoice | PropsUIQuestionMultipleChoiceCheckbox | PropsUIQuestionOpen]

+
+
+
+ +
+
+toDict()
+

Convert the object to a dictionary.

+
+
Returns:
+

A dictionary representation of the object.

+
+
Return type:
+

dict

+
+
+
+ +
+ +
+
+class port.api.props.PropsUIPromptRadioInput(title: Translatable, description: Translatable, items: list[RadioItem])
+

Radio group.

+

This radio group can be used to get a multiple choice answer from a user.

+
+
+title
+

Title of the radio group.

+
+
Type:
+

Translatable

+
+
+
+ +
+
+description
+

Short description of the radio group.

+
+
Type:
+

Translatable

+
+
+
+ +
+
+items
+

A list of radio buttons.

+
+
Type:
+

list[RadioItem]

+
+
+
+ +
+
+toDict()
+

Convert the object to a dictionary.

+
+
Returns:
+

A dictionary representation of the object.

+
+
Return type:
+

dict

+
+
+
+ +
+ +
+
+class port.api.props.PropsUIQuestionMultipleChoice(id: int, question: Translatable, choices: list[Translatable])
+

Multiple choice question with radio buttons.

+
+
+id
+

Question ID.

+
+
Type:
+

int

+
+
+
+ +
+
+question
+

The question text.

+
+
Type:
+

Translatable

+
+
+
+ +
+
+choices
+

List of choices.

+
+
Type:
+

list[Translatable]

+
+
+
+ +
+
+toDict()
+

Convert the object to a dictionary.

+
+
Returns:
+

A dictionary representation of the object.

+
+
Return type:
+

dict

+
+
+
+ +
+ +
+
+class port.api.props.PropsUIQuestionMultipleChoiceCheckbox(id: int, question: Translatable, choices: list[Translatable])
+

Multiple choice question with checkboxes.

+
+
+id
+

Question ID.

+
+
Type:
+

int

+
+
+
+ +
+
+question
+

The question text.

+
+
Type:
+

Translatable

+
+
+
+ +
+
+choices
+

List of choices.

+
+
Type:
+

list[Translatable]

+
+
+
+ +
+
+toDict()
+

Convert the object to a dictionary.

+
+
Returns:
+

A dictionary representation of the object.

+
+
Return type:
+

dict

+
+
+
+ +
+ +
+
+class port.api.props.PropsUIQuestionOpen(id: int, question: Translatable)
+

Open-ended question.

+
+
+id
+

Question ID.

+
+
Type:
+

int

+
+
+
+ +
+
+question
+

The question text.

+
+
Type:
+

Translatable

+
+
+
+ +
+
+toDict()
+

Convert the object to a dictionary.

+
+
Returns:
+

A dictionary representation of the object.

+
+
Return type:
+

dict

+
+
+
+ +
+ +
+
+class port.api.props.RadioItem
+

Radio button.

+
+
+id
+

ID of radio button.

+
+
Type:
+

int

+
+
+
+ +
+
+value
+

Text to be displayed.

+
+
Type:
+

str

+
+
+
+ +
+ +
+
+class port.api.props.Translatable(translations: Translations)
+

Wrapper class for Translations.

+
+
+toDict()
+

Convert the object to a dictionary.

+
+
Returns:
+

A dictionary representation of the object.

+
+
Return type:
+

dict

+
+
+
+ +
+ +
+
+class port.api.props.Translations
+

Typed dict containing text that is displayed in a specific language.

+
+ +
+
+

Extraction helpers

+

This module contains helper functions that can be used during the data extraction process

+
+
+exception port.helpers.extraction_helpers.FileNotFoundInZipError
+

The File you are looking for is not present in a zipfile

+
+ +
+
+port.helpers.extraction_helpers.dict_denester(inp: dict[Any, Any] | list[Any], new: dict[Any, Any] | None = None, name: str = '', run_first: bool = True) dict[Any, Any]
+

Denests a dictionary or list, returning a new flattened dictionary.

+
+
Parameters:
+
    +
  • inp (dict[Any, Any] | list[Any]) – The input dictionary or list to be denested.

  • +
  • new (dict[Any, Any] | None, optional) – The dictionary to store denested key-value pairs. Defaults to None.

  • +
  • name (str, optional) – The current key name in the denesting process. Defaults to “”.

  • +
  • run_first (bool, optional) – Flag to indicate if this is the first run of the function. Defaults to True.

  • +
+
+
Returns:
+

A new denested dictionary.

+
+
Return type:
+

dict[Any, Any]

+
+
+

Examples:

+
>>> nested_dict = {"a": {"b": {"c": 1}}, "d": [2, 3]}
+>>> dict_denester(nested_dict)
+{"a-b-c": 1, "d-0": 2, "d-1": 3}
+
+
+
+ +
+
+port.helpers.extraction_helpers.epoch_to_iso(epoch_timestamp: str | int | float) str
+

Convert epoch timestamp to an ISO 8601 string, assuming UTC.

+
+
Parameters:
+

epoch_timestamp (str | int) – The epoch timestamp to convert.

+
+
Returns:
+

The ISO 8601 formatted string, or the original input if conversion fails.

+
+
Return type:
+

str

+
+
Raises:
+

Exception – Logs an error message if conversion fails.

+
+
+

Examples:

+
>>> epoch_to_iso(1632139200)
+"2021-09-20T12:00:00+00:00"
+
+
+
+ +
+
+port.helpers.extraction_helpers.extract_file_from_zip(zfile: str, file_to_extract: str) BytesIO
+

Extracts a specific file from a zipfile and returns it as a BytesIO buffer.

+
+
Parameters:
+
    +
  • zfile (str) – Path to the zip file.

  • +
  • file_to_extract (str) – Name or path of the file to extract from the zip.

  • +
+
+
Returns:
+

+
A BytesIO buffer containing the extracted file’s content of the first file found.

Returns an empty BytesIO if the file is not found or an error occurs.

+
+
+

+
+
Return type:
+

io.BytesIO

+
+
Raises:
+
    +
  • FileNotFoundInZipError – Logs an error if the specified file is not found in the zip.

  • +
  • zipfile.BadZipFile – Logs an error if the zip file is invalid.

  • +
  • Exception – Logs any other unexpected errors.

  • +
+
+
+

Examples:

+
>>> extracted_file = extract_file_from_zip("archive.zip", "data.txt")
+>>> content = extracted_file.getvalue().decode('utf-8')
+
+
+
+ +
+
+port.helpers.extraction_helpers.find_item(d: dict[Any, Any], key_to_match: str) str
+

Finds the least nested value in a denested dictionary whose key contains the given key_to_match.

+
+
Parameters:
+
    +
  • d (dict[Any, Any]) – A denested dictionary to search in.

  • +
  • key_to_match (str) – The substring to match in the keys.

  • +
+
+
Returns:
+

+
The value of the least nested key containing key_to_match.

Returns an empty string if no match is found.

+
+
+

+
+
Return type:
+

str

+
+
Raises:
+

Exception – Logs an error message if an exception occurs during the search.

+
+
+

Examples:

+
>>> d = {"asd-asd-asd": 1, "asd-asd": 2, "qwe": 3}
+>>> find_item(d, "asd")
+"2"
+
+
+
+ +
+
+port.helpers.extraction_helpers.find_items(d: dict[Any, Any], key_to_match: str) list
+

Finds all values in a denested dictionary whose keys contain the given key_to_match.

+
+
Parameters:
+
    +
  • d (dict[Any, Any]) – A denested dictionary to search in.

  • +
  • key_to_match (str) – The substring to match in the keys.

  • +
+
+
Returns:
+

A list of all values whose keys contain key_to_match.

+
+
Return type:
+

list

+
+
Raises:
+

Exception – Logs an error message if an exception occurs during the search.

+
+
+

Examples:

+
>>> d = {"asd-1": "a", "asd-2": "b", "qwe": "c"}
+>>> find_items(d, "asd")
+["a", "b"]
+
+
+
+ +
+
+port.helpers.extraction_helpers.fix_ascii_string(input: str) str
+

Fixes the string encoding by removing non-ASCII characters.

+
+
Parameters:
+

input (str) – The input string that needs to be fixed.

+
+
Returns:
+

The fixed string with only ASCII characters, or the original string if an exception occurs.

+
+
Return type:
+

str

+
+
+

Examples:

+
>>> fix_ascii_string("Hello, 世界!")
+"Hello, !"
+
+
+
+ +
+
+port.helpers.extraction_helpers.fix_latin1_string(input: str) str
+

Fixes the string encoding by attempting to encode it using the ‘latin1’ encoding and then decoding it.

+
+
Parameters:
+

input (str) – The input string that needs to be fixed.

+
+
Returns:
+

The fixed string after encoding and decoding, or the original string if an exception occurs.

+
+
Return type:
+

str

+
+
+

Examples:

+
>>> fix_latin1_string("café")
+"café"
+
+
+
+ +
+
+port.helpers.extraction_helpers.json_dumper(zfile: str) pandas.DataFrame
+

Reads all JSON files in a zip file, flattens them, and combines them into a single DataFrame.

+
+
Parameters:
+

zfile (str) – Path to the zip file containing JSON files.

+
+
Returns:
+

A DataFrame containing flattened data from all JSON files in the zip.

+
+
Return type:
+

pd.DataFrame

+
+
Raises:
+

Exception – Logs an error message if an exception occurs during the process.

+
+
+

Examples:

+
>>> df = json_dumper("data.zip")
+>>> print(df.head())
+
+
+
+ +
+
+port.helpers.extraction_helpers.read_csv_from_bytes(json_bytes: BytesIO) list[dict[Any, Any]]
+

Reads CSV data from a BytesIO buffer and returns it as a list of dictionaries.

+
+
Parameters:
+

json_bytes (io.BytesIO) – A BytesIO buffer containing CSV data.

+
+
Returns:
+

+
A list of dictionaries, where each dictionary represents a row in the CSV.

Returns an empty list if parsing fails.

+
+
+

+
+
Return type:
+

list[dict[Any, Any]]

+
+
+

Examples

+
>>> buffer = io.BytesIO(b'name,age\nAlice,30\nBob,25')
+>>> data = read_csv_from_bytes(buffer)
+>>> print(data)
+[{'name': 'Alice', 'age': '30'}, {'name': 'Bob', 'age': '25'}]
+
+
+
+ +
+
+port.helpers.extraction_helpers.read_csv_from_bytes_to_df(json_bytes: BytesIO) pandas.DataFrame
+

Reads CSV data from a BytesIO buffer and returns it as a pandas DataFrame.

+
+
Parameters:
+

json_bytes (io.BytesIO) – A BytesIO buffer containing CSV data.

+
+
Returns:
+

A pandas DataFrame containing the CSV data.

+
+
Return type:
+

pd.DataFrame

+
+
+

Examples

+
>>> buffer = io.BytesIO(b'name,age\nAlice,30\nBob,25')
+>>> df = read_csv_from_bytes_to_df(buffer)
+>>> print(df)
+   name  age
+0  Alice   30
+1    Bob   25
+
+
+
+ +
+
+port.helpers.extraction_helpers.read_json_from_bytes(json_bytes: BytesIO) dict[Any, Any] | list[Any]
+

Reads JSON data from a BytesIO buffer.

+
+
Parameters:
+

json_bytes (io.BytesIO) – A BytesIO buffer containing JSON data.

+
+
Returns:
+

+
The parsed JSON data as a dictionary or list.

Returns an empty dictionary if parsing fails.

+
+
+

+
+
Return type:
+

dict[Any, Any] | list[Any]

+
+
+

Examples:

+
>>> buffer = io.BytesIO(b'{"key": "value"}')
+>>> data = read_json_from_bytes(buffer)
+>>> print(data)
+{'key': 'value'}
+
+
+
+ +
+
+port.helpers.extraction_helpers.read_json_from_file(json_file: str) dict[Any, Any] | list[Any]
+

Reads JSON data from a file.

+
+
Parameters:
+

json_file (str) – Path to the JSON file.

+
+
Returns:
+

+
The parsed JSON data as a dictionary or list.

Returns an empty dictionary if parsing fails.

+
+
+

+
+
Return type:
+

dict[Any, Any] | list[Any]

+
+
+

Examples:

+
>>> data = read_json_from_file("data.json")
+>>> print(data)
+{'key': 'value'}
+
+
+
+ +
+
+port.helpers.extraction_helpers.replace_months(input_string: str) str
+

Replaces Dutch month abbreviations with English equivalents in the input string.

+
+
Parameters:
+

input_string (str) – The input string containing potential Dutch month abbreviations.

+
+
Returns:
+

The input string with Dutch month abbreviations replaced by English equivalents.

+
+
Return type:
+

str

+
+
+

Examples:

+
>>> replace_months("15 mei 2023")
+"15 may 2023"
+
+
+
+ +
+
+port.helpers.extraction_helpers.sort_isotimestamp_empty_timestamp_last(timestamp_series: pandas.Series) pandas.Series
+

Creates a key for sorting a pandas Series of ISO timestamps, placing empty timestamps last.

+
+
Parameters:
+

timestamp_series (pd.Series) – A pandas Series containing ISO formatted timestamps.

+
+
Returns:
+

A Series of sorting keys, with -timestamp for valid dates and infinity for invalid/empty dates.

+
+
Return type:
+

pd.Series

+
+
+

Examples:

+
>>> df = df.sort_values(by="Date", key=sort_isotimestamp_empty_timestamp_last)
+
+
+
+ +
+
+

Port helpers

+
+
+port.helpers.port_helpers.donate(key: str, json_string: str) CommandSystemDonate
+

Initiates a donation process using the provided key and data.

+

This function triggers the donation process by passing a key and a JSON-formatted string +that contains donation information.

+
+
Parameters:
+
    +
  • key (str) – The key associated with the donation process. The key will be used in the file name.

  • +
  • json_string (str) – A JSON-formatted string containing the donated data.

  • +
+
+
Returns:
+

A system command that initiates the donation process. Must be yielded.

+
+
Return type:
+

CommandSystemDonate

+
+
+
+ +
+
+port.helpers.port_helpers.exit(code: int, info: str) CommandSystemExit
+

Exits Next with the provided exit code and additional information. +This if the code reaches this function, it will return to the task list in Next.

+
+
Parameters:
+
    +
  • code (int) – The exit code representing the type or status of the exit.

  • +
  • info (str) – A string containing additional information about the exit.

  • +
+
+
Returns:
+

A system command that initiates the exit process in Next.

+
+
Return type:
+

CommandSystemExit

+
+
+

Examples:

+
yield exit(0, "Success")
+
+
+
+ +
+
+port.helpers.port_helpers.generate_file_prompt(extensions: str) PropsUIPromptFileInput
+

Generates a file input prompt for selecting a file for a platform.

+

This function creates a bilingual (English and Dutch) file input prompt +that instructs the user to select a file they’ve received from a platform +and stored on their device.

+
+
Parameters:
+
    +
  • extensions (str) – A collection of allowed MIME types.

  • +
  • example (For) – “application/zip, text/plain, application/json”

  • +
+
+
Returns:
+

A file input prompt object containing +the description text and allowed file extensions.

+
+
Return type:
+

props.PropsUIPromptFileInput

+
+
+
+ +
+
+port.helpers.port_helpers.generate_retry_prompt(platform_name: str) PropsUIPromptConfirm
+

Generates a confirmation prompt for retrying file processing.

+

This function creates a bilingual (English and Dutch) confirmation prompt +when a file from a specific platform cannot be processed. It allows the user +to either try again with a different file or continue with the current file.

+
+
Parameters:
+

platform_name (str) – The name of the platform associated with the file +that couldn’t be processed. This is inserted into the prompt text.

+
+
Returns:
+

A confirmation prompt object containing +the message, and labels for the “OK” (try again) and “Cancel” (continue) buttons.

+
+
Return type:
+

props.PropsUIPromptConfirm

+
+
+
+ +
+
+port.helpers.port_helpers.generate_review_data_prompt(id: str, description: Translatable, table_list: list[PropsUIPromptConsentFormTable]) PropsUIPromptConsentForm
+

Generates a data review form with a list of tables and a description, including default donate question and button. +The participant can review these tables before they will be send to the researcher. If the participant consents to sharing the data +the data will be stored at the configured storage location.

+
+
Parameters:
+
    +
  • id (str) – will be used as part of the filename when the data is stored

  • +
  • table_list (list[props.PropsUIPromptConsentFormTable]) – A list of consent form tables to be included in the prompt.

  • +
  • description (props.Translatable) – A translatable description text for the consent prompt.

  • +
+
+
Returns:
+

A structured consent form object containing the provided table list, description, +and default values for donate question and button.

+
+
Return type:
+

props.PropsUIPromptConsentForm

+
+
+
+ +
+
+port.helpers.port_helpers.render_end_page()
+

Renders a thank you page, must be yielded.

+
+ +
+
+port.helpers.port_helpers.render_page(header_text: Translatable, body: PropsUIPromptRadioInput | PropsUIPromptConsentForm | PropsUIPromptFileInput | PropsUIPromptFileInputMultiple | PropsUIPromptConfirm | PropsUIPromptQuestionnaire) CommandUIRender
+

Renders the UI components for a donation page.

+

This function assembles various UI components including a header, body, and footer +to create a complete donation page. It uses the provided header text and body content +to customize the page.

+
+
Parameters:
+
    +
  • header_text (props.Translatable) – The text to be displayed in the header. +This should be a translatable object to support multiple languages.

  • +
  • ( (body) – props.PropsUIPromptRadioInput | +props.PropsUIPromptConsentForm | +props.PropsUIPromptFileInput | +props.PropsUIPromptFileInputMultiple | +props.PropsUIPromptConfirm | +props.PropsUIPromptQuestionnaire

  • +
  • ) – The main content of the page. It must be compatible with props.PropsUIPageDonation.

  • +
+
+
Returns:
+

A render command object containing the fully assembled page. Must be yielded.

+
+
Return type:
+

CommandUIRender

+
+
+
+ +
+
+

Validation

+

Contains classes to deal with input validation of DDPs

+

The idea of this module is to provide a uniform way to assign a validation status to a DDP validation +Which can be used and acted upon

+
+
+class port.helpers.validate.DDPCategory(id: str, ddp_filetype: DDPFiletype, language: Language, known_files: list[str])
+

Represents characteristics that define a DDP (Data Delivery Package) category.

+
+
Parameters:
+
    +
  • id (str) – Unique identifier for the DDP category.

  • +
  • ddp_filetype (DDPFiletype) – The file type of the DDP.

  • +
  • language (Language) – The language of the DDP.

  • +
  • known_files (List[str]) – A list of known files associated with this DDP category.

  • +
+
+
+

Examples

+
>>> category = DDPCategory("cat1", DDPFiletype.JSON, Language.EN, ["file1.json", "file2.json"])
+>>> print(category.id)
+cat1
+>>> print(category.language)
+<Language.EN: 1>
+
+
+
+ +
+
+class port.helpers.validate.DDPFiletype(value)
+

Enumeration of supported DDP file types.

+
+ +
+
+class port.helpers.validate.Language(value)
+

Enumeration of supported languages.

+
+ +
+
+class port.helpers.validate.StatusCode(id: int, description: str)
+

Represents a status code that can be used to set a DDP status.

+
+
Parameters:
+
    +
  • id (int) – The numeric identifier of the status code.

  • +
  • description (str) – A brief description of what the status code represents.

  • +
+
+
+

Examples

+
>>> status = StatusCode(0, "Success")
+>>> print(status.id)
+0
+>>> print(status.description)
+Success
+
+
+
+ +
+
+class port.helpers.validate.ValidateInput(all_status_codes: list[StatusCode], all_ddp_categories: list[DDPCategory], current_status_code: StatusCode | None = None, current_ddp_category: DDPCategory | None = None)
+

A class for validating input data against predefined categories and status codes.

+
+
Parameters:
+
    +
  • all_status_codes (List[StatusCode]) – A list of valid status codes.

  • +
  • all_ddp_categories (List[DDPCategory]) – A list of valid DDP categories.

  • +
  • current_status_code (Optional[StatusCode]) – The current status code. Defaults to None.

  • +
  • current_ddp_category (Optional[DDPCategory]) – The current DDP category. Defaults to None.

  • +
+
+
+
+
+ddp_categories_lookup
+

A lookup dictionary for DDP categories.

+
+
Type:
+

Dict[str, DDPCategory]

+
+
+
+ +
+
+status_codes_lookup
+

A lookup dictionary for status codes.

+
+
Type:
+

Dict[int, StatusCode]

+
+
+
+ +

Examples

+
>>> status_codes = [StatusCode(id=0, description="Success"), StatusCode(id=1, description="Error")]
+>>> ddp_categories = [DDPCategory(id="cat1", ddp_filetype=DDPFiletype.JSON, language=Language.EN, known_files=["file1.txt", "file2.txt"])]
+>>> validator = ValidateInput(all_status_codes=status_codes, all_ddp_categories=ddp_categories)
+
+
+
+
+get_status_code_id() int
+

Return the current assigned status code ID. Note: zero is always used for OK. +Non-zero otherwise.

+
+
Returns:
+

The ID of the current status code, or 1 if no status code is set.

+
+
Return type:
+

int

+
+
+

Examples

+
>>> validator.get_status_code_id()
+
+
+
+ +
+
+infer_ddp_category(file_list_input: list[str]) bool
+

Compares a list of files to a list of known files and infers the DDPCategory.

+
+
Parameters:
+

file_list_input (List[str]) – A list of input files to compare against known files.

+
+
Returns:
+

True if a valid DDP category is inferred, False otherwise. It sets the current_status_code +and current_ddp_category to either the DDP catogory match, or to an unknown category.

+
+
Return type:
+

bool

+
+
+

Examples

+
>>> validator.infer_ddp_category(["file1.txt", "file2.txt"])
+
+
+
+ +
+
+set_current_status_code_by_id(id: int) None
+

Set the status code based on the provided ID.

+
+
Parameters:
+

id (int) – The ID of the status code to set.

+
+
+

Examples

+
>>> validator.set_current_status_code_by_id(0)
+
+
+
+ +
+ +
+
+port.helpers.validate.validate_zip(ddp_categories: list[DDPCategory], path_to_zip: str) ValidateInput
+

Validates a DDP zip file against a list of DDP categories.

+

This function attempts to open and read the contents of a zip file, then uses +the ValidateInput class to infer the DDP category based on the files in the zip. +If the zip file is invalid or cannot be read, it sets an error status code (an integer greather than 0).

+
+
Parameters:
+
    +
  • ddp_categories (List[DDPCategory]) – A list of valid DDP categories to compare against.

  • +
  • path_to_zip (str) – The file path to the zip file to be validated.

  • +
+
+
Returns:
+

An instance of ValidateInput containing the validation results.

+
+
Return type:
+

ValidateInput

+
+
Raises:
+

zipfile.BadZipFile – This exception is caught internally and results in an error status code.

+
+
+

Examples

+
>>> categories = [DDPCategory(id="cat1", ddp_filetype=DDPFiletype.JSON, language=Language.EN, known_files=["file1.txt", "file2.txt"])]
+>>> result = validate_zip(categories, "path/to/valid.zip")
+>>> result.get_status_code_id()
+0
+
+
+
>>> result = validate_zip(categories, "path/to/invalid.zip")
+>>> result.get_status_code_id()
+1
+
+
+
+ +
+
+ + +
+
+
+
+ + + + + + +
+
+
+ +
+ + + + +

Styled using the Piccolo Theme

+ + \ No newline at end of file diff --git a/api/index.html b/api/index.html new file mode 100644 index 00000000..1a9a6045 --- /dev/null +++ b/api/index.html @@ -0,0 +1,246 @@ + + + + + + + + API Reference — Port 1.0.0 documentation + + + + + + + + + + + + + +
+ + + +
+ + + + + + +
+ +
+ + + + +

Styled using the Piccolo Theme

+ + \ No newline at end of file diff --git a/articles/creating-your-own-data-donation-task.html b/articles/creating-your-own-data-donation-task.html new file mode 100644 index 00000000..f19e7413 --- /dev/null +++ b/articles/creating-your-own-data-donation-task.html @@ -0,0 +1,573 @@ + + + + + + + + Creating your own donation task — Port 1.0.0 documentation + + + + + + + + + + + + + +
+ + + +
+ + + + + +
+
+
+
+ +
+

Creating your own donation task

+

The donation task is at the core of a data donation study. +It is the step where the participant is actually going to donate their data.

+

The data donation task goes as follows:

+
    +
  1. The participant goes to your data donation task app in a browser

  2. +
  3. The participant is prompted to submit their data download package (DDP)

  4. +
  5. A Python script you wrote, extracts the data you need for your research

  6. +
  7. That data gets presented to the participant on screen

  8. +
  9. The participants decides to donate and you receive the data

  10. +
+
+

Using the data donation task in a data donation study

+

The data donation task needs to be used in together with Next. Next is the backend for the data donation task. With Next you can configure a data donation study: i.e. configure:

+
    +
  • Your data donation task

  • +
  • An information page

  • +
  • An informed consent page

  • +
  • A privacy policy

  • +
  • Instruction manuals for participants

  • +
  • Configure where the data should go

  • +
  • and more

  • +
+

You can find more information on how to deploy a study in the wiki

+
+
+

How does the data donation task work?

+

The idea behind the data donation task +The data donation task repository is in essence a toolkit with which you can build your own custom data donation task. +It is designed as a toolkit because researchers need to tailor the data donation task to their own interests; they interested in different DDPs and are interested in different data within those DDPs.

+

This toolkit approach has several benefits:

+
    +
  1. It allows for a bespoke experience for the participant

  2. +
  3. You can only extract (and possibly aggregate) the data you need for you study, which we think is important in order to preserve the privacy of the participant and is often a requirement from your data steward/ethical review board

  4. +
+

These benefits come at the cost of added complexity compared to a one size fits all approach that extracts most or all data from the participant.

+

The design of the data donation task +The data donation task contains reusable components (such as: a screen that prompts the participant to submit their DDP and a screen with tables that the participants need to review prior to donation) that you can use and combine in creative ways to make your own study. +These components are combined in a Python script that is created by the researcher.

+

On a high level the script works as follows:

+
    +
  1. The Python script determines which user interface (UI) component needs to be shown to the participant

  2. +
  3. Whenever the participant is done interacting with the UI component, the result of that interaction is returned to the script

  4. +
  5. The script handles the return result en determine the next UI component that the participant needs to see or interact with, go back to step 1 until the end of the donation task

  6. +
+

The architecture of the data donation task +The data donation task is a web application (build with React and Pyodide) that completely runs in the browser of the participant. +The Python script and the UI components will run completely in the browser of the participant. +Data is only sent to the server upon the participant clicking a consent button.

+
+
+

Start writing your first data donation task script

+

After you have forked or cloned and installed this repository (see instruction) you can start creating your own donation task.

+

You can create your own study by changing and/or adapting the code in the following directory port/src/framework/processing/py/port/ +This directory contains the following files:

+
    +
  • script.py: Contains your donation task logic; which screen the participants will see and in what order

  • +
  • api/props.py: Contains all the UI elements you can use; you can use this file for reference

  • +
  • api/commands.py: Contains the Render and the Donate commands

  • +
  • main.py: The main driver of you donation task, you don’t need to touch this file

  • +
+
+

script.py

+

script.py is the most important file and the one we will be working with the most

+

Lets look at a full example of a script.py. In this example we will be extracting data from a fictional DDP. +Participants are asked to submit a zip file (any zip file will do in this case), and we will extract the file names and some meta data from this zip file. +In a real study you would extract something substantial from the data.

+

script.py must contain a function called process this function determines the whole data donation task from start to finish (Which screens the participant will see and in what order, and what kind of data extraction will take place). +At the time of writing this example is also the default script.py.

+

In this example process defines the following data donation task:

+
    +
  1. Ask the participant to submit a zip file

  2. +
  3. Perform validation on the submitted zip file, if not valid return to step 1

  4. +
  5. Extract the data from the submitted zip file

  6. +
  7. Render the extract data on screen in a table

  8. +
  9. Send the data to the data storage upon consent

  10. +
+

Although these can vary per data donation task, they will be more or less similar.

+

Below you can find the annotated example process function:

+
# script.py
+import port.api.props as props
+from port.api.commands import (CommandSystemDonate, CommandUIRender, CommandSystemExit)
+
+import pandas as pd
+import zipfile
+
+def process(session_id: str):
+    platform = "Platform of interest"
+
+    # Start of the data donation task
+    while True:
+        # Ask the participant to submit a file
+        file_prompt = generate_file_prompt(platform, "application/zip, text/plain")
+        file_prompt_result = yield render_page(platform, file_prompt)
+
+        # If the participant submitted a file: continue
+        if file_prompt_result.__type__ == 'PayloadString':
+
+            # Validate the file the participant submitted
+            # In general this is wise to do 
+            is_data_valid = validate_the_participants_input(file_prompt_result.value)
+
+            # Happy flow (all is well):
+            # The file the participant submitted is valid
+            if is_data_valid == True:
+
+                # Extract the data you as a researcher are interested in, and put it in a pandas DataFrame
+                # Show this data to the participant in a table on screen
+                # The participant can now decide to donate
+                extracted_data = extract_the_data_you_are_interested_in(file_prompt_result.value)
+                consent_prompt = generate_consent_prompt(extracted_data)
+                consent_prompt_result = yield render_page(platform, consent_prompt)
+
+                # If the participant wants to donate the data gets donated
+                if consent_prompt_result.__type__ == "PayloadJSON":
+                    yield donate(f"{session_id}-{platform}", consent_prompt_result.value)
+
+                break
+
+            # Sad flow
+            # The data was not valid, ask the participant to retry
+            if is_data_valid == False:
+                retry_prompt = generate_retry_prompt(platform)
+                retry_prompt_result = yield render_page(platform, retry_prompt)
+
+                # The participant wants to retry: start from the beginning
+                if retry_prompt_result.__type__ == 'PayloadTrue':
+                    continue
+                # The participant does not want to retry or pressed skip
+                else:
+                    break
+
+        # The participant did not submit a file and pressed skip
+        else:
+            break
+
+    yield exit_port(0, "Success")
+    yield render_end_page()
+
+
+
+

The functions used in process

+

These are all the functions used in process together they make up script.py.

+
+extract_the_data_you_are_interested_in +
def extract_the_data_you_are_interested_in(zip_file: str) -> pd.DataFrame:
+    """
+    This function extracts the data the researcher is interested in
+
+    In this case we extract from the zipfile:
+    * The filesnames
+    * The compressed file size
+    * The file size
+
+    You could extract anything here
+    """
+    names = []
+    out = pd.DataFrame()
+
+    try:
+        file = zipfile.ZipFile(zip_file)
+        data = []
+        for name in file.namelist():
+            names.append(name)
+            info = file.getinfo(name)
+            data.append((name, info.compress_size, info.file_size))
+
+        out = pd.DataFrame(data, columns=["File name", "Compressed file size", "File size"])
+
+    except Exception as e:
+        print(f"Something went wrong: {e}")
+
+    return out
+
+
+
+
+validate_the_participants_input +
def validate_the_participants_input(zip_file: str) -> bool:
+    """
+    Check if the participant actually submitted a zipfile
+    Returns True if participant submitted a zipfile, otherwise False
+
+    In reality you need to do a lot more validation.
+    Some things you could check:
+    - Check if the the file(s) are the correct format (json, html, binary, etc.)
+    - If the files are in the correct language
+    """
+
+    try:
+        with zipfile.ZipFile(zip_file) as zf:
+            return True
+    except zipfile.BadZipFile:
+        return False
+
+
+
+
+render_end_page +
def render_end_page():
+    """
+    Renders a thank you page
+    """
+    page = props.PropsUIPageEnd()
+    return CommandUIRender(page)
+
+
+
+
+
+render_page +
def render_page(platform: str, body, progress: int):
+    """
+    Renders the UI components
+    """
+    header = props.PropsUIHeader(props.Translatable({"en": platform, "nl": platform }))
+    footer = props.PropsUIFooter(progress)
+    page = props.PropsUIPageDonation(platform, header, body, footer)
+    return CommandUIRender(page)
+
+
+
+
+generate_retry_prompt +
def generate_retry_prompt(platform: str) -> props.PropsUIPromptConfirm:
+    """
+    Generates a retry screen, this can be used if validation failed for example.
+    You can aks the participant to try again, and go back to the submit file prompt
+    """
+    text = props.Translatable({
+        "en": f"Unfortunately, we cannot process your {platform} file. Continue, if you are sure that you selected the right file. Try again to select a different file.",
+        "nl": f"Helaas, kunnen we uw {platform} bestand niet verwerken. Weet u zeker dat u het juiste bestand heeft gekozen? Ga dan verder. Probeer opnieuw als u een ander bestand wilt kiezen."
+    })
+    ok = props.Translatable({
+        "en": "Try again",
+        "nl": "Probeer opnieuw"
+    })
+    cancel = props.Translatable({
+        "en": "Continue",
+        "nl": "Verder"
+    })
+    return props.PropsUIPromptConfirm(text, ok, cancel)
+
+
+
+
+generate_file_prompt +
def generate_file_prompt(platform: str) -> props.PropsUIPromptFileInput:
+    """
+    Generates a prompt that asks the participant to input a file
+    """
+    description = props.Translatable({
+        "en": f"Please follow the download instructions and choose the file that you stored on your device. Click “Skip” at the right bottom, if you do not have a {platform} file. ",
+        "nl": f"Volg de download instructies en kies het bestand dat u opgeslagen heeft op uw apparaat. Als u geen {platform} bestand heeft klik dan op “Overslaan” rechts onder."
+    })
+ 
+    return props.PropsUIPromptFileInput(description, "application/zip, text/plain")
+
+
+
+
+generate_consent_prompt +
def generate_consent_prompt(df: pd.DataFrame) -> props.PropsUIPromptConsentForm:
+    """
+    Generates a prompt screen with table(s) with the extract data for the participant to review
+    """
+    table_title = props.Translatable({
+        "en": "Zip file contents",
+        "nl": "Inhoud zip bestand"
+    })
+    table = props.PropsUIPromptConsentFormTable("zip_contents", table_title, df)
+    return props.PropsUIPromptConsentForm([table], [])
+
+
+
+
+donate +
def donate(key, json_string):
+    """
+    Sends data to the backend
+    """
+    return CommandSystemDonate(key, json_string)
+
+
+
+
+exit_port +
def exit_port(code, info):
+    """
+    When in Next this function exits the data donation task, and lets the participant return to Next
+    """
+    return CommandSystemExit(code, info)
+
+
+
+
+
+

Start writing your own script.py using the api

+

Now that you have seen a full example, you can start to try and create your own data donation task. With the elements from the example you can already build some pretty intricate data donation tasks. +Start creating your own by script.py by adapting this example to your own needs, for example, instead of file names you could extract data you would actually like to extract yourself.

+

If you want to see which up what UI elements are available to you checkout api/props.py. In general you need to construct your own pages (prompts) and render them with render_page (dont forget yield).

+
+
+

The usage of yield in script.py

+

Did you notice yield instead of return? yield makes sure that whenever the code resumes after a page render, it starts where it left off. +If you render a page you need to use yield instead of return, just like in the example.

+
+
+

Install Python packages

+

The data donation task runs in the browser of the participant, it is important to understand that when Python is running in your browser it is not using the Python version you have installed on your system. +The data donation task is using Pyodide this is Python compiled to web assembly that runs in the browser. +This means that packages you have available on your system install of Python, won’t be available in the browser.

+

If you want to use external packages they should be available for Pyodide, you can check the list of available packages here. +If you have found a package you want to use you can installed it by adding it to the array in the loadPackages function in src/framework/processing/py_worker.js as shown below:

+
// src/framework/processing/py_worker.js
+function loadPackages() {
+  console.log('[ProcessingWorker] loading packages')
+  // These packages are now installed and usable: micropip, numpy, pandas, and lxml
+  return self.pyodide.loadPackage(['micropip', 'numpy', 'pandas', 'lxml'])
+}
+
+
+

You can now import the packages as you would normally do in Python.

+
+
+

Try the donation task from the perspective of the participant

+

If you want to try out the above example, follow the installation instructions and start the server with npm run start.

+
+
+

Tips when writing your own script.py

+

Split the extraction logic from the data donation task logic +You can define your own modules where you create your data extraction, you can import those modules in script.py

+

Develop in separate script +You are better off engineering your extraction logic in different scripts and put them in script.py whenever you are finished developing. Only do small tweaks in script.py

+

Use the console in your browser +In case of errors they will show up in the browser console. You can use print in the Python script and it will show up in the browser console.

+

Keep the diverse nature of DDPs into account +At least check a couple of DDPs to make sure its reflective of the population you are interesed in. Thinks you can check are: data formats (html, json, plain text, csv, etc.), language settings (they somethines lead to json keys being in a different language or file names other than English).

+

Keep your code efficient +If your code is not efficient the extraction will take longer, which can result in a bad experience for the participant. In practice I have found that in most cases it’s not really an issue, and don’t have to pay that much attention to efficiency of your code. +Where efficiency really matters is when you have parse huge html files, beautifulsoup4 is a library that is commonly used to do this, this library is too slow however. As an alternative you can use lxml which is fast enough.

+

Don’t let your code crash +You cannot have your script crash, if your Python script crashes the task stops as well. This is not a good experience for your participant. +For example in the code you do the following: value_i_want_to_extract = extracted_data_in_a_dictionary["interesting key"] if the key "interesting key" does not exists, because it does not occur in the data of the participant, the script crashes and the participant cannot continue the data donation task.

+

Data donation checklist +Creating a good data donation task can be hard due to the variety of DDPs you will encounted. +Check out the following wiki article

+
+
+
+

Limits of the data donation task

+

Currently the data donation task has the following limitations:

+
    +
  • The data donation task is a frontend, you need to package this together with Next to deploy it. If you want to use it with your own backend you have to make the data donation task compatible with it yourself. A tutorial on how to do this might be added in the future.

  • +
  • The data donation task is running in the browser of the participant that brings in limitations, such as constraints on the files participant can submit. The limits are around 2GiB thats what Pyodide can handle. But less is better. So keep that in mind whenever you, for example, want to collect data from YouTube: your participants should exclude their own personal videos from their DDP (including these would result in a huge number of separate DDPs of around 2GiB).

  • +
  • The data donation currently works with text data, nothing keeps us from using other formats in the future (but the constraints on file sizes are still there)

  • +
  • The data donation task should run fine on mobile, however its not optimized for it, you might need to do some tweaking to the UI yourself

  • +
+
+
+ + +
+
+
+
+ + + + + + +
+
+
+ +
+ + + + +

Styled using the Piccolo Theme

+ + \ No newline at end of file diff --git a/articles/data-donation-checklist.html b/articles/data-donation-checklist.html new file mode 100644 index 00000000..e4c35c04 --- /dev/null +++ b/articles/data-donation-checklist.html @@ -0,0 +1,220 @@ + + + + + + + + Data donation checklist — Port 1.0.0 documentation + + + + + + + + + + + + + +
+ + + +
+ + + + + +
+
+
+
+ +
+

Data donation checklist

+

Writing data donation scripts can be challenging due to the diverse nature of data download packages (DDPs) your participants will try to submit. +If your participants try to submit a DDP that you did not anticipate your extraction might fail, or your script might crash, in that case you will end up with a non-response and a frustrated participant.

+

In order to minimize the number of problems you encounter. We developed a list of points you can pay attention to when developing scripts or performing your own study.

+

Inspect at least 5 DDPs from persons in the population you are going to sample from

+

Because DDPs will be different for different people, you need to inspect a couple of them (preferably from people from your target population) to learn what those differences are. +You need to verify that the extraction you have in mind works for all DDPs you might encounter.

+

DDP formats will change over time

+

DDP formats will change over time. Organization providing you with a DDP are under no obligation to keep the format of their DDP the same. The formats could change during data collection, be mindful of this fact. +Before going live with a study request a fresh package of your own and check whether your extraction still works as expected, and in the worst case scenario be ready to change your script during your data donation study.

+

Validate the DDPs your participant submit and log the results

+

This is crucial to do for two reasons:

+
    +
  1. If there are multiple versions of a DDP, you need to know which version the participant submitted so you can handle it appropriately. As an example consider the Youtube DDP: depending on the language setting files in the DDP are named differently. Another example is for the Instagram DDPs, keys in json files can be different depending on the language.

  2. +
  3. You probably want to give participants an incentive whenever they did a serious attempt of donating their data. In order to know whether they did a serious attempt you need to validate their DDP to see whether they submitted a package you expect. Example: a participant wants to participate in your study, but finds out its way too much work to request and download a DDP. They submit a random zipfile containing garbage, and they claim they completed the process succesfully. The only way for you to verify whether the participant gave it a fair shot is, to check what they submitted and why that did not result in you receiving data from that participant (If you would have received data its easy to verify they participated).

  4. +
+

Write your extraction so it matches the DDP request defaults

+

Often when requesting a DDP participants have a choice, for example you can request a portion of the DDP in html format or json format. The most human readible format (html) is often the default. It will be worth while to tailor your extraction script to the request defaults because participants will not read your instructions where you tell them to get the DDP in json format instead of html. This goes wrong quite often.

+

Our way of dealing with this is to bite the bullet and deal with the default DDP formats. This prevents mistakes and keeps the requests instruction for the participant as simple as possible. +Another way of dealing with it is to provide feedback to the participant whenever you detected they submitted the DDP in a format you did not anticipate.

+

Make sure your code will not crash

+

A crash in your code causes the data donation task to be stuck. The participant will see an error screen with the stacktrace displayed. You don’t want this to happen. Carefull programming can prevent your code from crashing. +A common cause for crashes is trying to access a key value pair in a dict that does not exist or sorting a pd.DataFrame column that does not exist. Most crashes will be caused by your expectation that the extraction is in a certain format, while in some cases it won’t be.

+

Make the least amount of assumptions possible about the data in a DDP

+

The more assumptions you make about the data the more opportunities your code has to fail. Some examples:

+
    +
  • Date parsing: Only parse date when its absolutely required. Date formats can be wildly different between participants, anticipating them all or writing code that can parse all dates you might encounter is less trivial than you might think.

  • +
  • Files in a zip: You can look for file paths you are interested in, or only file names you are interested in. If the file name is unique, there is no need to check for the full file path. Example: if the folder structure in a zip changes but files remain the same, the code that looks only at file names will still work.

  • +
  • Nested dictionaries: Sometimes you are interested in a value in a dict that is heavily nested. An approach you can take, instead of doing dict["key1"]["key2"]["key3"]["key_that_sometimes_is_called_something_different"]["key_which_value_you_want"], you can to first denest or flatten the dictionary start looking directly for “key_which_value_you_want”. You can find an example here, look for dict_denester and find_items.

  • +
+

The researcher is responsible for providing you with DDPs and should be knowledgeable about the data

+

If you are reading this checklist chances are you are going to create a data donation task. It could be the case that you are not the principal investigator of the project but just an engineer or person with some coding experience helping the researcher out. Some researchers expect you to be the one to have knowledge about a certain DDP they are interested in. Some researchers believe that because you are working with data, you also have that data available to you, know what it all means or whether data is present in a DDP. This is of course not always the case. Communicate clearly to the researcher that they responsible for the contents of their study, they should know what the data means and that they should tell you what to extract. In some cases the researcher might not even use the platform they are interested in, if that is the case, tell the researcher to start using the platform they are interested in so they can start instructing you on what to extract.

+

Test a study carefully before you go live

+

All researchers involved in the research group should test the study before you go live. A data donation study has more points of failure compared to traditional survey research, therefore its crucial that every researcher that is involved will test the complete data donation flow and shares their findings with you.

+
+ + +
+
+
+
+ + +
+
+ + + + + +

Styled using the Piccolo Theme

+ + \ No newline at end of file diff --git a/articles/deployment.html b/articles/deployment.html new file mode 100644 index 00000000..cbf99dbb --- /dev/null +++ b/articles/deployment.html @@ -0,0 +1,262 @@ + + + + + + + + Deployment of the data donation task — Port 1.0.0 documentation + + + + + + + + + + + + + +
+ + + +
+ + + + + +
+
+
+
+ +
+

Deployment of the data donation task

+

This article will discuss the option you have when using the data donation task in a data donation study. +The data donation task is designed to be used with Next.

+
+

The data donation task with Next

+

Next is a software as a service platform developed by Eyra. +As a researcher you can log in to Next and configure data donation study, this means:

+
    +
  1. Configuring a landing zone for your participants: I.e. a Consent form page, an information page, a privacy policy

  2. +
  3. Configure a task list for your participants to complete: After the landing zone participants see a list of task they have to complete, typically these tasks are: viewing instruction on how to request and download data from a specific platform and administering the data donation task that you developed using this repository

  4. +
  5. Configuring where the donated data should be stored. Next has solutions for: AWS, Azure and Yoda.

  6. +
+

After configuration participants can be sent to Next with a unique id in the url. This unique key id be used when storing the data, so you know who donated their data

+
+

Next as a paid service

+

You can use Next as a paid service provided by Eyra. +Please contact Eyra if this is something you are interested in.

+
+
+

Self service Next (community version) on Surf Research Cloud

+

Next is available as an offering on Surf Research Cloud available for Researchers at Dutch universities and universities of applied sciences. +Dutch researchers can apply for an EINFRA grant and get access to Research cloud this way. You can apply for an EINFRA grant here and click “Straight to the request portal”.

+

This offering comes with no service or warranties. Contact datadonation.eu if you are interested in setting this up.

+
+
+

Self service Next (community version)

+

Next is a free and open source tool and you could host it yourself. You can find Next here

+
+
+

Which option should I choose?

+
    +
  • Next as a paid service: If you have research budget; want to be unburdened and get your data donation study done, this is the best option.

  • +
  • Self service community Next on Surf Research Cloud: You are a researcher at a Dutch university with no budget this is the best option. When choosing this option you have to realize that it comes with no service or warranties, you have to know what you are doing.

  • +
  • Self service community Next: If you want to provide Next as a service to your organization.

  • +
+
+
+

Add data donation task to your data donation study on Next

+

After you have created your data donation task with this repository, you can use this task directly in Next. You can do this as follows:

+
    +
  1. In the data donation task run the command npm run release, this creates a zip file named release.zip

  2. +
  3. In Next when configuring your data donation study, go to work flow and create a new item task list item called data donation task

  4. +
  5. In the newly created task list item select the release.zip you have just created

  6. +
+

Your data donation task list item is configured!

+
+
+
+

Use the data donation task without Next

+

The data donation task can be adapted so it works with your own bespoke back end. A tutorial on how to do this is might be added in the future.

+
+
+ + +
+
+
+
+ + + + + + +
+
+ + + + + +

Styled using the Piccolo Theme

+ + \ No newline at end of file diff --git a/articles/index.html b/articles/index.html new file mode 100644 index 00000000..b73beb20 --- /dev/null +++ b/articles/index.html @@ -0,0 +1,224 @@ + + + + + + + + Articles — Port 1.0.0 documentation + + + + + + + + + + + + + +
+ + + +
+ + + + + + + + + + + +

Styled using the Piccolo Theme

+ + \ No newline at end of file diff --git a/articles/installation.html b/articles/installation.html new file mode 100644 index 00000000..12d25a9f --- /dev/null +++ b/articles/installation.html @@ -0,0 +1,300 @@ + + + + + + + + Installation — Port 1.0.0 documentation + + + + + + + + + + + + + +
+ + + +
+ + + + + +
+
+
+
+ +
+

Installation

+

This guide covers the installation of the data donation task locally, so you can start creating your own data donation study!

+
+

Installation Guide for the Pre-requisites

+

You need to install the following software:

+
    +
  • Python: Make sure it is at least version 3.10

  • +
  • Node.js: Make sure it is at least version 16

  • +
  • Poetry: It is a build system for Python packages that the data donation task uses.

  • +
+

Below you can find more detailed instructions on how to install the required software depending on your operating system. +These instructions are just suggestions, always prefer the official instructions that suite your situation best.

+
+

Linux

+

You can install Python and Node.js from the official repositories of your distribution. Here are the general steps:

+
    +
  1. Open your terminal

  2. +
  3. Use your package manager to install Python and Node.js

  4. +
  5. Install Poetry using pipx, see the instruction manual

  6. +
+
+
+

Mac OSX

+

If you are using a Mac OSX, you can install Python and Node.js using the HomeBrew package manager. Follow these steps:

+
    +
  1. Open your terminal

  2. +
  3. Install HomeBrew following instructions if you haven’t already

  4. +
  5. Install Python and Node.js by runnning: brew install python node

  6. +
  7. Install Poetry using pipx, see the instruction manual

  8. +
+
+
+

Windows

+

In order to develop on Windows we recommend using Windows Subsystem for Linux (WSL) in combination with VSCode. +Windows subsystem for Linux is a convenient way of running Linux on Windows. +This section will contain a bit more context because the steps might be less familiar to Windows only users.

+

If you are already familiar with WSL/Linux, VSCode or both, the installation won’t give you too much trouble. +If you are completely new to WSL (or Linux) expect a certain amount of problem solving you have to do. +Key topics to understand are: WSL, and the Ubuntu basics; knowledge on these topics will help you a lot.

+
    +
  1. Install WSL, see the official installation instructions

  2. +
  3. Install the default Linux distro (Ubuntu 22.04 at the time of writing) and choose a username and password

  4. +
  5. Download and install VSCode

  6. +
  7. Connect VSCode to WSL, see instructions

  8. +
  9. Now you can follow the instructions for Linux, Note that Python will be already installed for you

  10. +
+

In theory these steps should cause no problems but in reality you have a couple of issues you could run into. I will discuss some of them I encountered here:

+
    +
  • You have the be an administrator of your own device. If you are not an administrator you cannot continue the installation

  • +
  • In order to install WSL, kernel virtualization needs to be on. You can go into the Windows Task Manager and check whether it is on. If its not on, you have to turn it on in the bios of your motherboard. Check what CPU you have (AMD or Intel) and check what the settings is called. If the setting is not present in the bios your CPU might not support virtualization, this means you cannot run WSL

  • +
  • If you have WSL 1 installed make sure you continue with WSL 2

  • +
  • Make sure you don’t forget the username and password you chose during the installation

  • +
  • If you have VSCode open make sure you are connected to WSL, you can check this by looking at the “><” icon in the lower left corner of VSCode

  • +
  • Remember that if you are connected to WSL with VSCode you are working in Ubuntu. Programs and files are not shared between Windows and Ubuntu, meaning if you have installed a program on Windows is not available for Ubuntu and vice versa.

  • +
  • Remember to not use Powershell when connected to WSL use bash

  • +
  • If you see error messages related to Windows in the terminal (something with cmd.exe for example), you know that Ubuntu is trying to open a program on Windows. This will never work. This is happening because Windows manipulates the PATH variable on Ubuntu that contains information about where the programs Ubuntu can open are. Example: you want to check which version of node you have node -v and you get an error with cmd.exe in the error message. Solutions: uninstall the windows version of the Node.js or manipulate the PATH variable so it does not try to open the Windows version of Nodejs. How to do that is outside the scope of this manual.

  • +
  • To run port you need version Nodejs version 18 this version is not in the official Ubuntu 22.04 repositories. See for example this guide on how to get nodejs version 18. If you run into errors you are expected to search for them and to solve them

  • +
+
+

Don’t want to use WSL?

+

That’s completely fine too, you can change the commands in package.json so they work on Windows instead.

+
+
+
+
+

Installation of the data donation task

+

If you have the Pre-requisites installed the installation of the data donation task should be straightforward.

+
    +
  1. Clone the repository:

  2. +
+
git clone https://github.com/d3i-infra/data-donation-task.git
+
+
+
    +
  1. Install the dependencies by running the following commands:

  2. +
+
cd ./data-donation-task
+npm install
+
+
+
    +
  1. Start a local web server to server the data donation app:

  2. +
+
npm run start
+
+
+

You can now go to the browser: http://localhost:3000 and you should be greeted by a mock data donation task

+
+
+ + +
+
+
+
+ + + + + + +
+
+ + + + + +

Styled using the Piccolo Theme

+ + \ No newline at end of file diff --git a/articles/introduction-to-data-donation.html b/articles/introduction-to-data-donation.html new file mode 100644 index 00000000..3ac9c3a7 --- /dev/null +++ b/articles/introduction-to-data-donation.html @@ -0,0 +1,189 @@ + + + + + + + + Introduction to Data Donation — Port 1.0.0 documentation + + + + + + + + + + + + + +
+ + + +
+ + + + + +
+
+
+
+ +
+

Introduction to Data Donation

+

We have produced 2 videos detailing the concept of data donation, accessible on our website at datadonation.eu/data-donation.

+

Additionally, our website provides guidance on how to set up a data donation study; simply refer to the “Prepare a study” tab.

+
+ + +
+
+
+
+ + +
+
+
+
+
+ + + <Articles + +
+ +
+ + Installation> + +
+
+
+ + + + +

Styled using the Piccolo Theme

+ + \ No newline at end of file diff --git a/articles/next-in-docker.html b/articles/next-in-docker.html new file mode 100644 index 00000000..9eadaf47 --- /dev/null +++ b/articles/next-in-docker.html @@ -0,0 +1,386 @@ + + + + + + + + Try out Next with Docker — Port 1.0.0 documentation + + + + + + + + + + + + + +
+ + + +
+ + + + + +
+
+
+
+ +
+

Try out Next with Docker

+

This tutorial outlines how you can run Next in a docker container.

+

This is great for trying out the Next platform and will show you the necessary settings so you could use it in production.

+
+

Prerequisites

+

In order for you to try out Next you need to set up some prerequisites.

+
+

Unsplash

+

Configure a developer account at unsplash and get an API key. You can do this for free.

+

Unsplash is used as the source for banner images used to customize studies.

+
+
+

Google OIDC

+

Configure a google OIDC connect application in the google cloud console. For the details check the official instructions.

+

Google OIDC (OpenID Connect) is used to manage user authentication and account sign-ins.

+
+
+
+

Run Next in a Docker container

+

In this step, we will create and run the necessary containers using Docker Compose.

+

We are going to create a folder with the following structure:

+
.
+├── docker-compose.yaml
+├── proxy
+│   ├── certs
+│   │   ├── nginx-selfsigned.crt
+│   │   └── nginx-selfsigned.key
+│   └── conf
+│       └── nginx.conf
+
+
+

In the next step we are going to create the files.

+
+

Build the Next Docker image

+

Clone or fork Next

+

cd into /core

+

and build the image with:

+
docker build  --build-arg VERSION=1.0.0 --build-arg BUNDLE=self . -t self-d3i:latest
+
+
+
+
+

Setup certificates for TLS

+

Create certificates and put them in proxy/certs

+
openssl req -x509 -nodes -days 365 -newkey rsa:2048 -keyout nginx-selfsigned.key -out nginx-selfsigned.crt
+
+
+
+
+

Nginx configuration

+

We are going to use Nginx as reverse proxy.

+

Nginx will be used to provide TLS for our HTTP connections.

+

Paste the following nginx configuration in proxy/conf:

+
# nginx.conf
+events {}
+http {
+    server {
+        listen 80;
+        listen [::]:80;
+        server_name localhost;
+        
+        # Redirect all HTTP requests to HTTPS
+        return 301 https://$server_name$request_uri;
+    }
+
+    server {
+        server_name localhost;
+        
+        if ($scheme != "https") {
+            return 301 https://$host$request_uri;
+        }
+        
+        location / {
+          allow all;
+          proxy_pass                http://app:8000;
+          proxy_set_header          X-Forwarded-Proto $scheme;
+          proxy_set_header          X-Forwarded-For $remote_addr;
+          proxy_set_header          X-Real-IP $remote_addr;
+          proxy_set_header          Host $http_host;
+          proxy_http_version        1.1;
+          proxy_set_header          Upgrade $http_upgrade;
+          proxy_set_header          Connection "upgrade";
+          proxy_max_temp_file_size  1m;
+        }
+        
+        listen 443 ssl;
+        ssl_certificate /etc/nginx/certs/nginx-selfsigned.crt;
+        ssl_certificate_key /etc/nginx/certs/nginx-selfsigned.key;
+    }
+}
+
+
+

This Nginx configuration works with websocket connections which Next (Phoenix web application) uses.

+
+
+

Docker compose yaml

+

Now create the docker-compose.yaml:

+
#docker-compose.yaml
+services:
+  app:
+    image: self-d3i:latest
+    container_name: self-d3i
+    restart: always
+    environment:
+      APP_NAME: next
+      APP_DOMAIN: localhost
+      APP_MAIL_DOMAIN: "@gmail"
+      APP_ADMINS: youremail@gmail.com
+      DB_USER: user
+      DB_PASS: password
+      DB_HOST: db
+      DB_NAME: test_database
+      SECRET_KEY_BASE: "aUMZobj7oJn58XIlMGVcwTYrCsAllwDCGlwDCGlwDCGwDChdhsjahdghaggdgdGt7MoQYJtJbA="
+      STATIC_PATH: "/tmp"
+      UNSPLASH_ACCESS_KEY: "<your-unsplash-api-key>"
+      UNSPLASH_APP_NAME: "<your-unsplash-app-name>"
+      GOOGLE_SIGN_IN_CLIENT_ID: "<your-google-oidc-client-id>"
+      GOOGLE_SIGN_IN_CLIENT_SECRET: "<your-google-oidc-client-secret>"
+      STORAGE_SERVICES: "builtin, yoda, azure"
+    volumes:
+      - app_data:/tmp
+    depends_on:
+      - db
+
+  db:
+    image: postgres:latest
+    container_name: db-next
+    restart: always
+    environment:
+      POSTGRES_USER: user
+      POSTGRES_PASSWORD: password
+      POSTGRES_DB: test_database
+    volumes:
+      - postgres_data:/var/lib/postgresql/data
+
+  proxy:
+    image: nginx:latest
+    container_name: nginx
+    ports:
+      - 443:443
+    volumes:
+      - ./proxy/conf/nginx.conf:/etc/nginx/nginx.conf
+      - ./proxy/certs:/etc/nginx/certs
+    depends_on:
+      - app
+
+volumes:
+  postgres_data:
+  app_data:
+
+
+

and replace the following variables with the values you obtained in the previous steps:

+
UNSPLASH_ACCESS_KEY: "<your-unsplash-api-key>"
+UNSPLASH_APP_NAME: "<your-unsplash-app-name>"
+GOOGLE_SIGN_IN_CLIENT_ID: "<your-google-oidc-client-id>"
+GOOGLE_SIGN_IN_CLIENT_SECRET: "<your-google-oidc-client-secret>"
+
+
+

If you want to learn more about the variables you can read the documentation.

+

Now you are ready to start the containers with:

+
docker compose up
+
+
+

Go to https://localhost:80 and if everything went well you should see Next.

+

Note: because you self-signed your TLS certificates your browser will complain: accept all the risks and continue.

+
+
+
+

Next steps in Next

+

Now you can play around in Next. If you want to login as admin go to /admin/login.

+
+
+ + +
+
+
+
+ + + + + + +
+
+ + + + + +

Styled using the Piccolo Theme

+ + \ No newline at end of file diff --git a/articles/visualizations.html b/articles/visualizations.html new file mode 100644 index 00000000..e3fc260e --- /dev/null +++ b/articles/visualizations.html @@ -0,0 +1,419 @@ + + + + + + + + Adding data visualizations — Port 1.0.0 documentation + + + + + + + + + + + + + +
+ + + +
+ + + + + +
+
+
+
+ +
+

Adding data visualizations

+

You can add data visualizations to the consent form page, that will be shown below a data table. These visualizations will dynamically aggregate and visualize the data, responding to search queries and deleted items.

+

Good visualizations can help participants to see and explore what data they are about to donate, and thereby support informed consent. Furthermore, it can make the data donation process more educational and enjoyable.

+
+

Adding visualizations to tables

+

Visualizations are always directly connected to a consent form table. When in script.py you create a consent form table, you can implement visualizations as follows:

+
table_title = props.Translatable({
+    "en": "Table title",
+    "nl": "Tabel titel"
+})
+
+table = props.PropsUIPromptConsentFormTable(
+    id = "tableId",
+    title = table_title,
+    data_frame = df,
+    visualizations = [])
+
+
+

You can now add one or multiple visualization specifications to the visualizations list.

+
+
+

Visualization Specification

+

A visualization specification provides instructions for creating a visualization based on the data in the table. This visualization will then be created dynamically, so that when the table is updated (e.g., when participants search the data or remove rows) the visualization is updated as well.

+

A specification covers three main components:

+
    +
  • Aggregation: How should the table data be aggregated. e.g., count the number of rows per day

  • +
  • Display: How should the aggregated data be displayed? e.g., line chart, bar chart, wordcloud

  • +
  • Labels: Any labels to help along interpretation, optionally with translations (as seen above in the table_title)

  • +
+

A detailed explanation of the visualizatoin specification is shown below in the Specification Guide. But we recommend first having a look at the following examples.

+
+
+

Examples

+

Say we have data about every time a participant viewed a certain channel, and we also also know the channel category (e.g., sports, entertainment) and the exact timestampe. We have put this in a data_frame with the columns: channel, category and timestamp. We can then make a number of different visualizations.

+
+

Categorical variables | Bar chart of views per category

+
vis1 = dict(
+    title = dict(en= "views per category", ...),
+    type = "bar",
+    group = dict(column = "category", label = "Category")
+    values = [dict(aggregate = "count", label = dict(en = "number of views", ...))]
+)
+
+
+

The type determines the chart type, and can in this case be “bar”,”line” or “area”. The group determines how the data should be grouped and aggregated, which in this case is per category. The values determines the values to calculate per group, which here is just the count of the rows.

+

!!! Notice that values is a list, and not a single dictionary. Adding multiple value dictionaries will create multiple y-values, for grouped barcharts or multiple lines or areas.

+

The label’s can be either a single string (as in the group) or a dictionary with different languages, where keys are country codes, and values are labels (as in the values).

+
+
+

Date variables | Area chart of views per month

+
vis2 = dict(
+    title = dict(en= "views over time", ...),
+    type = "area",
+    group = dict(column = "timestamp", dateFormat = "month", label = "Month")
+    values = [dict(aggregate = "count", label = dict(en = "number of views", ...))]
+)
+
+
+

In this area chart (i.e. a line chart where the area below the line is coloured) we group the data by month, and use the same aggregation values as in the previous example to count the number of views per group.

+

The dateFormat grouping variable can be set if the column is a date string in ISO format: YYYY-MM-DD for date or YYYY-MM-DD HH:MM:SS for datetime (You can also use YYYY-MM-DDTHH:SS:MM), but that doesn’t look niced in the table).

+

The following formats are supported:

+
    +
  • Fixed interval: “year”, “quarter”, “month”, “day”, “hour”

  • +
  • Automatic interval: “auto” will pick an interval based on the min/max date. Pick this if the min/max date can vary heavily between participants. This also avoids slowing down the application by accidentally generating a huge graph (e.g., a one year period with “hour” interval)

  • +
  • cycles / season: “month_cycle” (January - December), “weekday_cycle” (Monday - Sunday) and “hour_cycle” (1 - 24).

  • +
+
+
+

Second-level aggregation | Line chart of views over time per category

+

Above we mentioned that you can add multiple values to create multiple y-values. But this only works if your data is wide. Alternatively, you can also perform a second-level aggregation on long data.

+
vis3 = dict(
+    title = dict(en= "views per category over time", ...),
+    type = "line",
+    group = dict(column = "timestamp", dateFormat = "auto", label = "Month")
+    values = [dict(
+        aggregate = "count",
+        label = dict(en = "number of views", ...),
+        group_by = "category"
+    )]
+)
+
+
+

Here we changed three things. First, we changed the type to “line”, because that’s a bit easier on the eye with multiple y-values. Second, we added group_by to the aggregation value, setting it to “category”. This will break the values data into groups for categories, and calculate the aggregation statistic per category. This will be visualized as a line chart where the frequency of each category (e.g., sport, entertainment) will be displayed on separate lines.

+

A third change is that we set the dateFormat to “auto” instead of fixing it to “month”. This will automatically pick a suitable time interval based on the range of column (last date - first date). This could mean that different participants see different intervals, depending on what works best for their own data.

+
+
+

Text variables | A wordcloud

+

As a final example, we’ll look at a different sub-specification for visualizing textual data. We’ll make a wordcloud of channels, based on their frequency in the data.

+
vis4 = dict(
+    title = dict(en= "Most viewed channels", ...),
+    type = "wordcloud",
+    textColumn = 'channel',
+    tokenize = False,
+)
+
+
+

This creates a wordcloud of the full channel names. Note that we could also have tokenized the texts, but for channels (e.g., YouTube channels) the full names are probably most informative.

+
+
+
+

Example wrap-up

+

Now that we have created visualizations, we can add them to the consent form table. Note that above we assigned our specifications to vis1 to vis4. We can now simply add them to the visualiations list.

+
table = props.PropsUIPromptConsentFormTable(
+    id = "tableId",
+    title = table_title,
+    data_frame = df,
+    visualizations = [vis1, vis2, vis3, vis4])
+
+
+
+
+

Specification guide

+

This is an overview of the visualiation specification. First, there are some general visualization arguments that every visualization has. Second, there are specific arguments depending on the visualization type

+
+

General visualization arguments

+

Every visualization has the following arguments

+
    +
  • title: A title for the visualization. This has to be a translation dictionary (see translation spec below)

  • +
  • type: The type of the visualization. The type determines what specification you need to follow

    +
      +
    • Chart visualiation: “line”, “bar” or “area”

    • +
    • Text visualization: “wordcloud”

    • +
    +
  • +
  • height (optional): The height of the chart in pixels

  • +
+
+
+

Chart visualization arguments

+

Chart visualizations work by aggregating the data into X, Y and optionally Z axes. It’s the basis for most common charts.

+
    +
  • type: “line”, “bar” or “area”

  • +
  • group: specifies the column to group and aggregate the data by. The group is visualized on the x-axis.

    +
      +
    • label: x-axis label. Either a string or translation dictionary (see translation spec below)

    • +
    • column: the name of the column

    • +
    • dateFormat (optional): if column is a date, select how it should be grouped. (see dateFormat spec below)

    • +
    • levels (optional). A list of strings with the specific column values to use. This also makes sure these values are displayed if they are missing in a participants data (also see values -> addZeroes)

    • +
    +
  • +
  • values: A list (!!) of objects. Each object specifies an (aggregate) value to calculate per group. A value is visualized on the y-axis. Multiple values can be given for multiple y-values

    +
      +
    • label: y-axis label. Either a string or translation dictionary (see translation spec below)

    • +
    • column (optional): the column based on which the value is calculated. Can be empty if just counting rows.

    • +
    • aggregate: The aggregation function. (see aggregate spec below)

    • +
    • addZeroes: Boolean. If true, add zeroes for empty groups. If levels are specified, participants will explicitly see that they occured zero times in their data. If dateFormat is used, this fills possible gaps (note that this mostly makes sense for row “count” aggregations where absense implies zero)

    • +
    • group_by (optional): the name of a column to do a second-level aggregation. This will create multiple y-values where the value in the column becomes the label.

    • +
    +
  • +
+
+
+

Text visualization arguments

+

Text visualizations take a text column as input.

+
    +
  • type: “wordcloud”

  • +
  • textColumn: A text (string) column in the data

  • +
  • tokenize (optional): Boolean. If true, the text will be tokenized

  • +
  • valueColumn (optional): By default, every text or token will be given a value based on the number of rows in which it occurs. Alternatively, you can specify a numeric column, in which case (the sum of) the values in this column will be used.

  • +
  • extract (optional): Normally, all preprocessing of the data should be handled in the import scripts, but for convenience we will provide some common methods for extracting parts of a string. Currently supports:

    +
      +
    • “url_domain”: If the column contains URLs, extract only the domain.

    • +
    +
  • +
+
+
+

Spec details

+

Here are some details for the more complicated spec components.

+
+

- translation

+

A translation dictionary has country codes as keys and the translations as values: dict(en = "english label", nl = "dutch label"). (This is identical to the dictionary used in the props.Translatable)

+
+
+

- dateFormat

+

If column is a date (YYYY-MM-DD, YYYY-MM-DD HH:MM or YYYY-MM-DD HH:MM:SS), select how the date is grouped. options are:

+
    +
  • Fixed interval: “year”, “quarter”, “month”, “day”, “hour”

  • +
  • Automatic interval: “auto” will pick an interval based on the min/max date. Pick this if the min/max date can vary heavily between participants. This also avoids slowing down the application by accidentally generating a huge graph (e.g., a one year period with “hour” interval)

  • +
  • cycles / season: “month_cycle” (January - December), “weekday_cycle” (Monday - Sunday) and “hour_cycle” (1 - 24).

  • +
+
+
+

- aggregate

+

The function by which to aggregate the column in values. The following functions are currently supported

+
    +
  • “count” just counts the rows

  • +
  • “mean” and “sum” require the value column to be numeric.

  • +
  • “count_pct” gives the count as a percentage of the total number of rows.*

  • +
  • “pct” sums the values of a numeric column and divides by the total sum.*

  • +
+

* If a secondary aggregation is used, percentages are calculated within the primary aggregation group

+
+
+
+
+ + +
+
+
+
+ + + + + + +
+
+ + + + + +

Styled using the Piccolo Theme

+ + \ No newline at end of file diff --git a/genindex.html b/genindex.html new file mode 100644 index 00000000..4c9923bf --- /dev/null +++ b/genindex.html @@ -0,0 +1,609 @@ + + + + + + + Index — Port 1.0.0 documentation + + + + + + + + + + + +
+ + + +
+ + + + + +
+
+
+
+ + +

Index

+ +
+ C + | D + | E + | F + | G + | H + | I + | J + | L + | M + | O + | P + | Q + | R + | S + | T + | V + +
+

C

+ + + +
+ +

D

+ + + +
+ +

E

+ + + +
+ +

F

+ + + +
+ +

G

+ + + +
+ +

H

+ + +
+ +

I

+ + + +
+ +

J

+ + +
+ +

L

+ + +
+ +

M

+ + +
+ +

O

+ + +
+ +

P

+ + + +
+ +

Q

+ + + +
+ +

R

+ + + +
+ +

S

+ + + +
+ +

T

+ + + +
+ +

V

+ + + +
+ + + +
+
+
+
+ + +
+
+
+
+
+ +
+ +
+ +
+
+
+ + + + +

Styled using the Piccolo Theme

+ + \ No newline at end of file diff --git a/index.html b/index.html new file mode 100644 index 00000000..2d19fb71 --- /dev/null +++ b/index.html @@ -0,0 +1,249 @@ + + + + + + + + The Data Donation Task Documentation — Port 1.0.0 documentation + + + + + + + + + + + + +
+ + + +
+ + + + + +
+
+
+
+ +
+

The Data Donation Task Documentation

+

Welcome to the Data Donation Task Documentation!

+

This is the place to learn about the data donation task.

+
+

Getting started

+

Checkout the following wiki articles to get started:

+ +
+
+

API Reference

+

You can find the API documentation here:

+ +
+
+

Standard scripts

+

We provide standard extraction scripts for a various platforms which you can find here:

+ +
+
+
+

Indices and tables

+ +
+ + +
+
+
+
+ + + + + + +
+
+
+
+
+ +
+ +
+ + Articles> + +
+
+
+ + + + +

Styled using the Piccolo Theme

+ + \ No newline at end of file diff --git a/objects.inv b/objects.inv new file mode 100644 index 00000000..f1fdf333 Binary files /dev/null and b/objects.inv differ diff --git a/py-modindex.html b/py-modindex.html new file mode 100644 index 00000000..a7cd888b --- /dev/null +++ b/py-modindex.html @@ -0,0 +1,218 @@ + + + + + + + Python Module Index — Port 1.0.0 documentation + + + + + + + + + + + + + + +
+ + + +
+ + + + + +
+
+
+
+ + +

Python Module Index

+ +
+ p +
+ + + + + + + + + + + + + + + + + + + + + + + + + +
 
+ p
+ port +
    + port.api.props +
    + port.helpers.extraction_helpers +
    + port.helpers.port_helpers +
    + port.helpers.validate +
    + port.platforms.chatgpt +
    + port.platforms.instagram +
+ + +
+
+
+
+ + +
+
+
+
+
+ +
+ +
+ +
+
+
+ + + + +

Styled using the Piccolo Theme

+ + \ No newline at end of file diff --git a/search.html b/search.html new file mode 100644 index 00000000..cbae4290 --- /dev/null +++ b/search.html @@ -0,0 +1,182 @@ + + + + + + + Search — Port 1.0.0 documentation + + + + + + + + + + + + + + + + + + +
+ + + +
+ + + + + +
+
+
+
+ +

Search

+ + + + +

+ Searching for multiple words only shows matches that contain + all words. +

+ + +
+ + + +
+ + +
+ + +
+
+
+
+ + +
+
+
+
+
+ +
+ +
+ +
+
+
+ + + + +

Styled using the Piccolo Theme

+ + \ No newline at end of file diff --git a/searchindex.js b/searchindex.js new file mode 100644 index 00000000..7a544a98 --- /dev/null +++ b/searchindex.js @@ -0,0 +1 @@ +Search.setIndex({"alltitles": {"- aggregate": [[9, "aggregate"]], "- dateFormat": [[9, "dateformat"]], "- translation": [[9, "translation"]], "API Reference": [[0, null], [1, null], [10, "api-reference"]], "Add data donation task to your data donation study on Next": [[4, "add-data-donation-task-to-your-data-donation-study-on-next"]], "Adding data visualizations": [[9, null]], "Adding visualizations to tables": [[9, "adding-visualizations-to-tables"]], "Articles": [[5, null]], "Available platforms": [[11, "module-port.platforms.chatgpt"]], "Build the Next Docker image": [[8, "build-the-next-docker-image"]], "Categorical variables | Bar chart of views per category": [[9, "categorical-variables-bar-chart-of-views-per-category"]], "Chart visualization arguments": [[9, "chart-visualization-arguments"]], "Creating your own donation task": [[2, null]], "Data donation checklist": [[3, null]], "Date variables | Area chart of views per month": [[9, "date-variables-area-chart-of-views-per-month"]], "Deployment of the data donation task": [[4, null]], "Docker compose yaml": [[8, "docker-compose-yaml"]], "Don\u2019t want to use WSL?": [[6, "don-t-want-to-use-wsl"]], "Example wrap-up": [[9, "example-wrap-up"]], "Examples": [[9, "examples"]], "Extraction helpers": [[0, "module-port.helpers.extraction_helpers"]], "General visualization arguments": [[9, "general-visualization-arguments"]], "Getting started": [[10, "getting-started"]], "Google OIDC": [[8, "google-oidc"]], "How does the data donation task work?": [[2, "how-does-the-data-donation-task-work"]], "Indices and tables": [[10, "indices-and-tables"]], "Instagram": [[11, "module-port.platforms.instagram"]], "Install Python packages": [[2, "install-python-packages"]], "Installation": [[6, null]], "Installation Guide for the Pre-requisites": [[6, "installation-guide-for-the-pre-requisites"]], "Installation of the data donation task": [[6, "installation-of-the-data-donation-task"]], "Introduction to Data Donation": [[7, null]], "Limits of the data donation task": [[2, "limits-of-the-data-donation-task"]], "Linux": [[6, "linux"]], "Mac OSX": [[6, "mac-osx"]], "Next as a paid service": [[4, "next-as-a-paid-service"]], "Next steps in Next": [[8, "next-steps-in-next"]], "Nginx configuration": [[8, "nginx-configuration"]], "Platform Documentation": [[11, null]], "Port helpers": [[0, "module-port.helpers.port_helpers"]], "Prerequisites": [[8, "prerequisites"]], "Props": [[0, "module-port.api.props"]], "Run Next in a Docker container": [[8, "run-next-in-a-docker-container"]], "Second-level aggregation | Line chart of views over time per category": [[9, "second-level-aggregation-line-chart-of-views-over-time-per-category"]], "Self service Next (community version)": [[4, "self-service-next-community-version"]], "Self service Next (community version) on Surf Research Cloud": [[4, "self-service-next-community-version-on-surf-research-cloud"]], "Setup certificates for TLS": [[8, "setup-certificates-for-tls"]], "Spec details": [[9, "spec-details"]], "Specification guide": [[9, "specification-guide"]], "Standard scripts": [[10, "standard-scripts"]], "Start writing your first data donation task script": [[2, "start-writing-your-first-data-donation-task-script"]], "Start writing your own script.py using the api": [[2, "start-writing-your-own-script-py-using-the-api"]], "Text variables | A wordcloud": [[9, "text-variables-a-wordcloud"]], "Text visualization arguments": [[9, "text-visualization-arguments"]], "The Data Donation Task Documentation": [[10, null]], "The data donation task with Next": [[4, "the-data-donation-task-with-next"]], "The usage of yield in script.py": [[2, "the-usage-of-yield-in-script-py"]], "Tips when writing your own script.py": [[2, "tips-when-writing-your-own-script-py"]], "Try out Next with Docker": [[8, null]], "Try the donation task from the perspective of the participant": [[2, "try-the-donation-task-from-the-perspective-of-the-participant"]], "Unsplash": [[8, "unsplash"]], "Use the data donation task without Next": [[4, "use-the-data-donation-task-without-next"]], "Using the data donation task in a data donation study": [[2, "using-the-data-donation-task-in-a-data-donation-study"]], "Validation": [[0, "module-port.helpers.validate"]], "Visualization Specification": [[9, "visualization-specification"]], "Which option should I choose?": [[4, "which-option-should-i-choose"]], "Windows": [[6, "windows"]], "script.py": [[2, "script-py"]]}, "docnames": ["api/api", "api/index", "articles/creating-your-own-data-donation-task", "articles/data-donation-checklist", "articles/deployment", "articles/index", "articles/installation", "articles/introduction-to-data-donation", "articles/next-in-docker", "articles/visualizations", "index", "standard_scripts/index"], "envversion": {"sphinx": 64, "sphinx.domains.c": 3, "sphinx.domains.changeset": 1, "sphinx.domains.citation": 1, "sphinx.domains.cpp": 9, "sphinx.domains.index": 1, "sphinx.domains.javascript": 3, "sphinx.domains.math": 2, "sphinx.domains.python": 4, "sphinx.domains.rst": 2, "sphinx.domains.std": 2}, "filenames": ["api/api.md", "api/index.rst", "articles/creating-your-own-data-donation-task.md", "articles/data-donation-checklist.md", "articles/deployment.md", "articles/index.rst", "articles/installation.md", "articles/introduction-to-data-donation.md", "articles/next-in-docker.md", "articles/visualizations.md", "index.rst", "standard_scripts/index.rst"], "indexentries": {"cancel (port.api.props.propsuipromptconfirm attribute)": [[0, "port.api.props.PropsUIPromptConfirm.cancel", false]], "choices (port.api.props.propsuiquestionmultiplechoice attribute)": [[0, "port.api.props.PropsUIQuestionMultipleChoice.choices", false]], "choices (port.api.props.propsuiquestionmultiplechoicecheckbox attribute)": [[0, "port.api.props.PropsUIQuestionMultipleChoiceCheckbox.choices", false]], "data_frame (port.api.props.propsuipromptconsentformtable attribute)": [[0, "port.api.props.PropsUIPromptConsentFormTable.data_frame", false]], "ddp_categories_lookup (port.helpers.validate.validateinput attribute)": [[0, "port.helpers.validate.ValidateInput.ddp_categories_lookup", false]], "ddpcategory (class in port.helpers.validate)": [[0, "port.helpers.validate.DDPCategory", false]], "ddpfiletype (class in port.helpers.validate)": [[0, "port.helpers.validate.DDPFiletype", false]], "delete_option (port.api.props.propsuipromptconsentformtable attribute)": [[0, "port.api.props.PropsUIPromptConsentFormTable.delete_option", false]], "description (port.api.props.propsuipromptconsentform attribute)": [[0, "port.api.props.PropsUIPromptConsentForm.description", false]], "description (port.api.props.propsuipromptconsentformtable attribute)": [[0, "port.api.props.PropsUIPromptConsentFormTable.description", false]], "description (port.api.props.propsuipromptfileinput attribute)": [[0, "port.api.props.PropsUIPromptFileInput.description", false]], "description (port.api.props.propsuipromptfileinputmultiple attribute)": [[0, "port.api.props.PropsUIPromptFileInputMultiple.description", false]], "description (port.api.props.propsuipromptprogress attribute)": [[0, "port.api.props.PropsUIPromptProgress.description", false]], "description (port.api.props.propsuipromptquestionnaire attribute)": [[0, "port.api.props.PropsUIPromptQuestionnaire.description", false]], "description (port.api.props.propsuipromptradioinput attribute)": [[0, "port.api.props.PropsUIPromptRadioInput.description", false]], "dict_denester() (in module port.helpers.extraction_helpers)": [[0, "port.helpers.extraction_helpers.dict_denester", false]], "donate() (in module port.helpers.port_helpers)": [[0, "port.helpers.port_helpers.donate", false]], "donate_button (port.api.props.propsuipromptconsentform attribute)": [[0, "port.api.props.PropsUIPromptConsentForm.donate_button", false]], "donate_question (port.api.props.propsuipromptconsentform attribute)": [[0, "port.api.props.PropsUIPromptConsentForm.donate_question", false]], "epoch_to_iso() (in module port.helpers.extraction_helpers)": [[0, "port.helpers.extraction_helpers.epoch_to_iso", false]], "exit() (in module port.helpers.port_helpers)": [[0, "port.helpers.port_helpers.exit", false]], "extensions (port.api.props.propsuipromptfileinput attribute)": [[0, "port.api.props.PropsUIPromptFileInput.extensions", false]], "extensions (port.api.props.propsuipromptfileinputmultiple attribute)": [[0, "port.api.props.PropsUIPromptFileInputMultiple.extensions", false]], "extract_file_from_zip() (in module port.helpers.extraction_helpers)": [[0, "port.helpers.extraction_helpers.extract_file_from_zip", false]], "filenotfoundinziperror": [[0, "port.helpers.extraction_helpers.FileNotFoundInZipError", false]], "find_item() (in module port.helpers.extraction_helpers)": [[0, "port.helpers.extraction_helpers.find_item", false]], "find_items() (in module port.helpers.extraction_helpers)": [[0, "port.helpers.extraction_helpers.find_items", false]], "fix_ascii_string() (in module port.helpers.extraction_helpers)": [[0, "port.helpers.extraction_helpers.fix_ascii_string", false]], "fix_latin1_string() (in module port.helpers.extraction_helpers)": [[0, "port.helpers.extraction_helpers.fix_latin1_string", false]], "folded (port.api.props.propsuipromptconsentformtable attribute)": [[0, "port.api.props.PropsUIPromptConsentFormTable.folded", false]], "footer (port.api.props.propsuipagedonation attribute)": [[0, "port.api.props.PropsUIPageDonation.footer", false]], "generate_file_prompt() (in module port.helpers.port_helpers)": [[0, "port.helpers.port_helpers.generate_file_prompt", false]], "generate_retry_prompt() (in module port.helpers.port_helpers)": [[0, "port.helpers.port_helpers.generate_retry_prompt", false]], "generate_review_data_prompt() (in module port.helpers.port_helpers)": [[0, "port.helpers.port_helpers.generate_review_data_prompt", false]], "get_status_code_id() (port.helpers.validate.validateinput method)": [[0, "port.helpers.validate.ValidateInput.get_status_code_id", false]], "header (port.api.props.propsuipagedonation attribute)": [[0, "port.api.props.PropsUIPageDonation.header", false]], "id (port.api.props.propsuipromptconsentform attribute)": [[0, "port.api.props.PropsUIPromptConsentForm.id", false]], "id (port.api.props.propsuipromptconsentformtable attribute)": [[0, "port.api.props.PropsUIPromptConsentFormTable.id", false]], "id (port.api.props.propsuiquestionmultiplechoice attribute)": [[0, "port.api.props.PropsUIQuestionMultipleChoice.id", false]], "id (port.api.props.propsuiquestionmultiplechoicecheckbox attribute)": [[0, "port.api.props.PropsUIQuestionMultipleChoiceCheckbox.id", false]], "id (port.api.props.propsuiquestionopen attribute)": [[0, "port.api.props.PropsUIQuestionOpen.id", false]], "id (port.api.props.radioitem attribute)": [[0, "port.api.props.RadioItem.id", false]], "infer_ddp_category() (port.helpers.validate.validateinput method)": [[0, "port.helpers.validate.ValidateInput.infer_ddp_category", false]], "items (port.api.props.propsuipromptradioinput attribute)": [[0, "port.api.props.PropsUIPromptRadioInput.items", false]], "json_dumper() (in module port.helpers.extraction_helpers)": [[0, "port.helpers.extraction_helpers.json_dumper", false]], "language (class in port.helpers.validate)": [[0, "port.helpers.validate.Language", false]], "message (port.api.props.propsuipromptprogress attribute)": [[0, "port.api.props.PropsUIPromptProgress.message", false]], "meta_tables (port.api.props.propsuipromptconsentform attribute)": [[0, "port.api.props.PropsUIPromptConsentForm.meta_tables", false]], "module": [[0, "module-port.api.props", false], [0, "module-port.helpers.extraction_helpers", false], [0, "module-port.helpers.port_helpers", false], [0, "module-port.helpers.validate", false], [11, "module-port.platforms.chatgpt", false], [11, "module-port.platforms.instagram", false]], "ok (port.api.props.propsuipromptconfirm attribute)": [[0, "port.api.props.PropsUIPromptConfirm.ok", false]], "percentage (port.api.props.propsuipromptprogress attribute)": [[0, "port.api.props.PropsUIPromptProgress.percentage", false]], "platform (port.api.props.propsuipagedonation attribute)": [[0, "port.api.props.PropsUIPageDonation.platform", false]], "port.api.props": [[0, "module-port.api.props", false]], "port.helpers.extraction_helpers": [[0, "module-port.helpers.extraction_helpers", false]], "port.helpers.port_helpers": [[0, "module-port.helpers.port_helpers", false]], "port.helpers.validate": [[0, "module-port.helpers.validate", false]], "port.platforms.chatgpt": [[11, "module-port.platforms.chatgpt", false]], "port.platforms.instagram": [[11, "module-port.platforms.instagram", false]], "propsuifooter (class in port.api.props)": [[0, "port.api.props.PropsUIFooter", false]], "propsuiheader (class in port.api.props)": [[0, "port.api.props.PropsUIHeader", false]], "propsuipagedonation (class in port.api.props)": [[0, "port.api.props.PropsUIPageDonation", false]], "propsuipageend (class in port.api.props)": [[0, "port.api.props.PropsUIPageEnd", false]], "propsuipromptconfirm (class in port.api.props)": [[0, "port.api.props.PropsUIPromptConfirm", false]], "propsuipromptconsentform (class in port.api.props)": [[0, "port.api.props.PropsUIPromptConsentForm", false]], "propsuipromptconsentformtable (class in port.api.props)": [[0, "port.api.props.PropsUIPromptConsentFormTable", false]], "propsuipromptfileinput (class in port.api.props)": [[0, "port.api.props.PropsUIPromptFileInput", false]], "propsuipromptfileinputmultiple (class in port.api.props)": [[0, "port.api.props.PropsUIPromptFileInputMultiple", false]], "propsuipromptprogress (class in port.api.props)": [[0, "port.api.props.PropsUIPromptProgress", false]], "propsuipromptquestionnaire (class in port.api.props)": [[0, "port.api.props.PropsUIPromptQuestionnaire", false]], "propsuipromptradioinput (class in port.api.props)": [[0, "port.api.props.PropsUIPromptRadioInput", false]], "propsuiquestionmultiplechoice (class in port.api.props)": [[0, "port.api.props.PropsUIQuestionMultipleChoice", false]], "propsuiquestionmultiplechoicecheckbox (class in port.api.props)": [[0, "port.api.props.PropsUIQuestionMultipleChoiceCheckbox", false]], "propsuiquestionopen (class in port.api.props)": [[0, "port.api.props.PropsUIQuestionOpen", false]], "question (port.api.props.propsuiquestionmultiplechoice attribute)": [[0, "port.api.props.PropsUIQuestionMultipleChoice.question", false]], "question (port.api.props.propsuiquestionmultiplechoicecheckbox attribute)": [[0, "port.api.props.PropsUIQuestionMultipleChoiceCheckbox.question", false]], "question (port.api.props.propsuiquestionopen attribute)": [[0, "port.api.props.PropsUIQuestionOpen.question", false]], "questions (port.api.props.propsuipromptquestionnaire attribute)": [[0, "port.api.props.PropsUIPromptQuestionnaire.questions", false]], "radioitem (class in port.api.props)": [[0, "port.api.props.RadioItem", false]], "read_csv_from_bytes() (in module port.helpers.extraction_helpers)": [[0, "port.helpers.extraction_helpers.read_csv_from_bytes", false]], "read_csv_from_bytes_to_df() (in module port.helpers.extraction_helpers)": [[0, "port.helpers.extraction_helpers.read_csv_from_bytes_to_df", false]], "read_json_from_bytes() (in module port.helpers.extraction_helpers)": [[0, "port.helpers.extraction_helpers.read_json_from_bytes", false]], "read_json_from_file() (in module port.helpers.extraction_helpers)": [[0, "port.helpers.extraction_helpers.read_json_from_file", false]], "render_end_page() (in module port.helpers.port_helpers)": [[0, "port.helpers.port_helpers.render_end_page", false]], "render_page() (in module port.helpers.port_helpers)": [[0, "port.helpers.port_helpers.render_page", false]], "replace_months() (in module port.helpers.extraction_helpers)": [[0, "port.helpers.extraction_helpers.replace_months", false]], "set_current_status_code_by_id() (port.helpers.validate.validateinput method)": [[0, "port.helpers.validate.ValidateInput.set_current_status_code_by_id", false]], "sort_isotimestamp_empty_timestamp_last() (in module port.helpers.extraction_helpers)": [[0, "port.helpers.extraction_helpers.sort_isotimestamp_empty_timestamp_last", false]], "status_codes_lookup (port.helpers.validate.validateinput attribute)": [[0, "port.helpers.validate.ValidateInput.status_codes_lookup", false]], "statuscode (class in port.helpers.validate)": [[0, "port.helpers.validate.StatusCode", false]], "tables (port.api.props.propsuipromptconsentform attribute)": [[0, "port.api.props.PropsUIPromptConsentForm.tables", false]], "text (port.api.props.propsuipromptconfirm attribute)": [[0, "port.api.props.PropsUIPromptConfirm.text", false]], "title (port.api.props.propsuipromptconsentformtable attribute)": [[0, "port.api.props.PropsUIPromptConsentFormTable.title", false]], "title (port.api.props.propsuipromptradioinput attribute)": [[0, "port.api.props.PropsUIPromptRadioInput.title", false]], "todict() (port.api.props.propsuifooter method)": [[0, "port.api.props.PropsUIFooter.toDict", false]], "todict() (port.api.props.propsuiheader method)": [[0, "port.api.props.PropsUIHeader.toDict", false]], "todict() (port.api.props.propsuipagedonation method)": [[0, "port.api.props.PropsUIPageDonation.toDict", false]], "todict() (port.api.props.propsuipageend method)": [[0, "port.api.props.PropsUIPageEnd.toDict", false]], "todict() (port.api.props.propsuipromptconfirm method)": [[0, "port.api.props.PropsUIPromptConfirm.toDict", false]], "todict() (port.api.props.propsuipromptconsentform method)": [[0, "port.api.props.PropsUIPromptConsentForm.toDict", false]], "todict() (port.api.props.propsuipromptconsentformtable method)": [[0, "port.api.props.PropsUIPromptConsentFormTable.toDict", false]], "todict() (port.api.props.propsuipromptfileinput method)": [[0, "port.api.props.PropsUIPromptFileInput.toDict", false]], "todict() (port.api.props.propsuipromptfileinputmultiple method)": [[0, "port.api.props.PropsUIPromptFileInputMultiple.toDict", false]], "todict() (port.api.props.propsuipromptprogress method)": [[0, "port.api.props.PropsUIPromptProgress.toDict", false]], "todict() (port.api.props.propsuipromptquestionnaire method)": [[0, "port.api.props.PropsUIPromptQuestionnaire.toDict", false]], "todict() (port.api.props.propsuipromptradioinput method)": [[0, "port.api.props.PropsUIPromptRadioInput.toDict", false]], "todict() (port.api.props.propsuiquestionmultiplechoice method)": [[0, "port.api.props.PropsUIQuestionMultipleChoice.toDict", false]], "todict() (port.api.props.propsuiquestionmultiplechoicecheckbox method)": [[0, "port.api.props.PropsUIQuestionMultipleChoiceCheckbox.toDict", false]], "todict() (port.api.props.propsuiquestionopen method)": [[0, "port.api.props.PropsUIQuestionOpen.toDict", false]], "todict() (port.api.props.translatable method)": [[0, "port.api.props.Translatable.toDict", false]], "translatable (class in port.api.props)": [[0, "port.api.props.Translatable", false]], "translate_meta_tables() (port.api.props.propsuipromptconsentform method)": [[0, "port.api.props.PropsUIPromptConsentForm.translate_meta_tables", false]], "translate_tables() (port.api.props.propsuipromptconsentform method)": [[0, "port.api.props.PropsUIPromptConsentForm.translate_tables", false]], "translations (class in port.api.props)": [[0, "port.api.props.Translations", false]], "validate_zip() (in module port.helpers.validate)": [[0, "port.helpers.validate.validate_zip", false]], "validateinput (class in port.helpers.validate)": [[0, "port.helpers.validate.ValidateInput", false]], "value (port.api.props.radioitem attribute)": [[0, "port.api.props.RadioItem.value", false]], "visualizations (port.api.props.propsuipromptconsentformtable attribute)": [[0, "port.api.props.PropsUIPromptConsentFormTable.visualizations", false]]}, "objects": {"port.api": [[0, 0, 0, "-", "props"]], "port.api.props": [[0, 1, 1, "", "PropsUIFooter"], [0, 1, 1, "", "PropsUIHeader"], [0, 1, 1, "", "PropsUIPageDonation"], [0, 1, 1, "", "PropsUIPageEnd"], [0, 1, 1, "", "PropsUIPromptConfirm"], [0, 1, 1, "", "PropsUIPromptConsentForm"], [0, 1, 1, "", "PropsUIPromptConsentFormTable"], [0, 1, 1, "", "PropsUIPromptFileInput"], [0, 1, 1, "", "PropsUIPromptFileInputMultiple"], [0, 1, 1, "", "PropsUIPromptProgress"], [0, 1, 1, "", "PropsUIPromptQuestionnaire"], [0, 1, 1, "", "PropsUIPromptRadioInput"], [0, 1, 1, "", "PropsUIQuestionMultipleChoice"], [0, 1, 1, "", "PropsUIQuestionMultipleChoiceCheckbox"], [0, 1, 1, "", "PropsUIQuestionOpen"], [0, 1, 1, "", "RadioItem"], [0, 1, 1, "", "Translatable"], [0, 1, 1, "", "Translations"]], "port.api.props.PropsUIFooter": [[0, 2, 1, "", "toDict"]], "port.api.props.PropsUIHeader": [[0, 2, 1, "", "toDict"]], "port.api.props.PropsUIPageDonation": [[0, 3, 1, "", "footer"], [0, 3, 1, "", "header"], [0, 3, 1, "", "platform"], [0, 2, 1, "", "toDict"]], "port.api.props.PropsUIPageEnd": [[0, 2, 1, "", "toDict"]], "port.api.props.PropsUIPromptConfirm": [[0, 3, 1, "", "cancel"], [0, 3, 1, "", "ok"], [0, 3, 1, "", "text"], [0, 2, 1, "", "toDict"]], "port.api.props.PropsUIPromptConsentForm": [[0, 3, 1, "", "description"], [0, 3, 1, "", "donate_button"], [0, 3, 1, "", "donate_question"], [0, 3, 1, "", "id"], [0, 3, 1, "", "meta_tables"], [0, 3, 1, "", "tables"], [0, 2, 1, "", "toDict"], [0, 2, 1, "", "translate_meta_tables"], [0, 2, 1, "", "translate_tables"]], "port.api.props.PropsUIPromptConsentFormTable": [[0, 3, 1, "", "data_frame"], [0, 3, 1, "", "delete_option"], [0, 3, 1, "", "description"], [0, 3, 1, "", "folded"], [0, 3, 1, "", "id"], [0, 3, 1, "", "title"], [0, 2, 1, "", "toDict"], [0, 3, 1, "", "visualizations"]], "port.api.props.PropsUIPromptFileInput": [[0, 3, 1, "", "description"], [0, 3, 1, "", "extensions"], [0, 2, 1, "", "toDict"]], "port.api.props.PropsUIPromptFileInputMultiple": [[0, 3, 1, "", "description"], [0, 3, 1, "", "extensions"], [0, 2, 1, "", "toDict"]], "port.api.props.PropsUIPromptProgress": [[0, 3, 1, "", "description"], [0, 3, 1, "", "message"], [0, 3, 1, "", "percentage"], [0, 2, 1, "", "toDict"]], "port.api.props.PropsUIPromptQuestionnaire": [[0, 3, 1, "", "description"], [0, 3, 1, "", "questions"], [0, 2, 1, "", "toDict"]], "port.api.props.PropsUIPromptRadioInput": [[0, 3, 1, "", "description"], [0, 3, 1, "", "items"], [0, 3, 1, "", "title"], [0, 2, 1, "", "toDict"]], "port.api.props.PropsUIQuestionMultipleChoice": [[0, 3, 1, "", "choices"], [0, 3, 1, "", "id"], [0, 3, 1, "", "question"], [0, 2, 1, "", "toDict"]], "port.api.props.PropsUIQuestionMultipleChoiceCheckbox": [[0, 3, 1, "", "choices"], [0, 3, 1, "", "id"], [0, 3, 1, "", "question"], [0, 2, 1, "", "toDict"]], "port.api.props.PropsUIQuestionOpen": [[0, 3, 1, "", "id"], [0, 3, 1, "", "question"], [0, 2, 1, "", "toDict"]], "port.api.props.RadioItem": [[0, 3, 1, "", "id"], [0, 3, 1, "", "value"]], "port.api.props.Translatable": [[0, 2, 1, "", "toDict"]], "port.helpers": [[0, 0, 0, "-", "extraction_helpers"], [0, 0, 0, "-", "port_helpers"], [0, 0, 0, "-", "validate"]], "port.helpers.extraction_helpers": [[0, 4, 1, "", "FileNotFoundInZipError"], [0, 5, 1, "", "dict_denester"], [0, 5, 1, "", "epoch_to_iso"], [0, 5, 1, "", "extract_file_from_zip"], [0, 5, 1, "", "find_item"], [0, 5, 1, "", "find_items"], [0, 5, 1, "", "fix_ascii_string"], [0, 5, 1, "", "fix_latin1_string"], [0, 5, 1, "", "json_dumper"], [0, 5, 1, "", "read_csv_from_bytes"], [0, 5, 1, "", "read_csv_from_bytes_to_df"], [0, 5, 1, "", "read_json_from_bytes"], [0, 5, 1, "", "read_json_from_file"], [0, 5, 1, "", "replace_months"], [0, 5, 1, "", "sort_isotimestamp_empty_timestamp_last"]], "port.helpers.port_helpers": [[0, 5, 1, "", "donate"], [0, 5, 1, "", "exit"], [0, 5, 1, "", "generate_file_prompt"], [0, 5, 1, "", "generate_retry_prompt"], [0, 5, 1, "", "generate_review_data_prompt"], [0, 5, 1, "", "render_end_page"], [0, 5, 1, "", "render_page"]], "port.helpers.validate": [[0, 1, 1, "", "DDPCategory"], [0, 1, 1, "", "DDPFiletype"], [0, 1, 1, "", "Language"], [0, 1, 1, "", "StatusCode"], [0, 1, 1, "", "ValidateInput"], [0, 5, 1, "", "validate_zip"]], "port.helpers.validate.ValidateInput": [[0, 3, 1, "", "ddp_categories_lookup"], [0, 2, 1, "", "get_status_code_id"], [0, 2, 1, "", "infer_ddp_category"], [0, 2, 1, "", "set_current_status_code_by_id"], [0, 3, 1, "", "status_codes_lookup"]], "port.platforms": [[11, 0, 0, "-", "chatgpt"], [11, 0, 0, "-", "instagram"]]}, "objnames": {"0": ["py", "module", "Python module"], "1": ["py", "class", "Python class"], "2": ["py", "method", "Python method"], "3": ["py", "attribute", "Python attribute"], "4": ["py", "exception", "Python exception"], "5": ["py", "function", "Python function"]}, "objtypes": {"0": "py:module", "1": "py:class", "2": "py:method", "3": "py:attribute", "4": "py:exception", "5": "py:function"}, "terms": {"": [0, 2, 6, 9], "0": [0, 2, 8], "00": 0, "04": 6, "09": 0, "1": [0, 2, 6, 8, 9], "10": 6, "15": 0, "16": 6, "1632139200": 0, "18": 6, "1m": 8, "2": [0, 6, 7], "2021": 0, "2023": 0, "2048": 8, "20t12": 0, "22": 6, "24": 9, "25": 0, "2gib": 2, "3": [0, 6], "30": 0, "3000": 6, "301": 8, "365": 8, "4": 0, "443": 8, "5": [0, 3], "6": 0, "8": 0, "80": 8, "8000": 8, "8601": 0, "A": [0, 2, 3, 4], "As": [2, 3, 4, 9], "At": 2, "But": [2, 9], "By": 9, "For": [0, 2, 8, 11], "If": [0, 2, 3, 4, 6, 8, 9], "In": [2, 3, 4, 6, 8, 9, 11], "It": [0, 2, 3, 6, 9], "On": 2, "That": [2, 6], "The": [0, 3, 5, 9], "These": [2, 6, 9], "To": 6, "With": 2, "__type__": 2, "abbrevi": 0, "about": [0, 3, 6, 8, 9, 10], "abov": [2, 9], "absens": 9, "absolut": 3, "accept": [0, 8], "access": [3, 4, 7], "accident": 9, "account": [2, 8], "act": 0, "actual": 2, "ad": [2, 4, 5, 10], "adapt": [2, 4], "add": 9, "addit": 0, "addition": 7, "addzero": 9, "admin": 8, "administ": 4, "administr": 6, "after": [0, 2, 4], "ag": 0, "again": [0, 2], "against": 0, "aggreg": 2, "ak": 2, "al": 2, "alic": 0, "all": [0, 2, 3, 8, 9], "all_ddp_categori": 0, "all_status_cod": 0, "allow": [0, 2, 8], "along": 9, "alreadi": [2, 6], "also": [2, 3, 9], "altern": [2, 9], "although": 2, "alwai": [0, 3, 6, 8, 9], "amd": 6, "amount": [3, 6], "an": [0, 2, 3, 4, 6, 8, 9, 11], "ander": 2, "ani": [0, 2, 9], "annot": 2, "anoth": 3, "answer": 0, "anticip": 3, "anyth": 2, "api": 8, "app": [2, 6, 8], "app_admin": 8, "app_data": 8, "app_domain": 8, "app_mail_domain": 8, "app_nam": 8, "apparaat": 2, "append": 2, "appli": 4, "applic": [0, 2, 8, 9], "approach": [2, 3], "appropri": 3, "ar": [0, 2, 3, 4, 6, 8, 9], "architectur": 2, "archiv": 0, "arg": 8, "around": [2, 8], "arrai": 2, "articl": [2, 4, 10], "ascii": 0, "asd": 0, "ask": 2, "assembl": 0, "assembli": 2, "assign": [0, 9], "associ": 0, "assum": 0, "assumpt": 3, "attempt": [0, 3], "attent": [2, 3], "aumzobj7ojn58xilmgvcwtyrcsallwdcglwdcglwdcgwdchdhsjahdghaggdgdgt7moqyjtjba": 8, "authent": 8, "auto": 9, "automat": 9, "avail": [2, 3, 4, 6], "avoid": 9, "aw": 4, "ax": 9, "axi": 9, "azur": [4, 8], "b": 0, "back": [2, 4], "backend": 2, "bad": 2, "badzipfil": [0, 2], "banner": 8, "barchart": 9, "base": [0, 9], "bash": 6, "basi": 9, "basic": 6, "beautifulsoup4": 2, "becaus": [2, 3, 6, 8, 9], "becom": 9, "befor": [0, 3], "begin": 2, "behind": 2, "being": 2, "believ": 3, "below": [2, 6, 9], "benefit": 2, "bespok": [2, 4], "best": [4, 6, 9], "bestand": 2, "better": 2, "between": [3, 6, 9], "bilingu": 0, "binari": 2, "bio": 6, "bit": [6, 9], "bite": 3, "board": 2, "bob": 0, "bodi": [0, 2], "bool": [0, 2], "boolean": 9, "both": 6, "bottom": 2, "break": [2, 9], "brew": 6, "brief": 0, "bring": 2, "browser": [2, 6, 8], "budget": 4, "buffer": 0, "build": [2, 6], "builtin": 8, "bullet": 3, "bundl": 8, "button": [0, 2], "bytesio": 0, "c": 0, "caf\u00e9": 0, "calcul": 9, "call": [2, 4, 6], "can": [0, 2, 3, 4, 6, 8, 9, 10], "cancel": [0, 2], "cannot": [0, 2, 6], "careful": 3, "carefulli": 3, "case": [0, 2, 3, 9, 11], "cat1": 0, "categori": 0, "catogori": 0, "caught": 0, "caus": [3, 6], "cd": [6, 8], "cert": 8, "certain": [3, 6, 9], "challeng": 3, "chanc": 3, "chang": [2, 3, 6, 9, 11], "channel": 9, "charact": 0, "characterist": 0, "chatgpt": 11, "check": [2, 3, 6, 8], "checkbox": 0, "checklist": [2, 5, 10], "checkout": [2, 10], "choic": [0, 3], "choos": [2, 6], "chose": 6, "claim": 3, "class": 0, "clearli": 3, "click": [2, 4], "client": 8, "clone": [2, 6, 8], "cloud": 8, "cmd": 6, "code": [0, 2, 3, 9], "collect": [0, 2, 3], "colour": 9, "column": [2, 3, 9], "column1": 0, "column2": 0, "column3": 0, "com": [6, 8], "combin": [0, 2, 6], "come": [2, 4], "command": [0, 2, 4, 6], "commandsystemdon": [0, 2], "commandsystemexit": [0, 2], "commanduirend": [0, 2], "common": [3, 9], "commonli": 2, "commun": 3, "compar": [0, 2, 3], "compat": [0, 2], "compil": 2, "complain": 8, "complet": [0, 2, 3, 4, 6], "complex": 2, "complic": 9, "compon": [0, 2, 9], "compress": 2, "compress_s": 2, "concept": 7, "conf": 8, "configur": [0, 2, 4], "confirm": 0, "connect": [6, 8, 9], "consent": [0, 2, 4, 9], "consent_prompt": 2, "consent_prompt_result": 2, "consid": 3, "consol": [2, 8], "constraint": 2, "construct": 2, "contact": 4, "contain": [0, 2, 3, 5, 6, 9, 11], "container_nam": 8, "content": [0, 2, 3], "context": 6, "continu": [0, 2, 6, 8], "conveni": [6, 9], "convers": 0, "convert": 0, "core": [2, 8], "corner": 6, "correct": 2, "cost": 2, "could": [0, 2, 3, 4, 6, 8, 9], "couldn": 0, "count": 9, "count_pct": 9, "countri": 9, "coupl": [2, 3, 6], "cours": 3, "cover": [6, 9], "cpu": 6, "crash": [2, 3], "creat": [0, 3, 4, 5, 6, 8, 9, 10], "creativ": 2, "crt": 8, "crucial": 3, "csv": [0, 2], "current": [0, 2, 9], "current_ddp_categori": 0, "current_status_cod": 0, "custom": [0, 2, 8], "cycl": 9, "d": 0, "d3i": [6, 8], "dai": [8, 9], "dan": 2, "dat": 2, "data": [0, 5, 8, 11], "data_fram": [0, 9], "data_frame_df": 0, "data_frame_dict": 0, "datadon": [4, 7], "datafram": [0, 2, 3], "date": [0, 3], "datetim": 9, "db": 8, "db_host": 8, "db_name": 8, "db_pass": 8, "db_user": 8, "dd": 9, "ddp": [0, 2, 3], "ddp_categori": 0, "ddp_categories_lookup": 0, "ddp_filetyp": 0, "ddpcategori": [0, 1], "ddpfiletyp": [0, 1], "ddthh": 9, "de": 2, "deal": [0, 3], "decemb": 9, "decid": 2, "decod": 0, "def": 2, "default": [0, 2, 3, 6, 9, 11], "defin": [0, 2], "delet": [0, 9], "delete_opt": 0, "deliveri": 0, "denest": [0, 3], "depend": [3, 6, 9], "depends_on": 8, "deploi": 2, "deploy": [5, 10], "descript": [0, 2], "design": [2, 4], "detail": [6, 7, 8], "detect": 3, "determin": [2, 9], "develop": [2, 3, 4, 6, 8], "devic": [0, 2, 6], "df": [0, 2, 9], "dict": [0, 3, 9], "dict_denest": [0, 1, 3], "dictionari": [0, 3, 9], "did": [2, 3], "differ": [0, 2, 3, 9], "directli": [3, 4, 9], "directori": 2, "discuss": [4, 6], "displai": [0, 3, 9], "distribut": 6, "distro": 6, "divers": [2, 3], "divid": 9, "do": [2, 3, 4, 6, 8, 9, 11], "docker": [5, 10], "document": 8, "doe": [3, 5, 6], "doesn": 9, "domain": 9, "don": [2, 3], "donat": [0, 1, 5, 9, 11], "donate_button": 0, "donate_quest": 0, "done": [0, 2, 4], "dont": 2, "down": 9, "download": [2, 3, 4, 6], "driver": 2, "due": [2, 3], "dure": [0, 3, 6], "dutch": [0, 4, 9], "dynam": 9, "e": [2, 4, 9], "each": [0, 9], "easi": 3, "easier": 9, "educ": 9, "een": 2, "effici": 2, "einfra": 4, "either": [0, 9], "element": 2, "els": 2, "empti": [0, 9], "en": [0, 2, 9], "encod": 0, "encount": [2, 3, 6], "end": [0, 2, 3, 4], "engin": [2, 3], "english": [0, 2, 9], "enjoy": 9, "enough": 2, "entertain": 9, "enumer": 0, "environ": 8, "epoch": 0, "epoch_timestamp": 0, "epoch_to_iso": [0, 1], "equival": 0, "error": [0, 2, 3, 6], "essenc": 2, "etc": [2, 8], "ethic": 2, "eu": [4, 7], "even": 3, "event": 8, "everi": [3, 9], "everyth": 8, "ex": 6, "exact": 9, "exampl": [0, 2, 3, 5, 6, 11], "example1": 0, "example2": 0, "except": [0, 2], "exclud": 2, "exist": [2, 3], "exit": [0, 1, 2], "exit_port": 2, "expect": [3, 6], "experi": [2, 3], "explan": [0, 9], "explicitli": 9, "explor": 9, "extens": 0, "extern": 2, "extract": [1, 2, 3, 9, 10, 11], "extract_file_from_zip": [0, 1], "extract_the_data_you_are_interested_in": 2, "extracted_data": 2, "extracted_data_in_a_dictionari": 2, "extracted_fil": 0, "extraction_help": 0, "ey": 9, "eyra": 4, "f": 2, "fact": 3, "fail": [0, 2, 3], "failur": 3, "fair": 3, "fals": [0, 2, 9], "familiar": 6, "fast": 2, "feedback": 3, "fiction": 2, "file": [0, 2, 3, 4, 6, 8, 11], "file1": 0, "file2": 0, "file_list_input": 0, "file_prompt": 2, "file_prompt_result": 2, "file_s": 2, "file_to_extract": 0, "filenam": 0, "filenotfoundinziperror": [0, 1], "filesnam": 2, "fill": 9, "final": 9, "find": [0, 2, 3, 4, 6, 10], "find_item": [0, 1, 3], "fine": [2, 6], "finish": 2, "first": [0, 3, 5, 9], "fit": [2, 11], "fix": [0, 9], "fix_ascii_str": [0, 1], "fix_latin1_str": [0, 1], "flag": 0, "flatten": [0, 3], "float": 0, "flow": [2, 3, 4, 11], "fold": 0, "folder": [3, 8], "follow": [2, 4, 6, 8, 9, 10], "footer": [0, 2], "forget": [2, 6], "fork": [2, 8], "form": [0, 4, 9], "format": [0, 2, 3, 9], "forward": 8, "found": [0, 2], "framework": [2, 11], "free": [4, 8, 11], "freel": 11, "frequenc": 9, "fresh": 3, "from": [0, 3, 4, 6, 11], "frontend": 2, "frustrat": 3, "full": [2, 3, 9], "fulli": 0, "function": [0, 2, 9], "furthermor": 9, "futur": [2, 4], "g": 9, "ga": 2, "gap": 9, "garbag": 3, "gave": 3, "geen": 2, "gekozen": 2, "gener": [0, 2, 6], "generate_consent_prompt": 2, "generate_file_prompt": [0, 1, 2], "generate_retry_prompt": [0, 1, 2], "generate_review_data_prompt": [0, 1], "get": [0, 2, 3, 4, 6, 8], "get_status_code_id": 0, "getinfo": 2, "getvalu": 0, "git": 6, "github": 6, "give": [3, 6, 9], "given": [0, 9], "gmail": 8, "go": [2, 3, 4, 6, 8], "goe": [2, 3], "good": [2, 9], "google_sign_in_client_id": 8, "google_sign_in_client_secret": 8, "grant": 4, "graph": 9, "great": 8, "greather": 0, "greet": 6, "group": [0, 3, 9], "group_bi": 9, "guid": 5, "guidanc": 7, "ha": [2, 3, 4, 9], "handl": [2, 3, 9], "happen": [3, 6], "happi": 2, "hard": 2, "have": [2, 3, 4, 6, 7, 9, 11], "haven": 6, "head": 0, "header": [0, 2], "header_text": 0, "heavili": [3, 9], "heeft": 2, "height": 9, "helaa": 2, "hello": 0, "help": [3, 6, 9], "helper": [1, 10], "here": [2, 3, 4, 6, 9, 10], "het": 2, "hh": 9, "high": 2, "homebrew": 6, "host": [4, 8], "hour": 9, "hour_cycl": 9, "how": [4, 5, 6, 7, 8, 9], "howev": 2, "html": [2, 3], "http": [6, 8], "http_host": 8, "http_upgrad": 8, "huge": [2, 9], "human": 3, "i": [0, 2, 3, 6, 8, 9, 10], "icon": 6, "id": [0, 4, 8, 9], "idea": [0, 2], "ident": 9, "identifi": 0, "implement": 9, "impli": 9, "import": [2, 9, 11], "incent": 3, "includ": [0, 2], "index": 10, "indic": 0, "infer": 0, "infer_ddp_categori": 0, "infin": 0, "info": [0, 2], "inform": [0, 2, 4, 6, 9], "infra": 6, "inhoud": 2, "initi": 0, "inp": 0, "input": [0, 2, 9], "input_str": 0, "ins": 8, "insert": 0, "inspect": 3, "instagram": 3, "instal": [5, 10], "instanc": 0, "instead": [2, 3, 6, 9], "instruct": [0, 2, 3, 4, 6, 8, 9], "instructi": 2, "int": [0, 2], "integ": 0, "intel": 6, "interact": 2, "interes": 2, "interest": [2, 3, 4], "interfac": 2, "intern": 0, "interpret": 9, "interv": 9, "intric": 2, "introduct": [5, 10], "invalid": 0, "invent": 11, "investig": 3, "involv": 3, "io": 0, "ip": 8, "is_data_valid": 2, "iso": [0, 9], "issu": [2, 6], "item": [0, 4, 9], "its": [2, 3, 6], "j": [2, 6], "januari": 9, "json": [0, 2, 3, 6], "json_byt": 0, "json_dump": [0, 1], "json_fil": 0, "json_str": [0, 2], "juist": 2, "just": [2, 3, 4, 6, 9], "keep": [2, 3], "kei": [0, 2, 3, 4, 6, 8, 9], "kernel": 6, "key1": 3, "key2": 3, "key3": 3, "key_that_sometimes_is_called_something_differ": 3, "key_to_match": 0, "key_which_value_you_w": 3, "keyout": 8, "ki": 2, "kiezen": 2, "kind": 2, "klik": 2, "know": [3, 4, 6, 9], "knowledg": [3, 6], "known": 0, "known_fil": 0, "kunnen": 2, "label": [0, 9], "land": 4, "languag": [0, 1, 2, 3, 9], "last": [0, 9], "latest": 8, "latin1": 0, "lead": 2, "learn": [3, 8, 10], "least": [0, 2, 3, 6], "left": [2, 6], "less": [2, 3, 6], "let": 2, "level": 2, "lib": 8, "librari": 2, "like": 2, "limit": 5, "line": 11, "list": [0, 2, 3, 4, 9], "listen": 8, "live": 3, "ll": 9, "load": 2, "loadpackag": 2, "local": 6, "localhost": [6, 8], "locat": [0, 8], "log": [0, 2, 3, 4], "logic": 2, "login": 8, "long": 9, "longer": 2, "look": [0, 2, 3, 6, 9], "lookup": 0, "lot": [2, 6], "lower": 6, "lxml": 2, "mai": 0, "main": [0, 2, 9, 11], "make": [2, 3, 6, 9], "manag": [6, 8], "manipul": 6, "manual": [2, 6], "match": [0, 3], "matter": 2, "max": 9, "mean": [2, 3, 4, 6, 9], "mei": 0, "mention": 9, "messag": [0, 6], "meta": [0, 2], "meta_t": 0, "method": 9, "micropip": 2, "might": [2, 3, 4, 6], "mime": 0, "min": 9, "mind": [2, 3], "minim": 3, "miss": 9, "mistak": 3, "mm": 9, "mobil": 2, "mock": 6, "modul": [0, 2, 10, 11], "mondai": 9, "month": 0, "month_cycl": 9, "more": [2, 3, 6, 8, 9], "most": [2, 3, 9], "mostli": 9, "motherboard": 6, "much": [2, 3, 6], "multi": 0, "multipl": [0, 3, 9], "must": [0, 2], "nalic": 0, "name": [0, 2, 3, 4, 8, 9], "namelist": 2, "natur": [2, 3], "nbob": 0, "necessari": 8, "need": [0, 2, 3, 6, 8, 9], "nest": [0, 3], "nested_dict": 0, "never": 6, "new": [0, 4, 6], "newkei": 8, "newli": 4, "next": [0, 2, 5, 10], "nice": 9, "niet": 2, "nl": [2, 9], "node": [6, 8], "nodej": 6, "non": [0, 3], "none": 0, "normal": [2, 9], "note": [0, 6, 8, 9], "noth": 2, "notic": [2, 9], "now": [2, 6, 8, 9], "npm": [2, 4, 6], "number": [2, 3, 9], "numer": [0, 9], "numpi": 2, "object": [0, 9], "oblig": 3, "obtain": 8, "occur": [0, 2, 9], "off": 2, "offer": 4, "offici": [6, 8], "often": [2, 3], "ok": [0, 2], "onder": 2, "one": [2, 3, 9], "onli": [0, 2, 3, 6, 9], "op": 2, "open": [0, 4, 6, 11], "openid": 8, "openssl": 8, "oper": 6, "opgeslagen": 2, "opnieuw": 2, "opportun": 3, "optim": 2, "option": [0, 9], "order": [2, 3, 6, 8, 11], "organ": [3, 4], "origin": 0, "other": [0, 2], "otherwis": [0, 2], "our": [3, 7, 8, 9], "out": [2, 3, 5, 10], "outlin": 8, "outsid": 6, "over": 3, "overslaan": 2, "overview": 9, "own": [3, 4, 5, 6, 9, 10], "packag": [0, 3, 6], "page": [0, 2, 4, 9, 10], "pai": [2, 3], "pair": [0, 3], "panda": [0, 2], "paramet": 0, "pars": [0, 2, 3], "part": [0, 9], "particip": [0, 3, 4, 9], "pass": 0, "password": [6, 8], "past": 8, "path": [0, 3, 6], "path_to_zip": 0, "payloadjson": 2, "payloadstr": 2, "payloadtru": 2, "pct": 9, "pd": [0, 2, 3], "peopl": 3, "per": 2, "percentag": [0, 9], "perform": [2, 3, 9], "period": 9, "person": [2, 3], "phoenix": 8, "pick": 9, "pipx": 6, "pixel": 9, "place": [0, 2, 10], "plai": 8, "plain": [0, 2], "platform": [0, 2, 3, 4, 8, 10], "platform_nam": 0, "pleas": [2, 4], "poetri": 6, "point": 3, "polici": [2, 4], "popul": [2, 3], "port": [1, 2, 6, 8, 10, 11], "port_help": 0, "portal": 4, "portion": 3, "possibl": [3, 9], "possibli": 2, "postgr": 8, "postgres_data": 8, "postgres_db": 8, "postgres_password": 8, "postgres_us": 8, "postgresql": 8, "potenti": 0, "powershel": 6, "practic": 2, "pre": 5, "predefin": 0, "prefer": [3, 6], "prepar": 7, "preprocess": 9, "prerequisit": 5, "present": [0, 2, 3, 6], "preserv": 2, "press": 2, "pretti": 2, "prevent": 3, "previou": [8, 9], "primari": 9, "princip": 3, "print": [0, 2], "prior": [0, 2], "privaci": [2, 4], "probabl": [3, 9], "probeer": 2, "problem": [3, 6], "process": [0, 2, 3, 9, 11], "processingwork": 2, "produc": 7, "product": 8, "program": [3, 6], "progress": [0, 2], "project": 3, "prompt": [0, 2], "prop": [1, 2, 9, 10], "propsuifoot": [0, 1, 2], "propsuihead": [0, 1, 2], "propsuipagedon": [0, 1, 2], "propsuipageend": [0, 1, 2], "propsuipromptconfirm": [0, 1, 2], "propsuipromptconsentform": [0, 1, 2], "propsuipromptconsentformt": [0, 1, 2, 9], "propsuipromptfileinput": [0, 1, 2], "propsuipromptfileinputmultipl": [0, 1], "propsuipromptprogress": [0, 1], "propsuipromptquestionnair": [0, 1], "propsuipromptradioinput": [0, 1], "propsuiquestionmultiplechoic": [0, 1], "propsuiquestionmultiplechoicecheckbox": [0, 1], "propsuiquestionopen": [0, 1], "proto": 8, "provid": [0, 3, 4, 7, 8, 9, 10, 11], "proxi": 8, "proxy_http_vers": 8, "proxy_max_temp_file_s": 8, "proxy_pass": 8, "proxy_set_head": 8, "purpos": 0, "put": [2, 8, 9], "py": [9, 11], "py_work": 2, "pyodid": 2, "python": 6, "quarter": 9, "queri": 9, "question": 0, "questionnair": 0, "quit": 3, "qwe": 0, "radio": 0, "radioitem": [0, 1], "rais": 0, "random": 3, "rang": 9, "reach": 0, "react": 2, "read": [0, 3, 8], "read_csv_from_byt": [0, 1], "read_csv_from_bytes_to_df": [0, 1], "read_json_from_byt": [0, 1], "read_json_from_fil": [0, 1], "readi": [3, 8], "readibl": 3, "real": [2, 8], "realiti": [2, 6], "realiz": 4, "realli": 2, "reason": 3, "receiv": [0, 2, 3], "recht": 2, "recommend": [6, 9], "redirect": 8, "refer": [2, 7], "reflect": 2, "regardless": 0, "relat": 6, "releas": 4, "remain": 3, "rememb": 6, "remote_addr": 8, "remov": [0, 9], "render": [0, 2], "render_end_pag": [0, 1, 2], "render_pag": [0, 1, 2], "replac": [0, 8], "replace_month": [0, 1], "repositori": [2, 4, 6], "repres": 0, "represent": 0, "req": 8, "request": [3, 4, 8], "request_uri": 8, "requir": [2, 3, 6, 9], "requisit": 5, "research": [0, 2, 3], "respond": 9, "respons": 3, "restart": 8, "result": [0, 2, 3], "resum": 2, "retri": [0, 2], "retry_prompt": 2, "retry_prompt_result": 2, "return": [0, 2, 8], "reusabl": 2, "revers": 8, "review": [0, 2], "right": 2, "risk": 8, "row": [0, 9], "rsa": 8, "run": [0, 2, 4, 5, 6], "run_first": 0, "runn": 6, "sad": 2, "sai": 9, "same": [3, 9], "sampl": 3, "scenario": 3, "scheme": 8, "scienc": 4, "scope": 6, "screen": [2, 3], "script": [3, 5, 9, 11], "search": [0, 6, 9, 10], "season": 9, "secondari": 9, "secret": 8, "secret_key_bas": 8, "section": 6, "see": [2, 3, 4, 6, 8, 9, 11], "seen": [2, 9], "select": [0, 2, 4, 9], "self": [2, 8], "selfsign": 8, "send": [0, 2], "sens": 9, "sent": [2, 4], "separ": [2, 9], "seri": 0, "seriou": 3, "server": [2, 6, 8], "server_nam": 8, "servic": 8, "session_id": 2, "set": [0, 2, 3, 4, 6, 7, 8, 9], "set_current_status_code_by_id": 0, "sever": 2, "share": [0, 3, 6], "short": 0, "shot": 3, "should": [0, 2, 3, 6, 8, 9], "show": [0, 2, 8], "shown": [0, 2, 9], "sign": 8, "similar": 2, "simpl": 3, "simpli": [7, 9], "singl": [0, 9], "situat": 6, "size": 2, "skip": 2, "slow": [2, 9], "small": 2, "so": [2, 3, 4, 6, 8, 9, 11], "softwar": [4, 6], "solut": [4, 6], "solv": 6, "some": [2, 3, 6, 8, 9], "someth": [2, 4, 6], "somethin": 2, "sometim": 3, "sort": [0, 3], "sort_isotimestamp_empty_timestamp_last": [0, 1], "sort_valu": 0, "sourc": [4, 8], "specif": [0, 4, 5], "specifi": [0, 9], "split": 2, "sport": 9, "src": [2, 11], "ss": 9, "ssl": 8, "ssl_certif": 8, "ssl_certificate_kei": 8, "stacktrac": 3, "standard": 11, "start": [3, 5, 6, 8], "static_path": 8, "statist": 9, "statu": 0, "status_cod": 0, "status_codes_lookup": 0, "statuscod": [0, 1], "step": [2, 5, 6], "steward": 2, "still": [2, 3], "stop": 2, "storag": [0, 2], "storage_servic": 8, "store": [0, 2, 4], "str": [0, 2], "straight": 4, "straightforward": 6, "string": [0, 9], "structur": [0, 3, 8], "stuck": 3, "studi": [3, 5, 6, 7, 8, 11], "sub": 9, "submit": [0, 2, 3], "substanti": 2, "substr": 0, "subsystem": 6, "succesfulli": 3, "success": [0, 2], "suggest": 6, "suit": 6, "suitabl": 9, "sum": 9, "sundai": 9, "support": [0, 6, 9], "sure": [2, 3, 6, 9], "survei": 3, "system": [0, 2, 6], "t": [0, 2, 3, 8, 9], "tab": 7, "tabel": 9, "tabl": [0, 2, 5], "table_list": 0, "table_titl": [2, 9], "tableid": 9, "tailor": [2, 3], "take": [2, 3, 9], "target": 3, "task": [0, 3, 5], "tell": 3, "termin": 6, "test": 3, "test_databas": 8, "text": [0, 2], "textcolumn": 9, "textual": 9, "than": [0, 2, 3], "thank": [0, 2], "thats": 2, "thei": [0, 2, 3, 4, 6, 9], "them": [0, 2, 3, 6, 8, 9], "theori": 6, "therebi": 9, "therefor": 3, "thi": [0, 2, 3, 4, 6, 8, 9, 10, 11], "thing": [2, 9], "think": [2, 3], "third": 9, "those": [2, 3], "three": 9, "time": [2, 3, 6], "timestamp": [0, 9], "timestamp_seri": 0, "titel": 9, "titl": [0, 9], "tmp": 8, "todict": 0, "togeth": 2, "token": 9, "too": [2, 3, 6], "tool": 4, "toolkit": 2, "topic": 6, "total": 9, "touch": 2, "tradit": 3, "translat": [0, 1, 2], "translate_meta_t": 0, "translate_t": 0, "trigger": 0, "trivial": 3, "troubl": 6, "true": [0, 2, 9], "try": [0, 3, 5, 6, 10], "turn": 6, "tutori": [2, 4, 8], "tweak": 2, "two": 3, "txt": 0, "type": [0, 9], "typic": 4, "u": 2, "ubuntu": 6, "ui": [0, 2], "unburden": 4, "under": 3, "understand": [2, 6], "unexpect": 0, "unfortun": 2, "uniform": 0, "uninstal": 6, "uniqu": [0, 3, 4], "univers": 4, "unknown": 0, "unsplash_access_kei": 8, "unsplash_app_nam": 8, "until": 2, "up": [2, 3, 4, 5, 7, 8], "updat": 9, "upgrad": 8, "upon": [0, 2], "url": [4, 9], "url_domain": 9, "us": [0, 3, 5, 8, 9, 11], "usabl": 2, "user": [0, 2, 6, 8], "usernam": 6, "utc": 0, "utf": 0, "uw": 2, "v": 6, "valid": [1, 2, 3, 10], "validate_the_participants_input": 2, "validate_zip": [0, 1], "validateinput": [0, 1], "valu": [0, 2, 3, 8, 9], "value_i_want_to_extract": 2, "valuecolumn": 9, "var": 8, "vari": [2, 9], "variabl": [6, 8], "varieti": 2, "variou": [0, 10, 11], "ve": 0, "verder": 2, "verifi": 3, "versa": 6, "version": [2, 3, 6, 8], "verwerken": 2, "vice": 6, "video": [2, 7], "view": 4, "virtual": 6, "vis1": 9, "vis2": 9, "vis3": 9, "vis4": 9, "visual": [0, 5, 10], "visuali": 9, "visualizatoin": 9, "volg": 2, "volum": 8, "vscode": 6, "wa": 2, "wai": [0, 2, 3, 4, 6], "want": [0, 2, 3, 4, 8], "warranti": 4, "we": [2, 3, 6, 7, 8, 9, 10, 11], "web": [2, 6, 8], "websit": 7, "websocket": 8, "weekday_cycl": 9, "weet": 2, "welcom": 10, "well": [2, 8, 9], "went": [2, 8], "what": [0, 2, 3, 4, 6, 9], "wheel": 11, "when": [0, 3, 4, 6, 9], "whenev": [2, 3], "where": [0, 2, 3, 4, 6, 9], "whether": [0, 3, 6], "which": [0, 2, 3, 6, 8, 9, 10], "while": [2, 3], "who": 4, "whole": 2, "whose": 0, "why": 3, "wide": 9, "wiki": [2, 10], "wildli": 3, "wilt": 2, "wise": 2, "within": [2, 9], "without": 5, "won": [2, 3, 6], "work": [3, 4, 5, 6, 8, 9], "worst": 3, "worth": 3, "would": [2, 3], "wrap": 5, "wrapper": 0, "write": [3, 5, 6], "wrong": [2, 3], "wrote": 2, "x": [8, 9], "x509": 8, "y": 9, "year": 9, "yield": 0, "yoda": [4, 8], "you": [0, 2, 3, 4, 6, 8, 9, 10, 11], "your": [3, 5, 6, 8, 9, 10], "youremail": 8, "yourself": [2, 4], "youtub": [2, 3, 9], "yyyi": 9, "z": 9, "zeker": 2, "zero": [0, 9], "zf": 2, "zfile": 0, "zip": [0, 2, 3, 4], "zip_cont": 2, "zip_fil": 2, "zipfil": [0, 2, 3], "zone": 4, "\u4e16\u754c": 0}, "titles": ["API Reference", "API Reference", "Creating your own donation task", "Data donation checklist", "Deployment of the data donation task", "Articles", "Installation", "Introduction to Data Donation", "Try out Next with Docker", "Adding data visualizations", "The Data Donation Task Documentation", "Platform Documentation"], "titleterms": {"A": 9, "The": [2, 4, 10], "ad": 9, "add": 4, "aggreg": 9, "api": [0, 1, 2, 10], "area": 9, "argument": 9, "articl": 5, "avail": 11, "bar": 9, "build": 8, "categor": 9, "categori": 9, "certif": 8, "chart": 9, "checklist": 3, "choos": 4, "cloud": 4, "commun": 4, "compos": 8, "configur": 8, "contain": 8, "creat": 2, "data": [2, 3, 4, 6, 7, 9, 10], "date": 9, "dateformat": 9, "deploy": 4, "detail": 9, "docker": 8, "document": [10, 11], "doe": 2, "don": 6, "donat": [2, 3, 4, 6, 7, 10], "exampl": 9, "extract": 0, "first": 2, "from": 2, "gener": 9, "get": 10, "googl": 8, "guid": [6, 9], "helper": 0, "how": 2, "i": 4, "imag": 8, "indic": 10, "instagram": 11, "instal": [2, 6], "introduct": 7, "level": 9, "limit": 2, "line": 9, "linux": 6, "mac": 6, "month": 9, "next": [4, 8], "nginx": 8, "oidc": 8, "option": 4, "osx": 6, "out": 8, "over": 9, "own": 2, "packag": 2, "paid": 4, "particip": 2, "per": 9, "perspect": 2, "platform": 11, "port": 0, "pre": 6, "prerequisit": 8, "prop": 0, "py": 2, "python": 2, "refer": [0, 1, 10], "requisit": 6, "research": 4, "run": 8, "script": [2, 10], "second": 9, "self": 4, "servic": 4, "setup": 8, "should": 4, "spec": 9, "specif": 9, "standard": 10, "start": [2, 10], "step": 8, "studi": [2, 4], "surf": 4, "t": 6, "tabl": [9, 10], "task": [2, 4, 6, 10], "text": 9, "time": 9, "tip": 2, "tl": 8, "translat": 9, "try": [2, 8], "unsplash": 8, "up": 9, "us": [2, 4, 6], "usag": 2, "valid": 0, "variabl": 9, "version": 4, "view": 9, "visual": 9, "want": 6, "when": 2, "which": 4, "window": 6, "without": 4, "wordcloud": 9, "work": 2, "wrap": 9, "write": 2, "wsl": 6, "yaml": 8, "yield": 2, "your": [2, 4]}}) \ No newline at end of file diff --git a/standard_scripts/index.html b/standard_scripts/index.html new file mode 100644 index 00000000..7fd8f25f --- /dev/null +++ b/standard_scripts/index.html @@ -0,0 +1,216 @@ + + + + + + + + Platform Documentation — Port 1.0.0 documentation + + + + + + + + + + + + +
+ + + +
+ + + + + +
+
+
+
+ +
+

Platform Documentation

+

For various platforms we provide default extraction scripts, so you do not have to invent the wheel.

+

Freel free to use the extraction scripts as you see fit.

+

In order to use the scripts open the file src/framework/processing/py/port/main.py and change this line:

+
from port.script import process
+
+
+

to:

+
#from port.script import process
+
+# Change to (in this case the standard script for instagram will be used):
+from port.platforms.instagram import process
+
+
+
+

Available platforms

+

ChatGPT

+

This module contains an example flow of a ChatGPT data donation study

+
+
+

Instagram

+

Instagram

+

This module contains an example flow of a Instagram data donation study

+
+
+ + +
+
+
+
+ + + + + + +
+
+
+
+ + +
+ +
+
+
+ + + + +

Styled using the Piccolo Theme

+ + \ No newline at end of file