diff --git a/.vscode/settings.example.json b/.vscode/settings.example.json index 1989c4133..eacc15168 100644 --- a/.vscode/settings.example.json +++ b/.vscode/settings.example.json @@ -11,6 +11,8 @@ } }, "eslint.workingDirectories": ["frontend"], + "editor.defaultFormatter": "esbenp.prettier-vscode", + "editor.formatOnSave": true, "flake8.args": ["--config=.flake8"], // NOTE: Uncomment following line and fix "optinist_dev" to your conda env name // "flake8.path": ["conda", "run", "-n", "optinist_dev", "python", "-m", "flake8"], diff --git a/docker-compose.dev.multiuser.yml b/docker-compose.dev.multiuser.yml new file mode 100644 index 000000000..3ae033212 --- /dev/null +++ b/docker-compose.dev.multiuser.yml @@ -0,0 +1,58 @@ +version: "3" + +services: + db: + image: mysql:8.4 + ports: + - "127.0.0.1:13306:3306" + env_file: + - studio/config/.env + volumes: + - db_data:/var/lib/mysql + environment: + TZ: Asia/Tokyo + healthcheck: + test: ["CMD", "mysqladmin", "ping", "-h", "127.0.0.1"] + interval: 10s + timeout: 5s + retries: 3 + + studio-dev-be: + build: + context: . + dockerfile: studio/config/docker/Dockerfile.dev + working_dir: /app + volumes: + - .:/app + # optinist data outputs directories + - ../optinist-docker-volumes/.snakemake/:/app/.snakemake + - ../optinist-docker-volumes/logs/:/app/logs + - ../optinist-docker-volumes/studio_data/:/app/studio_data + ports: + - "127.0.0.1:8000:8000" + command: > + bash -c " + alembic upgrade head && + poetry run python main.py --reload --host 0.0.0.0 --port 8000 + " + environment: + PYTHONPATH: /app/ + TZ: Asia/Tokyo + OPTINIST_DIR: /app/studio_data + depends_on: + db: + condition: service_healthy + + studio-dev-fe: + image: node:20.8.0-alpine3.18 + working_dir: /app/frontend + volumes: + - ./frontend/:/app/frontend/:cached + ports: + - "127.0.0.1:3000:3000" + command: ash -c 'yarn install && yarn start' + environment: + TZ: Asia/Tokyo + +volumes: + db_data: diff --git a/docker-compose.dev.yml b/docker-compose.dev.yml index 91daa1535..3bebb8f77 100644 --- a/docker-compose.dev.yml +++ b/docker-compose.dev.yml @@ -8,6 +8,9 @@ services: working_dir: /app volumes: - .:/app + # optinist data outputs directories + - ../optinist-docker-volumes/.snakemake/:/app/.snakemake + - ../optinist-docker-volumes/logs/:/app/logs - ../optinist-docker-volumes/studio_data/:/app/studio_data ports: - "127.0.0.1:8000:8000" diff --git a/docs/host_for_multiuser/setup.md b/docs/host_for_multiuser/setup.md index 9645de140..fe9d9b143 100644 --- a/docs/host_for_multiuser/setup.md +++ b/docs/host_for_multiuser/setup.md @@ -16,18 +16,20 @@ Follow the steps below to setup `multiuser` mode. ``` 2. copy config files ```bash - cp studio/config/.env.example studio/config.env - cp studio/config/auth/firebase_config.example.json studio/config/auth/firebase_config.json + cp -i studio/config/.env.example studio/config/.env + cp -i studio/config/auth/firebase_config.example.json studio/config/auth/firebase_config.json ``` -### Create your Firebase Project +### Setup Firebase Authentication + +#### Create your Firebase Project 1. Go to [https://console.firebase.google.com/](https://console.firebase.google.com/) 2. Click "Add project". 3. Enter your project name, and click "Continue". 4. Google Analytics is optional. You can choose "Enable Google Analytics for this project" or not. 5. After your project is ready, click "Continue". -### Setup Firebase Authentication +#### Setup Firebase Authentication 1. Select "Build > Authentication" from the left menu. 2. Select "Get started". 3. Select "Sign-in method" tab. @@ -35,7 +37,7 @@ Follow the steps below to setup `multiuser` mode. 5. Click "Email/Password" and enable it. 6. Click "Save". -### Create admin user for the project +#### Create admin user for the project 1. Select "Authentication" from the left menu. 2. Select "Users" tab. 3. Click "Add user" button. @@ -45,7 +47,7 @@ Follow the steps below to setup `multiuser` mode. - Created user's "User UID" is required later. -### Get Firebase tokens +#### Get Firebase tokens 1. Click setting icon(besides Project Overview), then select "Project settings" from the left menu. 2. Select "General" tab. 3. Select "web app" in "Your apps" section. @@ -62,90 +64,115 @@ Follow the steps below to setup `multiuser` mode. - (keep databaseURL blank) 7. Select "Service accounts" tab. 8. Click "Generate new private key" in "Firebase Admin SDK" section. -9. Put the downloaded file to `studio/config/auth/firebase_private.json`. - -### Setup mysql(or mariadb) -1. Install mysql(or mariadb) server. -2. Connect mysql server. +9. Save the downloaded file to `studio/config/auth/firebase_private.json`. + +### Setup Database +- Set up your own mysql (or mariadb) server or use docker compose mysql +- Below are the instructions for using mysql with docker compose. + +1. Edit configs. + - Edit studio/config/.env + - Set `MYSQL_SERVER` to db server host or ip + - Format: `{DB_HOST}:{DB_PORT}` + - \*For docker platform, the fixed value `db:3306` is fine. + - Set `MYSQL_ROOT_PASSWORD` to database root password, which you have decided. + - Set `MYSQL_DATABASE` to `{YOUR_DATABASE_NAME}`, which you have decided. + - Set `MYSQL_USER` to `{DB_USER_NAME}`, which you have decided. + - Set `MYSQL_PASSWORD` to `{DB_USER_PASSWORD}`, which you have decided. +2. Install & run mysql server. ```bash - mysql -u root -p - ``` -3. Create database for your project. - ```sql - CREATE DATABASE YOUR_DATABASE_NAME; - ``` -4. Create user for your project. - ```sql - CREATE USER 'DB_USER_NAME'@'localhost' IDENTIFIED BY 'DB_USER_PASSWORD'; - ``` -5. Grant all privileges to the user for the database. - ```sql - GRANT ALL PRIVILEGES ON YOUR_DATABASE_NAME.* TO 'DB_USER_NAME'@'localhost'; + docker compose -f docker-compose.dev.multiuser.yml up db -d ``` - -### Set OptiNiSt config -1. Edit `studio/config/.env` + - The database and db_user are automatically generated based on the .env settings. +3. Check connection to mysql server. + - Connecting via docker command + ```bash + docker exec -it {DB_CONTAINER_NAME} mysql -u {DB_USER_NAME} -p {YOUR_DATABASE_NAME} + mysql> exit + ``` + - Note: `{DB_CONTAINER_NAME}` is the container name or container ID of the database docker container. (Can be confirmed with `docker ps`) + - Connect via mysql command (requires mysql-client) + ```bash + mysql -h {DB_HOST} --port={DB_PORT} -u {DB_USER_NAME} -p {YOUR_DATABASE_NAME} + mysql> exit + ``` + - If a connection to the database server is available, the setup was successful. + +### Setup & Run OptiNiSt + +#### For Docker Platform + +To use multiuser mode with Docker, perform the following steps. + +##### Setup Backend + +###### 1. Set OptiNiSt config +- Edit `studio/config/.env` - Change `SECRET_KEY` to any random string. - Change `USE_FIREBASE_TOKEN` to `True`. - Change `IS_STANDALONE` to `False` - - Set `MYSQL_SERVER` to your host - - Set `MYSQL_DATABASE` to {YOUR_DATABASE_NAME}, which you created in the previous step. - - Set `MYSQL_USER` to {DB_USER_NAME}, which you created in the previous step. - - Set `MYSQL_PASSWORD` to {DB_USER_PASSWORD}, which you created in the previous step. - - `MYSQL_ROOT_PASSWORD` can be left commented. - -### Setup Frontend -1. Install node.js(version 20) - - https://nodejs.org -2. Install yarn - ```bash - npm install -g yarn - ``` -3. Install frontend requirements - ```bash - cd frontend - yarn install - ``` -4. Build frontend - ```bash - yarn build - ``` -### Setup Backend -- See OptiNiSt installation guide. -- After create and activate conda environment for the project, run following commands +###### 2. Start backend (Database is set up on startup) +```bash +docker compose -f docker-compose.dev.multiuser.yml up studio-dev-be -d +``` -1. Install backend requirements - ```bash - cd studio - pip install . - ``` -2. Setup database - ```bash - alembic upgrade head - ``` -3. Insert initial data +###### 3. Insert initial data +```bash +docker exec -it {DB_CONTAINER_NAME} mysql -u {DB_USER_NAME} -p {YOUR_DATABASE_NAME} +``` +```sql +INSERT INTO organization (name) VALUES ('{YOUR_ORGANIZATION_NAME}'); +INSERT INTO roles (id, role) VALUES (1, 'admin'), (20, 'operator'); +INSERT INTO users (uid, organization_id, name, email, active) VALUES ('{FIREBASE_USER_UID}', 1, '{YOUR_NAME}', '{YOUR_EMAIL}', true); +INSERT INTO user_roles (user_id, role_id) VALUES (1, 1); +``` + - Note on Variables + - `{FIREBASE_USER_UID}` ... The user uid you created in the previous step ([Create admin user for the project](#create-admin-user-for-the-project)). + - `{YOUR_ORGANIZATION_NAME}` ... Display name on system (Any text) + - `{YOUR_NAME}` ... Display name on system (Any text) + - `{YOUR_EMAIL}` ... Email address corresponding to `{FIREBASE_USER_UID}` + +- About Roles + - Only 2 roles, "admin" and "operator" are supported for now. + - "admin" + - can manage other users + - "operator" + - general user + - More information is [here](usage.md). + +##### Run OptiNiSt +```bash +docker compose -f docker-compose.dev.multiuser.yml up -d +``` - ```bash - mysql -u DB_USER_NAME -p - ``` - ```sql - USE YOUR_DATABASE_NAME; - INSERT INTO organization (name) VALUES ('YOUR_ORGANIZATION_NAME'); - INSERT INTO roles (id, role) VALUES (1, 'admin'), (20, 'operator'); - INSERT INTO users (uid, organization_id, name, email, active, ) VALUES ('USER_UID', 1, 'YOUR_EMAIL', 'YOUR_PASSWORD', 1); - INSERT INTO user_roles (user_id, role_id) VALUES (1, 1); - ``` - - USER_UID is the user uid you created in the previous step ([Create admin user for the project](#create-admin-user-for-the-project)). - - Only 2 roles, "admin" and "operator" are supported for now. - - "admin" - - can manage other users - - "operator" - - general user - -### Run OptiNiSt +1. Access to `http://{YOUR_HOST}:8000` from your browser. +2. Confirm that you can SingIn with your Firebase Authentication account. + +#### For Non-Docker Platforms + +Below are the steps for a case using Non-Docker platforms (Windows, Mac, Linux). + +##### Setup Backend +- See [OptiNiSt installation guide](../installation/index.rst). +- After creating and activating a conda environment for the project, run following commands + +###### 1. Set OptiNiSt config +- Same as [Set OptiNiSt config](#set-optinist-config) procedure. + +###### 2. Setup database +```bash +cd {OPTINIST_ROOT_PATH} # root path of repository cloned +alembic upgrade head +``` + +###### 3. Insert initial data +- Same as [Insert initial data](#insert-initial-data) procedure. + +##### Run OptiNiSt ```bash python main.py ``` -- Access to `http://{YOUR_HOST}:8000` from your browser. +1. Access to `http://{YOUR_HOST}:8000` from your browser. +2. Confirm that you can SingIn with your Firebase Authentication account. diff --git a/docs/index.rst b/docs/index.rst index a99d40fd2..33a56812e 100644 --- a/docs/index.rst +++ b/docs/index.rst @@ -36,4 +36,5 @@ Main Features gui/index host_for_multiuser/index utils/index + utils/nwb_file for_developers/index diff --git a/docs/installation/docker_for_developer.md b/docs/installation/docker_for_developer.md index 5d48856f0..33e12281f 100644 --- a/docs/installation/docker_for_developer.md +++ b/docs/installation/docker_for_developer.md @@ -24,6 +24,12 @@ git clone https://github.com/oist/optinist.git cd ./optinist ``` +- copy config files + ``` + cp studio/config/.env.example studio/config/.env + cp frontend/.env.example frontend/.env + ``` + ### Start docker container ``` diff --git a/docs/installation/each_platforms_for_developer.md b/docs/installation/each_platforms_for_developer.md index 7c1d3e85f..daedd9873 100644 --- a/docs/installation/each_platforms_for_developer.md +++ b/docs/installation/each_platforms_for_developer.md @@ -34,7 +34,7 @@ Get node with version 20 You can also install node via [nvm](https://github.com/nvm-sh/nvm) After install node, install yarn. -```bash +``` npm install -g yarn ``` @@ -45,6 +45,12 @@ git clone https://github.com/oist/optinist.git cd ./optinist ``` +- copy config files + ``` + cp studio/config/.env.example studio/config/.env + cp frontend/.env.example frontend/.env + ``` + ### Create anaconda environment ``` diff --git a/docs/installation/mac.md b/docs/installation/mac.md index ff8f3667c..9b2de6330 100644 --- a/docs/installation/mac.md +++ b/docs/installation/mac.md @@ -21,15 +21,44 @@ Please follow instructions below. #### Install Anaconda -- Download and install package. - - https://repo.anaconda.com/archive/ - - Anaconda3-\*.\*-MacOSX-x86_64.pkg - - *The latest version of the module is ok. +- Download and install the package: + - [Anaconda Archive](https://repo.anaconda.com/archive/) + - Download the latest version: `Anaconda3-*.MacOSX-x86_64.pkg` + - *The latest version of the module is fine.* ```{eval-rst} .. caution:: - Even if you're using arm64 (Apple Sillicon, M1, M2...) architecture's Mac, x86_64 version is required. - Some modules cannot be installed by conda install or pip install in arm64 version. + Even if you're using arm64 (Apple Silicon, M1, M2...) architecture's Mac, the x86_64 version is required. + Some modules cannot be installed by conda install or pip install in the arm64 version. + Installing the x86_64 version of conda can be done using `rosetta`. + + 1. Install Rosetta using the terminal: + + .. code-block:: bash + + /usr/sbin/softwareupdate --install-rosetta --agree-to-license + + 2. Open a Terminal Session in Rosetta: + - Open your existing Terminal (which is running natively on ARM). + - Start a new Terminal session that emulates the x86_64 architecture using the following command: + + .. code-block:: bash + + arch -x86_64 /usr/bin/env bash + + 3. Download and install Miniconda: + + .. code-block:: bash + + curl -O https://repo.anaconda.com/miniconda/Miniconda3-latest-MacOSX-x86_64.sh + + bash Miniconda3-latest-MacOSX-x86_64.sh + + /Users/MYUSERNAME/miniconda3/bin/conda init + + conda activate + + Now continue creating the optinist environment using conda ``` ### Create anaconda environment diff --git a/studio/__main_unit__.py b/studio/__main_unit__.py index 860058c2e..50b4ca7ac 100644 --- a/studio/__main_unit__.py +++ b/studio/__main_unit__.py @@ -1,5 +1,4 @@ import argparse -import logging import uvicorn from fastapi import Depends, FastAPI, Request @@ -12,6 +11,7 @@ get_admin_user, get_current_user, ) +from studio.app.common.core.logger import AppLogger from studio.app.common.core.mode import MODE from studio.app.common.core.workspace.workspace_dependencies import ( is_workspace_available, @@ -91,7 +91,8 @@ def skip_dependencies(): @app.on_event("startup") async def startup_event(): mode = "standalone" if MODE.IS_STANDALONE else "multiuser" - logging.info(f'"Studio" application startup complete. [mode: {mode}]') + logger = AppLogger.get_logger() + logger.info(f'"Studio" application startup complete. [mode: {mode}]') @app.get("/is_standalone", response_model=bool, tags=["others"]) diff --git a/studio/app/Snakefile b/studio/app/Snakefile index bf3a37748..a94bc9183 100644 --- a/studio/app/Snakefile +++ b/studio/app/Snakefile @@ -1,9 +1,9 @@ from studio.app.common.core.snakemake.smk_utils import SmkUtils from studio.app.common.core.utils.filepath_creater import join_filepath from studio.app.optinist.core.edit_ROI import EditRoiUtils -from studio.app.const import FILETYPE from studio.app.dir_path import DIRPATH +from studio.app.common.core.workflow.workflow import NodeType, NodeTypeUtil if config.get('type') == "EDIT_ROI": rule edit_ROI: @@ -17,15 +17,10 @@ else: for rule_name, details in config["rules"].items(): - if details["type"] in [ - FILETYPE.IMAGE, - FILETYPE.CSV, - FILETYPE.BEHAVIOR, - FILETYPE.HDF5, - FILETYPE.MATLAB, - FILETYPE.MICROSCOPE, - ]: + if NodeTypeUtil.check_nodetype_from_filetype(details["type"]) == NodeType.DATA: rule: + name: + rule_name input: SmkUtils.input(details) output: @@ -36,6 +31,8 @@ else: f"{DIRPATH.APP_DIR}/common/core/rules/data.py" else: rule: + name: + rule_name input: SmkUtils.input(details) output: diff --git a/studio/app/common/core/experiment/experiment_reader.py b/studio/app/common/core/experiment/experiment_reader.py index d092a77fb..6789d4ba4 100644 --- a/studio/app/common/core/experiment/experiment_reader.py +++ b/studio/app/common/core/experiment/experiment_reader.py @@ -54,25 +54,3 @@ def read_output_paths(cls, config) -> Dict[str, OutputPath]: } else: return None - - @classmethod - def rename(cls, filepath, new_name: str) -> ExptConfig: - with open(filepath, "r") as f: - config = yaml.safe_load(f) - config["name"] = new_name - - with open(filepath, "w") as f: - yaml.dump(config, f, sort_keys=False) - - return ExptConfig( - workspace_id=config["workspace_id"], - unique_id=config["unique_id"], - name=config["name"], - started_at=config.get("started_at"), - finished_at=config.get("finished_at"), - success=config.get("success", "running"), - hasNWB=config["hasNWB"], - function=cls.read_function(config["function"]), - nwb=config.get("nwb"), - snakemake=config.get("snakemake"), - ) diff --git a/studio/app/common/core/experiment/experiment_writer.py b/studio/app/common/core/experiment/experiment_writer.py index 4d49a1c67..b8b87d6ae 100644 --- a/studio/app/common/core/experiment/experiment_writer.py +++ b/studio/app/common/core/experiment/experiment_writer.py @@ -1,8 +1,11 @@ import os +import shutil from dataclasses import asdict from datetime import datetime from typing import Dict +import yaml + from studio.app.common.core.experiment.experiment import ExptConfig, ExptFunction from studio.app.common.core.experiment.experiment_builder import ExptConfigBuilder from studio.app.common.core.experiment.experiment_reader import ExptConfigReader @@ -98,3 +101,48 @@ def function_from_nodeDict(self) -> ExptConfig: func_dict[node.id].success = "success" return self.builder.set_function(func_dict).build() + + +class ExptDataWriter: + def __init__( + self, + workspace_id: str, + unique_id: str, + ): + self.workspace_id = workspace_id + self.unique_id = unique_id + + def delete_data(self) -> bool: + shutil.rmtree( + join_filepath([DIRPATH.OUTPUT_DIR, self.workspace_id, self.unique_id]) + ) + return True + + def rename(self, new_name: str) -> ExptConfig: + filepath = join_filepath( + [ + DIRPATH.OUTPUT_DIR, + self.workspace_id, + self.unique_id, + DIRPATH.EXPERIMENT_YML, + ] + ) + + with open(filepath, "r+") as f: + config = yaml.safe_load(f) + config["name"] = new_name + f.seek(0) # requires seek(0) before write. + yaml.dump(config, f, sort_keys=False) + + return ExptConfig( + workspace_id=config["workspace_id"], + unique_id=config["unique_id"], + name=config["name"], + started_at=config.get("started_at"), + finished_at=config.get("finished_at"), + success=config.get("success", "running"), + hasNWB=config["hasNWB"], + function=ExptConfigReader.read_function(config["function"]), + nwb=config.get("nwb"), + snakemake=config.get("snakemake"), + ) diff --git a/studio/app/common/core/logger.py b/studio/app/common/core/logger.py index 6b6a1b00c..6fba22f5c 100644 --- a/studio/app/common/core/logger.py +++ b/studio/app/common/core/logger.py @@ -1,5 +1,6 @@ import logging import logging.config +import os import yaml @@ -12,7 +13,7 @@ class AppLogger: Generic Application Logger """ - LOGGER_NAME = None # Note: use root logger (empty name) + LOGGER_NAME = "optinist" @staticmethod def init_logger(): @@ -39,7 +40,18 @@ def init_logger(): ) with open(log_config_file) as file: - logging.config.dictConfig(yaml.load(file.read(), yaml.FullLoader)) + log_config = yaml.load(file.read(), yaml.FullLoader) + + # create log output directory (if none exists) + log_file = ( + log_config.get("handlers", {}).get("rotating_file", {}).get("filename") + ) + if log_file: + log_dir = os.path.dirname(log_file) + if not os.path.isdir(log_dir): + os.makedirs(log_dir) + + logging.config.dictConfig(log_config) @staticmethod def get_logger(): diff --git a/studio/app/common/core/rules/data.py b/studio/app/common/core/rules/data.py index c8c3da363..686defe07 100644 --- a/studio/app/common/core/rules/data.py +++ b/studio/app/common/core/rules/data.py @@ -12,37 +12,36 @@ from studio.app.common.core.rules.file_writer import FileWriter from studio.app.common.core.snakemake.snakemake_reader import RuleConfigReader from studio.app.common.core.utils.pickle_handler import PickleWriter +from studio.app.common.core.workflow.workflow import NodeType, NodeTypeUtil from studio.app.const import FILETYPE if __name__ == "__main__": last_output = snakemake.config["last_output"] rule_config = RuleConfigReader.read(snakemake.params.name) - if rule_config.type in [FILETYPE.IMAGE]: - rule_config.input = snakemake.input - elif rule_config.type in [ - FILETYPE.CSV, - FILETYPE.BEHAVIOR, - FILETYPE.HDF5, - FILETYPE.MATLAB, - FILETYPE.MICROSCOPE, - ]: - rule_config.input = snakemake.input[0] + + if NodeTypeUtil.check_nodetype_from_filetype(rule_config.type) == NodeType.DATA: + if rule_config.type in [FILETYPE.IMAGE]: + rule_config.input = snakemake.input + else: + rule_config.input = snakemake.input[0] + else: + assert False, f"Invalid rule type: {rule_config.type}" rule_config.output = snakemake.output[0] + outputfile = None if rule_config.type in [FILETYPE.CSV, FILETYPE.BEHAVIOR]: outputfile = FileWriter.csv(rule_config, rule_config.type) - PickleWriter.write(rule_config.output, outputfile) elif rule_config.type == FILETYPE.IMAGE: outputfile = FileWriter.image(rule_config) - PickleWriter.write(rule_config.output, outputfile) elif rule_config.type == FILETYPE.HDF5: outputfile = FileWriter.hdf5(rule_config) - PickleWriter.write(rule_config.output, outputfile) elif rule_config.type == FILETYPE.MATLAB: outputfile = FileWriter.mat(rule_config) - PickleWriter.write(rule_config.output, outputfile) elif rule_config.type == FILETYPE.MICROSCOPE: outputfile = FileWriter.microscope(rule_config) - PickleWriter.write(rule_config.output, outputfile) + else: + assert False, f"Invalid file type: {rule_config.type}" + + PickleWriter.write(rule_config.output, outputfile) diff --git a/studio/app/common/core/rules/runner.py b/studio/app/common/core/rules/runner.py index 6961ef004..cb30d4c9b 100644 --- a/studio/app/common/core/rules/runner.py +++ b/studio/app/common/core/rules/runner.py @@ -150,6 +150,12 @@ def read_input_info(cls, input_files): input_info = {} for filepath in input_files: load_data = PickleReader.read(filepath) + + # validate load_data content + assert ( + type(load_data) is dict + ), f"Invalid node input data content. [{filepath}]" + merged_nwb = cls.deep_merge( load_data.pop("nwbfile", {}), input_info.pop("nwbfile", {}) ) diff --git a/studio/app/common/core/snakemake/smk_utils.py b/studio/app/common/core/snakemake/smk_utils.py index c7484f9fe..c26bc2415 100644 --- a/studio/app/common/core/snakemake/smk_utils.py +++ b/studio/app/common/core/snakemake/smk_utils.py @@ -2,6 +2,7 @@ from studio.app.common.core.utils.filepath_creater import join_filepath from studio.app.common.core.utils.filepath_finder import find_condaenv_filepath +from studio.app.common.core.workflow.workflow import NodeType, NodeTypeUtil from studio.app.const import FILETYPE from studio.app.dir_path import DIRPATH from studio.app.wrappers import wrapper_dict @@ -10,16 +11,11 @@ class SmkUtils: @classmethod def input(cls, details): - if details["type"] in [FILETYPE.IMAGE]: - return [join_filepath([DIRPATH.INPUT_DIR, x]) for x in details["input"]] - elif details["type"] in [ - FILETYPE.CSV, - FILETYPE.BEHAVIOR, - FILETYPE.HDF5, - FILETYPE.MATLAB, - FILETYPE.MICROSCOPE, - ]: - return join_filepath([DIRPATH.INPUT_DIR, details["input"]]) + if NodeTypeUtil.check_nodetype_from_filetype(details["type"]) == NodeType.DATA: + if details["type"] in [FILETYPE.IMAGE]: + return [join_filepath([DIRPATH.INPUT_DIR, x]) for x in details["input"]] + else: + return join_filepath([DIRPATH.INPUT_DIR, details["input"]]) else: return [join_filepath([DIRPATH.OUTPUT_DIR, x]) for x in details["input"]] @@ -29,14 +25,7 @@ def output(cls, details): @classmethod def conda(cls, details): - if details["type"] in [ - FILETYPE.IMAGE, - FILETYPE.CSV, - FILETYPE.BEHAVIOR, - FILETYPE.HDF5, - FILETYPE.MATLAB, - FILETYPE.MICROSCOPE, - ]: + if NodeTypeUtil.check_nodetype_from_filetype(details["type"]) == NodeType.DATA: return None wrapper = cls.dict2leaf(wrapper_dict, details["path"].split("/")) diff --git a/studio/app/common/core/snakemake/snakemake_rule.py b/studio/app/common/core/snakemake/snakemake_rule.py index 5aff39666..e09217afc 100644 --- a/studio/app/common/core/snakemake/snakemake_rule.py +++ b/studio/app/common/core/snakemake/snakemake_rule.py @@ -44,14 +44,16 @@ def __init__( ) def image(self) -> Rule: - return self.builder.set_type("image").build() + return self.builder.set_type(FILETYPE.IMAGE).build() - def csv(self, nodeType="csv") -> Rule: + def csv(self, nodeType=FILETYPE.CSV) -> Rule: return self.builder.set_type(nodeType).build() def hdf5(self) -> Rule: return ( - self.builder.set_type("hdf5").set_hdf5Path(self._node.data.hdf5Path).build() + self.builder.set_type(FILETYPE.HDF5) + .set_hdf5Path(self._node.data.hdf5Path) + .build() ) def mat(self) -> Rule: diff --git a/studio/app/common/core/workflow/workflow.py b/studio/app/common/core/workflow/workflow.py index e7efd7979..df310ed3c 100644 --- a/studio/app/common/core/workflow/workflow.py +++ b/studio/app/common/core/workflow/workflow.py @@ -4,20 +4,66 @@ from pydantic import BaseModel from studio.app.common.core.snakemake.smk import ForceRun +from studio.app.const import FILETYPE @dataclass class NodeType: + # Data Types IMAGE: str = "ImageFileNode" CSV: str = "CsvFileNode" FLUO: str = "FluoFileNode" BEHAVIOR: str = "BehaviorFileNode" HDF5: str = "HDF5FileNode" - MAT: str = "MatlabFileNode" + MATLAB: str = "MatlabFileNode" MICROSCOPE: str = "MicroscopeFileNode" + + # Data Type (Includes above DataType Nodes) + DATA: str = "DataNode" + + # Algo Type ALGO: str = "AlgorithmNode" +class NodeTypeUtil: + @staticmethod + def check_nodetype(node_type: str) -> str: + """ + Check NodeType (DATA or ALGO) from detailed node type + """ + if node_type in [ + NodeType.IMAGE, + NodeType.CSV, + NodeType.FLUO, + NodeType.BEHAVIOR, + NodeType.HDF5, + NodeType.MATLAB, + NodeType.MICROSCOPE, + ]: + return NodeType.DATA + elif node_type == NodeType.ALGO: + return NodeType.ALGO + else: + None + + @staticmethod + def check_nodetype_from_filetype(file_type: str) -> str: + """ + Check NodeType (DATA or ALGO) from file type + """ + if file_type in [ + FILETYPE.IMAGE, + FILETYPE.CSV, + FILETYPE.BEHAVIOR, + FILETYPE.HDF5, + FILETYPE.MATLAB, + FILETYPE.MICROSCOPE, + ]: + return NodeType.DATA + else: + None + + @dataclass class OutputType: IMAGE: str = "images" diff --git a/studio/app/common/core/workflow/workflow_runner.py b/studio/app/common/core/workflow/workflow_runner.py index c11116cfd..bde1fc107 100644 --- a/studio/app/common/core/workflow/workflow_runner.py +++ b/studio/app/common/core/workflow/workflow_runner.py @@ -10,7 +10,7 @@ from studio.app.common.core.snakemake.snakemake_reader import SmkParamReader from studio.app.common.core.snakemake.snakemake_rule import SmkRule from studio.app.common.core.snakemake.snakemake_writer import SmkConfigWriter -from studio.app.common.core.workflow.workflow import NodeType, RunItem +from studio.app.common.core.workflow.workflow import NodeType, NodeTypeUtil, RunItem from studio.app.common.core.workflow.workflow_params import get_typecheck_params from studio.app.common.core.workflow.workflow_reader import WorkflowConfigReader from studio.app.common.core.workflow.workflow_writer import WorkflowConfigWriter @@ -78,76 +78,47 @@ def rulefile(self): last_outputs = [] for node in self.nodeDict.values(): - if node.type == NodeType.IMAGE: - rule_dict[node.id] = SmkRule( + if NodeTypeUtil.check_nodetype(node.type) == NodeType.DATA: + data_common_rule = SmkRule( workspace_id=self.workspace_id, unique_id=self.unique_id, node=node, edgeDict=self.edgeDict, nwbfile=nwbfile, - ).image() - elif node.type == NodeType.CSV: - rule_dict[node.id] = SmkRule( - workspace_id=self.workspace_id, - unique_id=self.unique_id, - node=node, - edgeDict=self.edgeDict, - nwbfile=nwbfile, - ).csv() - elif node.type == NodeType.FLUO: - rule_dict[node.id] = SmkRule( - workspace_id=self.workspace_id, - unique_id=self.unique_id, - node=node, - edgeDict=self.edgeDict, - nwbfile=nwbfile, - ).csv() - elif node.type == NodeType.BEHAVIOR: - rule_dict[node.id] = SmkRule( - workspace_id=self.workspace_id, - unique_id=self.unique_id, - node=node, - edgeDict=self.edgeDict, - nwbfile=nwbfile, - ).csv(nodeType="behavior") - elif node.type == NodeType.HDF5: - rule_dict[node.id] = SmkRule( - workspace_id=self.workspace_id, - unique_id=self.unique_id, - node=node, - edgeDict=self.edgeDict, - nwbfile=nwbfile, - ).hdf5() - elif node.type == NodeType.MAT: - rule_dict[node.id] = SmkRule( - workspace_id=self.workspace_id, - unique_id=self.unique_id, - node=node, - edgeDict=self.edgeDict, - nwbfile=nwbfile, - ).mat() - elif node.type == NodeType.MICROSCOPE: - rule_dict[node.id] = SmkRule( - workspace_id=self.workspace_id, - unique_id=self.unique_id, - node=node, - edgeDict=self.edgeDict, - nwbfile=nwbfile, - ).microscope() - elif node.type == NodeType.ALGO: - rule = SmkRule( + ) + data_rule = None + + if node.type == NodeType.IMAGE: + data_rule = data_common_rule.image() + elif node.type == NodeType.CSV: + data_rule = data_common_rule.csv() + elif node.type == NodeType.FLUO: + data_rule = data_common_rule.csv() + elif node.type == NodeType.BEHAVIOR: + data_rule = data_common_rule.csv(nodeType="behavior") + elif node.type == NodeType.HDF5: + data_rule = data_common_rule.hdf5() + elif node.type == NodeType.MATLAB: + data_rule = data_common_rule.mat() + elif node.type == NodeType.MICROSCOPE: + data_rule = data_common_rule.microscope() + + rule_dict[node.id] = data_rule + + elif NodeTypeUtil.check_nodetype(node.type) == NodeType.ALGO: + algo_rule = SmkRule( workspace_id=self.workspace_id, unique_id=self.unique_id, node=node, edgeDict=self.edgeDict, ).algo(nodeDict=self.nodeDict) - rule_dict[node.id] = rule + rule_dict[node.id] = algo_rule if node.id in endNodeList: - last_outputs.append(rule.output) + last_outputs.append(algo_rule.output) else: - assert False, "NodeType doesn't exists" + assert False, f"NodeType doesn't exists: {node.type}" return rule_dict, last_outputs diff --git a/studio/app/common/routers/experiment.py b/studio/app/common/routers/experiment.py index c8b8a6738..04d4e9518 100644 --- a/studio/app/common/routers/experiment.py +++ b/studio/app/common/routers/experiment.py @@ -1,5 +1,4 @@ import os -import shutil from glob import glob from typing import Dict @@ -8,6 +7,7 @@ from studio.app.common.core.experiment.experiment import ExptConfig from studio.app.common.core.experiment.experiment_reader import ExptConfigReader +from studio.app.common.core.experiment.experiment_writer import ExptDataWriter from studio.app.common.core.utils.filepath_creater import join_filepath from studio.app.common.core.workspace.workspace_dependencies import ( is_workspace_available, @@ -45,12 +45,10 @@ async def get_experiments(workspace_id: str): dependencies=[Depends(is_workspace_owner)], ) async def rename_experiment(workspace_id: str, unique_id: str, item: RenameItem): - config = ExptConfigReader.rename( - join_filepath( - [DIRPATH.OUTPUT_DIR, workspace_id, unique_id, DIRPATH.EXPERIMENT_YML] - ), - new_name=item.new_name, - ) + config = ExptDataWriter( + workspace_id, + unique_id, + ).rename(item.new_name) config.nodeDict = [] config.edgeDict = [] @@ -64,7 +62,10 @@ async def rename_experiment(workspace_id: str, unique_id: str, item: RenameItem) ) async def delete_experiment(workspace_id: str, unique_id: str): try: - shutil.rmtree(join_filepath([DIRPATH.OUTPUT_DIR, workspace_id, unique_id])) + ExptDataWriter( + workspace_id, + unique_id, + ).delete_data() return True except Exception: return False @@ -77,10 +78,11 @@ async def delete_experiment(workspace_id: str, unique_id: str): ) async def delete_experiment_list(workspace_id: str, deleteItem: DeleteItem): try: - [ - shutil.rmtree(join_filepath([DIRPATH.OUTPUT_DIR, workspace_id, uid])) - for uid in deleteItem.uidList - ] + for unique_id in deleteItem.uidList: + ExptDataWriter( + workspace_id, + unique_id, + ).delete_data() return True except Exception: return False diff --git a/studio/app/common/routers/files.py b/studio/app/common/routers/files.py index 87cc459c9..6ed572b22 100644 --- a/studio/app/common/routers/files.py +++ b/studio/app/common/routers/files.py @@ -27,14 +27,7 @@ FilePath, TreeNode, ) -from studio.app.const import ( - ACCEPT_CSV_EXT, - ACCEPT_HDF5_EXT, - ACCEPT_MATLAB_EXT, - ACCEPT_MICROSCOPE_EXT, - ACCEPT_TIFF_EXT, - FILETYPE, -) +from studio.app.const import ACCEPT_FILE_EXT, FILETYPE from studio.app.dir_path import DIRPATH router = APIRouter(prefix="/files", tags=["files"]) @@ -61,7 +54,9 @@ def get_tree( ) IMAGE_SHAPE_DICT = ( - get_image_shape_dict(workspace_id) if file_types == ACCEPT_TIFF_EXT else {} + get_image_shape_dict(workspace_id) + if file_types == ACCEPT_FILE_EXT.TIFF_EXT.value + else {} ) for node_name in sorted_listdir: @@ -74,7 +69,7 @@ def get_tree( if os.path.isfile(search_dirpath) and node_name.endswith(tuple(file_types)): shape = IMAGE_SHAPE_DICT.get(relative_path, {}).get("shape") - if shape is None and file_types == ACCEPT_TIFF_EXT: + if shape is None and file_types == ACCEPT_FILE_EXT.TIFF_EXT.value: shape = update_image_shape(workspace_id, relative_path) nodes.append( TreeNode( @@ -152,15 +147,17 @@ def update_image_shape(workspace_id, relative_file_path): ) async def get_files(workspace_id: str, file_type: str = None): if file_type == FILETYPE.IMAGE: - return DirTreeGetter.get_tree(workspace_id, ACCEPT_TIFF_EXT) + return DirTreeGetter.get_tree(workspace_id, ACCEPT_FILE_EXT.TIFF_EXT.value) elif file_type == FILETYPE.CSV: - return DirTreeGetter.get_tree(workspace_id, ACCEPT_CSV_EXT) + return DirTreeGetter.get_tree(workspace_id, ACCEPT_FILE_EXT.CSV_EXT.value) elif file_type == FILETYPE.HDF5: - return DirTreeGetter.get_tree(workspace_id, ACCEPT_HDF5_EXT) + return DirTreeGetter.get_tree(workspace_id, ACCEPT_FILE_EXT.HDF5_EXT.value) elif file_type == FILETYPE.MICROSCOPE: - return DirTreeGetter.get_tree(workspace_id, ACCEPT_MICROSCOPE_EXT) + return DirTreeGetter.get_tree( + workspace_id, ACCEPT_FILE_EXT.MICROSCOPE_EXT.value + ) elif file_type == FILETYPE.MATLAB: - return DirTreeGetter.get_tree(workspace_id, ACCEPT_MATLAB_EXT) + return DirTreeGetter.get_tree(workspace_id, ACCEPT_FILE_EXT.MATLAB_EXT.value) else: return [] @@ -222,13 +219,7 @@ async def download_file( background_tasks: BackgroundTasks, ): path = PurePath(urlparse(file.url).path) - if path.suffix not in { - *ACCEPT_CSV_EXT, - *ACCEPT_HDF5_EXT, - *ACCEPT_TIFF_EXT, - *ACCEPT_MATLAB_EXT, - *ACCEPT_MICROSCOPE_EXT, - }: + if path.suffix not in ACCEPT_FILE_EXT.ALL_EXT.value: raise HTTPException(status_code=400, detail="Invalid url") create_directory(join_filepath([DIRPATH.INPUT_DIR, workspace_id])) diff --git a/studio/app/common/routers/outputs.py b/studio/app/common/routers/outputs.py index 850ed4d4c..d82587363 100644 --- a/studio/app/common/routers/outputs.py +++ b/studio/app/common/routers/outputs.py @@ -12,7 +12,7 @@ ) from studio.app.common.core.utils.json_writer import JsonWriter, save_tiff2json from studio.app.common.schemas.outputs import JsonTimeSeriesData, OutputData -from studio.app.const import ACCEPT_TIFF_EXT +from studio.app.const import ACCEPT_FILE_EXT from studio.app.dir_path import DIRPATH router = APIRouter(prefix="/outputs", tags=["outputs"]) @@ -121,7 +121,7 @@ async def get_image( end_index: Optional[int] = 10, ): filename, ext = os.path.splitext(os.path.basename(filepath)) - if ext in ACCEPT_TIFF_EXT: + if ext in ACCEPT_FILE_EXT.TIFF_EXT.value: if not filepath.startswith(join_filepath([DIRPATH.OUTPUT_DIR, workspace_id])): filepath = join_filepath([DIRPATH.INPUT_DIR, workspace_id, filepath]) diff --git a/studio/app/const.py b/studio/app/const.py index dab8efa67..90fdb4631 100644 --- a/studio/app/const.py +++ b/studio/app/const.py @@ -1,4 +1,5 @@ from dataclasses import dataclass +from enum import Enum @dataclass @@ -11,11 +12,15 @@ class FILETYPE: MICROSCOPE: str = "microscope" -ACCEPT_TIFF_EXT = [".tif", ".tiff", ".TIF", ".TIFF"] -ACCEPT_CSV_EXT = [".csv"] -ACCEPT_HDF5_EXT = [".hdf5", ".nwb", ".HDF5", ".NWB"] -ACCEPT_MATLAB_EXT = [".mat"] -ACCEPT_MICROSCOPE_EXT = [".nd2", ".oir", ".isxd", ".thor.zip"] +class ACCEPT_FILE_EXT(Enum): + TIFF_EXT = [".tif", ".tiff", ".TIF", ".TIFF"] + CSV_EXT = [".csv"] + HDF5_EXT = [".hdf5", ".nwb", ".HDF5", ".NWB"] + MATLAB_EXT = [".mat"] + MICROSCOPE_EXT = [".nd2", ".oir", ".isxd", ".thor.zip"] + + ALL_EXT = TIFF_EXT + CSV_EXT + HDF5_EXT + MATLAB_EXT + MICROSCOPE_EXT + NOT_DISPLAY_ARGS_LIST = ["params", "output_dir", "nwbfile", "kwargs"] diff --git a/studio/app/optinist/wrappers/caiman/conda/caiman.yaml b/studio/app/optinist/wrappers/caiman/conda/caiman.yaml index 2166a61bf..844cb2605 100644 --- a/studio/app/optinist/wrappers/caiman/conda/caiman.yaml +++ b/studio/app/optinist/wrappers/caiman/conda/caiman.yaml @@ -6,3 +6,4 @@ dependencies: - cython - numpy=1.22.* - caiman>=1.9.9, <=1.9.12 + - scikit-image=0.19.* diff --git a/studio/config/.env.example b/studio/config/.env.example index 1bb1cd886..ae36187c4 100644 --- a/studio/config/.env.example +++ b/studio/config/.env.example @@ -5,8 +5,9 @@ REFRESH_TOKEN_EXPIRE_MINUTES=1440 # 24 hours IS_STANDALONE=True # NOTE: Uncomment and set your own values when using for multiple user -# MYSQL_SERVER=db +# MYSQL_SERVER=db:3306 # MYSQL_ROOT_PASSWORD=db_root_password # MYSQL_DATABASE=studio # MYSQL_USER=studio_db_user # MYSQL_PASSWORD=studio_db_password +# ECHO_SQL=False diff --git a/studio/config/docker/Dockerfile b/studio/config/docker/Dockerfile index 3bb50eff9..e2e727269 100644 --- a/studio/config/docker/Dockerfile +++ b/studio/config/docker/Dockerfile @@ -26,7 +26,8 @@ RUN echo 'alias ll="ls -la --color=auto"' >> /root/.bashrc && \ # setup optinist COPY pyproject.toml poetry.lock ./ RUN pip3 install --no-cache-dir --upgrade pip && \ - pip3 install poetry + pip3 install poetry && \ + poetry config virtualenvs.create false RUN poetry install --no-root # copy optinist files diff --git a/studio/config/docker/Dockerfile.dev b/studio/config/docker/Dockerfile.dev index 2a1bc1a84..560baadbf 100644 --- a/studio/config/docker/Dockerfile.dev +++ b/studio/config/docker/Dockerfile.dev @@ -26,7 +26,8 @@ RUN echo 'alias ll="ls -la --color=auto"' >> /root/.bashrc && \ # setup optinist COPY pyproject.toml poetry.lock ./ RUN pip3 install --no-cache-dir --upgrade pip && \ - pip3 install poetry + pip3 install poetry && \ + poetry config virtualenvs.create false RUN poetry install --no-root --with dev EXPOSE 8000 diff --git a/studio/config/docker/Dockerfile.test b/studio/config/docker/Dockerfile.test index c04459d19..2a727ab0f 100644 --- a/studio/config/docker/Dockerfile.test +++ b/studio/config/docker/Dockerfile.test @@ -6,8 +6,8 @@ RUN apt-get --allow-releaseinfo-change update && \ apt-get install --no-install-recommends -y git gcc g++ libgl1 && \ apt-get purge git -y && apt-get autoremove -y && apt-get clean - RUN pip3 install --no-cache-dir --upgrade pip && \ - pip3 install poetry + pip3 install poetry && \ + poetry config virtualenvs.create false COPY pyproject.toml poetry.lock ./ diff --git a/studio/config/logging.multiuser.yaml b/studio/config/logging.multiuser.yaml index eb8025bb8..5341f1b41 100644 --- a/studio/config/logging.multiuser.yaml +++ b/studio/config/logging.multiuser.yaml @@ -3,10 +3,10 @@ disable_existing_loggers: false formatters: default: (): "uvicorn.logging.DefaultFormatter" - fmt: "%(asctime)s %(levelprefix)s %(funcName)s():%(lineno)d - %(message)s" + fmt: "%(asctime)s %(levelprefix)s [%(name)s] %(funcName)s():%(lineno)d - %(message)s" access: (): "uvicorn.logging.AccessFormatter" - fmt: "%(asctime)s %(levelprefix)s %(funcName)s():%(lineno)d - %(message)s" + fmt: "%(asctime)s %(levelprefix)s [%(name)s] %(funcName)s():%(lineno)d - %(message)s" handlers: console: class: logging.StreamHandler @@ -21,6 +21,8 @@ handlers: interval: 1 backupCount: 365 loggers: + optinist: + level: DEBUG snakemake: level: INFO handlers: [rotating_file] diff --git a/studio/config/logging.yaml b/studio/config/logging.yaml index fef5dbc37..0e5113d74 100644 --- a/studio/config/logging.yaml +++ b/studio/config/logging.yaml @@ -3,16 +3,18 @@ disable_existing_loggers: false formatters: default: (): "uvicorn.logging.DefaultFormatter" - fmt: "%(asctime)s %(levelprefix)s %(funcName)s():%(lineno)d - %(message)s" + fmt: "%(asctime)s %(levelprefix)s [%(name)s] %(funcName)s():%(lineno)d - %(message)s" access: (): "uvicorn.logging.AccessFormatter" - fmt: "%(asctime)s %(levelprefix)s %(funcName)s():%(lineno)d - %(message)s" + fmt: "%(asctime)s %(levelprefix)s [%(name)s] %(funcName)s():%(lineno)d - %(message)s" handlers: console: class: logging.StreamHandler level: DEBUG formatter: default loggers: + optinist: + level: DEBUG snakemake: level: INFO handlers: [console]