diff --git a/.env.example b/.env.example index 921f0225..311a47a3 100644 --- a/.env.example +++ b/.env.example @@ -6,6 +6,8 @@ THOTH_EXPORT_API=http://localhost:8181 THOTH_DOMAIN=localhost # Full postgres URL DATABASE_URL=postgres://thoth:thoth@localhost/thoth +# Full redis URL +REDIS_URL=redis://localhost:6379 # Authentication cookie secret key SECRET_KEY=an_up_to_255_bytes_random_key # Logging level diff --git a/.github/workflows/build_test_and_check.yml b/.github/workflows/build_test_and_check.yml index 91c8aff1..4aa9cbf3 100644 --- a/.github/workflows/build_test_and_check.yml +++ b/.github/workflows/build_test_and_check.yml @@ -27,6 +27,7 @@ env: CARGO_TERM_COLOR: always THOTH_GRAPHQL_API: https://api.thoth.pub THOTH_EXPORT_API: https://export.thoth.pub + TEST_REDIS_URL: redis://localhost:6379 jobs: build: @@ -46,6 +47,16 @@ jobs: run: cargo build -vv test: runs-on: ubuntu-latest + services: + redis: + image: redis:alpine + options: >- + --health-cmd "redis-cli ping" + --health-interval 10s + --health-timeout 5s + --health-retries 5 + ports: + - 6379:6379 steps: - uses: actions/checkout@v4 - uses: actions/cache@v4 diff --git a/.github/workflows/docker_build_and_push_to_dockerhub.yml b/.github/workflows/docker_build_and_push_to_dockerhub.yml index e63dcb78..8faa4cf6 100644 --- a/.github/workflows/docker_build_and_push_to_dockerhub.yml +++ b/.github/workflows/docker_build_and_push_to_dockerhub.yml @@ -4,6 +4,9 @@ on: pull_request: workflow_dispatch: +env: + REGISTRY: ghcr.io + jobs: build_and_push_staging_docker_image: runs-on: ubuntu-latest @@ -16,7 +19,7 @@ jobs: with: # list of Docker images to use as base name for tags images: | - openbookpublishers/thoth + ${{ env.REGISTRY }}/thoth-pub/thoth # generate Docker tags based on the following events/attributes tags: | type=ref,event=pr,prefix=staging-pr- @@ -24,11 +27,12 @@ jobs: uses: docker/setup-qemu-action@v3 - name: Set up Docker Buildx uses: docker/setup-buildx-action@v3 - - name: Login to DockerHub + - name: Login to Container registry uses: docker/login-action@v3 with: - username: ${{ secrets.DOCKERHUB_USERNAME }} - password: ${{ secrets.DOCKERHUB_TOKEN }} + registry: ${{ env.REGISTRY }} + username: ${{ github.actor }} + password: ${{ secrets.GITHUB_TOKEN }} - name: Build and push id: docker_build uses: docker/build-push-action@v5 @@ -54,7 +58,7 @@ jobs: uses: docker/build-push-action@v5 with: push: false - tags: openbookpublishers/thoth:latest + tags: thoth-pub/thoth:latest file: Dockerfile.dev build-args: | THOTH_GRAPHQL_API=https://api.thoth.pub diff --git a/.github/workflows/docker_build_and_push_to_dockerhub_release.yml b/.github/workflows/docker_build_and_push_to_dockerhub_release.yml index 7ea8f4e9..ad848fd0 100644 --- a/.github/workflows/docker_build_and_push_to_dockerhub_release.yml +++ b/.github/workflows/docker_build_and_push_to_dockerhub_release.yml @@ -4,6 +4,9 @@ on: release: types: [published] +env: + REGISTRY: ghcr.io + jobs: build_and_push_docker_image: runs-on: ubuntu-latest @@ -16,7 +19,7 @@ jobs: with: # list of Docker images to use as base name for tags images: | - openbookpublishers/thoth + ${{ env.REGISTRY }}/thoth-pub/thoth # generate Docker tags based on the following events/attributes tags: | type=semver,pattern={{version}} @@ -26,11 +29,12 @@ jobs: uses: docker/setup-qemu-action@v3 - name: Set up Docker Buildx uses: docker/setup-buildx-action@v3 - - name: Login to DockerHub + - name: Login to Container registry uses: docker/login-action@v3 with: - username: ${{ secrets.DOCKERHUB_USERNAME }} - password: ${{ secrets.DOCKERHUB_TOKEN }} + registry: ${{ env.REGISTRY }} + username: ${{ github.actor }} + password: ${{ secrets.GITHUB_TOKEN }} - name: Build and push id: docker_build uses: docker/build-push-action@v5 diff --git a/CHANGELOG.md b/CHANGELOG.md index 5bda8212..2be5c975 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -6,7 +6,17 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0 ## [Unreleased] ### Added -- [648](https://github.com/thoth-pub/thoth/issues/648) - Added new `LocationPlatform`, `THOTH`, for Locations where file is hosted directly by Thoth on S3. + - [651](https://github.com/thoth-pub/thoth/pull/651) - Implement Redis connection pools using `deadpool-redis` + - [651](https://github.com/thoth-pub/thoth/pull/651) - Implement Redis caching in export API + - [651](https://github.com/thoth-pub/thoth/pull/651) - Added `WorkLastUpdatedQuery` and `WorksLastUpdatedQuery` queries to thoth-client + - [651](https://github.com/thoth-pub/thoth/pull/651) - Implement `Copy`, `Ord` and `PartialOrd` traits for `Timestamp` + - [651](https://github.com/thoth-pub/thoth/pull/651) - Implement parsing from and to RFC 3339 strings for `Timestamp` + - [651](https://github.com/thoth-pub/thoth/pull/651) - Implement `Copy` trait for `WorkType`, `WorkStatus`, `PublicationType`, `CountryCode`, `LanguageRelation`, `LanguageCode`, `LocationPlatform`, `LengthUnit`, `WeightUnit`, `CurrencyCode`, and `SeriesType` + - [651](https://github.com/thoth-pub/thoth/pull/651) - Allow supplying `DATABASE_URL` as binary argument + - [648](https://github.com/thoth-pub/thoth/issues/648) - Added new `LocationPlatform`, `THOTH`, for Locations where file is hosted directly by Thoth on S3. + +### Changed + - [651](https://github.com/thoth-pub/thoth/pull/651) - Use Github Container registry instead of DockerHub ### Fixed - [631](https://github.com/thoth-pub/thoth/issues/631) - Fix slow loading of Contributor dropdown in Contribution form diff --git a/Cargo.lock b/Cargo.lock index 5a57063b..1898156e 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -407,6 +407,12 @@ version = "0.13.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d301b3b94cb4b2f23d7917810addbbaff90738e0ca2be692bd027e70d7e0330c" +[[package]] +name = "arc-swap" +version = "1.7.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "69f7f8c3906b62b754cd5326047894316021dcfe5a194c8ea52bdd94934a3457" + [[package]] name = "argon2rs" version = "0.2.5" @@ -718,6 +724,20 @@ dependencies = [ "unreachable", ] +[[package]] +name = "combine" +version = "4.6.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ba5a308b75df32fe02788e748662718f03fde005016435c444eea572398219fd" +dependencies = [ + "bytes", + "futures-core", + "memchr", + "pin-project-lite", + "tokio", + "tokio-util", +] + [[package]] name = "console" version = "0.15.8" @@ -881,6 +901,36 @@ dependencies = [ "syn 2.0.76", ] +[[package]] +name = "deadpool" +version = "0.12.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6541a3916932fe57768d4be0b1ffb5ec7cbf74ca8c903fdfd5c0fe8aa958f0ed" +dependencies = [ + "deadpool-runtime", + "num_cpus", + "tokio", +] + +[[package]] +name = "deadpool-redis" +version = "0.18.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bfae6799b68a735270e4344ee3e834365f707c72da09c9a8bb89b45cc3351395" +dependencies = [ + "deadpool", + "redis", +] + +[[package]] +name = "deadpool-runtime" +version = "0.1.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "092966b41edc516079bdf31ec78a2e0588d1d0c08f78b91d8307215928642b2b" +dependencies = [ + "tokio", +] + [[package]] name = "deranged" version = "0.3.11" @@ -1463,7 +1513,7 @@ version = "0.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d2ebc8013b4426d5b81a4364c419a95ed0b404af2b82e2457de52d9348f0e474" dependencies = [ - "combine", + "combine 3.8.1", "thiserror", ] @@ -2116,6 +2166,16 @@ dependencies = [ "autocfg", ] +[[package]] +name = "num_cpus" +version = "1.16.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4161fcb6d602d4d2081af7c3a45852d875a03dd337a6bfdd6e06407b61342a43" +dependencies = [ + "hermit-abi", + "libc", +] + [[package]] name = "object" version = "0.36.3" @@ -2540,6 +2600,27 @@ dependencies = [ "getrandom", ] +[[package]] +name = "redis" +version = "0.27.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "81cccf17a692ce51b86564334614d72dcae1def0fd5ecebc9f02956da74352b5" +dependencies = [ + "arc-swap", + "async-trait", + "bytes", + "combine 4.6.7", + "futures-util", + "itoa", + "num-bigint", + "percent-encoding", + "pin-project-lite", + "ryu", + "tokio", + "tokio-util", + "url", +] + [[package]] name = "redox_syscall" version = "0.2.16" @@ -3201,8 +3282,8 @@ version = "0.12.14" dependencies = [ "actix-web", "argon2rs", - "cargo-husky", "chrono", + "deadpool-redis", "diesel", "diesel-derive-enum", "diesel-derive-newtype", @@ -3221,6 +3302,7 @@ dependencies = [ "serde_json", "strum 0.26.3", "thoth-errors", + "tokio", "uuid", ] @@ -3300,7 +3382,9 @@ name = "thoth-errors" version = "0.12.14" dependencies = [ "actix-web", + "chrono", "csv", + "deadpool-redis", "dialoguer", "diesel", "juniper", @@ -3390,9 +3474,9 @@ checksum = "1f3ccbac311fea05f86f61904b462b55fb3df8837a366dfc601a0161d0532f20" [[package]] name = "tokio" -version = "1.39.3" +version = "1.41.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9babc99b9923bfa4804bd74722ff02c0381021eafa4db9949217e3be8e84fff5" +checksum = "22cfb5bee7a6a52939ca9224d6ac897bb669134078daa8735560897f69de4d33" dependencies = [ "backtrace", "bytes", @@ -3402,9 +3486,21 @@ dependencies = [ "pin-project-lite", "signal-hook-registry", "socket2", + "tokio-macros", "windows-sys 0.52.0", ] +[[package]] +name = "tokio-macros" +version = "2.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "693d596312e88961bc67d7f1f97af8a70227d9f90c31bba5806eec004978d752" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.76", +] + [[package]] name = "tokio-native-tls" version = "0.3.1" diff --git a/Cargo.toml b/Cargo.toml index 5917c59d..a360f6c6 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -1,7 +1,7 @@ [package] name = "thoth" version = "0.12.14" -authors = ["Javier Arias ", "Ross Higman "] +authors = ["Javier Arias ", "Ross Higman "] edition = "2021" license = "Apache-2.0" description = "GraphQL API for bibliographic data" @@ -9,7 +9,6 @@ repository = "https://github.com/thoth-pub/thoth" readme = "README.md" [badges] -travis-ci = { repository = "openbookpublishers/thoth" } maintenance = { status = "actively-developed" } [workspace] diff --git a/LICENSE b/LICENSE index 30291ef4..5194de71 100644 --- a/LICENSE +++ b/LICENSE @@ -186,7 +186,7 @@ same "printed page" as the copyright notice for easier identification within third-party archives. - Copyright 2020 Open Book Publishers + Copyright 2020 Thoth Open Metadata Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. diff --git a/Makefile b/Makefile index a5158315..6b0bf3bc 100644 --- a/Makefile +++ b/Makefile @@ -10,6 +10,7 @@ docker-dev-build \ docker-dev-run \ docker-dev-db \ + docker-dev-redis \ build \ test \ clippy \ @@ -44,6 +45,9 @@ docker-dev-run: docker-dev-db: docker compose -f docker-compose.dev.yml up db +docker-dev-redis: + docker compose -f docker-compose.dev.yml up redis + build: cargo build -vv diff --git a/README.md b/README.md index 14cde8da..00834719 100644 --- a/README.md +++ b/README.md @@ -1,5 +1,5 @@
- +

Thoth

@@ -127,7 +127,7 @@ The wasm APP needs to know the endpoint the API will be running at compile time, docker build \ --build-arg THOTH_GRAPHQL_API=https://api.thoth.pub \ --build-arg THOTH_EXPORT_API=https://export.thoth.pub \ - . -t openbookpublishers/thoth + . -t thoth-pub/thoth ``` ## Acknowledgements diff --git a/docker-compose.dev.yml b/docker-compose.dev.yml index 22314dd3..79df39f5 100644 --- a/docker-compose.dev.yml +++ b/docker-compose.dev.yml @@ -11,6 +11,12 @@ services: env_file: - .env + redis: + image: redis:alpine + container_name: "thoth_redis" + ports: + - "6379:6379" + graphql-api: build: context: . diff --git a/docker-compose.yml b/docker-compose.yml index 0cb1ec36..9f0f1891 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -11,8 +11,13 @@ services: env_file: - .env + redis: + image: redis:alpine + container_name: "thoth_redis" + restart: unless-stopped + graphql-api: - image: openbookpublishers/thoth + image: ghcr.io/thoth-pub/thoth container_name: "thoth_graphql_api" restart: unless-stopped env_file: @@ -21,7 +26,7 @@ services: - db export-api: - image: openbookpublishers/thoth + image: ghcr.io/thoth-pub/thoth container_name: "thoth_export_api" restart: unless-stopped command: ["start", "export-api"] @@ -31,7 +36,7 @@ services: - graphql-api app: - image: openbookpublishers/thoth + image: ghcr.io/thoth-pub/thoth container_name: "thoth_app" restart: unless-stopped command: ["start", "app"] diff --git a/src/bin/thoth.rs b/src/bin/thoth.rs index ac3cf990..c89731b8 100644 --- a/src/bin/thoth.rs +++ b/src/bin/thoth.rs @@ -4,12 +4,32 @@ use dotenv::dotenv; use std::env; use thoth::api::account::model::{AccountData, LinkedPublisher}; use thoth::api::account::service::{all_emails, all_publishers, register, update_password}; -use thoth::api::db::{establish_connection, revert_migrations, run_migrations}; +use thoth::api::db::{init_pool, revert_migrations, run_migrations}; use thoth::api_server; use thoth::app_server; use thoth::export_server; use thoth_errors::ThothResult; +fn database_argument() -> Arg { + Arg::new("db") + .short('D') + .long("database-url") + .value_name("DATABASE_URL") + .env("DATABASE_URL") + .help("Full postgres database url, e.g. postgres://thoth:thoth@localhost/thoth") + .num_args(1) +} + +fn redis_argument() -> Arg { + Arg::new("redis") + .short('R') + .long("redis-url") + .value_name("REDIS_URL") + .env("REDIS_URL") + .help("Full redis url, e.g. redis://localhost:6379") + .num_args(1) +} + fn host_argument(env_value: &'static str) -> Arg { Arg::new("host") .short('H') @@ -132,6 +152,7 @@ fn thoth_commands() -> Command { .subcommand( Command::new("migrate") .about("Run the database migrations") + .arg(database_argument()) .arg( Arg::new("revert") .long("revert") @@ -147,6 +168,7 @@ fn thoth_commands() -> Command { .subcommand( Command::new("graphql-api") .about("Start the thoth GraphQL API server") + .arg(database_argument()) .arg(host_argument("GRAPHQL_API_HOST")) .arg(port_argument("8000", "GRAPHQL_API_PORT")) .arg(threads_argument("GRAPHQL_API_THREADS")) @@ -167,6 +189,7 @@ fn thoth_commands() -> Command { .subcommand( Command::new("export-api") .about("Start the thoth metadata export API") + .arg(redis_argument()) .arg(host_argument("EXPORT_API_HOST")) .arg(port_argument("8181", "EXPORT_API_PORT")) .arg(threads_argument("EXPORT_API_THREADS")) @@ -178,6 +201,7 @@ fn thoth_commands() -> Command { .subcommand( Command::new("init") .about("Run the database migrations and start the thoth API server") + .arg(database_argument()) .arg(host_argument("GRAPHQL_API_HOST")) .arg(port_argument("8000", "GRAPHQL_API_PORT")) .arg(threads_argument("GRAPHQL_API_THREADS")) @@ -190,6 +214,7 @@ fn thoth_commands() -> Command { .subcommand( Command::new("account") .about("Manage user accounts") + .arg(database_argument()) .subcommand_required(true) .arg_required_else_help(true) .subcommand(Command::new("register").about("Create a new user account")) @@ -204,6 +229,7 @@ fn main() -> ThothResult<()> { match thoth_commands().get_matches().subcommand() { Some(("start", start_matches)) => match start_matches.subcommand() { Some(("graphql-api", api_matches)) => { + let database_url = api_matches.get_one::("db").unwrap().to_owned(); let host = api_matches.get_one::("host").unwrap().to_owned(); let port = api_matches.get_one::("port").unwrap().to_owned(); let threads = *api_matches.get_one::("threads").unwrap(); @@ -213,6 +239,7 @@ fn main() -> ThothResult<()> { let secret_str = api_matches.get_one::("key").unwrap().to_owned(); let session_duration = *api_matches.get_one::("duration").unwrap(); api_server( + database_url, host, port, threads, @@ -232,6 +259,10 @@ fn main() -> ThothResult<()> { app_server(host, port, threads, keep_alive).map_err(|e| e.into()) } Some(("export-api", client_matches)) => { + let redis_url = client_matches + .get_one::("redis") + .unwrap() + .to_owned(); let host = client_matches.get_one::("host").unwrap().to_owned(); let port = client_matches.get_one::("port").unwrap().to_owned(); let threads = *client_matches.get_one::("threads").unwrap(); @@ -244,16 +275,28 @@ fn main() -> ThothResult<()> { .get_one::("gql-endpoint") .unwrap() .to_owned(); - export_server(host, port, threads, keep_alive, url, gql_endpoint) - .map_err(|e| e.into()) + export_server( + redis_url, + host, + port, + threads, + keep_alive, + url, + gql_endpoint, + ) + .map_err(|e| e.into()) } _ => unreachable!(), }, - Some(("migrate", migrate_matches)) => match migrate_matches.get_flag("revert") { - true => revert_migrations(), - false => run_migrations(), - }, + Some(("migrate", migrate_matches)) => { + let database_url = migrate_matches.get_one::("db").unwrap(); + match migrate_matches.get_flag("revert") { + true => revert_migrations(database_url), + false => run_migrations(database_url), + } + } Some(("init", init_matches)) => { + let database_url = init_matches.get_one::("db").unwrap().to_owned(); let host = init_matches.get_one::("host").unwrap().to_owned(); let port = init_matches.get_one::("port").unwrap().to_owned(); let threads = *init_matches.get_one::("threads").unwrap(); @@ -265,8 +308,9 @@ fn main() -> ThothResult<()> { let domain = init_matches.get_one::("domain").unwrap().to_owned(); let secret_str = init_matches.get_one::("key").unwrap().to_owned(); let session_duration = *init_matches.get_one::("duration").unwrap(); - run_migrations()?; + run_migrations(&database_url)?; api_server( + database_url, host, port, threads, @@ -278,82 +322,86 @@ fn main() -> ThothResult<()> { ) .map_err(|e| e.into()) } - Some(("account", account_matches)) => match account_matches.subcommand() { - Some(("register", _)) => { - let pool = establish_connection(); + Some(("account", account_matches)) => { + let database_url = account_matches.get_one::("db").unwrap(); + match account_matches.subcommand() { + Some(("register", _)) => { + let pool = init_pool(database_url); - let name = Input::new() - .with_prompt("Enter given name") - .interact_on(&Term::stdout())?; - let surname = Input::new() - .with_prompt("Enter family name") - .interact_on(&Term::stdout())?; - let email = Input::new() - .with_prompt("Enter email address") - .interact_on(&Term::stdout())?; - let password = Password::new() - .with_prompt("Enter password") - .with_confirmation("Confirm password", "Passwords do not match") - .interact_on(&Term::stdout())?; - let is_superuser: bool = Input::new() - .with_prompt("Is this a superuser account") - .default(false) - .interact_on(&Term::stdout())?; - let is_bot: bool = Input::new() - .with_prompt("Is this a bot account") - .default(false) - .interact_on(&Term::stdout())?; - - let mut linked_publishers = vec![]; - if let Ok(publishers) = all_publishers(&pool) { - let chosen: Vec = MultiSelect::new() - .items(&publishers) - .with_prompt("Select publishers to link this account to") + let name = Input::new() + .with_prompt("Enter given name") + .interact_on(&Term::stdout())?; + let surname = Input::new() + .with_prompt("Enter family name") + .interact_on(&Term::stdout())?; + let email = Input::new() + .with_prompt("Enter email address") + .interact_on(&Term::stdout())?; + let password = Password::new() + .with_prompt("Enter password") + .with_confirmation("Confirm password", "Passwords do not match") + .interact_on(&Term::stdout())?; + let is_superuser: bool = Input::new() + .with_prompt("Is this a superuser account") + .default(false) + .interact_on(&Term::stdout())?; + let is_bot: bool = Input::new() + .with_prompt("Is this a bot account") + .default(false) .interact_on(&Term::stdout())?; - for index in chosen { - let publisher = publishers.get(index).unwrap(); - let is_admin: bool = Input::new() - .with_prompt(format!( - "Make user an admin of '{}'?", - publisher.publisher_name - )) - .default(false) + + let mut linked_publishers = vec![]; + if let Ok(publishers) = all_publishers(&pool) { + let chosen: Vec = MultiSelect::new() + .items(&publishers) + .with_prompt("Select publishers to link this account to") .interact_on(&Term::stdout())?; - let linked_publisher = LinkedPublisher { - publisher_id: publisher.publisher_id, - is_admin, - }; - linked_publishers.push(linked_publisher); + for index in chosen { + let publisher = publishers.get(index).unwrap(); + let is_admin: bool = Input::new() + .with_prompt(format!( + "Make user an admin of '{}'?", + publisher.publisher_name + )) + .default(false) + .interact_on(&Term::stdout())?; + let linked_publisher = LinkedPublisher { + publisher_id: publisher.publisher_id, + is_admin, + }; + linked_publishers.push(linked_publisher); + } } + let account_data = AccountData { + name, + surname, + email, + password, + is_superuser, + is_bot, + }; + register(account_data, linked_publishers, &pool).map(|_| ()) } - let account_data = AccountData { - name, - surname, - email, - password, - is_superuser, - is_bot, - }; - register(account_data, linked_publishers, &pool).map(|_| ()) - } - Some(("password", _)) => { - let pool = establish_connection(); - let all_emails = all_emails(&pool).expect("No user accounts present in database."); - let email_selection = Select::with_theme(&ColorfulTheme::default()) - .items(&all_emails) - .default(0) - .with_prompt("Select a user account") - .interact_on(&Term::stdout())?; - let password = Password::new() - .with_prompt("Enter new password") - .with_confirmation("Confirm password", "Passwords do not match") - .interact_on(&Term::stdout())?; - let email = all_emails.get(email_selection).unwrap(); + Some(("password", _)) => { + let pool = init_pool(database_url); + let all_emails = + all_emails(&pool).expect("No user accounts present in database."); + let email_selection = Select::with_theme(&ColorfulTheme::default()) + .items(&all_emails) + .default(0) + .with_prompt("Select a user account") + .interact_on(&Term::stdout())?; + let password = Password::new() + .with_prompt("Enter new password") + .with_confirmation("Confirm password", "Passwords do not match") + .interact_on(&Term::stdout())?; + let email = all_emails.get(email_selection).unwrap(); - update_password(email, &password, &pool).map(|_| ()) + update_password(email, &password, &pool).map(|_| ()) + } + _ => unreachable!(), } - _ => unreachable!(), - }, + } _ => unreachable!(), } } diff --git a/thoth-api-server/Cargo.toml b/thoth-api-server/Cargo.toml index 06fb4fcb..63e51fa2 100644 --- a/thoth-api-server/Cargo.toml +++ b/thoth-api-server/Cargo.toml @@ -1,7 +1,7 @@ [package] name = "thoth-api-server" version = "0.12.14" -authors = ["Javier Arias ", "Ross Higman "] +authors = ["Javier Arias ", "Ross Higman "] edition = "2021" license = "Apache-2.0" description = "Actix instance serving Thoth's GraphQL endpoints" diff --git a/thoth-api-server/LICENSE b/thoth-api-server/LICENSE index 30291ef4..5194de71 100644 --- a/thoth-api-server/LICENSE +++ b/thoth-api-server/LICENSE @@ -186,7 +186,7 @@ same "printed page" as the copyright notice for easier identification within third-party archives. - Copyright 2020 Open Book Publishers + Copyright 2020 Thoth Open Metadata Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. diff --git a/thoth-api-server/README.md b/thoth-api-server/README.md index e05e98d4..417fd432 100644 --- a/thoth-api-server/README.md +++ b/thoth-api-server/README.md @@ -1,5 +1,5 @@
- +

Thoth Client

diff --git a/thoth-api-server/src/lib.rs b/thoth-api-server/src/lib.rs index a6c2d174..37bade6c 100644 --- a/thoth-api-server/src/lib.rs +++ b/thoth-api-server/src/lib.rs @@ -9,8 +9,8 @@ use actix_session::config::PersistentSession; use actix_session::{storage::CookieSessionStore, SessionMiddleware}; use actix_web::{ cookie::time::Duration as CookieDuration, cookie::Key, error, get, http::header, - middleware::Logger, post, web::Data, web::Json, web::ServiceConfig, App, Error, HttpMessage, - HttpRequest, HttpResponse, HttpServer, Result, + middleware::Logger, post, web::Data, web::Json, App, Error, HttpMessage, HttpRequest, + HttpResponse, HttpServer, Result, }; use juniper::http::GraphQLRequest; use serde::Serialize; @@ -21,7 +21,7 @@ use thoth_api::{ account::service::get_account, account::service::get_account_details, account::service::login, - db::establish_connection, + db::init_pool, db::PgPool, graphql::model::Context, graphql::model::{create_schema, Schema}, @@ -184,25 +184,10 @@ async fn account_details( .map_err(error::ErrorUnauthorized) } -fn config(cfg: &mut ServiceConfig) { - let pool = establish_connection(); - let schema = Arc::new(create_schema()); - - cfg.app_data(Data::new(schema.clone())); - cfg.app_data(Data::new(pool)); - cfg.service(index); - cfg.service(graphql_index); - cfg.service(graphql); - cfg.service(graphiql_interface); - cfg.service(login_credentials); - cfg.service(login_session); - cfg.service(account_details); - cfg.service(graphql_schema); -} - #[allow(clippy::too_many_arguments)] #[actix_web::main] pub async fn start_server( + database_url: String, host: String, port: String, threads: usize, @@ -242,7 +227,16 @@ pub async fn start_server( .supports_credentials(), ) .app_data(Data::new(ApiConfig::new(public_url.clone()))) - .configure(config) + .app_data(Data::new(init_pool(&database_url))) + .app_data(Data::new(Arc::new(create_schema()))) + .service(index) + .service(graphql_index) + .service(graphql) + .service(graphiql_interface) + .service(login_credentials) + .service(login_session) + .service(account_details) + .service(graphql_schema) }) .workers(threads) .keep_alive(Duration::from_secs(keep_alive)) diff --git a/thoth-api/Cargo.toml b/thoth-api/Cargo.toml index d3703fbf..1c2bf051 100644 --- a/thoth-api/Cargo.toml +++ b/thoth-api/Cargo.toml @@ -1,7 +1,7 @@ [package] name = "thoth-api" version = "0.12.14" -authors = ["Javier Arias ", "Ross Higman "] +authors = ["Javier Arias ", "Ross Higman "] edition = "2021" license = "Apache-2.0" description = "GraphQL API for bibliographic data" @@ -9,11 +9,10 @@ repository = "https://github.com/thoth-pub/thoth" readme = "README.md" [badges] -travis-ci = { repository = "openbookpublishers/thoth" } maintenance = { status = "actively-developed" } [features] -backend = ["diesel", "diesel-derive-enum", "diesel_migrations", "futures", "actix-web", "jsonwebtoken"] +backend = ["diesel", "diesel-derive-enum", "diesel_migrations", "futures", "actix-web", "jsonwebtoken", "deadpool-redis"] [dependencies] thoth-errors = { version = "=0.12.14", path = "../thoth-errors" } @@ -21,6 +20,7 @@ actix-web = { version = "4.8", optional = true } argon2rs = "0.2.5" isbn2 = "0.4.0" chrono = { version = "0.4.31", features = ["serde"] } +deadpool-redis = { version = "0.18.0", optional = true } diesel = { version = "2.2.3", features = ["postgres", "uuid", "chrono", "r2d2", "64-column-tables", "serde_json"], optional = true } diesel-derive-enum = { version = "2.1.0", features = ["postgres"], optional = true } diesel-derive-newtype = "2.1.2" @@ -40,4 +40,4 @@ strum = { version = "0.26.3", features = ["derive"] } uuid = { version = "1.10.0", features = ["serde", "v4"] } [dev-dependencies] -cargo-husky = { version = "1.5.0", default-features = false, features = ["prepush-hook", "run-cargo-check", "run-cargo-test", "run-cargo-clippy", "run-cargo-fmt"] } +tokio = { version = "1.41", features = ["macros"] } diff --git a/thoth-api/LICENSE b/thoth-api/LICENSE index 30291ef4..5194de71 100644 --- a/thoth-api/LICENSE +++ b/thoth-api/LICENSE @@ -186,7 +186,7 @@ same "printed page" as the copyright notice for easier identification within third-party archives. - Copyright 2020 Open Book Publishers + Copyright 2020 Thoth Open Metadata Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. diff --git a/thoth-api/README.md b/thoth-api/README.md index 9c376ed7..d570c955 100644 --- a/thoth-api/README.md +++ b/thoth-api/README.md @@ -1,5 +1,5 @@
- +

Thoth API

diff --git a/thoth-api/src/account/service.rs b/thoth-api/src/account/service.rs index e8643456..71d31a49 100644 --- a/thoth-api/src/account/service.rs +++ b/thoth-api/src/account/service.rs @@ -120,13 +120,10 @@ pub fn update_password(email: &str, password: &str, pool: &PgPool) -> ThothResul let account_obj = dsl::account .filter(dsl::email.eq(email)) .first::(&mut connection) - .map_err(ThothError::from)?; + .map_err(Into::::into)?; - match diesel::update(dsl::account.find(&account_obj.account_id)) + diesel::update(dsl::account.find(&account_obj.account_id)) .set(&new_password) .get_result(&mut connection) - { - Ok(c) => Ok(c), - Err(e) => Err(ThothError::from(e)), - } + .map_err(Into::into) } diff --git a/thoth-api/src/db.rs b/thoth-api/src/db.rs index cc981931..11f689dc 100644 --- a/thoth-api/src/db.rs +++ b/thoth-api/src/db.rs @@ -1,49 +1,33 @@ -use std::env; - use diesel::pg::PgConnection; use diesel::r2d2::{ConnectionManager, Pool}; +use diesel::Connection; use diesel_migrations::{embed_migrations, EmbeddedMigrations, MigrationHarness}; -use dotenv::dotenv; -use thoth_errors::{ThothError, ThothResult}; +use thoth_errors::ThothResult; pub type PgPool = Pool>; pub const MIGRATIONS: EmbeddedMigrations = embed_migrations!(); -fn init_pool(database_url: &str) -> PgPool { +pub fn init_pool(database_url: &str) -> PgPool { let manager = ConnectionManager::::new(database_url); Pool::builder() .build(manager) .expect("Failed to create database pool.") } -fn get_database_url() -> String { - dotenv().ok(); - if cfg!(test) { - env::var("TEST_DATABASE_URL").expect("TEST_DATABASE_URL must be set") - } else { - env::var("DATABASE_URL").expect("DATABASE_URL must be set") - } -} - -pub fn establish_connection() -> PgPool { - let database_url = get_database_url(); - init_pool(&database_url) -} - -pub fn run_migrations() -> ThothResult<()> { - let mut connection = establish_connection().get().unwrap(); - match connection.run_pending_migrations(MIGRATIONS) { - Ok(_) => Ok(()), - Err(error) => Err(ThothError::DatabaseError(error.to_string())), - } +pub fn run_migrations(database_url: &str) -> ThothResult<()> { + let mut connection = PgConnection::establish(database_url)?; + connection + .run_pending_migrations(MIGRATIONS) + .map(|_| ()) + .map_err(Into::into) } -pub fn revert_migrations() -> ThothResult<()> { - let mut connection = establish_connection().get().unwrap(); - match connection.revert_all_migrations(MIGRATIONS) { - Ok(_) => Ok(()), - Err(error) => Err(ThothError::DatabaseError(error.to_string())), - } +pub fn revert_migrations(database_url: &str) -> ThothResult<()> { + let mut connection = PgConnection::establish(database_url)?; + connection + .revert_all_migrations(MIGRATIONS) + .map(|_| ()) + .map_err(Into::into) } diff --git a/thoth-api/src/graphql/model.rs b/thoth-api/src/graphql/model.rs index 830cb905..cec8b994 100644 --- a/thoth-api/src/graphql/model.rs +++ b/thoth-api/src/graphql/model.rs @@ -1752,7 +1752,7 @@ impl MutationRoot { let mut data: PatchWork = child.clone().into(); data.publication_date = w.publication_date; data.withdrawn_date = w.withdrawn_date; - data.work_status = w.work_status.clone(); + data.work_status = w.work_status; child.update(&context.db, &data, &account_id)?; } } @@ -2560,12 +2560,12 @@ impl Work { #[graphql(description = "Date and time at which the work record was created")] pub fn created_at(&self) -> Timestamp { - self.created_at.clone() + self.created_at } #[graphql(description = "Date and time at which the work record was last updated")] pub fn updated_at(&self) -> Timestamp { - self.updated_at.clone() + self.updated_at } #[graphql(description = "Page number on which the work begins (only applicable to chapters)")] @@ -2589,7 +2589,7 @@ impl Work { description = "Date and time at which the work record or any of its linked records was last updated" )] pub fn updated_at_with_relations(&self) -> Timestamp { - self.updated_at_with_relations.clone() + self.updated_at_with_relations } #[graphql(description = "Get this work's imprint")] @@ -2900,12 +2900,12 @@ impl Publication { #[graphql(description = "Date and time at which the publication record was created")] pub fn created_at(&self) -> Timestamp { - self.created_at.clone() + self.created_at } #[graphql(description = "Date and time at which the publication record was last updated")] pub fn updated_at(&self) -> Timestamp { - self.updated_at.clone() + self.updated_at } #[graphql( @@ -3081,12 +3081,12 @@ impl Publisher { #[graphql(description = "Date and time at which the publisher record was created")] pub fn created_at(&self) -> Timestamp { - self.created_at.clone() + self.created_at } #[graphql(description = "Date and time at which the publisher record was last updated")] pub fn updated_at(&self) -> Timestamp { - self.updated_at.clone() + self.updated_at } #[graphql(description = "Get imprints linked to this publisher")] @@ -3160,12 +3160,12 @@ impl Imprint { #[graphql(description = "Date and time at which the imprint record was created")] pub fn created_at(&self) -> Timestamp { - self.created_at.clone() + self.created_at } #[graphql(description = "Date and time at which the imprint record was last updated")] pub fn updated_at(&self) -> Timestamp { - self.updated_at.clone() + self.updated_at } #[graphql(description = "Get the publisher to which this imprint belongs")] @@ -3267,12 +3267,12 @@ impl Contributor { #[graphql(description = "Date and time at which the contributor record was created")] pub fn created_at(&self) -> Timestamp { - self.created_at.clone() + self.created_at } #[graphql(description = "Date and time at which the contributor record was last updated")] pub fn updated_at(&self) -> Timestamp { - self.updated_at.clone() + self.updated_at } #[graphql(description = "Get contributions linked to this contributor")] @@ -3345,12 +3345,12 @@ impl Contribution { #[graphql(description = "Date and time at which the contribution record was created")] pub fn created_at(&self) -> Timestamp { - self.created_at.clone() + self.created_at } #[graphql(description = "Date and time at which the contribution record was last updated")] pub fn updated_at(&self) -> Timestamp { - self.updated_at.clone() + self.updated_at } #[graphql( @@ -3473,12 +3473,12 @@ impl Series { #[graphql(description = "Date and time at which the series record was created")] pub fn created_at(&self) -> Timestamp { - self.created_at.clone() + self.created_at } #[graphql(description = "Date and time at which the series record was last updated")] pub fn updated_at(&self) -> Timestamp { - self.updated_at.clone() + self.updated_at } #[graphql(description = "Get the imprint linked to this series")] @@ -3541,12 +3541,12 @@ impl Issue { #[graphql(description = "Date and time at which the issue record was created")] pub fn created_at(&self) -> Timestamp { - self.created_at.clone() + self.created_at } #[graphql(description = "Date and time at which the issue record was last updated")] pub fn updated_at(&self) -> Timestamp { - self.updated_at.clone() + self.updated_at } #[graphql(description = "Get the series to which the issue belongs")] @@ -3591,12 +3591,12 @@ impl Language { #[graphql(description = "Date and time at which the language record was created")] pub fn created_at(&self) -> Timestamp { - self.created_at.clone() + self.created_at } #[graphql(description = "Date and time at which the language record was last updated")] pub fn updated_at(&self) -> Timestamp { - self.updated_at.clone() + self.updated_at } #[graphql(description = "Get the work which has this language")] @@ -3641,12 +3641,12 @@ impl Location { #[graphql(description = "Date and time at which the location record was created")] pub fn created_at(&self) -> Timestamp { - self.created_at.clone() + self.created_at } #[graphql(description = "Date and time at which the location record was last updated")] pub fn updated_at(&self) -> Timestamp { - self.updated_at.clone() + self.updated_at } #[graphql(description = "Get the publication linked to this location")] @@ -3681,12 +3681,12 @@ impl Price { #[graphql(description = "Date and time at which the price record was created")] pub fn created_at(&self) -> Timestamp { - self.created_at.clone() + self.created_at } #[graphql(description = "Date and time at which the price record was last updated")] pub fn updated_at(&self) -> Timestamp { - self.updated_at.clone() + self.updated_at } #[graphql(description = "Get the publication linked to this price")] @@ -3726,12 +3726,12 @@ impl Subject { #[graphql(description = "Date and time at which the subject record was created")] pub fn created_at(&self) -> Timestamp { - self.created_at.clone() + self.created_at } #[graphql(description = "Date and time at which the subject record was last updated")] pub fn updated_at(&self) -> Timestamp { - self.updated_at.clone() + self.updated_at } #[graphql(description = "Get the work to which the subject is linked")] @@ -3775,12 +3775,12 @@ impl Institution { #[graphql(description = "Date and time at which the institution record was created")] pub fn created_at(&self) -> Timestamp { - self.created_at.clone() + self.created_at } #[graphql(description = "Date and time at which the institution record was last updated")] pub fn updated_at(&self) -> Timestamp { - self.updated_at.clone() + self.updated_at } #[graphql(description = "Get fundings linked to this institution")] @@ -3884,12 +3884,12 @@ impl Funding { #[graphql(description = "Date and time at which the funding record was created")] pub fn created_at(&self) -> Timestamp { - self.created_at.clone() + self.created_at } #[graphql(description = "Date and time at which the funding record was last updated")] pub fn updated_at(&self) -> Timestamp { - self.updated_at.clone() + self.updated_at } #[graphql(description = "Get the funded work")] @@ -3936,12 +3936,12 @@ impl Affiliation { #[graphql(description = "Date and time at which the affiliation record was created")] pub fn created_at(&self) -> Timestamp { - self.created_at.clone() + self.created_at } #[graphql(description = "Date and time at which the affiliation record was last updated")] pub fn updated_at(&self) -> Timestamp { - self.updated_at.clone() + self.updated_at } #[graphql(description = "Get the institution linked to this affiliation")] @@ -3986,12 +3986,12 @@ impl WorkRelation { #[graphql(description = "Date and time at which the work relation record was created")] pub fn created_at(&self) -> Timestamp { - self.created_at.clone() + self.created_at } #[graphql(description = "Date and time at which the work relation record was last updated")] pub fn updated_at(&self) -> Timestamp { - self.updated_at.clone() + self.updated_at } #[graphql(description = "Get the other work in the relationship")] @@ -4136,12 +4136,12 @@ impl Reference { #[graphql(description = "Timestamp of the creation of this record within Thoth.")] pub fn created_at(&self) -> Timestamp { - self.created_at.clone() + self.created_at } #[graphql(description = "Timestamp of the last update to this record within Thoth.")] pub fn updated_at(&self) -> Timestamp { - self.updated_at.clone() + self.updated_at } #[graphql(description = "The citing work.")] diff --git a/thoth-api/src/lib.rs b/thoth-api/src/lib.rs index 8b40d443..8495057b 100644 --- a/thoth-api/src/lib.rs +++ b/thoth-api/src/lib.rs @@ -21,6 +21,8 @@ pub mod graphql; #[macro_use] pub mod model; #[cfg(feature = "backend")] +pub mod redis; +#[cfg(feature = "backend")] mod schema; macro_rules! apis { diff --git a/thoth-api/src/model/affiliation/crud.rs b/thoth-api/src/model/affiliation/crud.rs index 82e60f07..3aee12fb 100644 --- a/thoth-api/src/model/affiliation/crud.rs +++ b/thoth-api/src/model/affiliation/crud.rs @@ -7,7 +7,7 @@ use crate::model::{Crud, DbInsert, HistoryEntry}; use crate::schema::{affiliation, affiliation_history}; use crate::{crud_methods, db_insert}; use diesel::{ExpressionMethods, QueryDsl, RunQueryDsl}; -use thoth_errors::{ThothError, ThothResult}; +use thoth_errors::ThothResult; use uuid::Uuid; impl Crud for Affiliation { @@ -84,14 +84,11 @@ impl Crud for Affiliation { if let Some(pid) = parent_id_2 { query = query.filter(contribution_id.eq(pid)); } - match query + query .limit(limit.into()) .offset(offset.into()) .load::(&mut connection) - { - Ok(t) => Ok(t), - Err(e) => Err(ThothError::from(e)), - } + .map_err(Into::into) } fn count( @@ -109,10 +106,11 @@ impl Crud for Affiliation { // not implement i64 yet, only i32. The only sensible way, albeit shameful, to solve this // is converting i64 to string and then parsing it as i32. This should institution until we reach // 2147483647 records - if you are fixing this bug, congratulations on book number 2147483647! - match affiliation.count().get_result::(&mut connection) { - Ok(t) => Ok(t.to_string().parse::().unwrap()), - Err(e) => Err(ThothError::from(e)), - } + affiliation + .count() + .get_result::(&mut connection) + .map(|t| t.to_string().parse::().unwrap()) + .map_err(Into::into) } fn publisher_id(&self, db: &crate::db::PgPool) -> ThothResult { diff --git a/thoth-api/src/model/contribution/crud.rs b/thoth-api/src/model/contribution/crud.rs index 3de72b9f..4f40e7e8 100644 --- a/thoth-api/src/model/contribution/crud.rs +++ b/thoth-api/src/model/contribution/crud.rs @@ -8,7 +8,7 @@ use crate::model::{Crud, DbInsert, HistoryEntry}; use crate::schema::{contribution, contribution_history}; use crate::{crud_methods, db_insert}; use diesel::{ExpressionMethods, QueryDsl, RunQueryDsl}; -use thoth_errors::{ThothError, ThothResult}; +use thoth_errors::ThothResult; use uuid::Uuid; impl Crud for Contribution { @@ -105,14 +105,11 @@ impl Crud for Contribution { if !contribution_types.is_empty() { query = query.filter(contribution_type.eq_any(contribution_types)); } - match query + query .limit(limit.into()) .offset(offset.into()) .load::(&mut connection) - { - Ok(t) => Ok(t), - Err(e) => Err(ThothError::from(e)), - } + .map_err(Into::into) } fn count( @@ -134,10 +131,11 @@ impl Crud for Contribution { // not implement i64 yet, only i32. The only sensible way, albeit shameful, to solve this // is converting i64 to string and then parsing it as i32. This should work until we reach // 2147483647 records - if you are fixing this bug, congratulations on book number 2147483647! - match query.count().get_result::(&mut connection) { - Ok(t) => Ok(t.to_string().parse::().unwrap()), - Err(e) => Err(ThothError::from(e)), - } + query + .count() + .get_result::(&mut connection) + .map(|t| t.to_string().parse::().unwrap()) + .map_err(Into::into) } fn publisher_id(&self, db: &crate::db::PgPool) -> ThothResult { diff --git a/thoth-api/src/model/contributor/crud.rs b/thoth-api/src/model/contributor/crud.rs index 9080389e..d3c96037 100644 --- a/thoth-api/src/model/contributor/crud.rs +++ b/thoth-api/src/model/contributor/crud.rs @@ -83,14 +83,11 @@ impl Crud for Contributor { .or(orcid.ilike(format!("%{filter}%"))), ); } - match query + query .limit(limit.into()) .offset(offset.into()) .load::(&mut connection) - { - Ok(t) => Ok(t), - Err(e) => Err(ThothError::from(e)), - } + .map_err(Into::into) } fn count( @@ -117,10 +114,11 @@ impl Crud for Contributor { // not implement i64 yet, only i32. The only sensible way, albeit shameful, to solve this // is converting i64 to string and then parsing it as i32. This should work until we reach // 2147483647 records - if you are fixing this bug, congratulations on book number 2147483647! - match query.count().get_result::(&mut connection) { - Ok(t) => Ok(t.to_string().parse::().unwrap()), - Err(e) => Err(ThothError::from(e)), - } + query + .count() + .get_result::(&mut connection) + .map(|t| t.to_string().parse::().unwrap()) + .map_err(Into::into) } fn publisher_id(&self, _db: &crate::db::PgPool) -> ThothResult { diff --git a/thoth-api/src/model/funding/crud.rs b/thoth-api/src/model/funding/crud.rs index e61e5d44..0b14cfc9 100644 --- a/thoth-api/src/model/funding/crud.rs +++ b/thoth-api/src/model/funding/crud.rs @@ -5,7 +5,7 @@ use crate::model::{Crud, DbInsert, HistoryEntry}; use crate::schema::{funding, funding_history}; use crate::{crud_methods, db_insert}; use diesel::{ExpressionMethods, QueryDsl, RunQueryDsl}; -use thoth_errors::{ThothError, ThothResult}; +use thoth_errors::ThothResult; use uuid::Uuid; impl Crud for Funding { @@ -91,14 +91,11 @@ impl Crud for Funding { if let Some(pid) = parent_id_2 { query = query.filter(institution_id.eq(pid)); } - match query + query .limit(limit.into()) .offset(offset.into()) .load::(&mut connection) - { - Ok(t) => Ok(t), - Err(e) => Err(ThothError::from(e)), - } + .map_err(Into::into) } fn count( @@ -116,10 +113,11 @@ impl Crud for Funding { // not implement i64 yet, only i32. The only sensible way, albeit shameful, to solve this // is converting i64 to string and then parsing it as i32. This should work until we reach // 2147483647 records - if you are fixing this bug, congratulations on book number 2147483647! - match funding.count().get_result::(&mut connection) { - Ok(t) => Ok(t.to_string().parse::().unwrap()), - Err(e) => Err(ThothError::from(e)), - } + funding + .count() + .get_result::(&mut connection) + .map(|t| t.to_string().parse::().unwrap()) + .map_err(Into::into) } fn publisher_id(&self, db: &crate::db::PgPool) -> ThothResult { diff --git a/thoth-api/src/model/imprint/crud.rs b/thoth-api/src/model/imprint/crud.rs index 65f788ff..49816b10 100644 --- a/thoth-api/src/model/imprint/crud.rs +++ b/thoth-api/src/model/imprint/crud.rs @@ -9,7 +9,7 @@ use crate::{crud_methods, db_insert}; use diesel::{ BoolExpressionMethods, ExpressionMethods, PgTextExpressionMethods, QueryDsl, RunQueryDsl, }; -use thoth_errors::{ThothError, ThothResult}; +use thoth_errors::ThothResult; use uuid::Uuid; impl Crud for Imprint { @@ -80,14 +80,11 @@ impl Crud for Imprint { .or(imprint_url.ilike(format!("%{filter}%"))), ); } - match query + query .limit(limit.into()) .offset(offset.into()) .load::(&mut connection) - { - Ok(t) => Ok(t), - Err(e) => Err(ThothError::from(e)), - } + .map_err(Into::into) } fn count( @@ -116,10 +113,11 @@ impl Crud for Imprint { // not implement i64 yet, only i32. The only sensible way, albeit shameful, to solve this // is converting i64 to string and then parsing it as i32. This should work until we reach // 2147483647 records - if you are fixing this bug, congratulations on book number 2147483647! - match query.count().get_result::(&mut connection) { - Ok(t) => Ok(t.to_string().parse::().unwrap()), - Err(e) => Err(ThothError::from(e)), - } + query + .count() + .get_result::(&mut connection) + .map(|t| t.to_string().parse::().unwrap()) + .map_err(Into::into) } fn publisher_id(&self, _db: &crate::db::PgPool) -> ThothResult { diff --git a/thoth-api/src/model/institution/crud.rs b/thoth-api/src/model/institution/crud.rs index d5ddbe49..1b0a6a06 100644 --- a/thoth-api/src/model/institution/crud.rs +++ b/thoth-api/src/model/institution/crud.rs @@ -79,14 +79,11 @@ impl Crud for Institution { .or(institution_doi.ilike(format!("%{filter}%"))), ); } - match query + query .limit(limit.into()) .offset(offset.into()) .load::(&mut connection) - { - Ok(t) => Ok(t), - Err(e) => Err(ThothError::from(e)), - } + .map_err(Into::into) } fn count( @@ -113,10 +110,11 @@ impl Crud for Institution { // not implement i64 yet, only i32. The only sensible way, albeit shameful, to solve this // is converting i64 to string and then parsing it as i32. This should work until we reach // 2147483647 records - if you are fixing this bug, congratulations on book number 2147483647! - match query.count().get_result::(&mut connection) { - Ok(t) => Ok(t.to_string().parse::().unwrap()), - Err(e) => Err(ThothError::from(e)), - } + query + .count() + .get_result::(&mut connection) + .map(|t| t.to_string().parse::().unwrap()) + .map_err(Into::into) } fn publisher_id(&self, _db: &crate::db::PgPool) -> ThothResult { diff --git a/thoth-api/src/model/institution/mod.rs b/thoth-api/src/model/institution/mod.rs index c4c78c96..ad47910a 100644 --- a/thoth-api/src/model/institution/mod.rs +++ b/thoth-api/src/model/institution/mod.rs @@ -83,7 +83,7 @@ pub struct PatchInstitution { graphql(description = "Three-letter ISO 3166-1 code representing a country"), ExistingTypePath = "crate::schema::sql_types::CountryCode" )] -#[derive(Debug, Clone, PartialEq, Eq, Deserialize, Serialize, EnumString, Display)] +#[derive(Debug, Copy, Clone, PartialEq, Eq, Deserialize, Serialize, EnumString, Display)] #[serde(rename_all = "SCREAMING_SNAKE_CASE")] pub enum CountryCode { #[cfg_attr(feature = "backend", graphql(description = "Afghanistan"))] diff --git a/thoth-api/src/model/issue/crud.rs b/thoth-api/src/model/issue/crud.rs index 2fee3320..e502c1b7 100644 --- a/thoth-api/src/model/issue/crud.rs +++ b/thoth-api/src/model/issue/crud.rs @@ -75,14 +75,11 @@ impl Crud for Issue { if let Some(pid) = parent_id_2 { query = query.filter(series_id.eq(pid)); } - match query + query .limit(limit.into()) .offset(offset.into()) .load::(&mut connection) - { - Ok(t) => Ok(t), - Err(e) => Err(ThothError::from(e)), - } + .map_err(Into::into) } fn count( @@ -100,10 +97,11 @@ impl Crud for Issue { // not implement i64 yet, only i32. The only sensible way, albeit shameful, to solve this // is converting i64 to string and then parsing it as i32. This should work until we reach // 2147483647 records - if you are fixing this bug, congratulations on book number 2147483647! - match issue.count().get_result::(&mut connection) { - Ok(t) => Ok(t.to_string().parse::().unwrap()), - Err(e) => Err(ThothError::from(e)), - } + issue + .count() + .get_result::(&mut connection) + .map(|t| t.to_string().parse::().unwrap()) + .map_err(Into::into) } fn publisher_id(&self, db: &crate::db::PgPool) -> ThothResult { diff --git a/thoth-api/src/model/language/crud.rs b/thoth-api/src/model/language/crud.rs index fe0d27d5..66f7a7ed 100644 --- a/thoth-api/src/model/language/crud.rs +++ b/thoth-api/src/model/language/crud.rs @@ -8,7 +8,7 @@ use crate::model::{Crud, DbInsert, HistoryEntry}; use crate::schema::{language, language_history}; use crate::{crud_methods, db_insert}; use diesel::{ExpressionMethods, QueryDsl, RunQueryDsl}; -use thoth_errors::{ThothError, ThothResult}; +use thoth_errors::ThothResult; use uuid::Uuid; impl Crud for Language { @@ -85,14 +85,11 @@ impl Crud for Language { if !language_relations.is_empty() { query = query.filter(dsl::language_relation.eq_any(language_relations)); } - match query + query .limit(limit.into()) .offset(offset.into()) .load::(&mut connection) - { - Ok(t) => Ok(t), - Err(e) => Err(ThothError::from(e)), - } + .map_err(Into::into) } fn count( @@ -116,10 +113,11 @@ impl Crud for Language { // not implement i64 yet, only i32. The only sensible way, albeit shameful, to solve this // is converting i64 to string and then parsing it as i32. This should work until we reach // 2147483647 records - if you are fixing this bug, congratulations on book number 2147483647! - match query.count().get_result::(&mut connection) { - Ok(t) => Ok(t.to_string().parse::().unwrap()), - Err(e) => Err(ThothError::from(e)), - } + query + .count() + .get_result::(&mut connection) + .map(|t| t.to_string().parse::().unwrap()) + .map_err(Into::into) } fn publisher_id(&self, db: &crate::db::PgPool) -> ThothResult { diff --git a/thoth-api/src/model/language/mod.rs b/thoth-api/src/model/language/mod.rs index cea64f7a..f81259da 100644 --- a/thoth-api/src/model/language/mod.rs +++ b/thoth-api/src/model/language/mod.rs @@ -17,7 +17,9 @@ use crate::schema::language_history; ), ExistingTypePath = "crate::schema::sql_types::LanguageRelation" )] -#[derive(Debug, Clone, Default, PartialEq, Eq, Deserialize, Serialize, EnumString, Display)] +#[derive( + Debug, Copy, Clone, Default, PartialEq, Eq, Deserialize, Serialize, EnumString, Display, +)] #[serde(rename_all = "SCREAMING_SNAKE_CASE")] #[strum(serialize_all = "title_case")] pub enum LanguageRelation { @@ -102,7 +104,9 @@ pub struct PatchLanguage { graphql(description = "Three-letter ISO 639 code representing a language"), ExistingTypePath = "crate::schema::sql_types::LanguageCode" )] -#[derive(Debug, Clone, Default, PartialEq, Eq, Deserialize, Serialize, EnumString, Display)] +#[derive( + Debug, Copy, Clone, Default, PartialEq, Eq, Deserialize, Serialize, EnumString, Display, +)] #[serde(rename_all = "SCREAMING_SNAKE_CASE")] #[strum(serialize_all = "UPPERCASE")] pub enum LanguageCode { diff --git a/thoth-api/src/model/location/crud.rs b/thoth-api/src/model/location/crud.rs index 878e3530..39739c9b 100644 --- a/thoth-api/src/model/location/crud.rs +++ b/thoth-api/src/model/location/crud.rs @@ -88,14 +88,11 @@ impl Crud for Location { if !location_platforms.is_empty() { query = query.filter(location_platform.eq_any(location_platforms)); } - match query + query .limit(limit.into()) .offset(offset.into()) .load::(&mut connection) - { - Ok(t) => Ok(t), - Err(e) => Err(ThothError::from(e)), - } + .map_err(Into::into) } fn count( @@ -116,10 +113,11 @@ impl Crud for Location { // not implement i64 yet, only i32. The only sensible way, albeit shameful, to solve this // is converting i64 to string and then parsing it as i32. This should work until we reach // 2147483647 records - if you are fixing this bug, congratulations on book number 2147483647! - match query.count().get_result::(&mut connection) { - Ok(t) => Ok(t.to_string().parse::().unwrap()), - Err(e) => Err(ThothError::from(e)), - } + query + .count() + .get_result::(&mut connection) + .map(|t| t.to_string().parse::().unwrap()) + .map_err(Into::into) } fn publisher_id(&self, db: &crate::db::PgPool) -> ThothResult { diff --git a/thoth-api/src/model/location/mod.rs b/thoth-api/src/model/location/mod.rs index 8e7c8784..c96a26f3 100644 --- a/thoth-api/src/model/location/mod.rs +++ b/thoth-api/src/model/location/mod.rs @@ -16,7 +16,9 @@ use crate::schema::location_history; graphql(description = "Platform where a publication is hosted or can be acquired"), ExistingTypePath = "crate::schema::sql_types::LocationPlatform" )] -#[derive(Debug, Clone, Default, PartialEq, Eq, Deserialize, Serialize, EnumString, Display)] +#[derive( + Debug, Copy, Clone, Default, PartialEq, Eq, Deserialize, Serialize, EnumString, Display, +)] #[serde(rename_all = "SCREAMING_SNAKE_CASE")] pub enum LocationPlatform { #[cfg_attr( diff --git a/thoth-api/src/model/mod.rs b/thoth-api/src/model/mod.rs index 42fb26c4..b8a08747 100644 --- a/thoth-api/src/model/mod.rs +++ b/thoth-api/src/model/mod.rs @@ -18,7 +18,9 @@ pub const ROR_DOMAIN: &str = "https://ror.org/"; derive(juniper::GraphQLEnum), graphql(description = "Unit of measurement for physical Work dimensions (mm, cm or in)") )] -#[derive(Debug, Clone, Default, Serialize, Deserialize, PartialEq, Eq, EnumString, Display)] +#[derive( + Debug, Copy, Clone, Default, Serialize, Deserialize, PartialEq, Eq, EnumString, Display, +)] #[serde(rename_all = "SCREAMING_SNAKE_CASE")] #[strum(serialize_all = "lowercase")] pub enum LengthUnit { @@ -36,7 +38,9 @@ pub enum LengthUnit { derive(juniper::GraphQLEnum), graphql(description = "Unit of measurement for physical Work weight (grams or ounces)") )] -#[derive(Debug, Clone, Default, Serialize, Deserialize, PartialEq, Eq, EnumString, Display)] +#[derive( + Debug, Copy, Clone, Default, Serialize, Deserialize, PartialEq, Eq, EnumString, Display, +)] #[serde(rename_all = "SCREAMING_SNAKE_CASE")] #[strum(serialize_all = "lowercase")] pub enum WeightUnit { @@ -99,9 +103,20 @@ pub struct Ror(String); description = "RFC 3339 combined date and time in UTC time zone (e.g. \"1999-12-31T23:59:00Z\")" ) )] -#[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq)] +#[derive(Debug, Copy, Clone, Serialize, Deserialize, PartialEq, Eq, Ord, PartialOrd)] pub struct Timestamp(DateTime); +impl Timestamp { + pub fn to_rfc3339(&self) -> String { + self.0.to_rfc3339() + } + + pub fn parse_from_rfc3339(input: &str) -> ThothResult { + let timestamp = DateTime::parse_from_rfc3339(input)?.with_timezone(&Utc); + Ok(Timestamp(timestamp)) + } +} + impl Default for Timestamp { fn default() -> Timestamp { Timestamp(TimeZone::timestamp_opt(&Utc, 0, 0).unwrap()) @@ -426,16 +441,16 @@ macro_rules! crud_methods { let mut connection = db.get()?; connection.transaction(|connection| { - match diesel::update($entity_dsl.find(&self.pk())) + diesel::update($entity_dsl.find(&self.pk())) .set(data) .get_result(connection) - { - Ok(c) => match self.new_history_entry(&account_id).insert(connection) { - Ok(_) => Ok(c), - Err(e) => Err(e), - }, - Err(e) => Err(ThothError::from(e)), - } + .map_err(Into::into) + .and_then(|c| { + self.new_history_entry(&account_id) + .insert(connection) + .map(|_| c) + }) + .map_err(Into::into) }) } @@ -443,10 +458,10 @@ macro_rules! crud_methods { use diesel::{QueryDsl, RunQueryDsl}; let mut connection = db.get()?; - match diesel::delete($entity_dsl.find(&self.pk())).execute(&mut connection) { - Ok(_) => Ok(self), - Err(e) => Err(ThothError::from(e)), - } + diesel::delete($entity_dsl.find(&self.pk())) + .execute(&mut connection) + .map(|_| self) + .map_err(Into::into) } }; } @@ -569,453 +584,500 @@ impl IdentifierWithDomain for Doi {} impl IdentifierWithDomain for Orcid {} impl IdentifierWithDomain for Ror {} -#[test] -fn test_doi_default() { - let doi: Doi = Default::default(); - assert_eq!(doi, Doi("".to_string())); -} +#[cfg(test)] +mod tests { + use super::*; -#[test] -fn test_isbn_default() { - let isbn: Isbn = Default::default(); - assert_eq!(isbn, Isbn("".to_string())); -} + #[test] + fn test_doi_default() { + let doi: Doi = Default::default(); + assert_eq!(doi, Doi("".to_string())); + } -#[test] -fn test_orcid_default() { - let orcid: Orcid = Default::default(); - assert_eq!(orcid, Orcid("".to_string())); -} + #[test] + fn test_isbn_default() { + let isbn: Isbn = Default::default(); + assert_eq!(isbn, Isbn("".to_string())); + } -#[test] -fn test_ror_default() { - let ror: Ror = Default::default(); - assert_eq!(ror, Ror("".to_string())); -} + #[test] + fn test_orcid_default() { + let orcid: Orcid = Default::default(); + assert_eq!(orcid, Orcid("".to_string())); + } -#[test] -fn test_timestamp_default() { - let stamp: Timestamp = Default::default(); - assert_eq!( - stamp, - Timestamp(TimeZone::timestamp_opt(&Utc, 0, 0).unwrap()) - ); -} + #[test] + fn test_ror_default() { + let ror: Ror = Default::default(); + assert_eq!(ror, Ror("".to_string())); + } -#[test] -fn test_doi_display() { - let doi = Doi("https://doi.org/10.12345/Test-Suffix.01".to_string()); - assert_eq!(format!("{doi}"), "10.12345/Test-Suffix.01"); -} + #[test] + fn test_timestamp_default() { + let stamp: Timestamp = Default::default(); + assert_eq!( + stamp, + Timestamp(TimeZone::timestamp_opt(&Utc, 0, 0).unwrap()) + ); + } -#[test] -fn test_isbn_display() { - let isbn = Isbn("978-3-16-148410-0".to_string()); - assert_eq!(format!("{isbn}"), "978-3-16-148410-0"); -} + #[test] + fn test_doi_display() { + let doi = Doi("https://doi.org/10.12345/Test-Suffix.01".to_string()); + assert_eq!(format!("{doi}"), "10.12345/Test-Suffix.01"); + } -#[test] -fn test_orcid_display() { - let orcid = Orcid("https://orcid.org/0000-0002-1234-5678".to_string()); - assert_eq!(format!("{orcid}"), "0000-0002-1234-5678"); -} + #[test] + fn test_isbn_display() { + let isbn = Isbn("978-3-16-148410-0".to_string()); + assert_eq!(format!("{isbn}"), "978-3-16-148410-0"); + } -#[test] -fn test_ror_display() { - let ror = Ror("https://ror.org/0abcdef12".to_string()); - assert_eq!(format!("{ror}"), "0abcdef12"); -} + #[test] + fn test_orcid_display() { + let orcid = Orcid("https://orcid.org/0000-0002-1234-5678".to_string()); + assert_eq!(format!("{orcid}"), "0000-0002-1234-5678"); + } -#[test] -fn test_timestamp_display() { - let stamp: Timestamp = Default::default(); - assert_eq!(format!("{stamp}"), "1970-01-01 00:00:00"); -} + #[test] + fn test_ror_display() { + let ror = Ror("https://ror.org/0abcdef12".to_string()); + assert_eq!(format!("{ror}"), "0abcdef12"); + } -#[test] -fn test_doi_fromstr() { - let standardised = Doi("https://doi.org/10.12345/Test-Suffix.01".to_string()); - assert_eq!( - Doi::from_str("https://doi.org/10.12345/Test-Suffix.01").unwrap(), - standardised - ); - assert_eq!( - Doi::from_str("http://doi.org/10.12345/Test-Suffix.01").unwrap(), - standardised - ); - assert_eq!( - Doi::from_str("doi.org/10.12345/Test-Suffix.01").unwrap(), - standardised - ); - assert_eq!( - Doi::from_str("10.12345/Test-Suffix.01").unwrap(), - standardised - ); - assert_eq!( - Doi::from_str("HTTPS://DOI.ORG/10.12345/Test-Suffix.01").unwrap(), - standardised - ); - assert_eq!( - Doi::from_str("Https://DOI.org/10.12345/Test-Suffix.01").unwrap(), - standardised - ); - assert_eq!( - Doi::from_str("https://www.doi.org/10.12345/Test-Suffix.01").unwrap(), - standardised - ); - assert_eq!( - Doi::from_str("http://www.doi.org/10.12345/Test-Suffix.01").unwrap(), - standardised - ); - assert_eq!( - Doi::from_str("www.doi.org/10.12345/Test-Suffix.01").unwrap(), - standardised - ); - assert_eq!( - Doi::from_str("https://dx.doi.org/10.12345/Test-Suffix.01").unwrap(), - standardised - ); - assert_eq!( - Doi::from_str("http://dx.doi.org/10.12345/Test-Suffix.01").unwrap(), - standardised - ); - assert_eq!( - Doi::from_str("dx.doi.org/10.12345/Test-Suffix.01").unwrap(), - standardised - ); - assert_eq!( - Doi::from_str("https://www.dx.doi.org/10.12345/Test-Suffix.01").unwrap(), - standardised - ); - assert_eq!( - Doi::from_str("http://www.dx.doi.org/10.12345/Test-Suffix.01").unwrap(), - standardised - ); - assert_eq!( - Doi::from_str("www.dx.doi.org/10.12345/Test-Suffix.01").unwrap(), - standardised - ); - assert!(Doi::from_str("htts://doi.org/10.12345/Test-Suffix.01").is_err()); - assert!(Doi::from_str("https://10.12345/Test-Suffix.01").is_err()); - assert!(Doi::from_str("https://test.org/10.12345/Test-Suffix.01").is_err()); - assert!(Doi::from_str("http://test.org/10.12345/Test-Suffix.01").is_err()); - assert!(Doi::from_str("test.org/10.12345/Test-Suffix.01").is_err()); - assert!(Doi::from_str("//doi.org/10.12345/Test-Suffix.01").is_err()); - assert!(Doi::from_str("https://doi-org/10.12345/Test-Suffix.01").is_err()); - assert!(Doi::from_str("10.https://doi.org/12345/Test-Suffix.01").is_err()); - assert!(Doi::from_str("http://dx.doi.org/10.2990/1471-5457(2005)24[2:tmpwac]2.0.co;2").is_ok()); - assert!(Doi::from_str( - "https://doi.org/10.1002/(SICI)1098-2736(199908)36:6<637::AID-TEA4>3.0.CO;2-9" - ) - .is_ok()); - assert!(Doi::from_str( - "https://doi.org/10.1002/(sici)1096-8644(1996)23+<91::aid-ajpa4>3.0.co;2-c" - ) - .is_ok()); -} + #[test] + fn test_timestamp_display() { + let stamp: Timestamp = Default::default(); + assert_eq!(format!("{stamp}"), "1970-01-01 00:00:00"); + } -#[test] -fn test_isbn_fromstr() { - // Note the `isbn2` crate contains tests of valid/invalid ISBN values - - // this focuses on testing that a valid ISBN in any format is standardised - let standardised = Isbn("978-3-16-148410-0".to_string()); - assert_eq!(Isbn::from_str("978-3-16-148410-0").unwrap(), standardised); - assert_eq!(Isbn::from_str("9783161484100").unwrap(), standardised); - assert_eq!(Isbn::from_str("978 3 16 148410 0").unwrap(), standardised); - assert_eq!(Isbn::from_str("978 3 16-148410-0").unwrap(), standardised); - assert_eq!(Isbn::from_str("9-7-831614-8-4-100").unwrap(), standardised); - assert_eq!( - Isbn::from_str(" 97831 614 84 100 ").unwrap(), - standardised - ); - assert_eq!( - Isbn::from_str("---97--831614----8-4100--").unwrap(), - standardised - ); - assert!(Isbn::from_str("978-3-16-148410-1").is_err()); - assert!(Isbn::from_str("1234567890123").is_err()); - assert!(Isbn::from_str("0-684-84328-5").is_err()); - assert!(Isbn::from_str("abcdef").is_err()); -} + #[test] + fn test_doi_fromstr() { + let standardised = Doi("https://doi.org/10.12345/Test-Suffix.01".to_string()); + assert_eq!( + Doi::from_str("https://doi.org/10.12345/Test-Suffix.01").unwrap(), + standardised + ); + assert_eq!( + Doi::from_str("http://doi.org/10.12345/Test-Suffix.01").unwrap(), + standardised + ); + assert_eq!( + Doi::from_str("doi.org/10.12345/Test-Suffix.01").unwrap(), + standardised + ); + assert_eq!( + Doi::from_str("10.12345/Test-Suffix.01").unwrap(), + standardised + ); + assert_eq!( + Doi::from_str("HTTPS://DOI.ORG/10.12345/Test-Suffix.01").unwrap(), + standardised + ); + assert_eq!( + Doi::from_str("Https://DOI.org/10.12345/Test-Suffix.01").unwrap(), + standardised + ); + assert_eq!( + Doi::from_str("https://www.doi.org/10.12345/Test-Suffix.01").unwrap(), + standardised + ); + assert_eq!( + Doi::from_str("http://www.doi.org/10.12345/Test-Suffix.01").unwrap(), + standardised + ); + assert_eq!( + Doi::from_str("www.doi.org/10.12345/Test-Suffix.01").unwrap(), + standardised + ); + assert_eq!( + Doi::from_str("https://dx.doi.org/10.12345/Test-Suffix.01").unwrap(), + standardised + ); + assert_eq!( + Doi::from_str("http://dx.doi.org/10.12345/Test-Suffix.01").unwrap(), + standardised + ); + assert_eq!( + Doi::from_str("dx.doi.org/10.12345/Test-Suffix.01").unwrap(), + standardised + ); + assert_eq!( + Doi::from_str("https://www.dx.doi.org/10.12345/Test-Suffix.01").unwrap(), + standardised + ); + assert_eq!( + Doi::from_str("http://www.dx.doi.org/10.12345/Test-Suffix.01").unwrap(), + standardised + ); + assert_eq!( + Doi::from_str("www.dx.doi.org/10.12345/Test-Suffix.01").unwrap(), + standardised + ); + assert!(Doi::from_str("htts://doi.org/10.12345/Test-Suffix.01").is_err()); + assert!(Doi::from_str("https://10.12345/Test-Suffix.01").is_err()); + assert!(Doi::from_str("https://test.org/10.12345/Test-Suffix.01").is_err()); + assert!(Doi::from_str("http://test.org/10.12345/Test-Suffix.01").is_err()); + assert!(Doi::from_str("test.org/10.12345/Test-Suffix.01").is_err()); + assert!(Doi::from_str("//doi.org/10.12345/Test-Suffix.01").is_err()); + assert!(Doi::from_str("https://doi-org/10.12345/Test-Suffix.01").is_err()); + assert!(Doi::from_str("10.https://doi.org/12345/Test-Suffix.01").is_err()); + assert!( + Doi::from_str("http://dx.doi.org/10.2990/1471-5457(2005)24[2:tmpwac]2.0.co;2").is_ok() + ); + assert!(Doi::from_str( + "https://doi.org/10.1002/(SICI)1098-2736(199908)36:6<637::AID-TEA4>3.0.CO;2-9" + ) + .is_ok()); + assert!(Doi::from_str( + "https://doi.org/10.1002/(sici)1096-8644(1996)23+<91::aid-ajpa4>3.0.co;2-c" + ) + .is_ok()); + } -#[test] -fn test_orcid_fromstr() { - let standardised = Orcid("https://orcid.org/0000-0002-1234-5678".to_string()); - assert_eq!( - Orcid::from_str("https://orcid.org/0000-0002-1234-5678").unwrap(), - standardised - ); - assert_eq!( - Orcid::from_str("http://orcid.org/0000-0002-1234-5678").unwrap(), - standardised - ); - assert_eq!( - Orcid::from_str("orcid.org/0000-0002-1234-5678").unwrap(), - standardised - ); - assert_eq!( - Orcid::from_str("0000-0002-1234-5678").unwrap(), - standardised - ); - assert_eq!( - Orcid::from_str("HTTPS://ORCID.ORG/0000-0002-1234-5678").unwrap(), - standardised - ); - assert_eq!( - Orcid::from_str("Https://ORCiD.org/0000-0002-1234-5678").unwrap(), - standardised - ); - assert_eq!( - Orcid::from_str("https://www.orcid.org/0000-0002-1234-5678").unwrap(), - standardised - ); - assert_eq!( - Orcid::from_str("http://www.orcid.org/0000-0002-1234-5678").unwrap(), - standardised - ); - assert_eq!( - Orcid::from_str("www.orcid.org/0000-0002-1234-5678").unwrap(), - standardised - ); - assert!(Orcid::from_str("htts://orcid.org/0000-0002-1234-5678").is_err()); - assert!(Orcid::from_str("https://0000-0002-1234-5678").is_err()); - assert!(Orcid::from_str("https://test.org/0000-0002-1234-5678").is_err()); - assert!(Orcid::from_str("http://test.org/0000-0002-1234-5678").is_err()); - assert!(Orcid::from_str("test.org/0000-0002-1234-5678").is_err()); - assert!(Orcid::from_str("//orcid.org/0000-0002-1234-5678").is_err()); - assert!(Orcid::from_str("https://orcid-org/0000-0002-1234-5678").is_err()); - assert!(Orcid::from_str("0000-0002-1234-5678https://orcid.org/").is_err()); - assert!(Orcid::from_str("0009-0002-1234-567X").is_ok()); -} + #[test] + fn test_isbn_fromstr() { + // Note the `isbn2` crate contains tests of valid/invalid ISBN values - + // this focuses on testing that a valid ISBN in any format is standardised + let standardised = Isbn("978-3-16-148410-0".to_string()); + assert_eq!(Isbn::from_str("978-3-16-148410-0").unwrap(), standardised); + assert_eq!(Isbn::from_str("9783161484100").unwrap(), standardised); + assert_eq!(Isbn::from_str("978 3 16 148410 0").unwrap(), standardised); + assert_eq!(Isbn::from_str("978 3 16-148410-0").unwrap(), standardised); + assert_eq!(Isbn::from_str("9-7-831614-8-4-100").unwrap(), standardised); + assert_eq!( + Isbn::from_str(" 97831 614 84 100 ").unwrap(), + standardised + ); + assert_eq!( + Isbn::from_str("---97--831614----8-4100--").unwrap(), + standardised + ); + assert!(Isbn::from_str("978-3-16-148410-1").is_err()); + assert!(Isbn::from_str("1234567890123").is_err()); + assert!(Isbn::from_str("0-684-84328-5").is_err()); + assert!(Isbn::from_str("abcdef").is_err()); + } -#[test] -fn test_ror_fromstr() { - let standardised = Ror("https://ror.org/0abcdef12".to_string()); - assert_eq!( - Ror::from_str("https://ror.org/0abcdef12").unwrap(), - standardised - ); - assert_eq!( - Ror::from_str("http://ror.org/0abcdef12").unwrap(), - standardised - ); - assert_eq!(Ror::from_str("ror.org/0abcdef12").unwrap(), standardised); - assert_eq!(Ror::from_str("0abcdef12").unwrap(), standardised); - assert_eq!( - Ror::from_str("HTTPS://ROR.ORG/0abcdef12").unwrap(), - standardised - ); - assert_eq!( - Ror::from_str("Https://Ror.org/0abcdef12").unwrap(), - standardised - ); - assert_eq!( - Ror::from_str("https://www.ror.org/0abcdef12").unwrap(), - standardised - ); - // Testing shows that while leading http://ror and https://www.ror - // resolve successfully, leading www.ror and http://www.ror do not. - assert!(Ror::from_str("http://www.ror.org/0abcdef12").is_err()); - assert!(Ror::from_str("www.ror.org/0abcdef12").is_err()); - assert!(Ror::from_str("htts://ror.org/0abcdef12").is_err()); - assert!(Ror::from_str("https://0abcdef12").is_err()); - assert!(Ror::from_str("https://test.org/0abcdef12").is_err()); - assert!(Ror::from_str("http://test.org/0abcdef12").is_err()); - assert!(Ror::from_str("test.org/0abcdef12").is_err()); - assert!(Ror::from_str("//ror.org/0abcdef12").is_err()); - assert!(Ror::from_str("https://ror-org/0abcdef12").is_err()); - assert!(Ror::from_str("0abcdef12https://ror.org/").is_err()); -} + #[test] + fn test_orcid_fromstr() { + let standardised = Orcid("https://orcid.org/0000-0002-1234-5678".to_string()); + assert_eq!( + Orcid::from_str("https://orcid.org/0000-0002-1234-5678").unwrap(), + standardised + ); + assert_eq!( + Orcid::from_str("http://orcid.org/0000-0002-1234-5678").unwrap(), + standardised + ); + assert_eq!( + Orcid::from_str("orcid.org/0000-0002-1234-5678").unwrap(), + standardised + ); + assert_eq!( + Orcid::from_str("0000-0002-1234-5678").unwrap(), + standardised + ); + assert_eq!( + Orcid::from_str("HTTPS://ORCID.ORG/0000-0002-1234-5678").unwrap(), + standardised + ); + assert_eq!( + Orcid::from_str("Https://ORCiD.org/0000-0002-1234-5678").unwrap(), + standardised + ); + assert_eq!( + Orcid::from_str("https://www.orcid.org/0000-0002-1234-5678").unwrap(), + standardised + ); + assert_eq!( + Orcid::from_str("http://www.orcid.org/0000-0002-1234-5678").unwrap(), + standardised + ); + assert_eq!( + Orcid::from_str("www.orcid.org/0000-0002-1234-5678").unwrap(), + standardised + ); + assert!(Orcid::from_str("htts://orcid.org/0000-0002-1234-5678").is_err()); + assert!(Orcid::from_str("https://0000-0002-1234-5678").is_err()); + assert!(Orcid::from_str("https://test.org/0000-0002-1234-5678").is_err()); + assert!(Orcid::from_str("http://test.org/0000-0002-1234-5678").is_err()); + assert!(Orcid::from_str("test.org/0000-0002-1234-5678").is_err()); + assert!(Orcid::from_str("//orcid.org/0000-0002-1234-5678").is_err()); + assert!(Orcid::from_str("https://orcid-org/0000-0002-1234-5678").is_err()); + assert!(Orcid::from_str("0000-0002-1234-5678https://orcid.org/").is_err()); + assert!(Orcid::from_str("0009-0002-1234-567X").is_ok()); + } -#[test] -fn test_isbn_to_hyphenless_string() { - let hyphenless_isbn = Isbn("978-3-16-148410-0".to_string()).to_hyphenless_string(); - assert_eq!(hyphenless_isbn, "9783161484100"); -} + #[test] + fn test_ror_fromstr() { + let standardised = Ror("https://ror.org/0abcdef12".to_string()); + assert_eq!( + Ror::from_str("https://ror.org/0abcdef12").unwrap(), + standardised + ); + assert_eq!( + Ror::from_str("http://ror.org/0abcdef12").unwrap(), + standardised + ); + assert_eq!(Ror::from_str("ror.org/0abcdef12").unwrap(), standardised); + assert_eq!(Ror::from_str("0abcdef12").unwrap(), standardised); + assert_eq!( + Ror::from_str("HTTPS://ROR.ORG/0abcdef12").unwrap(), + standardised + ); + assert_eq!( + Ror::from_str("Https://Ror.org/0abcdef12").unwrap(), + standardised + ); + assert_eq!( + Ror::from_str("https://www.ror.org/0abcdef12").unwrap(), + standardised + ); + // Testing shows that while leading http://ror and https://www.ror + // resolve successfully, leading www.ror and http://www.ror do not. + assert!(Ror::from_str("http://www.ror.org/0abcdef12").is_err()); + assert!(Ror::from_str("www.ror.org/0abcdef12").is_err()); + assert!(Ror::from_str("htts://ror.org/0abcdef12").is_err()); + assert!(Ror::from_str("https://0abcdef12").is_err()); + assert!(Ror::from_str("https://test.org/0abcdef12").is_err()); + assert!(Ror::from_str("http://test.org/0abcdef12").is_err()); + assert!(Ror::from_str("test.org/0abcdef12").is_err()); + assert!(Ror::from_str("//ror.org/0abcdef12").is_err()); + assert!(Ror::from_str("https://ror-org/0abcdef12").is_err()); + assert!(Ror::from_str("0abcdef12https://ror.org/").is_err()); + } -#[test] -// Float equality comparison is fine here because the floats -// have already been rounded by the functions under test -#[allow(clippy::float_cmp)] -fn test_convert_length_from_to() { - use LengthUnit::*; - assert_eq!(123.456.convert_length_from_to(&Mm, &Cm), 12.3); - assert_eq!(123.456.convert_length_from_to(&Mm, &In), 4.86); - assert_eq!(123.456.convert_length_from_to(&Cm, &Mm), 1235.0); - assert_eq!(123.456.convert_length_from_to(&In, &Mm), 3136.0); - // Test some standard print sizes - assert_eq!(4.25.convert_length_from_to(&In, &Mm), 108.0); - assert_eq!(108.0.convert_length_from_to(&Mm, &In), 4.25); - assert_eq!(6.0.convert_length_from_to(&In, &Mm), 152.0); - assert_eq!(152.0.convert_length_from_to(&Mm, &In), 5.98); - assert_eq!(8.5.convert_length_from_to(&In, &Mm), 216.0); - assert_eq!(216.0.convert_length_from_to(&Mm, &In), 8.5); - // Test that converting and then converting back again - // returns a value within a reasonable margin of error - assert_eq!( - 5.06.convert_length_from_to(&In, &Mm) - .convert_length_from_to(&Mm, &In), - 5.08 - ); - assert_eq!( - 6.5.convert_length_from_to(&In, &Mm) - .convert_length_from_to(&Mm, &In), - 6.5 - ); - assert_eq!( - 7.44.convert_length_from_to(&In, &Mm) - .convert_length_from_to(&Mm, &In), - 7.44 - ); - assert_eq!( - 8.27.convert_length_from_to(&In, &Mm) - .convert_length_from_to(&Mm, &In), - 8.27 - ); - assert_eq!( - 9.0.convert_length_from_to(&In, &Mm) - .convert_length_from_to(&Mm, &In), - 9.02 - ); - assert_eq!( - 10.88 - .convert_length_from_to(&In, &Mm) - .convert_length_from_to(&Mm, &In), - 10.87 - ); - assert_eq!( - 102.0 - .convert_length_from_to(&Mm, &In) - .convert_length_from_to(&In, &Mm), - 102.0 - ); - assert_eq!( - 120.0 - .convert_length_from_to(&Mm, &In) - .convert_length_from_to(&In, &Mm), - 120.0 - ); - assert_eq!( - 168.0 - .convert_length_from_to(&Mm, &In) - .convert_length_from_to(&In, &Mm), - 168.0 - ); - assert_eq!( - 190.0 - .convert_length_from_to(&Mm, &In) - .convert_length_from_to(&In, &Mm), - 190.0 - ); -} + #[test] + fn test_isbn_to_hyphenless_string() { + let hyphenless_isbn = Isbn("978-3-16-148410-0".to_string()).to_hyphenless_string(); + assert_eq!(hyphenless_isbn, "9783161484100"); + } -#[test] -// Float equality comparison is fine here because the floats -// have already been rounded by the functions under test -#[allow(clippy::float_cmp)] -fn test_convert_weight_from_to() { - use WeightUnit::*; - assert_eq!(123.456.convert_weight_from_to(&G, &Oz), 4.3548); - assert_eq!(123.456.convert_weight_from_to(&Oz, &G), 3500.0); - assert_eq!(4.25.convert_weight_from_to(&Oz, &G), 120.0); - assert_eq!(108.0.convert_weight_from_to(&G, &Oz), 3.8096); - assert_eq!(6.0.convert_weight_from_to(&Oz, &G), 170.0); - assert_eq!(152.0.convert_weight_from_to(&G, &Oz), 5.3616); - assert_eq!(8.5.convert_weight_from_to(&Oz, &G), 241.0); - assert_eq!(216.0.convert_weight_from_to(&G, &Oz), 7.6192); - // Test that converting and then converting back again - // returns a value within a reasonable margin of error - assert_eq!( - 5.0.convert_weight_from_to(&Oz, &G) - .convert_weight_from_to(&G, &Oz), - 5.0089 - ); - assert_eq!( - 5.125 - .convert_weight_from_to(&Oz, &G) - .convert_weight_from_to(&G, &Oz), - 5.1147 - ); - assert_eq!( - 6.5.convert_weight_from_to(&Oz, &G) - .convert_weight_from_to(&G, &Oz), - 6.4904 - ); - assert_eq!( - 7.25.convert_weight_from_to(&Oz, &G) - .convert_weight_from_to(&G, &Oz), - 7.2664 - ); - assert_eq!( - 7.44.convert_weight_from_to(&Oz, &G) - .convert_weight_from_to(&G, &Oz), - 7.4428 - ); - assert_eq!( - 8.0625 - .convert_weight_from_to(&Oz, &G) - .convert_weight_from_to(&G, &Oz), - 8.0777 - ); - assert_eq!( - 9.0.convert_weight_from_to(&Oz, &G) - .convert_weight_from_to(&G, &Oz), - 8.9949 - ); - assert_eq!( - 10.75 - .convert_weight_from_to(&Oz, &G) - .convert_weight_from_to(&G, &Oz), - 10.7586 - ); - assert_eq!( - 10.88 - .convert_weight_from_to(&Oz, &G) - .convert_weight_from_to(&G, &Oz), - 10.8644 - ); - assert_eq!( - 102.0 - .convert_weight_from_to(&G, &Oz) - .convert_weight_from_to(&Oz, &G), - 102.0 - ); - assert_eq!( - 120.0 - .convert_weight_from_to(&G, &Oz) - .convert_weight_from_to(&Oz, &G), - 120.0 - ); - assert_eq!( - 168.0 - .convert_weight_from_to(&G, &Oz) - .convert_weight_from_to(&Oz, &G), - 168.0 - ); - assert_eq!( - 190.0 - .convert_weight_from_to(&G, &Oz) - .convert_weight_from_to(&Oz, &G), - 190.0 - ); -} + #[test] + // Float equality comparison is fine here because the floats + // have already been rounded by the functions under test + #[allow(clippy::float_cmp)] + fn test_convert_length_from_to() { + use LengthUnit::*; + assert_eq!(123.456.convert_length_from_to(&Mm, &Cm), 12.3); + assert_eq!(123.456.convert_length_from_to(&Mm, &In), 4.86); + assert_eq!(123.456.convert_length_from_to(&Cm, &Mm), 1235.0); + assert_eq!(123.456.convert_length_from_to(&In, &Mm), 3136.0); + // Test some standard print sizes + assert_eq!(4.25.convert_length_from_to(&In, &Mm), 108.0); + assert_eq!(108.0.convert_length_from_to(&Mm, &In), 4.25); + assert_eq!(6.0.convert_length_from_to(&In, &Mm), 152.0); + assert_eq!(152.0.convert_length_from_to(&Mm, &In), 5.98); + assert_eq!(8.5.convert_length_from_to(&In, &Mm), 216.0); + assert_eq!(216.0.convert_length_from_to(&Mm, &In), 8.5); + // Test that converting and then converting back again + // returns a value within a reasonable margin of error + assert_eq!( + 5.06.convert_length_from_to(&In, &Mm) + .convert_length_from_to(&Mm, &In), + 5.08 + ); + assert_eq!( + 6.5.convert_length_from_to(&In, &Mm) + .convert_length_from_to(&Mm, &In), + 6.5 + ); + assert_eq!( + 7.44.convert_length_from_to(&In, &Mm) + .convert_length_from_to(&Mm, &In), + 7.44 + ); + assert_eq!( + 8.27.convert_length_from_to(&In, &Mm) + .convert_length_from_to(&Mm, &In), + 8.27 + ); + assert_eq!( + 9.0.convert_length_from_to(&In, &Mm) + .convert_length_from_to(&Mm, &In), + 9.02 + ); + assert_eq!( + 10.88 + .convert_length_from_to(&In, &Mm) + .convert_length_from_to(&Mm, &In), + 10.87 + ); + assert_eq!( + 102.0 + .convert_length_from_to(&Mm, &In) + .convert_length_from_to(&In, &Mm), + 102.0 + ); + assert_eq!( + 120.0 + .convert_length_from_to(&Mm, &In) + .convert_length_from_to(&In, &Mm), + 120.0 + ); + assert_eq!( + 168.0 + .convert_length_from_to(&Mm, &In) + .convert_length_from_to(&In, &Mm), + 168.0 + ); + assert_eq!( + 190.0 + .convert_length_from_to(&Mm, &In) + .convert_length_from_to(&In, &Mm), + 190.0 + ); + } -#[test] -fn test_doi_with_domain() { - let doi = "https://doi.org/10.12345/Test-Suffix.01"; - assert_eq!(format!("{}", Doi(doi.to_string()).with_domain()), doi); -} + #[test] + // Float equality comparison is fine here because the floats + // have already been rounded by the functions under test + #[allow(clippy::float_cmp)] + fn test_convert_weight_from_to() { + use WeightUnit::*; + assert_eq!(123.456.convert_weight_from_to(&G, &Oz), 4.3548); + assert_eq!(123.456.convert_weight_from_to(&Oz, &G), 3500.0); + assert_eq!(4.25.convert_weight_from_to(&Oz, &G), 120.0); + assert_eq!(108.0.convert_weight_from_to(&G, &Oz), 3.8096); + assert_eq!(6.0.convert_weight_from_to(&Oz, &G), 170.0); + assert_eq!(152.0.convert_weight_from_to(&G, &Oz), 5.3616); + assert_eq!(8.5.convert_weight_from_to(&Oz, &G), 241.0); + assert_eq!(216.0.convert_weight_from_to(&G, &Oz), 7.6192); + // Test that converting and then converting back again + // returns a value within a reasonable margin of error + assert_eq!( + 5.0.convert_weight_from_to(&Oz, &G) + .convert_weight_from_to(&G, &Oz), + 5.0089 + ); + assert_eq!( + 5.125 + .convert_weight_from_to(&Oz, &G) + .convert_weight_from_to(&G, &Oz), + 5.1147 + ); + assert_eq!( + 6.5.convert_weight_from_to(&Oz, &G) + .convert_weight_from_to(&G, &Oz), + 6.4904 + ); + assert_eq!( + 7.25.convert_weight_from_to(&Oz, &G) + .convert_weight_from_to(&G, &Oz), + 7.2664 + ); + assert_eq!( + 7.44.convert_weight_from_to(&Oz, &G) + .convert_weight_from_to(&G, &Oz), + 7.4428 + ); + assert_eq!( + 8.0625 + .convert_weight_from_to(&Oz, &G) + .convert_weight_from_to(&G, &Oz), + 8.0777 + ); + assert_eq!( + 9.0.convert_weight_from_to(&Oz, &G) + .convert_weight_from_to(&G, &Oz), + 8.9949 + ); + assert_eq!( + 10.75 + .convert_weight_from_to(&Oz, &G) + .convert_weight_from_to(&G, &Oz), + 10.7586 + ); + assert_eq!( + 10.88 + .convert_weight_from_to(&Oz, &G) + .convert_weight_from_to(&G, &Oz), + 10.8644 + ); + assert_eq!( + 102.0 + .convert_weight_from_to(&G, &Oz) + .convert_weight_from_to(&Oz, &G), + 102.0 + ); + assert_eq!( + 120.0 + .convert_weight_from_to(&G, &Oz) + .convert_weight_from_to(&Oz, &G), + 120.0 + ); + assert_eq!( + 168.0 + .convert_weight_from_to(&G, &Oz) + .convert_weight_from_to(&Oz, &G), + 168.0 + ); + assert_eq!( + 190.0 + .convert_weight_from_to(&G, &Oz) + .convert_weight_from_to(&Oz, &G), + 190.0 + ); + } -#[test] -fn test_orcid_with_domain() { - let orcid = "https://orcid.org/0000-0002-1234-5678"; - assert_eq!(format!("{}", Orcid(orcid.to_string()).with_domain()), orcid); -} + #[test] + fn test_doi_with_domain() { + let doi = "https://doi.org/10.12345/Test-Suffix.01"; + assert_eq!(format!("{}", Doi(doi.to_string()).with_domain()), doi); + } + + #[test] + fn test_orcid_with_domain() { + let orcid = "https://orcid.org/0000-0002-1234-5678"; + assert_eq!(format!("{}", Orcid(orcid.to_string()).with_domain()), orcid); + } + + #[test] + fn test_ror_with_domain() { + let ror = "https://ror.org/0abcdef12"; + assert_eq!(format!("{}", Ror(ror.to_string()).with_domain()), ror); + } + + #[test] + fn test_timestamp_parse_from_rfc3339_valid() { + let input = "1999-12-31T23:59:00Z"; + let timestamp = Timestamp::parse_from_rfc3339(input); + assert!(timestamp.is_ok()); + + let expected = Timestamp(Utc.with_ymd_and_hms(1999, 12, 31, 23, 59, 0).unwrap()); + assert_eq!(timestamp.unwrap(), expected); + } -#[test] -fn test_ror_with_domain() { - let ror = "https://ror.org/0abcdef12"; - assert_eq!(format!("{}", Ror(ror.to_string()).with_domain()), ror); + #[test] + fn test_timestamp_parse_from_rfc3339_invalid_format() { + let input = "1999-12-31 23:59:00"; // Missing 'T' and 'Z' + let timestamp = Timestamp::parse_from_rfc3339(input); + assert!(timestamp.is_err()); + } + + #[test] + fn test_timestamp_parse_from_rfc3339_invalid_date() { + let input = "1999-02-30T23:59:00Z"; // Invalid date + let timestamp = Timestamp::parse_from_rfc3339(input); + assert!(timestamp.is_err()); + } + + #[test] + fn test_timestamp_to_rfc3339() { + let timestamp = Timestamp(Utc.with_ymd_and_hms(1999, 12, 31, 23, 59, 0).unwrap()); + assert_eq!(timestamp.to_rfc3339(), "1999-12-31T23:59:00+00:00"); + } + + #[test] + fn test_timestamp_round_trip_rfc3339_conversion() { + let original_string = "2023-11-13T12:34:56Z"; + let timestamp = Timestamp::parse_from_rfc3339(original_string).unwrap(); + let converted_string = timestamp.to_rfc3339(); + + let round_trip_timestamp = Timestamp::parse_from_rfc3339(&converted_string).unwrap(); + assert_eq!(timestamp, round_trip_timestamp); + } } pub mod affiliation; diff --git a/thoth-api/src/model/price/crud.rs b/thoth-api/src/model/price/crud.rs index c33006af..b213b081 100644 --- a/thoth-api/src/model/price/crud.rs +++ b/thoth-api/src/model/price/crud.rs @@ -5,7 +5,7 @@ use crate::model::{Crud, DbInsert, HistoryEntry}; use crate::schema::{price, price_history}; use crate::{crud_methods, db_insert}; use diesel::{ExpressionMethods, QueryDsl, RunQueryDsl}; -use thoth_errors::{ThothError, ThothResult}; +use thoth_errors::ThothResult; use uuid::Uuid; impl Crud for Price { @@ -78,14 +78,11 @@ impl Crud for Price { if !currency_codes.is_empty() { query = query.filter(currency_code.eq_any(currency_codes)); } - match query + query .limit(limit.into()) .offset(offset.into()) .load::(&mut connection) - { - Ok(t) => Ok(t), - Err(e) => Err(ThothError::from(e)), - } + .map_err(Into::into) } fn count( @@ -106,10 +103,11 @@ impl Crud for Price { // not implement i64 yet, only i32. The only sensible way, albeit shameful, to solve this // is converting i64 to string and then parsing it as i32. This should work until we reach // 2147483647 records - if you are fixing this bug, congratulations on book number 2147483647! - match query.count().get_result::(&mut connection) { - Ok(t) => Ok(t.to_string().parse::().unwrap()), - Err(e) => Err(ThothError::from(e)), - } + query + .count() + .get_result::(&mut connection) + .map(|t| t.to_string().parse::().unwrap()) + .map_err(Into::into) } fn publisher_id(&self, db: &crate::db::PgPool) -> ThothResult { diff --git a/thoth-api/src/model/price/mod.rs b/thoth-api/src/model/price/mod.rs index 9c152287..cccf672f 100644 --- a/thoth-api/src/model/price/mod.rs +++ b/thoth-api/src/model/price/mod.rs @@ -66,7 +66,9 @@ pub struct PatchPrice { graphql(description = "Three-letter ISO 4217 code representing a currency"), ExistingTypePath = "crate::schema::sql_types::CurrencyCode" )] -#[derive(Debug, Clone, Default, PartialEq, Eq, Deserialize, Serialize, EnumString, Display)] +#[derive( + Debug, Copy, Clone, Default, PartialEq, Eq, Deserialize, Serialize, EnumString, Display, +)] #[serde(rename_all = "SCREAMING_SNAKE_CASE")] #[strum(serialize_all = "UPPERCASE")] pub enum CurrencyCode { diff --git a/thoth-api/src/model/publication/crud.rs b/thoth-api/src/model/publication/crud.rs index 894fc2b8..e38e06b8 100644 --- a/thoth-api/src/model/publication/crud.rs +++ b/thoth-api/src/model/publication/crud.rs @@ -116,14 +116,11 @@ impl Crud for Publication { query = query.filter(isbn.ilike(format!("%{filter}%"))); } } - match query + query .limit(limit.into()) .offset(offset.into()) .load::(&mut connection) - { - Ok(t) => Ok(t), - Err(e) => Err(ThothError::from(e)), - } + .map_err(Into::into) } fn count( @@ -156,10 +153,11 @@ impl Crud for Publication { // not implement i64 yet, only i32. The only sensible way, albeit shameful, to solve this // is converting i64 to string and then parsing it as i32. This should work until we reach // 2147483647 records - if you are fixing this bug, congratulations on book number 2147483647! - match query.count().get_result::(&mut connection) { - Ok(t) => Ok(t.to_string().parse::().unwrap()), - Err(e) => Err(ThothError::from(e)), - } + query + .count() + .get_result::(&mut connection) + .map(|t| t.to_string().parse::().unwrap()) + .map_err(Into::into) } fn publisher_id(&self, db: &crate::db::PgPool) -> ThothResult { diff --git a/thoth-api/src/model/publication/mod.rs b/thoth-api/src/model/publication/mod.rs index 4c061a10..5cf4ae2f 100644 --- a/thoth-api/src/model/publication/mod.rs +++ b/thoth-api/src/model/publication/mod.rs @@ -21,7 +21,9 @@ use crate::schema::publication_history; graphql(description = "Format of a publication"), ExistingTypePath = "crate::schema::sql_types::PublicationType" )] -#[derive(Debug, Clone, Default, PartialEq, Eq, Deserialize, Serialize, EnumString, Display)] +#[derive( + Debug, Copy, Clone, Default, PartialEq, Eq, Deserialize, Serialize, EnumString, Display, +)] #[serde(rename_all = "SCREAMING_SNAKE_CASE")] pub enum PublicationType { #[cfg_attr( diff --git a/thoth-api/src/model/publisher/crud.rs b/thoth-api/src/model/publisher/crud.rs index 012822ea..b2776f01 100644 --- a/thoth-api/src/model/publisher/crud.rs +++ b/thoth-api/src/model/publisher/crud.rs @@ -9,7 +9,7 @@ use crate::{crud_methods, db_insert}; use diesel::{ BoolExpressionMethods, ExpressionMethods, PgTextExpressionMethods, QueryDsl, RunQueryDsl, }; -use thoth_errors::{ThothError, ThothResult}; +use thoth_errors::ThothResult; use uuid::Uuid; impl Crud for Publisher { @@ -77,14 +77,11 @@ impl Crud for Publisher { .or(publisher_shortname.ilike(format!("%{filter}%"))), ); } - match query + query .limit(limit.into()) .offset(offset.into()) .load::(&mut connection) - { - Ok(t) => Ok(t), - Err(e) => Err(ThothError::from(e)), - } + .map_err(Into::into) } fn count( @@ -113,10 +110,11 @@ impl Crud for Publisher { // not implement i64 yet, only i32. The only sensible way, albeit shameful, to solve this // is converting i64 to string and then parsing it as i32. This should work until we reach // 2147483647 records - if you are fixing this bug, congratulations on book number 2147483647! - match query.count().get_result::(&mut connection) { - Ok(t) => Ok(t.to_string().parse::().unwrap()), - Err(e) => Err(ThothError::from(e)), - } + query + .count() + .get_result::(&mut connection) + .map(|t| t.to_string().parse::().unwrap()) + .map_err(Into::into) } fn publisher_id(&self, _db: &crate::db::PgPool) -> ThothResult { diff --git a/thoth-api/src/model/reference/crud.rs b/thoth-api/src/model/reference/crud.rs index f1a9e325..960aabca 100644 --- a/thoth-api/src/model/reference/crud.rs +++ b/thoth-api/src/model/reference/crud.rs @@ -9,7 +9,7 @@ use crate::{crud_methods, db_insert}; use diesel::{ BoolExpressionMethods, ExpressionMethods, PgTextExpressionMethods, QueryDsl, RunQueryDsl, }; -use thoth_errors::{ThothError, ThothResult}; +use thoth_errors::ThothResult; use uuid::Uuid; impl Crud for Reference { @@ -172,14 +172,11 @@ impl Crud for Reference { ); } } - match query + query .limit(limit.into()) .offset(offset.into()) .load::(&mut connection) - { - Ok(t) => Ok(t), - Err(e) => Err(ThothError::from(e)), - } + .map_err(Into::into) } fn count( @@ -223,10 +220,11 @@ impl Crud for Reference { // not implement i64 yet, only i32. The only sensible way, albeit shameful, to solve this // is converting i64 to string and then parsing it as i32. This should work until we reach // 2147483647 records - if you are fixing this bug, congratulations on book number 2147483647! - match query.count().get_result::(&mut connection) { - Ok(t) => Ok(t.to_string().parse::().unwrap()), - Err(e) => Err(ThothError::from(e)), - } + query + .count() + .get_result::(&mut connection) + .map(|t| t.to_string().parse::().unwrap()) + .map_err(Into::into) } fn publisher_id(&self, db: &crate::db::PgPool) -> ThothResult { diff --git a/thoth-api/src/model/series/crud.rs b/thoth-api/src/model/series/crud.rs index 29a987d5..610486fc 100644 --- a/thoth-api/src/model/series/crud.rs +++ b/thoth-api/src/model/series/crud.rs @@ -9,7 +9,7 @@ use crate::{crud_methods, db_insert}; use diesel::{ BoolExpressionMethods, ExpressionMethods, PgTextExpressionMethods, QueryDsl, RunQueryDsl, }; -use thoth_errors::{ThothError, ThothResult}; +use thoth_errors::ThothResult; use uuid::Uuid; impl Crud for Series { @@ -102,14 +102,11 @@ impl Crud for Series { .or(series_description.ilike(format!("%{filter}%"))), ); } - match query + query .limit(limit.into()) .offset(offset.into()) .load::(&mut connection) - { - Ok(t) => Ok(t), - Err(e) => Err(ThothError::from(e)), - } + .map_err(Into::into) } fn count( @@ -146,10 +143,11 @@ impl Crud for Series { // not implement i64 yet, only i32. The only sensible way, albeit shameful, to solve this // is converting i64 to string and then parsing it as i32. This should work until we reach // 2147483647 records - if you are fixing this bug, congratulations on book number 2147483647! - match query.count().get_result::(&mut connection) { - Ok(t) => Ok(t.to_string().parse::().unwrap()), - Err(e) => Err(ThothError::from(e)), - } + query + .count() + .get_result::(&mut connection) + .map(|t| t.to_string().parse::().unwrap()) + .map_err(Into::into) } fn publisher_id(&self, db: &crate::db::PgPool) -> ThothResult { diff --git a/thoth-api/src/model/series/mod.rs b/thoth-api/src/model/series/mod.rs index ec1da46b..c41fc655 100644 --- a/thoth-api/src/model/series/mod.rs +++ b/thoth-api/src/model/series/mod.rs @@ -18,7 +18,9 @@ use crate::schema::series_history; graphql(description = "Type of a series"), ExistingTypePath = "crate::schema::sql_types::SeriesType" )] -#[derive(Debug, Clone, Default, PartialEq, Eq, Deserialize, Serialize, EnumString, Display)] +#[derive( + Debug, Copy, Clone, Default, PartialEq, Eq, Deserialize, Serialize, EnumString, Display, +)] #[serde(rename_all = "SCREAMING_SNAKE_CASE")] #[strum(serialize_all = "title_case")] pub enum SeriesType { diff --git a/thoth-api/src/model/subject/crud.rs b/thoth-api/src/model/subject/crud.rs index b8504bda..9c63fc98 100644 --- a/thoth-api/src/model/subject/crud.rs +++ b/thoth-api/src/model/subject/crud.rs @@ -7,7 +7,7 @@ use crate::model::{Crud, DbInsert, HistoryEntry}; use crate::schema::{subject, subject_history}; use crate::{crud_methods, db_insert}; use diesel::{ExpressionMethods, PgTextExpressionMethods, QueryDsl, RunQueryDsl}; -use thoth_errors::{ThothError, ThothResult}; +use thoth_errors::ThothResult; use uuid::Uuid; impl Crud for Subject { @@ -84,15 +84,12 @@ impl Crud for Subject { if let Some(filter) = filter { query = query.filter(subject_code.ilike(format!("%{filter}%"))); } - match query + query .then_order_by(subject_code.asc()) .limit(limit.into()) .offset(offset.into()) .load::(&mut connection) - { - Ok(t) => Ok(t), - Err(e) => Err(ThothError::from(e)), - } + .map_err(Into::into) } fn count( @@ -116,10 +113,11 @@ impl Crud for Subject { // not implement i64 yet, only i32. The only sensible way, albeit shameful, to solve this // is converting i64 to string and then parsing it as i32. This should work until we reach // 2147483647 records - if you are fixing this bug, congratulations on book number 2147483647! - match query.count().get_result::(&mut connection) { - Ok(t) => Ok(t.to_string().parse::().unwrap()), - Err(e) => Err(ThothError::from(e)), - } + query + .count() + .get_result::(&mut connection) + .map(|t| t.to_string().parse::().unwrap()) + .map_err(Into::into) } fn publisher_id(&self, db: &crate::db::PgPool) -> ThothResult { diff --git a/thoth-api/src/model/subject/mod.rs b/thoth-api/src/model/subject/mod.rs index 16571b29..bef66516 100644 --- a/thoth-api/src/model/subject/mod.rs +++ b/thoth-api/src/model/subject/mod.rs @@ -21,6 +21,7 @@ use thoth_errors::ThothResult; )] #[derive( Debug, + Copy, Clone, Default, PartialEq, diff --git a/thoth-api/src/model/work/crud.rs b/thoth-api/src/model/work/crud.rs index be44ea49..c9254e39 100644 --- a/thoth-api/src/model/work/crud.rs +++ b/thoth-api/src/model/work/crud.rs @@ -314,15 +314,12 @@ impl Crud for Work { .or(dsl::landing_page.ilike(format!("%{filter}%"))), ); } - match query + query .then_order_by(dsl::work_id) .limit(limit.into()) .offset(offset.into()) .load::(&mut connection) - { - Ok(t) => Ok(t), - Err(e) => Err(ThothError::from(e)), - } + .map_err(Into::into) } fn count( @@ -373,10 +370,11 @@ impl Crud for Work { // not implement i64 yet, only i32. The only sensible way, albeit shameful, to solve this // is converting i64 to string and then parsing it as i32. This should work until we reach // 2147483647 records - if you are fixing this bug, congratulations on book number 2147483647! - match query.count().get_result::(&mut connection) { - Ok(t) => Ok(t.to_string().parse::().unwrap()), - Err(e) => Err(ThothError::from(e)), - } + query + .count() + .get_result::(&mut connection) + .map(|t| t.to_string().parse::().unwrap()) + .map_err(Into::into) } fn publisher_id(&self, db: &crate::db::PgPool) -> ThothResult { diff --git a/thoth-api/src/model/work/mod.rs b/thoth-api/src/model/work/mod.rs index 0f47fc00..eb2ab755 100644 --- a/thoth-api/src/model/work/mod.rs +++ b/thoth-api/src/model/work/mod.rs @@ -29,7 +29,9 @@ use crate::schema::work_history; graphql(description = "Type of a work"), ExistingTypePath = "crate::schema::sql_types::WorkType" )] -#[derive(Debug, Clone, Default, PartialEq, Eq, Deserialize, Serialize, EnumString, Display)] +#[derive( + Debug, Copy, Clone, Default, PartialEq, Eq, Deserialize, Serialize, EnumString, Display, +)] #[serde(rename_all = "SCREAMING_SNAKE_CASE")] #[strum(serialize_all = "title_case")] pub enum WorkType { @@ -80,7 +82,9 @@ pub enum WorkType { ), ExistingTypePath = "crate::schema::sql_types::WorkStatus" )] -#[derive(Debug, Clone, Default, PartialEq, Eq, Deserialize, Serialize, EnumString, Display)] +#[derive( + Debug, Copy, Clone, Default, PartialEq, Eq, Deserialize, Serialize, EnumString, Display, +)] #[serde(rename_all = "SCREAMING_SNAKE_CASE")] #[strum(serialize_all = "title_case")] pub enum WorkStatus { diff --git a/thoth-api/src/model/work_relation/crud.rs b/thoth-api/src/model/work_relation/crud.rs index 1e7fbfdd..b0278843 100644 --- a/thoth-api/src/model/work_relation/crud.rs +++ b/thoth-api/src/model/work_relation/crud.rs @@ -78,14 +78,11 @@ impl Crud for WorkRelation { if !relation_types.is_empty() { query = query.filter(relation_type.eq_any(relation_types)); } - match query + query .limit(limit.into()) .offset(offset.into()) .load::(&mut connection) - { - Ok(t) => Ok(t), - Err(e) => Err(ThothError::from(e)), - } + .map_err(Into::into) } fn count( @@ -107,10 +104,11 @@ impl Crud for WorkRelation { // not implement i64 yet, only i32. The only sensible way, albeit shameful, to solve this // is converting i64 to string and then parsing it as i32. This should work until we reach // 2147483647 records - if you are fixing this bug, congratulations on book number 2147483647! - match query.count().get_result::(&mut connection) { - Ok(t) => Ok(t.to_string().parse::().unwrap()), - Err(e) => Err(ThothError::from(e)), - } + query + .count() + .get_result::(&mut connection) + .map(|t| t.to_string().parse::().unwrap()) + .map_err(Into::into) } // `crud_methods!` cannot be used for create(), update() or delete() @@ -185,18 +183,18 @@ impl Crud for WorkRelation { diesel::update(work_relation::table.find(inverse_work_relation.work_relation_id)) .set(inverse_data) .execute(connection)?; - match diesel::update(work_relation::table.find(&self.pk())) + diesel::update(work_relation::table.find(&self.pk())) .set(data) .get_result::(connection) - { - // On success, create a new history table entry. - // Only record the original update, not the automatic inverse update. - Ok(t) => match self.new_history_entry(account_id).insert(connection) { - Ok(_) => Ok(t), - Err(e) => Err(e), - }, - Err(e) => Err(ThothError::from(e)), - } + .map_err(Into::into) + .and_then(|t| { + // On success, create a new history table entry. + // Only record the original update, not the automatic inverse update. + self.new_history_entry(account_id) + .insert(connection) + .map(|_| t) + }) + .map_err(Into::into) }) } @@ -210,10 +208,10 @@ impl Crud for WorkRelation { connection.transaction(|connection| { diesel::delete(work_relation::table.find(inverse_work_relation.work_relation_id)) .execute(connection)?; - match diesel::delete(work_relation::table.find(self.pk())).execute(connection) { - Ok(_) => Ok(self), - Err(e) => Err(ThothError::from(e)), - } + diesel::delete(work_relation::table.find(self.pk())) + .execute(connection) + .map(|_| self) + .map_err(Into::into) }) } @@ -246,24 +244,27 @@ impl WorkRelation { pub fn get_inverse(&self, db: &crate::db::PgPool) -> ThothResult { // Every WorkRelation record must be accompanied by an 'inverse' record, // which represents the relation from the perspective of the related work. - match work_relation::table + work_relation::table .filter( work_relation::relator_work_id .eq(self.related_work_id) .and(work_relation::related_work_id.eq(self.relator_work_id)), ) .first::(&mut db.get()?) - { - // The inverse record should have the inverse relation_type, - // but this cannot be enforced by the database. Test for data integrity. - Ok(r) => match r.relation_type == self.relation_type.convert_to_inverse() { - true => Ok(r), - false => Err(ThothError::InternalError( - "Found mismatched relation types for paired Work Relation objects".to_string(), - )), - }, - Err(e) => Err(ThothError::from(e)), - } + .map_err(Into::into) + .and_then(|r| { + // The inverse record should have the inverse relation_type, + // but this cannot be enforced by the database. Test for data integrity. + if r.relation_type == self.relation_type.convert_to_inverse() { + Ok(r) + } else { + Err(ThothError::InternalError( + "Found mismatched relation types for paired Work Relation objects" + .to_string(), + )) + } + }) + .map_err(Into::into) } } diff --git a/thoth-api/src/redis.rs b/thoth-api/src/redis.rs new file mode 100644 index 00000000..3413e1a3 --- /dev/null +++ b/thoth-api/src/redis.rs @@ -0,0 +1,71 @@ +use deadpool_redis::{redis::AsyncCommands, Config, Connection, Pool}; +use thoth_errors::ThothResult; + +pub type RedisPool = Pool; +type RedisConnection = Connection; + +pub fn init_pool(redis_url: &str) -> RedisPool { + Config::from_url(redis_url) + .builder() + .expect("Failed to create redis pool.") + .build() + .expect("Failed to build redis pool.") +} + +async fn create_connection(pool: &RedisPool) -> ThothResult { + pool.get().await.map_err(Into::into) +} + +pub async fn set(pool: &RedisPool, key: &str, value: &str) -> ThothResult<()> { + let mut con = create_connection(pool).await?; + con.set(key, value).await.map_err(Into::into) +} + +pub async fn get(pool: &RedisPool, key: &str) -> ThothResult { + let mut con = create_connection(pool).await?; + con.get(key).await.map_err(Into::into) +} + +#[cfg(test)] +mod tests { + use super::*; + use dotenv::dotenv; + use std::env; + + async fn get_pool() -> RedisPool { + dotenv().ok(); + let redis_url = env::var("TEST_REDIS_URL").expect("TEST_REDIS_URL must be set"); + init_pool(&redis_url) + } + + #[tokio::test] + async fn test_init_pool() { + // Ensure that the pool initializes successfully + let pool = get_pool().await; + assert!(pool.get().await.is_ok()); + } + + #[tokio::test] + async fn test_set_and_get() { + let pool = get_pool().await; + + let test_key = "test_key"; + let test_value = "test_value"; + + let set_result = set(&pool, test_key, test_value).await; + assert!(set_result.is_ok()); + + let get_result = get(&pool, test_key).await; + assert!(get_result.is_ok()); + assert_eq!(get_result.unwrap(), test_value); + } + + #[tokio::test] + async fn test_get_nonexistent_key() { + let pool = get_pool().await; + + let test_key = "nonexistent_key"; + let get_result = get(&pool, test_key).await; + assert!(get_result.is_err()); + } +} diff --git a/thoth-app-server/Cargo.toml b/thoth-app-server/Cargo.toml index 7ace5432..37449ab7 100644 --- a/thoth-app-server/Cargo.toml +++ b/thoth-app-server/Cargo.toml @@ -1,7 +1,7 @@ [package] name = "thoth-app-server" version = "0.12.14" -authors = ["Javier Arias ", "Ross Higman "] +authors = ["Javier Arias ", "Ross Higman "] edition = "2021" license = "Apache-2.0" description = "Actix instance serving Thoth's WASM GUI statically" diff --git a/thoth-app-server/LICENSE b/thoth-app-server/LICENSE index 30291ef4..5194de71 100644 --- a/thoth-app-server/LICENSE +++ b/thoth-app-server/LICENSE @@ -186,7 +186,7 @@ same "printed page" as the copyright notice for easier identification within third-party archives. - Copyright 2020 Open Book Publishers + Copyright 2020 Thoth Open Metadata Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. diff --git a/thoth-app-server/README.md b/thoth-app-server/README.md index 59632a63..c00c66e8 100644 --- a/thoth-app-server/README.md +++ b/thoth-app-server/README.md @@ -1,5 +1,5 @@
- +

Thoth Client

diff --git a/thoth-app/Cargo.toml b/thoth-app/Cargo.toml index 0119b6c9..0e951f29 100644 --- a/thoth-app/Cargo.toml +++ b/thoth-app/Cargo.toml @@ -1,7 +1,7 @@ [package] name = "thoth-app" version = "0.12.14" -authors = ["Javier Arias ", "Ross Higman "] +authors = ["Javier Arias ", "Ross Higman "] edition = "2021" license = "Apache-2.0" description = "WASM APP for bibliographic data" @@ -10,7 +10,6 @@ readme = "README.md" build = "build.rs" [badges] -travis-ci = { repository = "openbookpublishers/thoth" } maintenance = { status = "actively-developed" } [dependencies] diff --git a/thoth-app/LICENSE b/thoth-app/LICENSE index 30291ef4..5194de71 100644 --- a/thoth-app/LICENSE +++ b/thoth-app/LICENSE @@ -186,7 +186,7 @@ same "printed page" as the copyright notice for easier identification within third-party archives. - Copyright 2020 Open Book Publishers + Copyright 2020 Thoth Open Metadata Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. diff --git a/thoth-app/README.md b/thoth-app/README.md index e0921fa3..737ab6d8 100644 --- a/thoth-app/README.md +++ b/thoth-app/README.md @@ -1,5 +1,5 @@
- +

Thoth APP

diff --git a/thoth-app/index.html b/thoth-app/index.html index 557d0a5f..025e6cc2 100644 --- a/thoth-app/index.html +++ b/thoth-app/index.html @@ -29,7 +29,7 @@ - + diff --git a/thoth-app/src/component/institution.rs b/thoth-app/src/component/institution.rs index da58b356..b80e3a49 100644 --- a/thoth-app/src/component/institution.rs +++ b/thoth-app/src/component/institution.rs @@ -294,7 +294,7 @@ impl Component for InstitutionComponent { institution_name: self.institution.institution_name.clone(), institution_doi: self.institution.institution_doi.clone(), ror: self.institution.ror.clone(), - country_code: self.institution.country_code.clone(), + country_code: self.institution.country_code, }, ..Default::default() }; @@ -476,7 +476,7 @@ impl Component for InstitutionComponent { /> diff --git a/thoth-app/src/component/languages_form.rs b/thoth-app/src/component/languages_form.rs index 4f7363ba..d38e0004 100644 --- a/thoth-app/src/component/languages_form.rs +++ b/thoth-app/src/component/languages_form.rs @@ -194,8 +194,8 @@ impl Component for LanguagesFormComponent { let body = CreateLanguageRequestBody { variables: Variables { work_id: ctx.props().work_id, - language_relation: self.new_language.language_relation.clone(), - language_code: self.new_language.language_code.clone(), + language_relation: self.new_language.language_relation, + language_code: self.new_language.language_code, main_language: self.new_language.main_language, }, ..Default::default() @@ -304,7 +304,7 @@ impl Component for LanguagesFormComponent { > diff --git a/thoth-app/src/component/new_series.rs b/thoth-app/src/component/new_series.rs index 5ab0732c..1154a3d9 100644 --- a/thoth-app/src/component/new_series.rs +++ b/thoth-app/src/component/new_series.rs @@ -190,7 +190,7 @@ impl Component for NewSeriesComponent { Msg::CreateSeries => { let body = CreateSeriesRequestBody { variables: Variables { - series_type: self.series.series_type.clone(), + series_type: self.series.series_type, series_name: self.series.series_name.clone(), issn_print: self.series.issn_print.clone(), issn_digital: self.series.issn_digital.clone(), @@ -263,7 +263,7 @@ impl Component for NewSeriesComponent {
Some(Publication { publication_id: self.publication.publication_id, - publication_type: self.publication.publication_type.clone(), + publication_type: self.publication.publication_type, work_id: self.publication.work_id, isbn: self.publication.isbn.clone(), // Not used by child form created_at: Default::default(), - updated_at: self.publication.updated_at.clone(), + updated_at: self.publication.updated_at, width_mm: self.publication.width_mm, width_in: self.publication.width_in, height_mm: self.publication.height_mm, @@ -299,7 +299,7 @@ impl Component for PublicationComponent { VNode { - if Some(c.name.clone()) == self.value { + if Some(c.name) == self.value { html! {