diff --git a/.gitmodules b/.gitmodules index 8ef5ef40..e69de29b 100644 --- a/.gitmodules +++ b/.gitmodules @@ -1,15 +0,0 @@ -[submodule "http/tests/di-ed25519-test-suite"] - path = http/tests/di-ed25519-test-suite - url = https://github.com/w3c-ccg/di-ed25519-test-suite.git -[submodule "http/tests/vc-api-issuer-test-suite"] - path = http/tests/vc-api-issuer-test-suite - url = https://github.com/w3c-ccg/vc-api-issuer-test-suite.git -[submodule "http/tests/vc-api-verifier-test-suite"] - path = http/tests/vc-api-verifier-test-suite - url = https://github.com/w3c-ccg/vc-api-verifier-test-suite.git -[submodule "http/tests/did-key-test-suite"] - path = http/tests/did-key-test-suite - url = https://github.com/w3c-ccg/did-key-test-suite.git -[submodule "http/tests/di-eddsa-2022-test-suite"] - path = http/tests/di-eddsa-2022-test-suite - url = https://github.com/w3c-ccg/di-eddsa-2022-test-suite.git diff --git a/Cargo.toml b/Cargo.toml index 1653620c..640bb4e0 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -1,18 +1,7 @@ [workspace] -members = [ - "http", - "cli", - "lib", - "lib/cbindings", - "lib/web", -] +members = ["cli", "lib", "lib/cbindings", "lib/web"] -default-members = [ - "http", - "cli", - "lib", - "lib/cbindings", -] +default-members = ["http", "cli", "lib", "lib/cbindings"] # $ cargo release [workspace.metadata.release] diff --git a/README.md b/README.md index bf93cdb6..78b98923 100644 --- a/README.md +++ b/README.md @@ -38,41 +38,15 @@ Build DIDKit using [Cargo][]: ```sh $ cargo build ``` -That will give you the DIDKit CLI and HTTP server executables located at -`target/debug/didkit` and `target/debug/didkit-http`, respectively. You can also build and install DIDKit's components separately. Building the FFI libraries will require additional dependencies. See the corresponding readmes linked below for more info. - - -### Container - -Both the CLI and HTTP server are containerised and available under -`ghcr.io/spruceid/didkit-(cli|http)`. - -You can use the images like CLIs: -```bash -$ docker run ghcr.io/spruceid/didkit-cli:latest --help -$ docker run --init -p 8080 ghcr.io/spruceid/didkit-http:latest --port 8080 -``` - -> You can pass JWKs either by sharing a volume with `docker run --volume`, or by passing the JWK directly with `docker run -e JWK=$MY_JWK` or `docker run didkit-http --jwk $MY_JWK`. - -#### Build Images - -The Dockerfiles rely on having `ssi` in the root of `didkit` (a symbolic link will not work unfortunately). - -Then the images can be built with: -```bash -$ docker build -f Dockerfile-cli . -t didkit-cli -$ docker build -f Dockerfile-http . -t didkit-http -``` - -And to use them, replace `ghcr.io/spruceid/didkit-(cli|http):latest` with `didkit-(cli|http)`. +That will give you the DIDKit CLI executable located at +`target/debug/didkit`. You can also build and install DIDKit's components separately. Building the FFI libraries will require additional dependencies. See the corresponding readmes linked below for more info. ## Usage DIDKit can be used in any of the following ways: - [CLI](cli/) - `didkit` command-line program -- [HTTP](http/) - HTTP server (Rust library and CLI program) +- [HTTP](https://github.com/spruceid/didkit-http/) - HTTP server (Rust library and CLI program) - [FFI](lib/FFI.md) - libraries for C, Java, Android, and Dart/Flutter [Rust]: https://www.rust-lang.org/ diff --git a/http/Cargo.toml b/http/Cargo.toml deleted file mode 100644 index d13fbeb0..00000000 --- a/http/Cargo.toml +++ /dev/null @@ -1,32 +0,0 @@ -[package] -name = "didkit-http" -version = "0.3.0" -authors = ["Spruce Systems, Inc."] -edition = "2021" -description = "HTTP server for Verifiable Credentials and Decentralized Identifiers." -keywords = ["ssi", "did", "vc", "http", "api"] -license = "MIT OR Apache-2.0" -homepage = "https://github.com/spruceid/didkit/tree/main/http/" -repository = "https://github.com/spruceid/didkit/" - -exclude = [ - "/tests" -] - -[dependencies] -didkit = { version = "0.6", path = "../lib", features = ["http-did"] } - -anyhow = "1.0.70" -axum = { version = "0.6.16", features = ["macros", "headers"] } -figment = { version = "0.10.8", features = ["env", "toml"] } -tokio = { version = "1.0", features = ["macros", "rt-multi-thread"] } -tower = "0.4.13" -tower-http = { version = "0.4.0", features = ["trace", "limit"] } -tracing = "0.1.37" -tracing-subscriber = "0.3.16" -serde = { version = "1.0", features = ["derive"] } -serde_json = "1.0" -serde_with = { version = "2.1.0", features = ["json"] } -serde_urlencoded = "0.7" -url = { version = "2.3.1", features = ["serde"] } -percent-encoding = "2.2.0" diff --git a/http/Dockerfile b/http/Dockerfile deleted file mode 100644 index e8fc0e70..00000000 --- a/http/Dockerfile +++ /dev/null @@ -1,28 +0,0 @@ -FROM clux/muslrust:stable AS chef -USER root -RUN cargo install cargo-chef -WORKDIR /app - -FROM chef AS planner -COPY ./defaults.toml ./ -COPY ./Cargo.toml ./ -COPY ./src/ ./src/ -RUN sed -i -e 's/path .*,//g' ./Cargo.toml -RUN cargo chef prepare --recipe-path recipe.json - -FROM chef AS builder -COPY --from=planner /app/recipe.json recipe.json -RUN cargo chef cook --release --target x86_64-unknown-linux-musl --recipe-path recipe.json -COPY --from=planner /app/ ./ -RUN cargo build --release --target x86_64-unknown-linux-musl --bin didkit-http - -FROM alpine AS runtime -RUN addgroup -S didkit-http && adduser -S didkit-http -G didkit-http -COPY --from=builder /app/target/x86_64-unknown-linux-musl/release/didkit-http /usr/local/bin/didkit-http -USER didkit-http -EXPOSE 3000 -ENV DIDKIT_HTTP_HTTP_ADDRESS=[0,0,0,0] -CMD ["didkit-http"] -HEALTHCHECK --interval=5s --timeout=3s \ - CMD wget --no-verbose --tries=1 --spider http://localhost:3000/healthz || exit 1 -LABEL org.opencontainers.image.source https://github.com/spruceid/didkit diff --git a/http/README.md b/http/README.md deleted file mode 100644 index 1664ec57..00000000 --- a/http/README.md +++ /dev/null @@ -1,36 +0,0 @@ -# DIDKit HTTP - -Implementation of the [Verifiable Credentials API](https://w3c-ccg.github.io/vc-api/) -using DIDKit. - -## Usage - -The Docker image is available at -https://github.com/spruceid/didkit/pkgs/container/didkit-http. - -### Configuration - -Refer to the [defaults configuration file](./defaults.toml). - -To provide overrides you can either: -- use a configuration file named `didkit-http.toml` which follows the structure - as the defaults files; or -- use environment variables, which are prefixed with `DIDKIT_HTTP_` and follow - the same names, with a separating `_` between sections. - -## Security Considerations - -Spruce does not use DIDKit HTTP in any production environments except with a reverse proxy, and does not recommend them for production use-cases without a holistic review of security levels. The following is not an exhaustive list, but should be considered in any such review. - -### Authorization - -DIDKit HTTP does not implement any endpoint authorization or access control. Any client can request a signature/proof creation from the server's key(s) using the issue credential/presentation endpoints. To limit access to some or all of DIDKit HTTP's endpoints, a deployment should place DIDKit HTTP behind a reverse proxy with appropriate settings. - -### Denial of Service - -DIDKit HTTP does not implement complete protection against resource exhaustion. Clients may be able to overwhelm the server with excessively slow and/or concurrent requests. To protect against resource exhaustion, deployments should use a reverse proxy with rate limiting, load balancing across multiple DIDKit HTTP instances, and/or other protections. - -[did-http]: https://w3c-ccg.github.io/did-resolution/#bindings-https -[vc-api]: https://w3c-ccg.github.io/vc-api/ -[vc-http-api-0.0.1]: https://github.com/w3c-ccg/vc-api/pull/72 -[did-resolution-https-binding]: https://w3c-ccg.github.io/did-resolution/#bindings-https diff --git a/http/defaults.toml b/http/defaults.toml deleted file mode 100644 index a20301c9..00000000 --- a/http/defaults.toml +++ /dev/null @@ -1,11 +0,0 @@ -[default.http] -# DIDKIT_HTTP_HTTP_PORT -port = 3000 -# DIDKIT_HTTP_HTTP_ADDRESS -address = [127, 0, 0, 1] -# DIDKIT_HTTP_HTTP_BODYSIZELIMIT -bodysizelimit = 2097152 # 2MiB - -[default.issuer] -# DIDKIT_HTTP_ISSUER_KEYS -# keys = '[, ]' diff --git a/http/src/config.rs b/http/src/config.rs deleted file mode 100644 index 0725bf62..00000000 --- a/http/src/config.rs +++ /dev/null @@ -1,29 +0,0 @@ -use didkit::JWK; -use serde::Deserialize; -use serde_with::{json::JsonString, serde_as}; - -#[derive(Deserialize, Debug, Clone, Hash, PartialEq, Eq)] -pub struct Config { - pub http: Http, - pub issuer: Issuer, -} - -#[derive(Deserialize, Debug, Clone, Hash, PartialEq, Eq)] -pub struct Http { - pub port: u16, - pub address: [u8; 4], - #[serde(rename = "bodysizelimit")] - pub body_size_limit: usize, -} - -// #[derive(Deserialize, Debug, Clone, Hash, PartialEq, Eq)] -// pub struct Resolver { -// pub fallback: Option, -// } - -#[serde_as] -#[derive(Deserialize, Debug, Clone, Hash, PartialEq, Eq)] -pub struct Issuer { - #[serde_as(as = "Option")] - pub keys: Option>, -} diff --git a/http/src/credentials.rs b/http/src/credentials.rs deleted file mode 100644 index f05009c3..00000000 --- a/http/src/credentials.rs +++ /dev/null @@ -1,193 +0,0 @@ -use anyhow::Context; -use axum::{http::StatusCode, Extension, Json}; -use didkit::{ - ssi::ldp::{now_ns, Error as LdpError}, - ContextLoader, CredentialOrJWT, JWTOrLDPOptions, ProofFormat, VerifiableCredential, - VerificationResult, DID_METHODS, -}; -use serde::{Deserialize, Serialize}; - -use crate::{error::Error, keys::pick_key, utils::CustomErrorJson, KeyMap}; - -#[derive(Deserialize)] -pub struct IssueRequest { - pub credential: VerifiableCredential, - pub options: Option, -} - -#[derive(Serialize)] -#[serde(rename_all = "camelCase")] -pub struct IssueResponse { - pub verifiable_credential: CredentialOrJWT, -} - -pub async fn issue( - Extension(keys): Extension, - CustomErrorJson(req): CustomErrorJson, -) -> Result<(StatusCode, Json), Error> { - let mut credential = req.credential; - let options = req.options.unwrap_or_default(); - let proof_format = options.proof_format.unwrap_or_default(); - let resolver = DID_METHODS.to_resolver(); - let mut context_loader = ContextLoader::default(); - let key = match pick_key( - &keys, - &credential.issuer.clone().map(|i| i.get_id()), - &options.ldp_options, - resolver, - ) - .await - { - Some(key) => key, - None => return Err((StatusCode::NOT_FOUND, "Missing key".to_string()).into()), - }; - if credential.issuance_date.is_none() { - credential.issuance_date = Some(now_ns().into()); - } - if let Err(e) = credential.validate_unsigned() { - return Err((StatusCode::BAD_REQUEST, e.to_string()).into()); - } - let res = match proof_format { - ProofFormat::JWT => CredentialOrJWT::JWT( - credential - .generate_jwt(Some(key), &options.ldp_options, resolver) - .await - .context("Failed to issue JWT VC")?, - ), - ProofFormat::LDP => { - let proof = match credential - .generate_proof(key, &options.ldp_options, resolver, &mut context_loader) - .await - { - Ok(p) => p, - Err(LdpError::ToRdfError(e)) => { - return Err( - (StatusCode::BAD_REQUEST, LdpError::ToRdfError(e).to_string()).into(), - ) - } - e => e.context("Faield to generate proof")?, - }; - credential.add_proof(proof); - CredentialOrJWT::Credential(credential) - } - _ => return Err((StatusCode::BAD_REQUEST, "Unknown proof format".to_string()).into()), - }; - Ok(( - StatusCode::CREATED, - Json(IssueResponse { - verifiable_credential: res, - }), - )) -} - -#[derive(Deserialize)] -#[serde(rename_all = "camelCase")] -pub struct VerifyRequest { - pub verifiable_credential: CredentialOrJWT, - pub options: Option, -} - -pub async fn verify( - CustomErrorJson(req): CustomErrorJson, -) -> Result, Error> { - let resolver = DID_METHODS.to_resolver(); - let mut context_loader = ContextLoader::default(); - let options = req.options.unwrap_or_default(); - let ldp_options = options.ldp_options; - let res = match (options.proof_format, req.verifiable_credential) { - (Some(ProofFormat::LDP), CredentialOrJWT::Credential(vc)) - | (None, CredentialOrJWT::Credential(vc)) => { - if let Err(e) = vc.validate() { - return Err((StatusCode::BAD_REQUEST, e.to_string()).into()); - } - vc.verify(Some(ldp_options), resolver, &mut context_loader) - .await - } - (Some(ProofFormat::JWT), CredentialOrJWT::JWT(vc_jwt)) - | (None, CredentialOrJWT::JWT(vc_jwt)) => { - VerifiableCredential::verify_jwt( - &vc_jwt, - Some(ldp_options), - resolver, - &mut context_loader, - ) - .await - } - (Some(proof_format), vc) => { - let err_msg = format!( - "Credential/proof format mismatch. Proof format: {}, credential format: {}", - proof_format, - match vc { - CredentialOrJWT::JWT(_) => "JWT".to_string(), - CredentialOrJWT::Credential(_) => "LDP".to_string(), - } - ); - return Err((StatusCode::BAD_REQUEST, err_msg).into()); - } - }; - if !res.errors.is_empty() { - return Err((StatusCode::BAD_REQUEST, format!("{:?}", res.errors)).into()); - } - Ok(Json(res)) -} - -#[cfg(test)] -mod test { - use serde_json::json; - - use crate::test::default_keys; - - use super::*; - - #[tokio::test] - async fn issue_ed25519() { - let keys = default_keys(); - let req = serde_json::from_value(json!({ - "credential": { - "@context": [ - "https://www.w3.org/2018/credentials/v1" - ], - "id": "urn:uuid:040d4921-4756-447b-99ad-8d4978420e91", - "type": [ - "VerifiableCredential" - ], - "issuer": "did:key:z6MkgYAGxLBSXa6Ygk1PnUbK2F7zya8juE9nfsZhrvY7c9GD", - "credentialSubject": { - "id": "did:key:z6MktKwz7Ge1Yxzr4JHavN33wiwa8y81QdcMRLXQsrH9T53b" - } - }, - "options": { - "type": "DataIntegrityProof" - } - })) - .unwrap(); - - let _ = issue(Extension(keys), CustomErrorJson(req)).await.unwrap(); - } - - #[tokio::test] - async fn issue_p256() { - let keys = default_keys(); - let req = serde_json::from_value(json!({ - "credential": { - "@context": [ - "https://www.w3.org/2018/credentials/v1" - ], - "id": "urn:uuid:040d4921-4756-447b-99ad-8d4978420e91", - "type": [ - "VerifiableCredential" - ], - "issuer": "did:key:zDnaej4NHTz2DtpMByubtLGzZfEjYor4ffJWLuW2eJ4KkZ3r2", - "credentialSubject": { - "id": "did:key:z6MktKwz7Ge1Yxzr4JHavN33wiwa8y81QdcMRLXQsrH9T53b" - } - }, - "options": { - "type": "DataIntegrityProof" - } - })) - .unwrap(); - - let _ = issue(Extension(keys), CustomErrorJson(req)).await.unwrap(); - } -} diff --git a/http/src/error.rs b/http/src/error.rs deleted file mode 100644 index 38b64ce1..00000000 --- a/http/src/error.rs +++ /dev/null @@ -1,47 +0,0 @@ -use axum::{ - http::StatusCode, - response::{IntoResponse, Response}, -}; -use tracing::{debug, error}; - -#[derive(Debug, Clone)] -pub struct Error { - status: StatusCode, - body: ErrorBody, -} - -#[derive(Debug, Clone)] -pub enum ErrorBody { - Text(String), - // Json(serde_json::Value), -} - -impl From<(StatusCode, String)> for Error { - fn from(e: (StatusCode, String)) -> Error { - Error { - status: e.0, - body: ErrorBody::Text(e.1), - } - } -} - -impl From for Error { - fn from(e: anyhow::Error) -> Self { - error!("{:?}", e); - Error { - status: StatusCode::INTERNAL_SERVER_ERROR, - body: ErrorBody::Text(e.to_string()), - } - } -} - -impl IntoResponse for Error { - fn into_response(self) -> Response { - match self.body { - ErrorBody::Text(t) => { - debug!("{t}"); - (self.status, t).into_response() - } // ErrorBody::Json(j) => (self.status, axum::Json(j)).into_response(), - } - } -} diff --git a/http/src/identifiers.rs b/http/src/identifiers.rs deleted file mode 100644 index 4af07d98..00000000 --- a/http/src/identifiers.rs +++ /dev/null @@ -1,150 +0,0 @@ -use anyhow::Context; -use axum::{ - body::Bytes, - extract::{Path, Query}, - http::{ - header::{CONTENT_TYPE, LOCATION}, - HeaderMap, StatusCode, - }, -}; -use didkit::{ - dereference, - ssi::did_resolve::{ - ERROR_INVALID_DID, ERROR_INVALID_DID_URL, ERROR_METHOD_NOT_SUPPORTED, ERROR_NOT_FOUND, - ERROR_REPRESENTATION_NOT_SUPPORTED, TYPE_DID_LD_JSON, TYPE_DID_RESOLUTION, - }, - Content, ContentMetadata, DereferencingInputMetadata, ResolutionResult, DID_METHODS, -}; -use percent_encoding::percent_decode; - -use crate::error::Error; - -pub async fn resolve( - Path(path): Path, - Query(metadata): Query, -) -> Result<(StatusCode, HeaderMap, Bytes), Error> { - let did_url = percent_decode(path.as_bytes()) - .decode_utf8() - .context("Could not percent decode path") - .map_err(|e| (StatusCode::BAD_REQUEST, format!("{e:?}")))?; - let resolver = DID_METHODS.to_resolver(); - let (deref_meta, content, content_meta) = dereference(resolver, &did_url, &metadata).await; - if let Some(ref error) = deref_meta.error { - // 1.6, 1.7, 1.8 - let status = match &error[..] { - ERROR_NOT_FOUND => StatusCode::NOT_FOUND, - ERROR_INVALID_DID | ERROR_INVALID_DID_URL => StatusCode::BAD_REQUEST, - ERROR_REPRESENTATION_NOT_SUPPORTED => StatusCode::NOT_ACCEPTABLE, - ERROR_METHOD_NOT_SUPPORTED => StatusCode::NOT_IMPLEMENTED, - _ => StatusCode::INTERNAL_SERVER_ERROR, - }; - return Err((status, format!("Dereferencing failed: {error}")))?; - } - if let ContentMetadata::DIDDocument(ref did_doc_meta) = content_meta { - if did_doc_meta.deactivated == Some(true) { - return Err((StatusCode::GONE, "".to_string()))?; - } - } - - let mut headers = HeaderMap::new(); - - let body = match content { - Content::DIDDocument(did_doc) => { - if metadata.accept != Some(TYPE_DID_RESOLUTION.to_string()) { - // 1.10.1 - let content_type = deref_meta - .content_type - .unwrap_or_else(|| TYPE_DID_LD_JSON.to_string()); - let content_type_header = content_type.parse().map_err(|e| { - ( - StatusCode::BAD_REQUEST, - format!("Unable to parse Content-Type: {e}"), - ) - })?; - headers.insert(CONTENT_TYPE, content_type_header); - // 1.10.1.3 - match did_doc.to_representation(&content_type) { - Err(err) => { - return Err(( - StatusCode::NOT_ACCEPTABLE, - format!("Unable to represent DID document: {}", err), - ))?; - } - Ok(content_type) => content_type, - } - } else { - // 1.10.2 - // 1.10.2.1 - let did_doc_meta_opt = match content_meta { - ContentMetadata::DIDDocument(meta) => Some(meta), - ContentMetadata::Other(map) if map.is_empty() => None, - _ => { - return Err(( - StatusCode::NOT_ACCEPTABLE, - format!( - "Expected content-metadata to be a DID Document metadata structure, but found: {:?}", content_meta - ) - ))? - } - }; - let result = ResolutionResult { - did_document: Some(did_doc), - did_resolution_metadata: Some(deref_meta.into()), - did_document_metadata: did_doc_meta_opt, - ..Default::default() - }; - // 1.10.2.3 - let content_type = match TYPE_DID_RESOLUTION.parse() { - Ok(content_type) => content_type, - Err(err) => { - return Err(( - StatusCode::BAD_REQUEST, - format!("Unable to parse Content-Type: {}", err), - ))?; - } - }; - headers.insert(CONTENT_TYPE, content_type); - - // 1.10.2.4 - match serde_json::to_vec(&result) { - Ok(data) => data, - Err(err) => { - return Err(( - StatusCode::INTERNAL_SERVER_ERROR, - format!("Unable to serialize resolution result: {}", err), - ))?; - } - } - } - } - Content::URL(url) => { - // 1.11 - let location = match url.parse() { - Ok(location) => location, - Err(err) => { - return Err(( - StatusCode::BAD_REQUEST, - format!("Unable to parse service endpoint URL: {}", err), - ))?; - } - }; - headers.insert(LOCATION, location); - return Ok((StatusCode::SEE_OTHER, headers, vec![].into())); - } - Content::Object(object) => match serde_json::to_vec(&object) { - Ok(data) => data, - Err(err) => { - return Err(( - StatusCode::INTERNAL_SERVER_ERROR, - format!("Unable to serialize dereferenced object: {}", err), - ))?; - } - }, - Content::Data(data) => data, - Content::Null => { - vec![] - } - }; - - Ok((StatusCode::OK, headers, body.into())) -} diff --git a/http/src/keys.rs b/http/src/keys.rs deleted file mode 100644 index 43ddee69..00000000 --- a/http/src/keys.rs +++ /dev/null @@ -1,111 +0,0 @@ -use std::collections::HashMap; - -use didkit::{resolve_key, DIDResolver, LinkedDataProofOptions, Source, DID_METHODS, JWK}; - -pub type KeyMap = HashMap; - -pub async fn pick_key<'a>( - keys: &'a KeyMap, - issuer: &Option, - options: &LinkedDataProofOptions, - did_resolver: &dyn DIDResolver, -) -> Option<&'a JWK> { - if keys.len() <= 1 { - return keys.values().next(); - } - let public_key = match (issuer, options.verification_method.clone()) { - (_, Some(vm)) => { - match resolve_key(&vm.to_string(), did_resolver).await { - Err(_err) => { - // TODO: return error - return None; - } - Ok(key) => key, - } - } - (Some(issuer), None) => { - let method = match DID_METHODS.get_method(issuer) { - Ok(m) => m, - Err(_) => { - return None; - } - }; - for jwk in keys.keys() { - let did = match method.generate(&Source::Key(jwk)) { - Some(d) => d, - None => continue, - }; - if &did == issuer { - return keys.get(jwk); - } - } - return None; - } - (None, None) => return keys.values().next(), - }; - keys.get(&public_key) -} - -#[cfg(test)] -mod test { - use didkit::URI; - - use crate::test::default_keys; - - use super::*; - - #[tokio::test] - async fn pick_key_only_issuer() { - let keys = default_keys(); - - let p256_did = "did:key:zDnaej4NHTz2DtpMByubtLGzZfEjYor4ffJWLuW2eJ4KkZ3r2".to_string(); - let ed25519_did = "did:key:z6MkgYAGxLBSXa6Ygk1PnUbK2F7zya8juE9nfsZhrvY7c9GD".to_string(); - - let options = LinkedDataProofOptions::default(); - - let key1 = pick_key(&keys, &Some(p256_did), &options, DID_METHODS.to_resolver()) - .await - .unwrap(); - let key2 = pick_key( - &keys, - &Some(ed25519_did), - &options, - DID_METHODS.to_resolver(), - ) - .await - .unwrap(); - - assert_ne!(key1, key2); - } - - #[tokio::test] - async fn pick_key_ldp_options() { - let keys = default_keys(); - - let p256_did = "did:key:zDnaej4NHTz2DtpMByubtLGzZfEjYor4ffJWLuW2eJ4KkZ3r2".to_string(); - - let options = LinkedDataProofOptions { - verification_method: Some(URI::String("did:key:zDnaej4NHTz2DtpMByubtLGzZfEjYor4ffJWLuW2eJ4KkZ3r2#zDnaej4NHTz2DtpMByubtLGzZfEjYor4ffJWLuW2eJ4KkZ3r2".to_string())), - ..Default::default() - }; - - let key1 = pick_key( - &keys, - &Some(p256_did.clone()), - &options, - DID_METHODS.to_resolver(), - ) - .await - .unwrap(); - let key2 = pick_key( - &keys, - &Some(p256_did), - &LinkedDataProofOptions::default(), - DID_METHODS.to_resolver(), - ) - .await - .unwrap(); - - assert_eq!(key1, key2); - } -} diff --git a/http/src/main.rs b/http/src/main.rs deleted file mode 100644 index 7728e66b..00000000 --- a/http/src/main.rs +++ /dev/null @@ -1,105 +0,0 @@ -use std::net::SocketAddr; - -use axum::{ - routing::{get, post}, - Extension, Router, -}; -use figment::{ - providers::{Env, Format, Toml}, - Figment, -}; -use tower::ServiceBuilder; -use tower_http::{limit::RequestBodyLimitLayer, trace::TraceLayer}; -use tracing::info; - -use crate::keys::KeyMap; - -mod config; -mod credentials; -mod error; -mod identifiers; -mod keys; -mod presentations; -mod utils; - -pub async fn healthcheck() {} - -#[tokio::main] -async fn main() { - tracing_subscriber::fmt::init(); - - let pkg_name = env!("CARGO_PKG_NAME").replace('-', "_"); - let config: config::Config = Figment::new() - .merge(Toml::string(include_str!("../defaults.toml")).nested()) - .merge(Toml::file(format!("{pkg_name}.toml")).nested()) - .merge( - Env::prefixed(&format!("{}_", pkg_name.to_uppercase())) - .split("_") - .global(), - ) - .extract() - .expect("Unable to load config"); - - let keys: KeyMap = config - .issuer - .keys - .clone() - .unwrap_or_default() - .into_iter() - .map(|jwk| (jwk.to_public(), jwk)) - .collect(); - - let app = Router::new() - .route("/healthz", get(healthcheck)) - // vc-http-api 0.0.1 - .route("/issue/credentials", post(credentials::issue)) - .route("/verify/credentials", post(credentials::verify)) - .route("/issue/presentations", post(presentations::issue)) - .route("/verify/presentations", post(presentations::verify)) - // - .route("/credentials/issue", post(credentials::issue)) - .route("/credentials/verify", post(credentials::verify)) - .route("/presentations/issue", post(presentations::issue)) - .route("/presentations/verify", post(presentations::verify)) - .route("/identifiers/:id", get(identifiers::resolve)) - .layer(TraceLayer::new_for_http()) - .layer(RequestBodyLimitLayer::new(config.http.body_size_limit)) - .layer( - ServiceBuilder::new() - .layer(Extension(config.clone())) - .layer(Extension(keys.clone())), - ); - - let addr = SocketAddr::from((config.http.address, config.http.port)); - info!("listening on {}", addr); - axum::Server::bind(&addr) - .serve(app.into_make_service()) - .await - .expect("failed to start server"); -} - -#[cfg(test)] -mod test { - use didkit::JWK; - use figment::providers::Format; - use serde_json::json; - - use super::*; - - pub fn default_config() -> config::Config { - Figment::new() - .merge(Toml::string(include_str!("../defaults.toml")).nested()) - .extract() - .expect("Unable to load config") - } - - #[test] - fn can_generate_default_config() { - default_config(); - } - - pub fn default_keys() -> KeyMap { - let keys: Vec = serde_json::from_value(json!([{"kty":"OKP","crv":"Ed25519","x":"HvjBEw94RHAh9KkiD385aYZNxGkxIkwBcrLBY5Z7Koo","d":"1onWu34oC29Y09qCRl0aD2FOp5y5obTqHZxQQRT3-bs"}, {"kty":"EC","crv":"P-256","x":"FMWMt6D0SymYPdlxXzeGMo1OrZLTrZ44aaW0_gyqCZM","y":"3DOY-ceh9ivyq9CzrmWR67ILrC7e3_FegeBxixWoiYc","d":"DjD-ngByYFcS6bfmofNeT7WNJBtWcO2GnGHJq1S9zkU"}])).unwrap(); - keys.into_iter().map(|jwk| (jwk.to_public(), jwk)).collect() - } -} diff --git a/http/src/presentations.rs b/http/src/presentations.rs deleted file mode 100644 index 7adf31cf..00000000 --- a/http/src/presentations.rs +++ /dev/null @@ -1,134 +0,0 @@ -use anyhow::Context; -use axum::{http::StatusCode, Extension, Json}; -use didkit::{ - ssi::{ldp::Error as LdpError, vc::Error as VCError}, - ContextLoader, JWTOrLDPOptions, ProofFormat, VerifiablePresentation, VerificationResult, - DID_METHODS, -}; -use serde::{Deserialize, Serialize}; - -use crate::{error::Error, keys::pick_key, KeyMap}; - -// TODO move to ssi -#[derive(Debug, Serialize, Deserialize, Clone)] -#[serde(untagged)] -pub enum PresentationOrJWT { - VP(VerifiablePresentation), - Jwt(String), -} - -#[derive(Deserialize)] -pub struct IssueRequest { - pub presentation: VerifiablePresentation, - pub options: Option, -} - -#[derive(Serialize)] -#[serde(rename_all = "camelCase")] -pub struct IssueResponse { - pub verifiable_presentation: PresentationOrJWT, -} - -pub async fn issue( - Extension(keys): Extension, - Json(req): Json, -) -> Result<(StatusCode, Json), Error> { - let mut presentation = req.presentation; - let options = req.options.unwrap_or_default(); - let proof_format = options.proof_format.unwrap_or_default(); - let resolver = DID_METHODS.to_resolver(); - let mut context_loader = ContextLoader::default(); - let key = match pick_key( - &keys, - &presentation.holder.clone().map(String::from), - &options.ldp_options, - resolver, - ) - .await - { - Some(key) => key, - None => return Err((StatusCode::NOT_FOUND, "Missing key".to_string()).into()), - }; - if let Err(e) = presentation.validate_unsigned() { - return Err((StatusCode::BAD_REQUEST, e.to_string()).into()); - } - let res = match proof_format { - ProofFormat::JWT => PresentationOrJWT::Jwt( - presentation - .generate_jwt(Some(key), &options.ldp_options, resolver) - .await - .context("Failed to issue JWT VC")?, - ), - ProofFormat::LDP => { - let proof = match presentation - .generate_proof(key, &options.ldp_options, resolver, &mut context_loader) - .await - { - Ok(p) => p, - Err(VCError::LDP(LdpError::ToRdfError(e))) => { - return Err( - (StatusCode::BAD_REQUEST, LdpError::ToRdfError(e).to_string()).into(), - ) - } - e => e.context("Faield to generate proof")?, - }; - presentation.add_proof(proof); - PresentationOrJWT::VP(presentation) - } - _ => return Err((StatusCode::BAD_REQUEST, "Unknown proof format".to_string()).into()), - }; - Ok(( - StatusCode::CREATED, - Json(IssueResponse { - verifiable_presentation: res, - }), - )) -} - -#[derive(Deserialize)] -#[serde(rename_all = "camelCase")] -pub struct VerifyRequest { - pub verifiable_presentation: PresentationOrJWT, - pub options: Option, -} - -pub async fn verify(Json(req): Json) -> Result, Error> { - let resolver = DID_METHODS.to_resolver(); - let mut context_loader = ContextLoader::default(); - let options = req.options.unwrap_or_default(); - let ldp_options = options.ldp_options; - let res = match (options.proof_format, req.verifiable_presentation) { - (Some(ProofFormat::LDP), PresentationOrJWT::VP(vp)) | (None, PresentationOrJWT::VP(vp)) => { - if let Err(e) = vp.validate() { - return Err((StatusCode::BAD_REQUEST, e.to_string()).into()); - } - vp.verify(Some(ldp_options), resolver, &mut context_loader) - .await - } - (Some(ProofFormat::JWT), PresentationOrJWT::Jwt(vc_jwt)) - | (None, PresentationOrJWT::Jwt(vc_jwt)) => { - VerifiablePresentation::verify_jwt( - &vc_jwt, - Some(ldp_options), - resolver, - &mut context_loader, - ) - .await - } - (Some(proof_format), vc) => { - let err_msg = format!( - "Credential/proof format mismatch. Proof format: {}, presentation format: {}", - proof_format, - match vc { - PresentationOrJWT::Jwt(_) => "JWT".to_string(), - PresentationOrJWT::VP(_) => "LDP".to_string(), - } - ); - return Err((StatusCode::BAD_REQUEST, err_msg).into()); - } - }; - if !res.errors.is_empty() { - return Err((StatusCode::BAD_REQUEST, format!("{:?}", res.errors)).into()); - } - Ok(Json(res)) -} diff --git a/http/src/utils.rs b/http/src/utils.rs deleted file mode 100644 index 83def5ec..00000000 --- a/http/src/utils.rs +++ /dev/null @@ -1,36 +0,0 @@ -use axum::{ - async_trait, - extract::{rejection::JsonRejection, FromRequest}, - http::Request, - http::StatusCode, -}; - -pub struct CustomErrorJson(pub T); - -#[async_trait] -impl FromRequest for CustomErrorJson -where - axum::Json: FromRequest, - S: Send + Sync, - B: Send + 'static, -{ - type Rejection = (StatusCode, String); - - async fn from_request(req: Request, state: &S) -> Result { - let (parts, body) = req.into_parts(); - let req = Request::from_parts(parts, body); - - match axum::Json::::from_request(req, state).await { - Ok(value) => Ok(Self(value.0)), - Err(rejection) => { - let message = rejection.to_string(); - let code = if let JsonRejection::JsonDataError(_) = rejection { - StatusCode::BAD_REQUEST - } else { - rejection.status() - }; - Err((code, message)) - } - } - } -} diff --git a/http/tests/di-ed25519-test-suite b/http/tests/di-ed25519-test-suite deleted file mode 160000 index 1558d696..00000000 --- a/http/tests/di-ed25519-test-suite +++ /dev/null @@ -1 +0,0 @@ -Subproject commit 1558d6965b7b89e7ca06200b06e037292016f05e diff --git a/http/tests/di-eddsa-2022-test-suite b/http/tests/di-eddsa-2022-test-suite deleted file mode 160000 index 59427e3f..00000000 --- a/http/tests/di-eddsa-2022-test-suite +++ /dev/null @@ -1 +0,0 @@ -Subproject commit 59427e3ff33a1dd9a7599b23629e135bb2b85717 diff --git a/http/tests/did-key-test-suite b/http/tests/did-key-test-suite deleted file mode 160000 index 3e1862c0..00000000 --- a/http/tests/did-key-test-suite +++ /dev/null @@ -1 +0,0 @@ -Subproject commit 3e1862c04032e26c533841eab56e75dc643f24da diff --git a/http/tests/vc-api-issuer-test-suite b/http/tests/vc-api-issuer-test-suite deleted file mode 160000 index 1effe244..00000000 --- a/http/tests/vc-api-issuer-test-suite +++ /dev/null @@ -1 +0,0 @@ -Subproject commit 1effe244026fd3b3d6bcd42ca54dd0bf7bc9d5cd diff --git a/http/tests/vc-api-verifier-test-suite b/http/tests/vc-api-verifier-test-suite deleted file mode 160000 index 07ae240e..00000000 --- a/http/tests/vc-api-verifier-test-suite +++ /dev/null @@ -1 +0,0 @@ -Subproject commit 07ae240e9cc9f2a543d2c981bbab8e25d96572f8 diff --git a/http/tests/vcApiTestImplementationsConfig.cjs b/http/tests/vcApiTestImplementationsConfig.cjs deleted file mode 100644 index 86c8bccb..00000000 --- a/http/tests/vcApiTestImplementationsConfig.cjs +++ /dev/null @@ -1,29 +0,0 @@ -module.exports = [{ - "name": "Spruce", - "implementation": "Spruce", - "issuers": [{ - "id": "did:key:z6MkgYAGxLBSXa6Ygk1PnUbK2F7zya8juE9nfsZhrvY7c9GD", - "endpoint": "https://127.0.0.1:9000/credentials/issue", - "options": { - "type": "Ed25519Signature2020" - }, - "tags": ["vc-api", "Ed25519Signature2020", "JWT"] - }, { - "id": "did:key:z6MkgYAGxLBSXa6Ygk1PnUbK2F7zya8juE9nfsZhrvY7c9GD", - "endpoint": "https://127.0.0.1:9000/credentials/issue", - "options": { - "type": "DataIntegrityProof" - }, - "tags": ["vc-api", "eddsa-2022", "JWT"] - }], - "verifiers": [{ - "id": "https://spruceid.com", - "endpoint": "https://127.0.0.1:9000/credentials/verify", - "tags": ["vc-api", "Ed25519Signature2020", "JWT", "eddsa-2022"] - }], - "didResolvers": [{ - "id": "https://spruceid.com", - "endpoint": "https://127.0.0.1:9000/identifiers", - "tags": ["did-key"] - }] -}];