diff --git a/.github/workflows/build-and-test.yaml b/.github/workflows/build-and-test.yaml new file mode 100644 index 0000000..274d119 --- /dev/null +++ b/.github/workflows/build-and-test.yaml @@ -0,0 +1,34 @@ +name: "Test Suite" +on: + pull_request: + +jobs: + test: + name: cargo test + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v4 + - uses: actions-rust-lang/setup-rust-toolchain@v1 + with: + # this defaults to "-D warnings", making warnings fail the entire build. + # setting to empty strng to allow builds with warnings + # todo: consider removing this, and disallowing pushing with warnings? + rustflags: "" + - run: cargo test --all-features + + # Check formatting with rustfmt + formatting: + name: cargo fmt + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v4 + # Ensure rustfmt is installed and setup problem matcher + - uses: actions-rust-lang/setup-rust-toolchain@v1 + with: + components: rustfmt + # this defaults to "-D warnings", making warnings fail the entire build. + # setting to empty strng to allow builds with warnings + # todo: consider removing this, and disallowing pushing with warnings? + rustflags: "" + - name: Rustfmt Check + uses: actions-rust-lang/rustfmt@v1 \ No newline at end of file diff --git a/Cargo.lock b/Cargo.lock index af24efa..15c5281 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1157,8 +1157,8 @@ dependencies = [ [[package]] name = "ndc-models" -version = "0.1.3" -source = "git+http://github.com/hasura/ndc-spec.git?tag=v0.1.3#b9316d206a6aece470531937f6e1ea9223e88122" +version = "0.1.4" +source = "git+http://github.com/hasura/ndc-spec.git?tag=v0.1.4#20172e3b2552b78d16dbafcd047f559ced420309" dependencies = [ "indexmap 2.2.6", "schemars", @@ -1169,8 +1169,8 @@ dependencies = [ [[package]] name = "ndc-sdk" -version = "0.1.3" -source = "git+https://github.com/hasura/ndc-sdk-rs?tag=v0.1.3#266f4db31777d2a0885a665e0d982237503b223c" +version = "0.1.5" +source = "git+https://github.com/hasura/ndc-sdk-rs?tag=v0.1.5#7f8382001b745c24b5f066411dde6822df65f545" dependencies = [ "async-trait", "axum", diff --git a/crates/common/src/client.rs b/crates/common/src/client.rs index db22fee..e5835b7 100644 --- a/crates/common/src/client.rs +++ b/crates/common/src/client.rs @@ -1,13 +1,19 @@ use crate::config::ConnectionConfig; use glob_match::glob_match; +use reqwest::header::{HeaderMap, HeaderValue, CONTENT_TYPE}; use serde::Serialize; use std::{collections::BTreeMap, error::Error, fmt::Debug}; pub fn get_http_client( _connection_config: &ConnectionConfig, ) -> Result> { - // todo: we could make client come preconfigured with some headers such as for username and password? - let client = reqwest::Client::builder().build()?; + let mut headers = HeaderMap::new(); + headers.insert(CONTENT_TYPE, HeaderValue::from_static("application/json")); + + let client = reqwest::Client::builder() + .default_headers(headers) + .build()?; + Ok(client) } diff --git a/crates/common/src/config.rs b/crates/common/src/config.rs index 878e539..fddbc7f 100644 --- a/crates/common/src/config.rs +++ b/crates/common/src/config.rs @@ -1,15 +1,15 @@ use std::collections::BTreeMap; use crate::{ - config_file::{RequestConfig, ResponseConfig}, + config_file::{RequestConfigFile, ResponseConfigFile}, schema::SchemaDefinition, }; #[derive(Debug, Clone)] pub struct ServerConfig { pub connection: ConnectionConfig, - pub request: RequestConfig, - pub response: ResponseConfig, + pub request: RequestConfig, + pub response: ResponseConfig, pub schema: SchemaDefinition, } @@ -19,7 +19,77 @@ pub struct ConnectionConfig { pub headers: BTreeMap, } -impl ResponseConfig { +#[derive(Debug, Clone)] +pub struct RequestConfig { + pub headers_argument: String, + pub headers_type_name: String, + pub forward_headers: Vec, +} +#[derive(Debug, Clone)] +pub struct ResponseConfig { + pub headers_field: String, + pub response_field: String, + pub type_name_prefix: String, + pub type_name_suffix: String, + pub forward_headers: Vec, +} + +impl Default for RequestConfig { + fn default() -> Self { + Self { + headers_argument: "_headers".to_owned(), + headers_type_name: "_HeaderMap".to_owned(), + forward_headers: vec![], + } + } +} + +impl Default for ResponseConfig { + fn default() -> Self { + Self { + headers_field: "headers".to_owned(), + response_field: "response".to_owned(), + type_name_prefix: "_".to_owned(), + type_name_suffix: "Response".to_owned(), + forward_headers: vec![], + } + } +} + +impl From for RequestConfig { + fn from(value: RequestConfigFile) -> Self { + RequestConfig { + headers_argument: value + .headers_argument + .unwrap_or_else(|| Self::default().headers_argument), + headers_type_name: value + .headers_type_name + .unwrap_or_else(|| Self::default().headers_type_name), + forward_headers: value.forward_headers.unwrap_or_default(), + } + } +} +impl From for ResponseConfig { + fn from(value: ResponseConfigFile) -> Self { + ResponseConfig { + headers_field: value + .headers_field + .unwrap_or_else(|| Self::default().headers_field), + response_field: value + .response_field + .unwrap_or_else(|| Self::default().response_field), + type_name_prefix: value + .type_name_prefix + .unwrap_or_else(|| Self::default().type_name_prefix), + type_name_suffix: value + .type_name_suffix + .unwrap_or_else(|| Self::default().type_name_suffix), + forward_headers: value.forward_headers.unwrap_or_default(), + } + } +} + +impl ResponseConfig { pub fn query_response_type_name(&self, query: &str) -> String { format!( "{}{}Query{}", diff --git a/crates/common/src/config_file.rs b/crates/common/src/config_file.rs index 7583a4f..40d3d24 100644 --- a/crates/common/src/config_file.rs +++ b/crates/common/src/config_file.rs @@ -10,14 +10,14 @@ pub const CONFIG_SCHEMA_FILE_NAME: &str = "configuration.schema.json"; pub struct ServerConfigFile { #[serde(rename = "$schema")] pub json_schema: String, + /// Connection Configuration for introspection + pub introspection: ConnectionConfigFile, /// Connection configuration for query execution pub execution: ConnectionConfigFile, - /// Optional Connection Configuration for introspection - pub introspection: ConnectionConfigFile, /// Optional configuration for requests - pub request: RequestConfig>, + pub request: RequestConfigFile, /// Optional configuration for responses - pub response: ResponseConfig>, + pub response: ResponseConfigFile, } impl Default for ServerConfigFile { @@ -26,8 +26,8 @@ impl Default for ServerConfigFile { json_schema: CONFIG_SCHEMA_FILE_NAME.to_owned(), execution: ConnectionConfigFile::default(), introspection: ConnectionConfigFile::default(), - request: RequestConfig::default(), - response: ResponseConfig::default(), + request: RequestConfigFile::default(), + response: ResponseConfigFile::default(), } } } @@ -43,56 +43,60 @@ impl Default for ConnectionConfigFile { fn default() -> Self { Self { endpoint: ConfigValue::Value("".to_string()), - headers: BTreeMap::from_iter(vec![ - ( - "Content-Type".to_owned(), - ConfigValue::Value("application/json".to_string()), - ), - ( - "Authorization".to_owned(), - ConfigValue::ValueFromEnv("GRAPHQL_ENDPOINT_AUTHORIZATION".to_string()), - ), - ]), + headers: BTreeMap::from_iter(vec![( + "Authorization".to_owned(), + ConfigValue::ValueFromEnv("GRAPHQL_ENDPOINT_AUTHORIZATION".to_string()), + )]), } } } #[derive(Debug, Clone, Serialize, Deserialize, JsonSchema)] #[serde(rename_all = "camelCase")] -pub struct RequestConfig { +pub struct RequestConfigFile { /// Name of the headers argument /// Must not conflict with any arguments of root fields in the target schema /// Defaults to "_headers", set to a different value if there is a conflict - pub headers_argument: T, + #[serde(skip_serializing_if = "Option::is_none", default)] + pub headers_argument: Option, /// Name of the headers argument type /// Must not conflict with other types in the target schema /// Defaults to "_HeaderMap", set to a different value if there is a conflict - pub headers_type_name: T, + #[serde(skip_serializing_if = "Option::is_none", default)] + pub headers_type_name: Option, /// List of headers to from the request - /// Defaults to ["*"], AKA all headers + /// Defaults to [], AKA no headers/disabled /// Supports glob patterns eg. "X-Hasura-*" + /// Enabling this requires additional configuration on the ddn side, see docs for more + #[serde(skip_serializing_if = "Option::is_none", default)] pub forward_headers: Option>, } #[derive(Debug, Clone, Serialize, Deserialize, JsonSchema)] #[serde(rename_all = "camelCase")] -pub struct ResponseConfig { +pub struct ResponseConfigFile { /// Name of the headers field in the response type /// Defaults to "headers" - pub headers_field: T, + #[serde(skip_serializing_if = "Option::is_none", default)] + pub headers_field: Option, /// Name of the response field in the response type /// Defaults to "response" - pub response_field: T, + #[serde(skip_serializing_if = "Option::is_none", default)] + pub response_field: Option, /// Prefix for response type names /// Defaults to "_" /// Generated response type names must be unique once prefix and suffix are applied - pub type_name_prefix: T, + #[serde(skip_serializing_if = "Option::is_none", default)] + pub type_name_prefix: Option, /// Suffix for response type names /// Defaults to "Response" /// Generated response type names must be unique once prefix and suffix are applied - pub type_name_suffix: T, + #[serde(skip_serializing_if = "Option::is_none", default)] + pub type_name_suffix: Option, /// List of headers to from the response - /// Defaults to ["*"], AKA all headers + /// Defaults to [], AKA no headers/disabled /// Supports glob patterns eg. "X-Hasura-*" + /// Enabling this requires additional configuration on the ddn side, see docs for more + #[serde(skip_serializing_if = "Option::is_none", default)] pub forward_headers: Option>, } @@ -104,91 +108,24 @@ pub enum ConfigValue { ValueFromEnv(String), } -impl Default for RequestConfig { - fn default() -> Self { - Self { - headers_argument: "_headers".to_owned(), - headers_type_name: "_HeaderMap".to_owned(), - forward_headers: Some(vec!["*".to_owned()]), - } - } -} - -impl Default for RequestConfig> { +impl Default for RequestConfigFile { fn default() -> Self { Self { headers_argument: None, headers_type_name: None, - forward_headers: Some(vec!["*".to_owned()]), + forward_headers: Some(vec![]), } } } -impl Default for ResponseConfig { - fn default() -> Self { - Self { - headers_field: "headers".to_owned(), - response_field: "response".to_owned(), - type_name_prefix: "_".to_owned(), - type_name_suffix: "Response".to_owned(), - forward_headers: Some(vec!["*".to_owned()]), - } - } -} - -impl Default for ResponseConfig> { +impl Default for ResponseConfigFile { fn default() -> Self { Self { headers_field: None, response_field: None, type_name_prefix: None, type_name_suffix: None, - forward_headers: Some(vec!["*".to_owned()]), - } - } -} - -impl From>> for RequestConfig { - fn from(value: RequestConfig>) -> Self { - RequestConfig { - headers_argument: value - .headers_argument - .unwrap_or_else(|| Self::default().headers_argument), - headers_type_name: value - .headers_type_name - .unwrap_or_else(|| Self::default().headers_type_name), - forward_headers: value.forward_headers.and_then(|forward_headers| { - if forward_headers.is_empty() { - None - } else { - Some(forward_headers) - } - }), - } - } -} -impl From>> for ResponseConfig { - fn from(value: ResponseConfig>) -> Self { - ResponseConfig { - headers_field: value - .headers_field - .unwrap_or_else(|| Self::default().headers_field), - response_field: value - .response_field - .unwrap_or_else(|| Self::default().response_field), - type_name_prefix: value - .type_name_prefix - .unwrap_or_else(|| Self::default().type_name_prefix), - type_name_suffix: value - .type_name_suffix - .unwrap_or_else(|| Self::default().type_name_suffix), - forward_headers: value.forward_headers.and_then(|forward_headers| { - if forward_headers.is_empty() { - None - } else { - Some(forward_headers) - } - }), + forward_headers: Some(vec![]), } } } diff --git a/crates/common/src/schema.rs b/crates/common/src/schema.rs index b5053cf..67537b6 100644 --- a/crates/common/src/schema.rs +++ b/crates/common/src/schema.rs @@ -1,10 +1,12 @@ -use crate::config_file::{RequestConfig, ResponseConfig}; +use crate::config::{RequestConfig, ResponseConfig}; use graphql_parser::schema; use std::{collections::BTreeMap, fmt::Display}; #[derive(Debug, Clone)] pub struct SchemaDefinition { + pub query_type_name: Option, pub query_fields: BTreeMap, + pub mutation_type_name: Option, pub mutation_fields: BTreeMap, pub definitions: BTreeMap, } @@ -12,8 +14,8 @@ pub struct SchemaDefinition { impl SchemaDefinition { pub fn new( schema_document: &schema::Document<'_, String>, - request_config: &RequestConfig, - response_config: &ResponseConfig, + request_config: &RequestConfig, + response_config: &ResponseConfig, ) -> Result { let schema_definition = schema_document .definitions @@ -165,7 +167,9 @@ impl SchemaDefinition { Ok(Self { query_fields, + query_type_name: schema_definition.query.to_owned(), mutation_fields, + mutation_type_name: schema_definition.mutation.to_owned(), definitions, }) } diff --git a/crates/ndc-graphql-cli/src/main.rs b/crates/ndc-graphql-cli/src/main.rs index 712674f..32bae16 100644 --- a/crates/ndc-graphql-cli/src/main.rs +++ b/crates/ndc-graphql-cli/src/main.rs @@ -220,7 +220,13 @@ async fn update_config( let response = execute_graphql_introspection(&connection).await?; // todo: handle graphql errors! - let introspection = response.data.expect("Successful introspection"); + if let Some(errors) = response.errors { + return Err(format!("Introspection error: {}", serde_json::to_string(&errors)?).into()); + } + + let introspection = response + .data + .expect("Introspection without error should have data"); let schema_document = schema_from_introspection(introspection); diff --git a/crates/ndc-graphql/Cargo.toml b/crates/ndc-graphql/Cargo.toml index cd0404d..7f8ea26 100644 --- a/crates/ndc-graphql/Cargo.toml +++ b/crates/ndc-graphql/Cargo.toml @@ -9,7 +9,7 @@ common = { path = "../common" } glob-match = "0.2.1" graphql-parser = "0.4.0" indexmap = "2.1.0" -ndc-sdk = { git = "https://github.com/hasura/ndc-sdk-rs", tag = "v0.1.3", package = "ndc-sdk", features = [ +ndc-sdk = { git = "https://github.com/hasura/ndc-sdk-rs", tag = "v0.1.5", package = "ndc-sdk", features = [ "rustls", ], default-features = false } prometheus = "0.13.3" diff --git a/crates/ndc-graphql/src/connector.rs b/crates/ndc-graphql/src/connector.rs index a147363..7aece60 100644 --- a/crates/ndc-graphql/src/connector.rs +++ b/crates/ndc-graphql/src/connector.rs @@ -1,4 +1,4 @@ -use self::{configuration::read_configuration, state::ServerState}; +use self::state::ServerState; use crate::query_builder::{build_mutation_document, build_query_document}; use async_trait::async_trait; use common::{ @@ -8,8 +8,8 @@ use common::{ use indexmap::IndexMap; use ndc_sdk::{ connector::{ - Connector, ConnectorSetup, ExplainError, FetchMetricsError, HealthError, - InitializationError, MutationError, ParseError, QueryError, SchemaError, + Connector, ExplainError, FetchMetricsError, HealthError, MutationError, QueryError, + SchemaError, }, json_response::JsonResponse, models::{ @@ -17,36 +17,15 @@ use ndc_sdk::{ }, }; use schema::schema_response; -use std::{collections::BTreeMap, mem, path::Path}; +use std::{collections::BTreeMap, mem}; use tracing::Instrument; - -mod configuration; mod schema; +pub mod setup; mod state; #[derive(Debug, Default, Clone)] pub struct GraphQLConnector; -#[async_trait] -impl ConnectorSetup for GraphQLConnector { - type Connector = Self; - - async fn parse_configuration( - &self, - configuration_dir: impl AsRef + Send, - ) -> Result<::Configuration, ParseError> { - read_configuration(configuration_dir.as_ref()).await - } - - async fn try_init_state( - &self, - configuration: &::Configuration, - _metrics: &mut prometheus::Registry, - ) -> Result<::State, InitializationError> { - Ok(ServerState::new(configuration)) - } -} - #[async_trait] impl Connector for GraphQLConnector { type Configuration = ServerConfig; @@ -75,6 +54,7 @@ impl Connector for GraphQLConnector { variables: None, explain: Some(LeafCapability {}), nested_fields: models::NestedFieldCapabilities { + aggregates: None, filter_by: None, order_by: None, }, @@ -106,7 +86,7 @@ impl Connector for GraphQLConnector { &operation.query, &operation.variables, )) - .map_err(|err| ExplainError::InvalidRequest(err.to_string()))?; + .map_err(ExplainError::new)?; let details = BTreeMap::from_iter(vec![ ("SQL Query".to_string(), operation.query), @@ -133,7 +113,7 @@ impl Connector for GraphQLConnector { &operation.query, &operation.variables, )) - .map_err(|err| ExplainError::InvalidRequest(err.to_string()))?; + .map_err(ExplainError::new)?; let details = BTreeMap::from_iter(vec![ ("SQL Query".to_string(), operation.query), @@ -159,7 +139,7 @@ impl Connector for GraphQLConnector { let client = state .client(configuration) .await - .map_err(|err| MutationError::Other(err.to_string().into()))?; + .map_err(MutationError::new)?; let execution_span = tracing::info_span!("Execute GraphQL Mutation", internal.visibility = "user"); @@ -170,24 +150,18 @@ impl Connector for GraphQLConnector { &configuration.connection.endpoint, &operation.headers, &client, - &configuration - .response - .forward_headers - .clone() - .unwrap_or_default(), + &configuration.response.forward_headers, ) .instrument(execution_span) .await - .map_err(|err| MutationError::Other(err.to_string().into()))?; + .map_err(MutationError::new)?; tracing::info_span!("Process Response").in_scope(|| { if let Some(errors) = response.errors { - Err(MutationError::InvalidRequest( - serde_json::to_string(&errors) - .map_err(|err| MutationError::Other(err.into()))?, - )) + Err(MutationError::new_unprocessable_content(&errors[0].message) + .with_details(serde_json::json!({ "errors": errors }))) } else if let Some(mut data) = response.data { - let forward_response_headers = configuration.response.forward_headers.is_some(); + let forward_response_headers = !configuration.response.forward_headers.is_empty(); let operation_results = request .operations @@ -216,14 +190,14 @@ impl Connector for GraphQLConnector { }), }) .collect::, serde_json::Error>>() - .map_err(|err| MutationError::Other(err.into()))?; + .map_err(MutationError::new)?; Ok(JsonResponse::Value(models::MutationResponse { operation_results, })) } else { - Err(MutationError::UnprocessableContent( - "No data or errors in response".into(), + Err(MutationError::new_unprocessable_content( + &"No data or errors in response", )) } }) @@ -237,10 +211,7 @@ impl Connector for GraphQLConnector { let operation = tracing::info_span!("Build Query Document", internal.visibility = "user") .in_scope(|| build_query_document(&request, configuration))?; - let client = state - .client(configuration) - .await - .map_err(|err| QueryError::Other(err.to_string().into()))?; + let client = state.client(configuration).await.map_err(QueryError::new)?; let execution_span = tracing::info_span!("Execute GraphQL Query", internal.visibility = "user"); @@ -251,31 +222,22 @@ impl Connector for GraphQLConnector { &configuration.connection.endpoint, &operation.headers, &client, - &configuration - .response - .forward_headers - .clone() - .unwrap_or_default(), + &configuration.response.forward_headers, ) .instrument(execution_span) .await - .map_err(|err| QueryError::Other(err.to_string().into()))?; + .map_err(QueryError::new)?; tracing::info_span!("Process Response").in_scope(|| { if let Some(errors) = response.errors { - Err(QueryError::Other( - serde_json::to_string(&errors) - .map_err(|err| QueryError::Other(err.into()))? - .into(), - )) + Err(QueryError::new_unprocessable_content(&errors[0].message) + .with_details(serde_json::json!({ "errors": errors }))) } else if let Some(data) = response.data { - let forward_response_headers = configuration.response.forward_headers.is_some(); + let forward_response_headers = !configuration.response.forward_headers.is_empty(); let row = if forward_response_headers { - let headers = serde_json::to_value(headers) - .map_err(|err| QueryError::Other(err.into()))?; - let data = - serde_json::to_value(data).map_err(|err| QueryError::Other(err.into()))?; + let headers = serde_json::to_value(headers).map_err(QueryError::new)?; + let data = serde_json::to_value(data).map_err(QueryError::new)?; IndexMap::from_iter(vec![ ( @@ -296,8 +258,8 @@ impl Connector for GraphQLConnector { rows: Some(vec![row]), }]))) } else { - Err(QueryError::UnprocessableContent( - "No data or errors in response".into(), + Err(QueryError::new_unprocessable_content( + &"No data or errors in response", )) } }) diff --git a/crates/ndc-graphql/src/connector/configuration.rs b/crates/ndc-graphql/src/connector/configuration.rs deleted file mode 100644 index 3b88c09..0000000 --- a/crates/ndc-graphql/src/connector/configuration.rs +++ /dev/null @@ -1,104 +0,0 @@ -use std::{ - env, - iter::once, - path::{Path, PathBuf}, -}; - -use common::{ - config::{ConnectionConfig, ServerConfig}, - config_file::{ConfigValue, ServerConfigFile, CONFIG_FILE_NAME, SCHEMA_FILE_NAME}, - schema::SchemaDefinition, -}; -use graphql_parser::parse_schema; -use ndc_sdk::connector::{InvalidNode, InvalidNodes, KeyOrIndex, LocatedError, ParseError}; -use tokio::fs; - -pub async fn read_configuration(context_path: &Path) -> Result { - let config_file_path = context_path.join(CONFIG_FILE_NAME); - let config_file = fs::read_to_string(&config_file_path) - .await - .map_err(ParseError::IoError)?; - let config_file: ServerConfigFile = serde_json::from_str(&config_file).map_err(|err| { - ParseError::ParseError(LocatedError { - file_path: config_file_path.clone(), - line: err.line(), - column: err.column(), - message: err.to_string(), - }) - })?; - - let schema_file_path = context_path.join(SCHEMA_FILE_NAME); - let schema_string = fs::read_to_string(&schema_file_path) - .await - .map_err(ParseError::IoError)?; - - let schema_document = parse_schema(&schema_string).map_err(|err| { - ParseError::ParseError(LocatedError { - file_path: config_file_path.clone(), - line: 0, - column: 0, - message: err.to_string(), - }) - })?; - - let request_config = config_file.request.into(); - let response_config = config_file.response.into(); - - let schema = SchemaDefinition::new(&schema_document, &request_config, &response_config) - .map_err(|err| { - ParseError::ValidateError(InvalidNodes(vec![InvalidNode { - file_path: schema_file_path, - node_path: vec![], - message: err.to_string(), - }])) - })?; - - let config = ServerConfig { - schema, - connection: ConnectionConfig { - endpoint: read_config_value( - &config_file_path, - &["connection", "endpoint"], - config_file.execution.endpoint, - )?, - headers: config_file - .execution - .headers - .into_iter() - .map(|(header_name, header_value)| { - let value = read_config_value( - &config_file_path, - &["connection", "headers", &header_name, "value"], - header_value, - )?; - Ok((header_name, value)) - }) - .collect::>()?, - }, - request: request_config, - response: response_config, - }; - - Ok(config) -} - -fn read_config_value( - file_path: &PathBuf, - node_path: &[&str], - value: ConfigValue, -) -> Result { - match value { - ConfigValue::Value(v) => Ok(v), - ConfigValue::ValueFromEnv(e) => Ok(env::var(&e).map_err(|err| { - ParseError::ValidateError(InvalidNodes(vec![InvalidNode { - file_path: file_path.to_owned(), - node_path: node_path - .iter() - .map(|s| KeyOrIndex::Key((*s).to_owned())) - .chain(once(KeyOrIndex::Key("valueFromEnv".to_owned()))) - .collect(), - message: format!("Error reading env var {}: {}", e, err), - }])) - })?), - } -} diff --git a/crates/ndc-graphql/src/connector/schema.rs b/crates/ndc-graphql/src/connector/schema.rs index bf36509..6935675 100644 --- a/crates/ndc-graphql/src/connector/schema.rs +++ b/crates/ndc-graphql/src/connector/schema.rs @@ -8,8 +8,8 @@ use ndc_sdk::models; use std::{collections::BTreeMap, iter}; pub fn schema_response(configuration: &ServerConfig) -> models::SchemaResponse { - let forward_request_headers = configuration.request.forward_headers.is_some(); - let forward_response_headers = configuration.response.forward_headers.is_some(); + let forward_request_headers = !configuration.request.forward_headers.is_empty(); + let forward_response_headers = !configuration.response.forward_headers.is_empty(); let mut scalar_types: BTreeMap<_, _> = configuration .schema diff --git a/crates/ndc-graphql/src/connector/setup.rs b/crates/ndc-graphql/src/connector/setup.rs new file mode 100644 index 0000000..605b992 --- /dev/null +++ b/crates/ndc-graphql/src/connector/setup.rs @@ -0,0 +1,152 @@ +use super::{state::ServerState, GraphQLConnector}; +use async_trait::async_trait; +use common::{ + config::{ConnectionConfig, ServerConfig}, + config_file::{ConfigValue, ServerConfigFile, CONFIG_FILE_NAME, SCHEMA_FILE_NAME}, + schema::SchemaDefinition, +}; +use graphql_parser::parse_schema; +use ndc_sdk::connector::{ + Connector, ConnectorSetup, InitializationError, InvalidNode, InvalidNodes, KeyOrIndex, + LocatedError, ParseError, +}; +use std::{ + collections::HashMap, + env, + iter::once, + path::{Path, PathBuf}, +}; +use tokio::fs; + +pub struct GraphQLConnectorSetup { + environment: HashMap, +} + +#[async_trait] +impl ConnectorSetup for GraphQLConnectorSetup { + type Connector = GraphQLConnector; + + async fn parse_configuration( + &self, + configuration_dir: impl AsRef + Send, + ) -> Result<::Configuration, ParseError> { + self.read_configuration(configuration_dir).await + } + + async fn try_init_state( + &self, + configuration: &::Configuration, + _metrics: &mut prometheus::Registry, + ) -> Result<::State, InitializationError> { + Ok(ServerState::new(configuration)) + } +} + +impl Default for GraphQLConnectorSetup { + fn default() -> Self { + Self { + environment: env::vars().collect(), + } + } +} + +impl GraphQLConnectorSetup { + pub fn new(environment: HashMap) -> Self { + Self { environment } + } + async fn read_configuration( + &self, + configuration_dir: impl AsRef + Send, + ) -> Result { + let config_file_path = configuration_dir.as_ref().join(CONFIG_FILE_NAME); + let config_file = fs::read_to_string(&config_file_path) + .await + .map_err(ParseError::IoError)?; + let config_file: ServerConfigFile = serde_json::from_str(&config_file).map_err(|err| { + ParseError::ParseError(LocatedError { + file_path: config_file_path.clone(), + line: err.line(), + column: err.column(), + message: err.to_string(), + }) + })?; + + let schema_file_path = configuration_dir.as_ref().join(SCHEMA_FILE_NAME); + let schema_string = fs::read_to_string(&schema_file_path) + .await + .map_err(ParseError::IoError)?; + + let schema_document = parse_schema(&schema_string).map_err(|err| { + ParseError::ParseError(LocatedError { + file_path: config_file_path.clone(), + line: 0, + column: 0, + message: err.to_string(), + }) + })?; + + let request_config = config_file.request.into(); + let response_config = config_file.response.into(); + + let schema = SchemaDefinition::new(&schema_document, &request_config, &response_config) + .map_err(|err| { + ParseError::ValidateError(InvalidNodes(vec![InvalidNode { + file_path: schema_file_path, + node_path: vec![], + message: err.to_string(), + }])) + })?; + + let config = ServerConfig { + schema, + connection: ConnectionConfig { + endpoint: self.read_config_value( + &config_file_path, + &["connection", "endpoint"], + config_file.execution.endpoint, + )?, + headers: config_file + .execution + .headers + .into_iter() + .map(|(header_name, header_value)| { + let value = self.read_config_value( + &config_file_path, + &["connection", "headers", &header_name, "value"], + header_value, + )?; + Ok((header_name, value)) + }) + .collect::>()?, + }, + request: request_config, + response: response_config, + }; + + Ok(config) + } + + fn read_config_value( + &self, + file_path: &PathBuf, + node_path: &[&str], + value: ConfigValue, + ) -> Result { + match value { + ConfigValue::Value(v) => Ok(v), + ConfigValue::ValueFromEnv(e) => { + Ok(self.environment.get(&e).cloned().ok_or_else(|| { + ParseError::ValidateError(InvalidNodes(vec![InvalidNode { + file_path: file_path.to_owned(), + node_path: node_path + .iter() + .map(|s| KeyOrIndex::Key((*s).to_owned())) + .chain(once(KeyOrIndex::Key("valueFromEnv".to_owned()))) + .collect(), + message: format!("Environment Variable {e} not set"), + }])) + })?) + } + } + } +} diff --git a/crates/ndc-graphql/src/main.rs b/crates/ndc-graphql/src/main.rs index 2ce2f86..f7757c6 100644 --- a/crates/ndc-graphql/src/main.rs +++ b/crates/ndc-graphql/src/main.rs @@ -1,9 +1,9 @@ -use ndc_graphql::connector::GraphQLConnector; +use ndc_graphql::connector::setup::GraphQLConnectorSetup; use ndc_sdk::default_main::default_main; use std::error::Error; #[tokio::main] async fn main() -> Result<(), Box> { - default_main::().await + default_main::().await } diff --git a/crates/ndc-graphql/src/query_builder.rs b/crates/ndc-graphql/src/query_builder.rs index 5c96575..137d6a2 100644 --- a/crates/ndc-graphql/src/query_builder.rs +++ b/crates/ndc-graphql/src/query_builder.rs @@ -37,6 +37,12 @@ pub fn build_mutation_document( let mut request_headers = BTreeMap::new(); let mut items = vec![]; + let mutation_type_name = configuration + .schema + .mutation_type_name + .as_ref() + .ok_or(QueryBuilderError::NoMutationType)?; + for (index, operation) in request.operations.iter().enumerate() { match operation { models::MutationOperation::Procedure { @@ -69,6 +75,8 @@ pub fn build_mutation_document( |v| Ok(v.to_owned()), field_definition, &mut variables, + name, + &mutation_type_name, )?, fields, field_definition, @@ -117,6 +125,12 @@ pub fn build_query_document( let mut items = vec![]; + let query_type_name = configuration + .schema + .query_type_name + .as_ref() + .ok_or(QueryBuilderError::NoQueryType)?; + for (alias, field) in request .query .fields @@ -165,6 +179,8 @@ pub fn build_query_document( map_arg, field_definition, &mut variables, + &request.collection, + &query_type_name, )?, fields, field_definition, @@ -215,11 +231,7 @@ where let mut request_arguments = BTreeMap::new(); let mut headers = configuration.connection.headers.clone(); - let patterns = configuration - .request - .forward_headers - .clone() - .unwrap_or_default(); + let patterns = &configuration.request.forward_headers; for (name, argument) in arguments { let value = map_argument(argument)?; @@ -248,7 +260,7 @@ where )) } serde_json::Value::String(header) => { - for pattern in &patterns { + for pattern in patterns { if glob_match(&pattern.to_lowercase(), &name.to_lowercase()) { headers.insert(name, header); break; @@ -280,7 +292,7 @@ fn selection_set_field<'a>( let items = fields .iter() .map(|(alias, field)| { - let (name, fields, arguments) = match field { + let (field_name, fields, arguments) = match field { models::Field::Column { column, fields, @@ -293,31 +305,35 @@ fn selection_set_field<'a>( } }; + let object_name = field_definition.r#type.name(); + // subfield selection should only exist on object types - let field_definition = - match configuration - .schema - .definitions - .get(field_definition.r#type.name()) - { - Some(TypeDef::Object { - fields, - description: _, - }) => fields.get(name).ok_or_else(|| { - QueryBuilderError::ObjectFieldNotFound { - object: field_definition.r#type.name().to_owned(), - field: name.to_owned(), - } - }), - Some(_) | None => Err(QueryBuilderError::ObjectTypeNotFound( - field_definition.r#type.name().to_owned(), - )), - }?; + let field_definition = match configuration.schema.definitions.get(object_name) { + Some(TypeDef::Object { + fields, + description: _, + }) => fields.get(field_name).ok_or_else(|| { + QueryBuilderError::ObjectFieldNotFound { + object: field_definition.r#type.name().to_owned(), + field: field_name.to_owned(), + } + }), + Some(_) | None => Err(QueryBuilderError::ObjectTypeNotFound( + field_definition.r#type.name().to_owned(), + )), + }?; selection_set_field( alias, - name, - field_arguments(arguments, map_query_arg, field_definition, variables)?, + field_name, + field_arguments( + arguments, + map_query_arg, + field_definition, + variables, + field_name, + object_name, + )?, fields, field_definition, variables, @@ -354,6 +370,8 @@ fn field_arguments<'a, A, M>( map_argument: M, field_definition: &ObjectFieldDefinition, variables: &mut OperationVariables, + field_name: &str, + object_name: &str, ) -> Result)>, QueryBuilderError> where M: Fn(&A) -> Result, @@ -361,7 +379,15 @@ where arguments .iter() .map(|(name, arg)| { - let input_type = &field_definition.arguments.get(name).unwrap().r#type; + let input_type = &field_definition + .arguments + .get(name) + .ok_or(QueryBuilderError::ArgumentNotFound { + object: object_name.to_owned(), + field: field_name.to_owned(), + argument: name.to_owned(), + })? + .r#type; let value = map_argument(arg)?; @@ -396,8 +422,7 @@ mod test { use std::collections::BTreeMap; use common::{ - config::{ConnectionConfig, ServerConfig}, - config_file::{RequestConfig, ResponseConfig}, + config::{ConnectionConfig, RequestConfig, ResponseConfig, ServerConfig}, schema::SchemaDefinition, }; use graphql_parser; @@ -430,7 +455,10 @@ mod test { "#; let schema_document = graphql_parser::parse_schema(schema_string)?; - let request_config = RequestConfig::default(); + let request_config = RequestConfig { + forward_headers: vec!["Authorization".to_string()], + ..RequestConfig::default() + }; let response_config = ResponseConfig::default(); let schema = SchemaDefinition::new(&schema_document, &request_config, &response_config)?; diff --git a/crates/ndc-graphql/src/query_builder/error.rs b/crates/ndc-graphql/src/query_builder/error.rs index b216028..2f41ac1 100644 --- a/crates/ndc-graphql/src/query_builder/error.rs +++ b/crates/ndc-graphql/src/query_builder/error.rs @@ -38,17 +38,17 @@ impl std::error::Error for QueryBuilderError {} impl From for QueryError { fn from(value: QueryBuilderError) -> Self { - QueryError::InvalidRequest(value.to_string()) + QueryError::new_invalid_request(&value) } } impl From for MutationError { fn from(value: QueryBuilderError) -> Self { - MutationError::InvalidRequest(value.to_string()) + MutationError::new_invalid_request(&value) } } impl From for ExplainError { fn from(value: QueryBuilderError) -> Self { - ExplainError::InvalidRequest(value.to_string()) + ExplainError::new_invalid_request(&value) } }