-
Notifications
You must be signed in to change notification settings - Fork 1.8k
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
fix(cubesql): Support new QuickSight meta queries
- Loading branch information
1 parent
df53c51
commit 148e4cf
Showing
15 changed files
with
676 additions
and
19 deletions.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
159 changes: 159 additions & 0 deletions
159
rust/cubesql/cubesql/src/compile/engine/information_schema/postgres/sql_sizing.rs
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,159 @@ | ||
use std::{any::Any, sync::Arc}; | ||
|
||
use async_trait::async_trait; | ||
use datafusion::{ | ||
arrow::{ | ||
array::{Array, ArrayRef, StringBuilder, UInt32Builder}, | ||
datatypes::{DataType, Field, Schema, SchemaRef}, | ||
record_batch::RecordBatch, | ||
}, | ||
datasource::{datasource::TableProviderFilterPushDown, TableProvider, TableType}, | ||
error::DataFusionError, | ||
logical_plan::Expr, | ||
physical_plan::{memory::MemoryExec, ExecutionPlan}, | ||
}; | ||
|
||
struct InfoSchemaSqlSizingBuilder { | ||
sizing_id: UInt32Builder, | ||
sizing_name: StringBuilder, | ||
supported_value: UInt32Builder, | ||
comments: StringBuilder, | ||
} | ||
|
||
impl InfoSchemaSqlSizingBuilder { | ||
fn new(capacity: usize) -> Self { | ||
Self { | ||
sizing_id: UInt32Builder::new(capacity), | ||
sizing_name: StringBuilder::new(capacity), | ||
supported_value: UInt32Builder::new(capacity), | ||
comments: StringBuilder::new(capacity), | ||
} | ||
} | ||
|
||
fn add_info( | ||
&mut self, | ||
sizing_id: u32, | ||
sizing_name: impl AsRef<str>, | ||
supported_value: Option<u32>, | ||
comments: Option<&str>, | ||
) { | ||
self.sizing_id.append_value(sizing_id).unwrap(); | ||
self.sizing_name.append_value(sizing_name).unwrap(); | ||
self.supported_value.append_option(supported_value).unwrap(); | ||
self.comments.append_option(comments).unwrap(); | ||
} | ||
|
||
fn finish(mut self) -> Vec<Arc<dyn Array>> { | ||
let columns: Vec<Arc<dyn Array>> = vec![ | ||
Arc::new(self.sizing_id.finish()), | ||
Arc::new(self.sizing_name.finish()), | ||
Arc::new(self.supported_value.finish()), | ||
Arc::new(self.comments.finish()), | ||
]; | ||
|
||
columns | ||
} | ||
} | ||
|
||
pub struct InfoSchemaSqlSizingProvider { | ||
data: Arc<Vec<ArrayRef>>, | ||
} | ||
|
||
impl InfoSchemaSqlSizingProvider { | ||
pub fn new() -> Self { | ||
let mut builder = InfoSchemaSqlSizingBuilder::new(11); | ||
|
||
builder.add_info(97, "MAXIMUM COLUMNS IN GROUP BY", Some(0), None); | ||
builder.add_info(99, "MAXIMUM COLUMNS IN ORDER BY", Some(0), None); | ||
builder.add_info(100, "MAXIMUM COLUMNS IN SELECT", Some(1664), None); | ||
builder.add_info(101, "MAXIMUM COLUMNS IN TABLE", Some(1600), None); | ||
builder.add_info( | ||
34, | ||
"MAXIMUM CATALOG NAME LENGTH", | ||
Some(63), | ||
Some("Might be less, depending on character set."), | ||
); | ||
builder.add_info( | ||
30, | ||
"MAXIMUM COLUMN NAME LENGTH", | ||
Some(63), | ||
Some("Might be less, depending on character set."), | ||
); | ||
builder.add_info( | ||
31, | ||
"MAXIMUM CURSOR NAME LENGTH", | ||
Some(63), | ||
Some("Might be less, depending on character set."), | ||
); | ||
builder.add_info( | ||
10005, | ||
"MAXIMUM IDENTIFIER LENGTH", | ||
Some(63), | ||
Some("Might be less, depending on character set."), | ||
); | ||
builder.add_info( | ||
32, | ||
"MAXIMUM SCHEMA NAME LENGTH", | ||
Some(63), | ||
Some("Might be less, depending on character set."), | ||
); | ||
builder.add_info( | ||
35, | ||
"MAXIMUM TABLE NAME LENGTH", | ||
Some(63), | ||
Some("Might be less, depending on character set."), | ||
); | ||
builder.add_info( | ||
107, | ||
"MAXIMUM USER NAME LENGTH", | ||
Some(63), | ||
Some("Might be less, depending on character set."), | ||
); | ||
|
||
Self { | ||
data: Arc::new(builder.finish()), | ||
} | ||
} | ||
} | ||
|
||
#[async_trait] | ||
impl TableProvider for InfoSchemaSqlSizingProvider { | ||
fn as_any(&self) -> &dyn Any { | ||
self | ||
} | ||
|
||
fn table_type(&self) -> TableType { | ||
TableType::View | ||
} | ||
|
||
fn schema(&self) -> SchemaRef { | ||
Arc::new(Schema::new(vec![ | ||
Field::new("sizing_id", DataType::UInt32, false), | ||
Field::new("sizing_name", DataType::Utf8, false), | ||
Field::new("supported_value", DataType::UInt32, true), | ||
Field::new("comments", DataType::Utf8, true), | ||
])) | ||
} | ||
|
||
async fn scan( | ||
&self, | ||
projection: &Option<Vec<usize>>, | ||
_filters: &[Expr], | ||
_limit: Option<usize>, | ||
) -> Result<Arc<dyn ExecutionPlan>, DataFusionError> { | ||
let batch = RecordBatch::try_new(self.schema(), self.data.to_vec())?; | ||
|
||
Ok(Arc::new(MemoryExec::try_new( | ||
&[vec![batch]], | ||
self.schema(), | ||
projection.clone(), | ||
)?)) | ||
} | ||
|
||
fn supports_filter_pushdown( | ||
&self, | ||
_filter: &Expr, | ||
) -> Result<TableProviderFilterPushDown, DataFusionError> { | ||
Ok(TableProviderFilterPushDown::Unsupported) | ||
} | ||
} |
4 changes: 4 additions & 0 deletions
4
rust/cubesql/cubesql/src/compile/engine/information_schema/redshift/mod.rs
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -1,15 +1,19 @@ | ||
pub mod late_binding_view_unpacked; | ||
pub mod pg_external_schema; | ||
pub mod stl_ddltext; | ||
pub mod stl_query; | ||
pub mod stl_querytext; | ||
pub mod stv_slices; | ||
pub mod svv_external_schemas; | ||
pub mod svv_table_info; | ||
pub mod svv_tables; | ||
|
||
pub use late_binding_view_unpacked::*; | ||
pub use pg_external_schema::*; | ||
pub use stl_ddltext::*; | ||
pub use stl_query::*; | ||
pub use stl_querytext::*; | ||
pub use stv_slices::*; | ||
pub use svv_external_schemas::*; | ||
pub use svv_table_info::*; | ||
pub use svv_tables::*; |
99 changes: 99 additions & 0 deletions
99
rust/cubesql/cubesql/src/compile/engine/information_schema/redshift/pg_external_schema.rs
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,99 @@ | ||
use std::{any::Any, sync::Arc}; | ||
|
||
use async_trait::async_trait; | ||
use datafusion::{ | ||
arrow::{ | ||
array::{Array, ArrayRef, Int32Builder, StringBuilder, UInt32Builder}, | ||
datatypes::{DataType, Field, Schema, SchemaRef}, | ||
record_batch::RecordBatch, | ||
}, | ||
datasource::{datasource::TableProviderFilterPushDown, TableProvider, TableType}, | ||
error::DataFusionError, | ||
logical_plan::Expr, | ||
physical_plan::{memory::MemoryExec, ExecutionPlan}, | ||
}; | ||
|
||
struct RedshiftPgExternalSchemaBuilder { | ||
esoid: UInt32Builder, | ||
eskind: Int32Builder, | ||
esdbname: StringBuilder, | ||
esoptions: StringBuilder, | ||
} | ||
|
||
impl RedshiftPgExternalSchemaBuilder { | ||
fn new(capacity: usize) -> Self { | ||
Self { | ||
esoid: UInt32Builder::new(capacity), | ||
eskind: Int32Builder::new(capacity), | ||
esdbname: StringBuilder::new(capacity), | ||
esoptions: StringBuilder::new(capacity), | ||
} | ||
} | ||
|
||
fn finish(mut self) -> Vec<Arc<dyn Array>> { | ||
let columns: Vec<Arc<dyn Array>> = vec![ | ||
Arc::new(self.esoid.finish()), | ||
Arc::new(self.eskind.finish()), | ||
Arc::new(self.esdbname.finish()), | ||
Arc::new(self.esoptions.finish()), | ||
]; | ||
|
||
columns | ||
} | ||
} | ||
|
||
pub struct RedshiftPgExternalSchemaProvider { | ||
data: Arc<Vec<ArrayRef>>, | ||
} | ||
|
||
impl RedshiftPgExternalSchemaProvider { | ||
pub fn new() -> Self { | ||
let builder = RedshiftPgExternalSchemaBuilder::new(0); | ||
|
||
Self { | ||
data: Arc::new(builder.finish()), | ||
} | ||
} | ||
} | ||
|
||
#[async_trait] | ||
impl TableProvider for RedshiftPgExternalSchemaProvider { | ||
fn as_any(&self) -> &dyn Any { | ||
self | ||
} | ||
|
||
fn table_type(&self) -> TableType { | ||
TableType::View | ||
} | ||
|
||
fn schema(&self) -> SchemaRef { | ||
Arc::new(Schema::new(vec![ | ||
Field::new("esoid", DataType::UInt32, false), | ||
Field::new("eskind", DataType::Int32, false), | ||
Field::new("esdbname", DataType::Utf8, false), | ||
Field::new("esoptions", DataType::Utf8, false), | ||
])) | ||
} | ||
|
||
async fn scan( | ||
&self, | ||
projection: &Option<Vec<usize>>, | ||
_filters: &[Expr], | ||
_limit: Option<usize>, | ||
) -> Result<Arc<dyn ExecutionPlan>, DataFusionError> { | ||
let batch = RecordBatch::try_new(self.schema(), self.data.to_vec())?; | ||
|
||
Ok(Arc::new(MemoryExec::try_new( | ||
&[vec![batch]], | ||
self.schema(), | ||
projection.clone(), | ||
)?)) | ||
} | ||
|
||
fn supports_filter_pushdown( | ||
&self, | ||
_filter: &Expr, | ||
) -> Result<TableProviderFilterPushDown, DataFusionError> { | ||
Ok(TableProviderFilterPushDown::Unsupported) | ||
} | ||
} |
Oops, something went wrong.