Skip to content

Commit

Permalink
feat: undefined json keys as defaults w/ Prefer:undefined-keys
Browse files Browse the repository at this point in the history
  • Loading branch information
steve-chavez authored Mar 22, 2023
1 parent e731241 commit 439a96c
Show file tree
Hide file tree
Showing 16 changed files with 232 additions and 38 deletions.
1 change: 1 addition & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -24,6 +24,7 @@ This project adheres to [Semantic Versioning](http://semver.org/).
+ When the client sends the request header specified in the config it will be included in the response headers.
- #2694, Make `db-root-spec` stable. - @steve-chavez
+ This can be used to override the OpenAPI spec with a custom database function
- #1567, On bulk inserts with `?columns`, undefined json keys can get columns' DEFAULT values by using the `Prefer: undefined-keys=apply-defaults` header - @steve-chavez

### Fixed

Expand Down
23 changes: 22 additions & 1 deletion src/PostgREST/ApiRequest/Preferences.hs
Original file line number Diff line number Diff line change
Expand Up @@ -9,6 +9,7 @@
module PostgREST.ApiRequest.Preferences
( Preferences(..)
, PreferCount(..)
, PreferUndefinedKeys(..)
, PreferParameters(..)
, PreferRepresentation(..)
, PreferResolution(..)
Expand All @@ -33,6 +34,7 @@ import Protolude
-- >>> deriving instance Show PreferParameters
-- >>> deriving instance Show PreferCount
-- >>> deriving instance Show PreferTransaction
-- >>> deriving instance Show PreferUndefinedKeys
-- >>> deriving instance Show Preferences

-- | Preferences recognized by the application.
Expand All @@ -43,6 +45,7 @@ data Preferences
, preferParameters :: Maybe PreferParameters
, preferCount :: Maybe PreferCount
, preferTransaction :: Maybe PreferTransaction
, preferUndefinedKeys :: Maybe PreferUndefinedKeys
}

-- |
Expand All @@ -57,6 +60,7 @@ data Preferences
-- , preferParameters = Nothing
-- , preferCount = Just ExactCount
-- , preferTransaction = Nothing
-- , preferUndefinedKeys = Nothing
-- }
--
-- Multiple headers can also be used:
Expand All @@ -68,6 +72,7 @@ data Preferences
-- , preferParameters = Nothing
-- , preferCount = Just ExactCount
-- , preferTransaction = Nothing
-- , preferUndefinedKeys = Nothing
-- }
--
-- If a preference is set more than once, only the first is used:
Expand All @@ -92,13 +97,14 @@ data Preferences
--
-- Preferences can be separated by arbitrary amounts of space, lower-case header is also recognized:
--
-- >>> pPrint $ fromHeaders [("prefer", "count=exact, tx=commit ,return=representation")]
-- >>> pPrint $ fromHeaders [("prefer", "count=exact, tx=commit ,return=representation , undefined-keys=apply-defaults")]
-- Preferences
-- { preferResolution = Nothing
-- , preferRepresentation = Full
-- , preferParameters = Nothing
-- , preferCount = Just ExactCount
-- , preferTransaction = Just Commit
-- , preferUndefinedKeys = Just ApplyDefaults
-- }
--
fromHeaders :: [HTTP.Header] -> Preferences
Expand All @@ -109,6 +115,7 @@ fromHeaders headers =
, preferParameters = parsePrefs [SingleObject, MultipleObjects]
, preferCount = parsePrefs [ExactCount, PlannedCount, EstimatedCount]
, preferTransaction = parsePrefs [Commit, Rollback]
, preferUndefinedKeys = parsePrefs [ApplyDefaults, IgnoreDefaults]
}
where
prefHeaders = filter ((==) HTTP.hPrefer . fst) headers
Expand Down Expand Up @@ -204,3 +211,17 @@ instance ToHeaderValue PreferTransaction where
toHeaderValue Rollback = "tx=rollback"

instance ToAppliedHeader PreferTransaction

-- |
-- How to handle the insertion/update when the keys specified in ?columns are not present
-- in the json body.
data PreferUndefinedKeys
= ApplyDefaults -- ^ Use the default column value for the unspecified keys.
| IgnoreDefaults -- ^ Inserts: null values / Updates: the keys are not SET to any value
deriving Eq

instance ToHeaderValue PreferUndefinedKeys where
toHeaderValue ApplyDefaults = "undefined-keys=apply-defaults"
toHeaderValue IgnoreDefaults = "undefined-keys=ignore-defaults"

instance ToAppliedHeader PreferUndefinedKeys
12 changes: 7 additions & 5 deletions src/PostgREST/Plan.hs
Original file line number Diff line number Diff line change
Expand Up @@ -13,6 +13,7 @@ resource.
{-# LANGUAGE DuplicateRecordFields #-}
{-# LANGUAGE LambdaCase #-}
{-# LANGUAGE NamedFieldPuns #-}
{-# LANGUAGE OverloadedRecordDot #-}
{-# LANGUAGE RecordWildCards #-}

module PostgREST.Plan
Expand Down Expand Up @@ -501,28 +502,28 @@ updateNode f (targetNodeName:remainingPath, a) (Right (Node rootNode forest)) =
findNode = find (\(Node ReadPlan{relName, relAlias} _) -> relName == targetNodeName || relAlias == Just targetNodeName) forest

mutatePlan :: Mutation -> QualifiedIdentifier -> ApiRequest -> SchemaCache -> ReadPlanTree -> Either Error MutatePlan
mutatePlan mutation qi ApiRequest{iPreferences=Preferences{..}, ..} sCache readReq = mapLeft ApiRequestError $
mutatePlan mutation qi ApiRequest{iPreferences=preferences, ..} sCache readReq = mapLeft ApiRequestError $
case mutation of
MutationCreate ->
mapRight (\typedColumns -> Insert qi typedColumns body ((,) <$> preferResolution <*> Just confCols) [] returnings pkCols) typedColumnsOrError
mapRight (\typedColumns -> Insert qi typedColumns body ((,) <$> preferences.preferResolution <*> Just confCols) [] returnings pkCols applyDefaults) typedColumnsOrError
MutationUpdate ->
mapRight (\typedColumns -> Update qi typedColumns body combinedLogic iTopLevelRange rootOrder returnings) typedColumnsOrError
mapRight (\typedColumns -> Update qi typedColumns body combinedLogic iTopLevelRange rootOrder returnings applyDefaults) typedColumnsOrError
MutationSingleUpsert ->
if null qsLogic &&
qsFilterFields == S.fromList pkCols &&
not (null (S.fromList pkCols)) &&
all (\case
Filter _ (OpExpr False (Op OpEqual _)) -> True
_ -> False) qsFiltersRoot
then mapRight (\typedColumns -> Insert qi typedColumns body (Just (MergeDuplicates, pkCols)) combinedLogic returnings mempty) typedColumnsOrError
then mapRight (\typedColumns -> Insert qi typedColumns body (Just (MergeDuplicates, pkCols)) combinedLogic returnings mempty False) typedColumnsOrError
else
Left InvalidFilters
MutationDelete -> Right $ Delete qi combinedLogic iTopLevelRange rootOrder returnings
where
confCols = fromMaybe pkCols qsOnConflict
QueryParams.QueryParams{..} = iQueryParams
returnings =
if preferRepresentation == None
if preferences.preferRepresentation == None
then []
else inferColsEmbedNeeds readReq pkCols
pkCols = maybe mempty tablePKCols $ HM.lookup qi $ dbTables sCache
Expand All @@ -532,6 +533,7 @@ mutatePlan mutation qi ApiRequest{iPreferences=Preferences{..}, ..} sCache readR
body = payRaw <$> iPayload -- the body is assumed to be json at this stage(ApiRequest validates)
tbl = HM.lookup qi $ dbTables sCache
typedColumnsOrError = resolveOrError tbl `traverse` S.toList iColumns
applyDefaults = preferences.preferUndefinedKeys == Just ApplyDefaults

resolveOrError :: Maybe Table -> FieldName -> Either ApiRequestError TypedField
resolveOrError Nothing _ = Left NotFound
Expand Down
2 changes: 2 additions & 0 deletions src/PostgREST/Plan/MutatePlan.hs
Original file line number Diff line number Diff line change
Expand Up @@ -24,6 +24,7 @@ data MutatePlan
, where_ :: [LogicTree]
, returning :: [FieldName]
, insPkCols :: [FieldName]
, applyDefs :: Bool
}
| Update
{ in_ :: QualifiedIdentifier
Expand All @@ -33,6 +34,7 @@ data MutatePlan
, mutRange :: NonnegRange
, mutOrder :: [OrderTerm]
, returning :: [FieldName]
, applyDefs :: Bool
}
| Delete
{ in_ :: QualifiedIdentifier
Expand Down
7 changes: 4 additions & 3 deletions src/PostgREST/Plan/Types.hs
Original file line number Diff line number Diff line change
Expand Up @@ -13,12 +13,13 @@ import Protolude

-- | A TypedField is a field with sufficient information to be read from JSON with `json_to_recordset`.
data TypedField = TypedField
{ tfName :: FieldName
, tfIRType :: Text -- ^ The initial type of the field, before any casting.
{ tfName :: FieldName
, tfIRType :: Text -- ^ The initial type of the field, before any casting.
, tfDefault :: Maybe Text
} deriving (Eq)

resolveTableField :: Table -> FieldName -> Maybe TypedField
resolveTableField table fieldName =
case HMI.lookup fieldName (tableColumns table) of
Just column -> Just $ TypedField (colName column) (colNominalType column)
Just column -> Just $ TypedField (colName column) (colNominalType column) (colDefault column)
Nothing -> Nothing
12 changes: 6 additions & 6 deletions src/PostgREST/Query/QueryBuilder.hs
Original file line number Diff line number Diff line change
Expand Up @@ -81,9 +81,9 @@ getSelectsJoins rr@(Node ReadPlan{select, relName, relToParent=Just rel, relAggA
(if null select && null forest then selects else sel:selects, joi:joins)

mutatePlanToQuery :: MutatePlan -> SQL.Snippet
mutatePlanToQuery (Insert mainQi iCols body onConflct putConditions returnings _) =
mutatePlanToQuery (Insert mainQi iCols body onConflct putConditions returnings _ applyDefaults) =
"INSERT INTO " <> SQL.sql (fromQi mainQi) <> SQL.sql (if null iCols then " " else "(" <> cols <> ") ") <>
fromJsonBodyF body iCols True False <>
fromJsonBodyF body iCols True False applyDefaults <>
-- Only used for PUT
(if null putConditions then mempty else "WHERE " <> intercalateSnippet " AND " (pgFmtLogicTree (QualifiedIdentifier mempty "pgrst_body") <$> putConditions)) <>
SQL.sql (BS.unwords [
Expand All @@ -105,7 +105,7 @@ mutatePlanToQuery (Insert mainQi iCols body onConflct putConditions returnings _
cols = BS.intercalate ", " $ pgFmtIdent . tfName <$> iCols

-- An update without a limit is always filtered with a WHERE
mutatePlanToQuery (Update mainQi uCols body logicForest range ordts returnings)
mutatePlanToQuery (Update mainQi uCols body logicForest range ordts returnings applyDefaults)
| null uCols =
-- if there are no columns we cannot do UPDATE table SET {empty}, it'd be invalid syntax
-- selecting an empty resultset from mainQi gives us the column names to prevent errors when using &select=
Expand All @@ -114,13 +114,13 @@ mutatePlanToQuery (Update mainQi uCols body logicForest range ordts returnings)

| range == allRange =
"UPDATE " <> mainTbl <> " SET " <> SQL.sql nonRangeCols <> " " <>
fromJsonBodyF body uCols False False <>
fromJsonBodyF body uCols False False applyDefaults <>
whereLogic <> " " <>
SQL.sql (returningF mainQi returnings)

| otherwise =
"WITH " <>
"pgrst_update_body AS (" <> fromJsonBodyF body uCols True True <> "), " <>
"pgrst_update_body AS (" <> fromJsonBodyF body uCols True True applyDefaults <> "), " <>
"pgrst_affected_rows AS (" <>
"SELECT " <> SQL.sql rangeIdF <> " FROM " <> mainTbl <>
whereLogic <> " " <>
Expand Down Expand Up @@ -171,7 +171,7 @@ callPlanToQuery (FunctionCall qi params args returnsScalar multipleCall returnin
fromCall = case params of
OnePosParam prm -> "FROM " <> callIt (singleParameter args $ encodeUtf8 $ ppType prm)
KeyParams [] -> "FROM " <> callIt mempty
KeyParams prms -> fromJsonBodyF args ((\p -> TypedField (ppName p) (ppType p)) <$> prms) False (not multipleCall) <> ", " <>
KeyParams prms -> fromJsonBodyF args ((\p -> TypedField (ppName p) (ppType p) Nothing) <$> prms) False (not multipleCall) False <> ", " <>
"LATERAL " <> callIt (fmtParams prms)

callIt :: SQL.Snippet -> SQL.Snippet
Expand Down
34 changes: 28 additions & 6 deletions src/PostgREST/Query/SqlFragment.hs
Original file line number Diff line number Diff line change
Expand Up @@ -143,6 +143,16 @@ pgBuildArrayLiteral vals =
pgFmtIdent :: Text -> SqlFragment
pgFmtIdent x = encodeUtf8 $ "\"" <> T.replace "\"" "\"\"" (trimNullChars x) <> "\""

-- Only use it if the input comes from the database itself, like on `jsonb_build_object('column_from_a_table', val)..`
pgFmtLit :: Text -> Text
pgFmtLit x =
let trimmed = trimNullChars x
escaped = "'" <> T.replace "'" "''" trimmed <> "'"
slashed = T.replace "\\" "\\\\" escaped in
if "\\" `T.isInfixOf` escaped
then "E" <> slashed
else slashed

trimNullChars :: Text -> Text
trimNullChars = T.takeWhile (/= '\x0')

Expand Down Expand Up @@ -221,28 +231,40 @@ pgFmtSelectItem table (f@(fName, jp), Nothing, alias) = pgFmtField table f <> SQ
pgFmtSelectItem table (f@(fName, jp), Just cast, alias) = "CAST (" <> pgFmtField table f <> " AS " <> SQL.sql (encodeUtf8 cast) <> " )" <> SQL.sql (pgFmtAs fName jp alias)

-- TODO: At this stage there shouldn't be a Maybe since ApiRequest should ensure that an INSERT/UPDATE has a body
fromJsonBodyF :: Maybe LBS.ByteString -> [TypedField] -> Bool -> Bool -> SQL.Snippet
fromJsonBodyF body fields includeSelect includeLimitOne =
fromJsonBodyF :: Maybe LBS.ByteString -> [TypedField] -> Bool -> Bool -> Bool -> SQL.Snippet
fromJsonBodyF body fields includeSelect includeLimitOne includeDefaults =
SQL.sql
(if includeSelect then "SELECT " <> parsedCols <> " " else mempty) <>
"FROM (SELECT " <> jsonPlaceHolder <> " AS json_data) pgrst_payload, " <>
-- convert a json object into a json array, this way we can use json_to_recordset for all json payloads
-- Otherwise we'd have to use json_to_record for json objects and json_to_recordset for json arrays
-- We do this in SQL to avoid processing the JSON in application code
"LATERAL (SELECT CASE WHEN json_typeof(pgrst_payload.json_data) = 'array' THEN pgrst_payload.json_data ELSE json_build_array(pgrst_payload.json_data) END AS val) pgrst_uniform_json, " <>
"LATERAL (SELECT CASE WHEN " <> jsonTypeofF <> "(pgrst_payload.json_data) = 'array' THEN pgrst_payload.json_data ELSE " <> jsonBuildArrayF <> "(pgrst_payload.json_data) END AS val) pgrst_uniform_json, " <>
(if includeDefaults
then "LATERAL (SELECT jsonb_agg(jsonb_build_object(" <> defsJsonb <> ") || elem) AS val from jsonb_array_elements(pgrst_uniform_json.val) elem) pgrst_json_defs, "
else mempty) <>
"LATERAL (SELECT * FROM " <>
(if null fields
-- When we are inserting no columns (e.g. using default values), we can't use our ordinary `json_to_recordset`
-- because it can't extract records with no columns (there's no valid syntax for the `AS (colName colType,...)`
-- part). But we still need to ensure as many rows are created as there are array elements.
then SQL.sql "json_array_elements(pgrst_uniform_json.val) _ "
else SQL.sql ("json_to_recordset(pgrst_uniform_json.val) AS _(" <> typedCols <> ") " <> if includeLimitOne then "LIMIT 1" else mempty)
then SQL.sql $ jsonArrayElementsF <> "(" <> finalBodyF <> ") _ "
else SQL.sql $ jsonToRecordsetF <> "(" <> finalBodyF <> ") AS _(" <> typedCols <> ") " <> if includeLimitOne then "LIMIT 1" else mempty
) <>
") pgrst_body "
where
parsedCols = BS.intercalate ", " $ fromQi . QualifiedIdentifier "pgrst_body" . tfName <$> fields
typedCols = BS.intercalate ", " $ pgFmtIdent . tfName <> const " " <> encodeUtf8 . tfIRType <$> fields
jsonPlaceHolder = SQL.encoderAndParam (HE.nullable HE.jsonLazyBytes) body
defsJsonb = SQL.sql $ BS.intercalate "," fieldsWDefaults
fieldsWDefaults = mapMaybe (\case
TypedField{tfName=nam, tfDefault=Just def} -> Just $ encodeUtf8 (pgFmtLit nam <> ", " <> def)
TypedField{tfDefault=Nothing} -> Nothing
) fields
(finalBodyF, jsonTypeofF, jsonBuildArrayF, jsonArrayElementsF, jsonToRecordsetF) =
if includeDefaults
then ("pgrst_json_defs.val", "jsonb_typeof", "jsonb_build_array", "jsonb_array_elements", "jsonb_to_recordset")
else ("pgrst_uniform_json.val", "json_typeof", "json_build_array", "json_array_elements", "json_to_recordset")
jsonPlaceHolder = SQL.encoderAndParam (HE.nullable $ if includeDefaults then HE.jsonbLazyBytes else HE.jsonLazyBytes) body

pgFmtOrderTerm :: QualifiedIdentifier -> OrderTerm -> SQL.Snippet
pgFmtOrderTerm qi ot =
Expand Down
5 changes: 3 additions & 2 deletions src/PostgREST/Response.hs
Original file line number Diff line number Diff line change
Expand Up @@ -109,6 +109,7 @@ createResponse QualifiedIdentifier{..} MutateReadPlan{mrMutatePlan} ctxApiReques
Nothing
else
toAppliedHeader <$> preferResolution
, toAppliedHeader <$> preferUndefinedKeys
]

if preferRepresentation == Full then
Expand All @@ -125,9 +126,9 @@ updateResponse ctxApiRequest@ApiRequest{iPreferences=Preferences{..}} resultSet
let
response = gucResponse rsGucStatus rsGucHeaders
contentRangeHeader =
RangeQuery.contentRangeH 0 (rsQueryTotal - 1) $
Just . RangeQuery.contentRangeH 0 (rsQueryTotal - 1) $
if shouldCount preferCount then Just rsQueryTotal else Nothing
headers = [contentRangeHeader]
headers = catMaybes [contentRangeHeader, toAppliedHeader <$> preferUndefinedKeys]

if preferRepresentation == Full then
response HTTP.status200
Expand Down
15 changes: 15 additions & 0 deletions test/pgbench/1567/new.sql
Original file line number Diff line number Diff line change
@@ -0,0 +1,15 @@
INSERT INTO "test"."complex_items"("arr_data", "field-with_sep", "id", "name")
SELECT pgrst_body."arr_data", pgrst_body."field-with_sep", pgrst_body."id", pgrst_body."name"
FROM (
SELECT '[{"id": 4, "name": "Vier"}, {"id": 5, "name": "Funf", "arr_data": null}, {"id": 6, "name": "Sechs", "arr_data": [1, 2, 3], "field-with_sep": 6}]'::jsonb as json_data
) pgrst_payload,
LATERAL (
SELECT CASE WHEN jsonb_typeof(pgrst_payload.json_data) = 'array' THEN pgrst_payload.json_data ELSE jsonb_build_array(pgrst_payload.json_data) END AS val
) pgrst_uniform_json,
LATERAL (
SELECT jsonb_agg(jsonb_build_object('field-with_sep', 1) || elem) AS vals from jsonb_array_elements(pgrst_uniform_json.val) elem
) pgrst_json_defs,
LATERAL (
SELECT * FROM jsonb_to_recordset (pgrst_json_defs.vals) AS _ ("arr_data" integer[], "field-with_sep" integer, "id" bigint, "name" text)
) pgrst_body
RETURNING "test"."complex_items".*;
12 changes: 12 additions & 0 deletions test/pgbench/1567/old.sql
Original file line number Diff line number Diff line change
@@ -0,0 +1,12 @@
INSERT INTO "test"."complex_items"("arr_data", "field-with_sep", "id", "name")
SELECT pgrst_body."arr_data", pgrst_body."field-with_sep", pgrst_body."id", pgrst_body."name"
FROM (
SELECT '[{"id": 4, "name": "Vier"}, {"id": 5, "name": "Funf", "arr_data": null}, {"id": 6, "name": "Sechs", "arr_data": [1, 2, 3], "field-with_sep": 6}]'::jsonb as json_data
) pgrst_payload,
LATERAL (
SELECT CASE WHEN jsonb_typeof(pgrst_payload.json_data) = 'array' THEN pgrst_payload.json_data ELSE jsonb_build_array(pgrst_payload.json_data) END AS val
) pgrst_uniform_json,
LATERAL (
SELECT * FROM jsonb_to_recordset (pgrst_uniform_json.val) AS _ ("arr_data" integer[], "field-with_sep" integer, "id" bigint, "name" text)
) pgrst_body
RETURNING "test"."complex_items".*
9 changes: 7 additions & 2 deletions test/pgbench/README.md
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,11 @@
Can be used as:

```
postgrest-with-postgresql-15 -f test/pgbench/fixtures.sql pgbench -n -T 10 -f test/pgbench/2677/old.sql
postgrest-with-postgresql-15 -f test/pgbench/fixtures.sql pgbench -n -T 10 -f test/pgbench/2677/new.sql
postgrest-with-postgresql-15 -f test/pgbench/fixtures.sql pgbench -n -T 10 -f test/pgbench/1567/old.sql
postgrest-with-postgresql-15 -f test/pgbench/fixtures.sql pgbench -n -T 10 -f test/pgbench/1567/new.sql
```

## Directory structure

The directory name is the issue number on github.
Loading

0 comments on commit 439a96c

Please sign in to comment.