Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

fix: allow using special characters in json keys #3081

Merged
merged 4 commits into from
Dec 6, 2023
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -36,6 +36,7 @@ This project adheres to [Semantic Versioning](http://semver.org/).
+ Does not apply to role settings and `app.settings.*`
- #2420, Fix bogus message when listening on port 0 - @develop7
- #3067, Fix Acquision Timeout errors logging to stderr when `log-level=crit` - @laurenceisla
- #3054, Fix not allowing special characters in JSON keys - @laurenceisla

### Changed

Expand Down
32 changes: 30 additions & 2 deletions src/PostgREST/ApiRequest/QueryParams.hs
Original file line number Diff line number Diff line change
Expand Up @@ -350,8 +350,12 @@ pFieldForest = pFieldTree `sepBy` lexeme (char ',')
pFieldName :: Parser Text
pFieldName =
pQuotedValue <|>
T.intercalate "-" . map toS <$> (pIdentifier `sepBy1` dash) <?>
sepByDash pIdentifier <?>
"field name (* or [a..z0..9_$])"

sepByDash :: Parser Text -> Parser Text
sepByDash fieldIdent =
T.intercalate "-" . map toS <$> (fieldIdent `sepBy1` dash)
where
isDash :: GenParser Char st ()
isDash = try ( char '-' >> notFollowedBy (char '>') )
Expand All @@ -364,12 +368,18 @@ pFieldName =
-- >>> P.parse pJsonPath "" "->text"
-- Right [JArrow {jOp = JKey {jVal = "text"}}]
--
-- >>> P.parse pJsonPath "" "->!@#$%^&*_a"
-- Right [JArrow {jOp = JKey {jVal = "!@#$%^&*_a"}}]
--
-- >>> P.parse pJsonPath "" "->1"
-- Right [JArrow {jOp = JIdx {jVal = "+1"}}]
--
-- >>> P.parse pJsonPath "" "->>text"
-- Right [J2Arrow {jOp = JKey {jVal = "text"}}]
--
-- >>> P.parse pJsonPath "" "->>!@#$%^&*_a"
-- Right [J2Arrow {jOp = JKey {jVal = "!@#$%^&*_a"}}]
--
-- >>> P.parse pJsonPath "" "->>1"
-- Right [J2Arrow {jOp = JIdx {jVal = "+1"}}]
--
Expand Down Expand Up @@ -406,7 +416,7 @@ pJsonPath = many pJsonOperation
try (string "->" $> JArrow)

pJsonOperand =
let pJKey = JKey . toS <$> pFieldName
let pJKey = JKey . toS <$> pJsonKeyName
pJIdx = JIdx . toS <$> ((:) <$> P.option '+' (char '-') <*> many1 digit) <* pEnd
pEnd = try (void $ lookAhead (string "->")) <|>
try (void $ lookAhead (string "::")) <|>
Expand All @@ -415,6 +425,15 @@ pJsonPath = many pJsonOperation
try eof in
try pJIdx <|> try pJKey

pJsonKeyName :: Parser Text
pJsonKeyName =
pQuotedValue <|>
sepByDash pJsonKeyIdentifier <?>
"any non reserved character different from: .,>()"

pJsonKeyIdentifier :: Parser Text
pJsonKeyIdentifier = T.strip . toS <$> many1 (noneOf "(-:.,>)")

pField :: Parser Field
pField = lexeme $ (,) <$> pFieldName <*> P.option [] pJsonPath

Expand Down Expand Up @@ -477,6 +496,9 @@ pRelationSelect = lexeme $ do
-- >>> P.parse pFieldSelect "" "alias:name->jsonpath::cast"
-- Right (SelectField {selField = ("name",[JArrow {jOp = JKey {jVal = "jsonpath"}}]), selAggregateFunction = Nothing, selAggregateCast = Nothing, selCast = Just "cast", selAlias = Just "alias"})
--
-- >>> P.parse pFieldSelect "" "alias:name->!@#$%^&*_a::cast"
-- Right (SelectField {selField = ("name",[JArrow {jOp = JKey {jVal = "!@#$%^&*_a"}}]), selAggregateFunction = Nothing, selAggregateCast = Nothing, selCast = Just "cast", selAlias = Just "alias"})
--
-- >>> P.parse pFieldSelect "" "*"
-- Right (SelectField {selField = ("*",[]), selAggregateFunction = Nothing, selAggregateCast = Nothing, selCast = Nothing, selAlias = Nothing})
--
Expand Down Expand Up @@ -678,9 +700,15 @@ pDelimiter = char '.' <?> "delimiter (.)"
-- >>> P.parse pOrder "" "json_col->key.asc.nullslast"
-- Right [OrderTerm {otTerm = ("json_col",[JArrow {jOp = JKey {jVal = "key"}}]), otDirection = Just OrderAsc, otNullOrder = Just OrderNullsLast}]
--
-- >>> P.parse pOrder "" "json_col->!@#$%^&*_a.asc.nullslast"
-- Right [OrderTerm {otTerm = ("json_col",[JArrow {jOp = JKey {jVal = "!@#$%^&*_a"}}]), otDirection = Just OrderAsc, otNullOrder = Just OrderNullsLast}]
--
-- >>> P.parse pOrder "" "clients(json_col->key).desc.nullsfirst"
-- Right [OrderRelationTerm {otRelation = "clients", otRelTerm = ("json_col",[JArrow {jOp = JKey {jVal = "key"}}]), otDirection = Just OrderDesc, otNullOrder = Just OrderNullsFirst}]
--
-- >>> P.parse pOrder "" "clients(json_col->!@#$%^&*_a).desc.nullsfirst"
-- Right [OrderRelationTerm {otRelation = "clients", otRelTerm = ("json_col",[JArrow {jOp = JKey {jVal = "!@#$%^&*_a"}}]), otDirection = Just OrderDesc, otNullOrder = Just OrderNullsFirst}]
--
-- >>> P.parse pOrder "" "clients(name,id)"
-- Left (line 1, column 8):
-- unexpected '('
Expand Down
2 changes: 1 addition & 1 deletion test/memory/memory-tests.sh
Original file line number Diff line number Diff line change
Expand Up @@ -102,7 +102,7 @@ postJsonArrayTest(){

echo "Running memory usage tests.."

jsonKeyTest "1M" "POST" "/rpc/leak?columns=blob" "24M"
jsonKeyTest "1M" "POST" "/rpc/leak?columns=blob" "26M"
jsonKeyTest "1M" "POST" "/leak?columns=blob" "16M"
jsonKeyTest "1M" "PATCH" "/leak?id=eq.1&columns=blob" "16M"

Expand Down
20 changes: 20 additions & 0 deletions test/spec/Feature/Query/JsonOperatorSpec.hs
Original file line number Diff line number Diff line change
Expand Up @@ -56,6 +56,21 @@ spec actualPgVersion = describe "json and jsonb operators" $ do
[json| [{"myInt":1}] |] -- the value in the db is an int, but here we expect a string for now
{ matchHeaders = [matchContentTypeJson] }

it "accepts non reserved special characters in the key's name" $
get "/json_arr?id=eq.10&select=data->!@#$%^%26*_d->>!@#$%^%26*_e::integer" `shouldRespondWith`
[json| [{"!@#$%^&*_e":3}] |]
{ matchHeaders = [matchContentTypeJson] }

it "fails when there is a reserved special character in the key's name" $
get "/json_arr?id=eq.10&select=data->(!@#$%^%26*_d->>!@#$%^%26*_e::integer" `shouldRespondWith`
[json| {
"code":"PGRST100",
"details":"unexpected \"(\" expecting \"-\", digit or any non reserved character different from: .,>()",
"hint":null,
"message":"\"failed to parse select parameter (data->(!@#$%^&*_d->>!@#$%^&*_e::integer)\" (line 1, column 7)"}
|]
{ matchStatus = 400 , matchHeaders = [] }

-- TODO the status code for the error is 404, this is because 42883 represents undefined function
-- this works fine for /rpc/unexistent requests, but for this case a 500 seems more appropriate
it "fails when a double arrow ->> is followed with a single arrow ->" $ do
Expand Down Expand Up @@ -178,6 +193,11 @@ spec actualPgVersion = describe "json and jsonb operators" $ do
get "/grandchild_entities?or=(jsonb_col->a->>b.eq.foo, jsonb_col->>b.eq.bar)&select=id" `shouldRespondWith`
[json|[{id: 4}, {id: 5}]|] { matchStatus = 200, matchHeaders = [matchContentTypeJson] }

it "can filter when the key's name has non reserved special characters" $
get "/json_arr?select=data->!@#$%^%26*_d&data->!@#$%^%26*_d->>!@#$%^%26*_e=eq.3" `shouldRespondWith`
[json| [{"!@#$%^&*_d": {"!@#$%^&*_e": 3}}] |]
{ matchHeaders = [matchContentTypeJson] }

it "can filter by array indexes" $ do
get "/json_arr?select=data&data->>0=eq.1" `shouldRespondWith`
[json| [{"data":[1, 2, 3]}] |]
Expand Down
1 change: 1 addition & 0 deletions test/spec/fixtures/data.sql
Original file line number Diff line number Diff line change
Expand Up @@ -512,6 +512,7 @@ INSERT INTO json_arr VALUES (6, '[{"a": [1,2,3]}, {"b": [4,5]}]');
INSERT INTO json_arr VALUES (7, '{"c": [1,2,3], "d": [4,5]}');
INSERT INTO json_arr VALUES (8, '{"c": [{"d": [4,5,6,7,8]}]}');
INSERT INTO json_arr VALUES (9, '[{"0xy1": [1,{"23-xy-45": [2, {"xy-6": [3]}]}]}]');
INSERT INTO json_arr VALUES (10, '{"!@#$%^&*_a": [{"!@#$%^&*_b": 1}, {"!@#$%^&*_c": [2]}], "!@#$%^&*_d": {"!@#$%^&*_e": 3}}');

TRUNCATE TABLE jsonb_test CASCADE;
INSERT INTO jsonb_test VALUES (1, '{ "a": {"b": 2} }');
Expand Down
Loading