Skip to content

Commit

Permalink
Issue #26: Handle duplicate domains (#103)
Browse files Browse the repository at this point in the history
  • Loading branch information
Marko Dimjašević authored Nov 25, 2019
1 parent bf41f51 commit cb8afc7
Show file tree
Hide file tree
Showing 5 changed files with 118 additions and 35 deletions.
46 changes: 23 additions & 23 deletions lib/Fencer/Main.hs
Original file line number Diff line number Diff line change
Expand Up @@ -13,6 +13,7 @@ where
import BasePrelude

import Control.Concurrent.STM (atomically)
import qualified Data.List.NonEmpty as NE
import System.FilePath ((</>))
import qualified System.Logger as Logger
import System.Logger (Logger)
Expand Down Expand Up @@ -77,27 +78,26 @@ reloadRules logger settings appState = do
Logger.msg ("Loading rules from " ++ configDir)

-- Read and parse the rules
ruleDefinitionsVal :: Either [LoadRulesError] [DomainDefinition] <-
loadRulesFromDirectory
(#rootDirectory $ settingsRoot settings)
(#subDirectory $ settingsSubdirectory settings </> "config")
(#ignoreDotFiles (settingsIgnoreDotFiles settings))
case ruleDefinitionsVal of
Left fs ->
Logger.err logger $
Logger.msg ("error loading new configuration from runtime: " ++
prettyPrintErrors fs)
Right ruleDefinitions -> do
Logger.info logger $
Logger.msg ("Parsed rules for domains: " ++
show (map (unDomainId . domainDefinitionId) ruleDefinitions))
loadRulesFromDirectory
(#rootDirectory $ settingsRoot settings)
(#subDirectory $ settingsSubdirectory settings </> "config")
(#ignoreDotFiles (settingsIgnoreDotFiles settings))
>>= \case
Left fs ->
Logger.err logger $
Logger.msg ("error loading new configuration from runtime: " ++
prettyPrintErrors (NE.toList fs))
Right ruleDefinitions -> do
Logger.info logger $
Logger.msg ("Parsed rules for domains: " ++
show (map (unDomainId . domainDefinitionId) ruleDefinitions))

-- Recreate 'appStateRules'
--
-- There is no need to remove old rate limiting rules
atomically $
-- See the documentation of 'setRules' for details on what
-- happens with counters during rule reloading.
setRules appState (map domainToRuleTree ruleDefinitions)
Logger.info logger $
Logger.msg (Logger.val "Applied new rules")
-- Recreate 'appStateRules'
--
-- There is no need to remove old rate limiting rules
atomically $
-- See the documentation of 'setRules' for details on what
-- happens with counters during rule reloading.
setRules appState (map domainToRuleTree ruleDefinitions)
Logger.info logger $
Logger.msg (Logger.val "Applied new rules")
33 changes: 29 additions & 4 deletions lib/Fencer/Rules.hs
Original file line number Diff line number Diff line change
Expand Up @@ -8,6 +8,7 @@ module Fencer.Rules
, prettyPrintErrors
, showError
, loadRulesFromDirectory
, validatePotentialDomains
, definitionsToRuleTree
, domainToRuleTree
, applyRules
Expand All @@ -19,8 +20,10 @@ import BasePrelude
import Control.Applicative (liftA2)
import Control.Monad.Extra (partitionM, concatMapM, ifM)
import Data.Either (partitionEithers)
import Data.Maybe (catMaybes)
import qualified Data.HashMap.Strict as HM
import Data.List.NonEmpty (NonEmpty)
import Data.Maybe (catMaybes)
import qualified Data.List.NonEmpty as NE
import Named ((:!), arg)
import System.Directory (listDirectory, doesFileExist, doesDirectoryExist, getPermissions, pathIsSymbolicLink, readable)
import System.FilePath ((</>), makeRelative, normalise, splitDirectories)
Expand All @@ -31,13 +34,16 @@ import Fencer.Types
data LoadRulesError
= LoadRulesParseError FilePath Yaml.ParseException
| LoadRulesIOError IOException
| LoadRulesDuplicateDomain DomainId
deriving stock (Show)

-- | Pretty-print a 'LoadRulesError'.
showError :: LoadRulesError -> String
showError (LoadRulesParseError file yamlEx) =
show file ++ ", " ++ (Yaml.prettyPrintParseException yamlEx)
showError (LoadRulesIOError ex) = "IO error: " ++ displayException ex
showError (LoadRulesDuplicateDomain d) =
"duplicate domain " ++ (show . unDomainId $ d) ++ " in config file"

-- | Pretty-print a list of 'LoadRulesError's.
prettyPrintErrors :: [LoadRulesError] -> String
Expand All @@ -55,7 +61,7 @@ loadRulesFromDirectory
:: "rootDirectory" :! FilePath
-> "subDirectory" :! FilePath
-> "ignoreDotFiles" :! Bool
-> IO (Either [LoadRulesError] [DomainDefinition])
-> IO (Either (NonEmpty LoadRulesError) [DomainDefinition])
loadRulesFromDirectory
(arg #rootDirectory -> rootDirectory)
(arg #subDirectory -> subDirectory)
Expand All @@ -67,8 +73,7 @@ loadRulesFromDirectory
let filteredFiles = if ignoreDotFiles
then filter (not . isDotFile) files
else files
(errs, mRules) <- partitionEithers <$> mapM loadFile filteredFiles
pure $ if (null @[] errs) then Right (catMaybes mRules) else Left errs
validatePotentialDomains <$> mapM loadFile filteredFiles
where
loadFile :: FilePath -> IO (Either LoadRulesError (Maybe DomainDefinition))
loadFile file =
Expand Down Expand Up @@ -115,6 +120,26 @@ loadRulesFromDirectory
dirs <- filterM isDirectory other
(files ++) <$> concatMapM listAllFiles dirs

-- | Perform validation checks to make sure the behavior matches that
-- of @lyft/ratelimit@.
validatePotentialDomains
:: [Either LoadRulesError (Maybe DomainDefinition)]
-> Either (NonEmpty LoadRulesError) [DomainDefinition]
validatePotentialDomains res = case partitionEithers res of
(errs@(_:_), _ ) -> Left $ NE.fromList errs
([] , mRules) -> do
-- check if there are any duplicate domains
let
rules = catMaybes mRules
groupedRules :: [NonEmpty DomainDefinition] = NE.groupBy
((==) `on` (unDomainId . domainDefinitionId))
(NE.fromList $ sortOn domainDefinitionId rules)
if (length @[] rules /= length @[] groupedRules)
then
let dupDomain = NE.head . head $ filter (\l -> NE.length l > 1) groupedRules
in Left . pure . LoadRulesDuplicateDomain . domainDefinitionId $ dupDomain
else Right rules

-- | Convert a list of descriptors to a 'RuleTree'.
definitionsToRuleTree :: [DescriptorDefinition] -> RuleTree
definitionsToRuleTree = HM.fromList . map (\d -> (makeKey d, makeBranch d))
Expand Down
22 changes: 21 additions & 1 deletion test/Fencer/Rules/Test.hs
Original file line number Diff line number Diff line change
Expand Up @@ -9,6 +9,7 @@ module Fencer.Rules.Test

import BasePrelude

import qualified Data.List.NonEmpty as NE
import qualified Data.Yaml as Yaml
import qualified System.Directory as Dir
import System.FilePath ((</>))
Expand All @@ -19,6 +20,7 @@ import Fencer.Rules
import Fencer.Rules.Test.Examples
import Fencer.Rules.Test.Helpers (expectLoadRules)
import Fencer.Rules.Test.Types
import Fencer.Types (DomainId(..))


tests :: TestTree
Expand All @@ -33,6 +35,7 @@ tests = testGroup "Rule tests"
, test_rulesLoadRulesMinimal
, test_rulesYAMLSeparator
, test_rulesLoadRulesReadPermissions
, test_rulesLoadRulesDuplicateDomain
]

-- | test that 'loadRulesFromDirectory' loads rules from YAML files.
Expand Down Expand Up @@ -136,7 +139,7 @@ test_rulesLoadRulesException =
, simpleRuleFile "faultyDomain.yaml" faultyDomain
]
)
(#result $ Left
(#result $ Left $ NE.fromList
[LoadRulesParseError "faultyDomain.yaml" $
Yaml.AesonException
"Error in $.descriptors[1]: key \"key\" not present"])
Expand Down Expand Up @@ -167,6 +170,23 @@ test_rulesYAMLSeparator =
(#files [simpleRuleFile "sep.yaml" separatorDomainText] )
(#result $ Right [separatorDomain])

-- | test that 'loadRulesFromDirectory' rejects a configuration with a
-- duplicate domain.
--
-- This matches the behavior of @lyft/ratelimit@.
test_rulesLoadRulesDuplicateDomain :: TestTree
test_rulesLoadRulesDuplicateDomain =
testCase "Error on a configuration with a duplicate domain" $
expectLoadRules
(#ignoreDotFiles False)
(#files
[ simpleRuleFile "one.yaml" domainDescriptorKeyValueText
, simpleRuleFile "two.yaml" domainDescriptorKeyValueText
]
)
(#result $
Left $ NE.fromList [LoadRulesDuplicateDomain $ DomainId "domain1"])

-- | test that 'loadRulesFromDirectory' loads a configuration file in
-- presence of another configuration file without read permissions.
--
Expand Down
14 changes: 8 additions & 6 deletions test/Fencer/Rules/Test/Helpers.hs
Original file line number Diff line number Diff line change
Expand Up @@ -11,6 +11,8 @@ where

import BasePrelude

import Data.List.NonEmpty (NonEmpty)
import qualified Data.List.NonEmpty as NE
import qualified Data.Text.IO as TIO
import Named ((:!), arg)
import qualified System.Directory as Dir
Expand Down Expand Up @@ -46,7 +48,7 @@ writeAndLoadRules
:: "ignoreDotFiles" :! Bool
-> "root" :! FilePath
-> "files" :! [RuleFile]
-> IO (Either [LoadRulesError] [DomainDefinition])
-> IO (Either (NonEmpty LoadRulesError) [DomainDefinition])
writeAndLoadRules
(arg #ignoreDotFiles -> ignoreDotFiles)
(arg #root -> root)
Expand All @@ -66,7 +68,7 @@ writeAndLoadRules
expectLoadRules
:: "ignoreDotFiles" :! Bool
-> "files" :! [RuleFile]
-> "result" :! Either [LoadRulesError] [DomainDefinition]
-> "result" :! Either (NonEmpty LoadRulesError) [DomainDefinition]
-> Assertion
expectLoadRules
(arg #ignoreDotFiles -> ignoreDotFiles)
Expand All @@ -84,11 +86,11 @@ expectLoadRules
assertFailure "Expected failures, got domain definitions!"
Left expectedErrs ->
assertBool ("Exceptions differ! Expected: " ++
(prettyPrintErrors expectedErrs) ++ "\nGot: " ++
(prettyPrintErrors errs))
(prettyPrintErrors $ NE.toList expectedErrs) ++ "\nGot: " ++
(prettyPrintErrors $ NE.toList errs))
(((==) `on` (fmap showError))
(sortBy (compare `on` showError) (trimPath <$> expectedErrs))
(sortBy (compare `on` showError) (trimPath <$> errs)))
(NE.sortBy (compare `on` showError) (trimPath <$> expectedErrs))
(NE.sortBy (compare `on` showError) (trimPath <$> errs)))
Right definitions -> assertBool "unexpected definitions"
(((==) `on` show)
(sortOn domainDefinitionId <$> result)
Expand Down
38 changes: 37 additions & 1 deletion test/Fencer/Server/Test.hs
Original file line number Diff line number Diff line change
Expand Up @@ -31,7 +31,7 @@ import Fencer.Server
import Fencer.Settings (defaultGRPCPort, getLogLevel, newLogger)
import Fencer.Types
import Fencer.Rules
import Fencer.Rules.Test.Examples (domainDescriptorKeyValueText, domainDescriptorKeyText)
import Fencer.Rules.Test.Examples (domainDescriptorKeyValueText, domainDescriptorKeyText, domainDescriptorKeyValue)
import Fencer.Rules.Test.Helpers (writeAndLoadRules)
import Fencer.Rules.Test.Types (RuleFile(..), simpleRuleFile)
import qualified Fencer.Proto as Proto
Expand All @@ -48,6 +48,7 @@ tests = testGroup "Server tests"
, test_serverResponseEmptyDomain
, test_serverResponseEmptyDescriptorList
, test_serverResponseReadPermissions
, test_serverResponseDuplicateDomain
]

-- | Test that when Fencer is started without any rules provided to it (i.e.
Expand Down Expand Up @@ -196,6 +197,41 @@ test_serverResponseReadPermissions =
, rateLimitResponseHeaders = Vector.empty
}

-- | Test that a request with a non-empty descriptor list results in a
-- response with an unknown status code in presence of a configuration
-- with a duplicate domain.
--
-- This behavior matches @lyft/ratelimit@.
test_serverResponseDuplicateDomain :: TestTree
test_serverResponseDuplicateDomain =
withResource createServer destroyServer $ \serverIO ->
testCase "In presence of duplicate domains all requests error" $ do
server <- serverIO
pure (validatePotentialDomains $ Right . Just <$> domains) >>= \case
Left _ ->
withService server $ \service -> do
response <- Proto.rateLimitServiceShouldRateLimit service $
Grpc.ClientNormalRequest request 1 mempty
expectError
(unknownError "no rate limit configuration loaded")
response
Right _ -> assertFailure $
"Expected a failure, and got domain definitions instead"
where
domains :: [DomainDefinition]
domains = replicate 2 domainDescriptorKeyValue

request :: Proto.RateLimitRequest
request = Proto.RateLimitRequest
{ Proto.rateLimitRequestDomain = "domain1"
, Proto.rateLimitRequestDescriptors =
fromList $
[ Proto.RateLimitDescriptor $
fromList [Proto.RateLimitDescriptor_Entry "some key" ""]
]
, Proto.rateLimitRequestHitsAddend = 0
}

----------------------------------------------------------------------------
-- Helpers
----------------------------------------------------------------------------
Expand Down

0 comments on commit cb8afc7

Please sign in to comment.