From 8625f33b8e9f8b162fbc176b56edbb5c48a64145 Mon Sep 17 00:00:00 2001 From: Arya Irani Date: Tue, 31 Aug 2021 10:15:03 -0400 Subject: [PATCH 001/297] eyeballed wip of cycle-length-removal --- parser-typechecker/src/Unison/Codebase.hs | 7 ++ .../src/Unison/Codebase/Conversion/Sync12.hs | 88 +++++++++---------- .../src/Unison/Codebase/Editor/Command.hs | 9 +- .../src/Unison/Codebase/Editor/HandleInput.hs | 29 +----- .../src/Unison/Codebase/SqliteCodebase.hs | 2 +- .../SqliteCodebase/Branch/Dependencies.hs | 10 +-- .../Codebase/SqliteCodebase/Conversions.hs | 86 +++++++++--------- .../src/Unison/PrettyPrintEnv.hs | 6 +- .../tests/Unison/Test/Referent.hs | 4 +- unison-core/src/Unison/ABT.hs | 5 +- unison-core/src/Unison/Reference.hs | 82 +++++++---------- unison-core/src/Unison/Reference/Util.hs | 2 +- unison-core/src/Unison/Term.hs | 7 +- unison-core/src/Unison/Type.hs | 2 +- 14 files changed, 150 insertions(+), 189 deletions(-) diff --git a/parser-typechecker/src/Unison/Codebase.hs b/parser-typechecker/src/Unison/Codebase.hs index 9e858a0ea9..f6ae692caf 100644 --- a/parser-typechecker/src/Unison/Codebase.hs +++ b/parser-typechecker/src/Unison/Codebase.hs @@ -45,6 +45,7 @@ import Unison.Var (Var) import qualified Unison.Var as Var import UnliftIO.Directory (getHomeDirectory) import qualified Unison.Codebase.GitError as GitError +import Unison.Hash (Hash) type DataDeclaration v a = DD.DataDeclaration v a @@ -67,6 +68,12 @@ data Codebase m v a = , getTypeOfTermImpl :: Reference.Id -> m (Maybe (Type v a)) , getTypeDeclaration :: Reference.Id -> m (Maybe (Decl v a)) + , getTermComponent :: Hash -> m (Maybe [Term v a]) + , getTermComponentWithTypes :: Hash -> m (Maybe [(Term v a, Type v a)]) + , getTermComponentLength :: Hash -> m (Reference.CycleSize) + , getDeclComponent :: Hash -> m (Maybe [Decl v a]) + , getDeclComponentLength :: Hash -> m (Reference.CycleSize) + , putTerm :: Reference.Id -> Term v a -> Type v a -> m () , putTypeDeclaration :: Reference.Id -> Decl v a -> m () diff --git a/parser-typechecker/src/Unison/Codebase/Conversion/Sync12.hs b/parser-typechecker/src/Unison/Codebase/Conversion/Sync12.hs index 1de51313a8..8f99ba01db 100644 --- a/parser-typechecker/src/Unison/Codebase/Conversion/Sync12.hs +++ b/parser-typechecker/src/Unison/Codebase/Conversion/Sync12.hs @@ -24,16 +24,10 @@ import Control.Monad.Validate (MonadValidate, runValidateT) import qualified Control.Monad.Validate as Validate import Control.Natural (type (~>)) import Data.Bifoldable (bitraverse_) -import Data.Foldable (traverse_) import qualified Data.Foldable as Foldable -import Data.Functor (($>)) import qualified Data.List as List -import Data.Map (Map) import qualified Data.Map as Map -import Data.Maybe (catMaybes) import qualified Data.Set as Set -import Data.Traversable (for) -import Debug.Trace (traceM) import System.IO (stdout) import System.IO.Extra (hFlush) import U.Codebase.Sqlite.Connection (Connection) @@ -55,7 +49,7 @@ import Unison.Hash (Hash) import qualified Unison.Hashable as H import qualified Unison.LabeledDependency as LD import Unison.NameSegment (NameSegment) -import Unison.Prelude (Set) +import Unison.Prelude import qualified Unison.Reference as Reference import Unison.Referent (Referent) import qualified Unison.Referent as Referent @@ -80,9 +74,9 @@ data Env m a = Env data Entity m = C Branch.Hash (m (UnwrappedBranch m)) - | T Hash Reference.Size + | T Hash | W WatchKind Reference.Id - | D Hash Reference.Size + | D Hash | P Branch.EditHash type V m n = MonadValidate (Set (Entity m)) n @@ -179,11 +173,11 @@ trySync t e = do else do setBranchStatus h (BranchReplaced h' c') pure Sync.NonFatalError - T h n -> + T h -> getTermStatus h >>= \case Just {} -> pure Sync.PreviouslyDone Nothing -> do - runExceptT (runValidateT (checkTermComponent (lift . lift . t) h n)) >>= \case + runExceptT (runValidateT (checkTermComponent (lift . lift . t) h)) >>= \case Left status -> do setTermStatus h status pure Sync.NonFatalError @@ -191,7 +185,7 @@ trySync t e = do pure . Sync.Missing $ Foldable.toList deps Right (Right component) -> do Foldable.for_ (zip component [0 ..]) \((term, typ), i) -> - t $ Codebase.putTerm dest (Reference.Id h i n) term typ + t $ Codebase.putTerm dest (Reference.Id h i) term typ setTermStatus h TermOk pure Sync.Done W k r -> @@ -208,11 +202,11 @@ trySync t e = do t $ Codebase.putWatch dest k r watchResult setWatchStatus k r WatchOk pure Sync.Done - D h n -> + D h -> getDeclStatus h >>= \case Just {} -> pure Sync.PreviouslyDone Nothing -> - runExceptT (runValidateT (checkDeclComponent (lift . lift . t) h n)) >>= \case + runExceptT (runValidateT (checkDeclComponent (lift . lift . t) h)) >>= \case Left status -> do setDeclStatus h status pure Sync.NonFatalError @@ -220,7 +214,7 @@ trySync t e = do pure . Sync.Missing $ Foldable.toList deps Right (Right component) -> do Foldable.for_ (zip component [0 ..]) \(decl, i) -> - t $ Codebase.putTypeDeclaration dest (Reference.Id h i n) decl + t $ Codebase.putTypeDeclaration dest (Reference.Id h i) decl setDeclStatus h DeclOk pure Sync.Done P h -> @@ -271,20 +265,21 @@ setBranchStatus h s = do branchStatus . at h .= Just s setWatchStatus :: S m n => WatchKind -> Reference.Id -> WatchStatus -> n () -setWatchStatus k r@(Reference.Id h i _) s = do +setWatchStatus k r@(Reference.Id h i) s = do when debug (traceM $ "setWatchStatus " ++ show k ++ " " ++ take 10 (show h) ++ " " ++ show i) watchStatus . at (k, r) .= Just s +-- | verifies that the entire term component, the types-of-terms, and dependencies are available checkTermComponent :: forall m n a. (RS m n a, V m n, E TermStatus n) => (m ~> n) -> Hash -> - Reference.Size -> n [(Term Symbol a, Type Symbol a)] -checkTermComponent t h n = do +checkTermComponent t h = do Env src _ _ <- Reader.ask - for [Reference.Id h i n | i <- [0 .. n -1]] \r -> do + n <- t $ Codebase.getTermComponentLength src h + for [Reference.Id h i | i <- [0 .. n -1]] \r -> do term <- t $ Codebase.getTerm src r typ <- t $ Codebase.getTypeOfTermImpl src r case (term, typ) of @@ -295,22 +290,22 @@ checkTermComponent t h n = do typeDeps = Type.dependencies typ let checkDecl = \case Reference.Builtin {} -> pure () - Reference.DerivedId (Reference.Id h' _ n') -> + Reference.DerivedId (Reference.Id h' _) -> getDeclStatus h' >>= \case Just DeclOk -> pure () Just _ -> Except.throwError TermMissingDependencies - Nothing -> Validate.dispute . Set.singleton $ D h' n' + Nothing -> Validate.dispute . Set.singleton $ D h' checkTerm = \case Reference.Builtin {} -> pure () - Reference.DerivedId (Reference.Id h' _ _) + Reference.DerivedId (Reference.Id h' _) | h == h' -> pure () -- ignore self-references - Reference.DerivedId (Reference.Id h' _ n') -> + Reference.DerivedId (Reference.Id h' _) -> getTermStatus h' >>= \case Just TermOk -> pure () Just _ -> Except.throwError TermMissingDependencies - Nothing -> Validate.dispute . Set.singleton $ T h' n' + Nothing -> Validate.dispute . Set.singleton $ T h' traverse_ (bitraverse_ checkDecl checkTerm . LD.toReference) termDeps traverse_ checkDecl typeDeps pure (term, typ) @@ -322,7 +317,7 @@ checkWatchComponent :: WatchKind -> Reference.Id -> n (Term Symbol a) -checkWatchComponent t k r@(Reference.Id h _ _) = do +checkWatchComponent t k r@(Reference.Id h _) = do Env src _ _ <- Reader.ask (t $ Codebase.getWatch src k r) >>= \case Nothing -> Except.throwError WatchNotCached @@ -330,22 +325,22 @@ checkWatchComponent t k r@(Reference.Id h _ _) = do let deps = Term.labeledDependencies watchResult let checkDecl = \case Reference.Builtin {} -> pure () - Reference.DerivedId (Reference.Id h' _ n') -> + Reference.DerivedId (Reference.Id h' _) -> getDeclStatus h' >>= \case Just DeclOk -> pure () Just _ -> Except.throwError WatchMissingDependencies - Nothing -> Validate.dispute . Set.singleton $ D h' n' + Nothing -> Validate.dispute . Set.singleton $ D h' checkTerm = \case Reference.Builtin {} -> pure () - Reference.DerivedId (Reference.Id h' _ _) + Reference.DerivedId (Reference.Id h' _) | h == h' -> pure () -- ignore self-references - Reference.DerivedId (Reference.Id h' _ n') -> + Reference.DerivedId (Reference.Id h' _) -> getTermStatus h' >>= \case Just TermOk -> pure () Just _ -> Except.throwError WatchMissingDependencies - Nothing -> Validate.dispute . Set.singleton $ T h' n' + Nothing -> Validate.dispute . Set.singleton $ T h' traverse_ (bitraverse_ checkDecl checkTerm . LD.toReference) deps pure watchResult @@ -354,11 +349,11 @@ checkDeclComponent :: (RS m n a, E DeclStatus n, V m n) => (m ~> n) -> Hash -> - Reference.Size -> n [Decl Symbol a] -checkDeclComponent t h n = do +checkDeclComponent t h = do Env src _ _ <- Reader.ask - for [Reference.Id h i n | i <- [0 .. n -1]] \r -> do + n <- t $ Codebase.getDeclComponentLength src h + for [Reference.Id h i | i <- [0 .. n - 1]] \r -> do decl <- t $ Codebase.getTypeDeclaration src r case decl of Nothing -> Except.throwError DeclMissing @@ -366,12 +361,11 @@ checkDeclComponent t h n = do let deps = DD.declDependencies decl checkDecl = \case Reference.Builtin {} -> pure () - Reference.DerivedId (Reference.Id h' _ _) | h == h' -> pure () - Reference.DerivedId (Reference.Id h' _ n') -> - getDeclStatus h' >>= \case + Reference.DerivedId (Reference.Id h' _) -> + unless (h == h') $ getDeclStatus h' >>= \case Just DeclOk -> pure () Just _ -> Except.throwError DeclMissingDependencies - Nothing -> Validate.dispute . Set.singleton $ D h' n' + Nothing -> Validate.dispute . Set.singleton $ D h' traverse_ checkDecl deps pure decl @@ -450,16 +444,16 @@ repairPatch (Patch termEdits typeEdits) = do -- reference to it. See Sync22.syncPatchLocalIds helpTermEdit = \case Reference.Builtin _ -> pure True - Reference.DerivedId (Reference.Id h _ n) -> + Reference.DerivedId (Reference.Id h _) -> getTermStatus h >>= \case - Nothing -> Validate.refute . Set.singleton $ T h n + Nothing -> Validate.refute . Set.singleton $ T h Just TermOk -> pure True Just _ -> pure False helpTypeEdit = \case Reference.Builtin _ -> pure True - Reference.DerivedId (Reference.Id h _ n) -> + Reference.DerivedId (Reference.Id h _) -> getDeclStatus h >>= \case - Nothing -> Validate.refute . Set.singleton $ D h n + Nothing -> Validate.refute . Set.singleton $ D h Just DeclOk -> pure True Just _ -> pure False filterTermEdit old new = do @@ -506,18 +500,18 @@ validateTermReferent = \case validateTermReference :: (S m n, V m n) => Reference.Reference -> n Bool validateTermReference = \case Reference.Builtin {} -> pure True - Reference.DerivedId (Reference.Id h _i n) -> + Reference.DerivedId (Reference.Id h _i) -> getTermStatus h >>= \case - Nothing -> Validate.refute . Set.singleton $ T h n + Nothing -> Validate.refute . Set.singleton $ T h Just TermOk -> pure True Just _ -> pure False validateTypeReference :: (S m n, V m n) => Reference.Reference -> n Bool validateTypeReference = \case Reference.Builtin {} -> pure True - Reference.DerivedId (Reference.Id h _i n) -> + Reference.DerivedId (Reference.Id h _i) -> getDeclStatus h >>= \case - Nothing -> Validate.refute . Set.singleton $ D h n + Nothing -> Validate.refute . Set.singleton $ D h Just DeclOk -> pure True Just _ -> pure False @@ -666,8 +660,8 @@ data Entity' toEntity' :: Entity m -> Entity' toEntity' = \case C h _ -> C' h - T h _ -> T' h - D h _ -> D' h + T h -> T' h + D h -> D' h P h -> P' h W k r -> W' k r diff --git a/parser-typechecker/src/Unison/Codebase/Editor/Command.hs b/parser-typechecker/src/Unison/Codebase/Editor/Command.hs index 265e2bd451..90402f64f5 100644 --- a/parser-typechecker/src/Unison/Codebase/Editor/Command.hs +++ b/parser-typechecker/src/Unison/Codebase/Editor/Command.hs @@ -60,6 +60,7 @@ import Unison.Name (Name) import Unison.Server.QueryResult (QueryResult) import qualified Unison.Server.SearchResult as SR import qualified Unison.Server.SearchResult' as SR' +import qualified Unison.Hash as H type AmbientAbilities v = [Type v Ann] type SourceName = Text @@ -199,10 +200,13 @@ data Command m i v a where LoadReflog :: Command m i v [Reflog.Entry] LoadTerm :: Reference.Id -> Command m i v (Maybe (Term v Ann)) + LoadTermComponent :: H.Hash -> Command m i v (Maybe [Term v Ann]) + LoadTermComponentWithType :: H.Hash -> Command m i v (Maybe [(Term v Ann, Type v Ann)]) -- todo: change this to take Reference and return DeclOrBuiltin + -- todo: change this to LoadDecl LoadType :: Reference.Id -> Command m i v (Maybe (Decl v Ann)) - + LoadDeclComponent :: H.Hash -> Command m i v (Maybe [Decl v Ann]) LoadTypeOfTerm :: Reference -> Command m i v (Maybe (Type v Ann)) PutTerm :: Reference.Id -> Term v Ann -> Type v Ann -> Command m i v () @@ -213,11 +217,14 @@ data Command m i v a where -- (why, again? because we can know from the Reference?) IsTerm :: Reference -> Command m i v Bool IsType :: Reference -> Command m i v Bool + IsDerivedTerm :: H.Hash -> Command m i v Bool + IsDerivedType :: H.Hash -> Command m i v Bool -- Get the immediate (not transitive) dependents of the given reference -- This might include historical definitions not in any current path; these -- should be filtered by the caller of this command if that's not desired. GetDependents :: Reference -> Command m i v (Set Reference) + GetDependentsOfComponent :: H.Hash -> Command m i v (Set Reference) GetTermsOfType :: Type v Ann -> Command m i v (Set Referent) GetTermsMentioningType :: Type v Ann -> Command m i v (Set Referent) diff --git a/parser-typechecker/src/Unison/Codebase/Editor/HandleInput.hs b/parser-typechecker/src/Unison/Codebase/Editor/HandleInput.hs index d087a5ba18..abdf49d7ad 100644 --- a/parser-typechecker/src/Unison/Codebase/Editor/HandleInput.hs +++ b/parser-typechecker/src/Unison/Codebase/Editor/HandleInput.hs @@ -2036,7 +2036,8 @@ showTodoOutput getPpe patch names0 = do checkTodo :: Patch -> Names0 -> Action m i v (TO.TodoOutput v Ann) checkTodo patch names0 = do - f <- computeFrontier (eval . GetDependents) patch names0 + let careToUpdate = Names.contains names0 + f <- Propagate.computeFrontier (eval . GetDependents) patch careToUpdate let dirty = R.dom f frontier = R.ran f (frontierTerms, frontierTypes) <- loadDisplayInfo frontier @@ -2066,32 +2067,6 @@ checkTodo patch names0 = do -- we don't want the frontier in the result pure $ tdeps `Set.difference` rs --- (d, f) when d is "dirty" (needs update), --- f is in the frontier (an edited dependency of d), --- and d depends on f --- a ⋖ b = a depends directly on b --- dirty(d) ∧ frontier(f) <=> not(edited(d)) ∧ edited(f) ∧ d ⋖ f --- --- The range of this relation is the frontier, and the domain is --- the set of dirty references. -computeFrontier :: forall m . Monad m - => (Reference -> m (Set Reference)) -- eg Codebase.dependents codebase - -> Patch - -> Names0 - -> m (R.Relation Reference Reference) -computeFrontier getDependents patch names = let - edited :: Set Reference - edited = R.dom (Patch._termEdits patch) <> R.dom (Patch._typeEdits patch) - addDependents :: R.Relation Reference Reference -> Reference -> m (R.Relation Reference Reference) - addDependents dependents ref = - (\ds -> R.insertManyDom ds ref dependents) . Set.filter (Names.contains names) - <$> getDependents ref - in do - -- (r,r2) ∈ dependsOn if r depends on r2 - dependsOn <- foldM addDependents R.empty edited - -- Dirty is everything that `dependsOn` Frontier, minus already edited defns - pure $ R.filterDom (not . flip Set.member edited) dependsOn - eval :: Command m i v a -> Action m i v a eval = lift . lift . Free.eval diff --git a/parser-typechecker/src/Unison/Codebase/SqliteCodebase.hs b/parser-typechecker/src/Unison/Codebase/SqliteCodebase.hs index 6cab43e216..fbf4f14cb7 100644 --- a/parser-typechecker/src/Unison/Codebase/SqliteCodebase.hs +++ b/parser-typechecker/src/Unison/Codebase/SqliteCodebase.hs @@ -284,7 +284,7 @@ sqliteCodebase debugName root = do cycleLengthCache <- Cache.semispaceCache 8192 declTypeCache <- Cache.semispaceCache 2048 let getTerm :: MonadIO m => Reference.Id -> m (Maybe (Term Symbol Ann)) - getTerm (Reference.Id h1@(Cv.hash1to2 -> h2) i _n) = + getTerm (Reference.Id h1@(Cv.hash1to2 -> h2) i) = runDB' conn do term2 <- Ops.loadTermByReference (C.Reference.Id h2 i) Cv.term2to1 h1 (getCycleLen "getTerm") getDeclType term2 diff --git a/parser-typechecker/src/Unison/Codebase/SqliteCodebase/Branch/Dependencies.hs b/parser-typechecker/src/Unison/Codebase/SqliteCodebase/Branch/Dependencies.hs index df4dc0cef3..63584c47fc 100644 --- a/parser-typechecker/src/Unison/Codebase/SqliteCodebase/Branch/Dependencies.hs +++ b/parser-typechecker/src/Unison/Codebase/SqliteCodebase/Branch/Dependencies.hs @@ -74,13 +74,13 @@ fromBranch0 b = fromTermsStar :: Branch.Star Referent NameSegment -> Dependencies fromTermsStar s = Dependencies mempty terms decls where terms = Set.fromList $ - [ h | Referent.Ref (Derived h _ _) <- references s] ++ - [ h | (Derived h _ _) <- mdValues s] + [ h | Referent.Ref (Derived h _) <- references s] ++ + [ h | (Derived h _) <- mdValues s] decls = Set.fromList $ - [ h | Referent.Con (Derived h _i _n) _ _ <- references s ] + [ h | Referent.Con (Derived h _i) _ _ <- references s ] fromTypesStar :: Branch.Star Reference NameSegment -> Dependencies fromTypesStar s = Dependencies mempty terms decls where - terms = Set.fromList [ h | (Derived h _ _) <- mdValues s ] - decls = Set.fromList [ h | (Derived h _ _) <- references s ] + terms = Set.fromList [ h | (Derived h _) <- mdValues s ] + decls = Set.fromList [ h | (Derived h _) <- references s ] fromEdits :: Map NameSegment (EditHash, m Patch) -> Dependencies fromEdits m = Dependencies (Set.fromList . fmap fst $ toList m) mempty mempty diff --git a/parser-typechecker/src/Unison/Codebase/SqliteCodebase/Conversions.hs b/parser-typechecker/src/Unison/Codebase/SqliteCodebase/Conversions.hs index 34e9abae35..63cf8d418f 100644 --- a/parser-typechecker/src/Unison/Codebase/SqliteCodebase/Conversions.hs +++ b/parser-typechecker/src/Unison/Codebase/SqliteCodebase/Conversions.hs @@ -146,15 +146,15 @@ term1to2 h = V1.Pattern.Snoc -> V2.Term.PSnoc V1.Pattern.Concat -> V2.Term.PConcat -term2to1 :: forall m. Monad m => Hash -> (Hash -> m V1.Reference.Size) -> (V2.Reference -> m CT.ConstructorType) -> V2.Term.Term V2.Symbol -> m (V1.Term.Term V1.Symbol Ann) -term2to1 h lookupSize lookupCT tm = - V1.ABT.transformM (termF2to1 h lookupSize lookupCT) +term2to1 :: forall m. Monad m => Hash -> (V2.Reference -> m CT.ConstructorType) -> V2.Term.Term V2.Symbol -> m (V1.Term.Term V1.Symbol Ann) +term2to1 h lookupCT tm = + V1.ABT.transformM (termF2to1 h lookupCT) . V1.ABT.vmap symbol2to1 . V1.ABT.amap (const Ann.External) $ abt2to1 tm where - termF2to1 :: forall m a. Monad m => Hash -> (Hash -> m V1.Reference.Size) -> (V2.Reference -> m CT.ConstructorType) -> V2.Term.F V2.Symbol a -> m (V1.Term.F V1.Symbol Ann Ann a) - termF2to1 h lookupSize lookupCT = go + termF2to1 :: forall m a. Monad m => Hash -> (V2.Reference -> m CT.ConstructorType) -> V2.Term.F V2.Symbol a -> m (V1.Term.F V1.Symbol Ann Ann a) + termF2to1 h lookupCT = go where go :: V2.Term.F V2.Symbol a -> m (V1.Term.F V1.Symbol Ann Ann a) go = \case @@ -164,14 +164,14 @@ term2to1 h lookupSize lookupCT tm = V2.Term.Boolean b -> pure $ V1.Term.Boolean b V2.Term.Text t -> pure $ V1.Term.Text t V2.Term.Char c -> pure $ V1.Term.Char c - V2.Term.Ref r -> V1.Term.Ref <$> rreference2to1 h lookupSize r + V2.Term.Ref r -> pure $ V1.Term.Ref (rreference2to1 h r) V2.Term.Constructor r i -> - V1.Term.Constructor <$> reference2to1 lookupSize r <*> pure (fromIntegral i) + pure $ V1.Term.Constructor (reference2to1 r) (fromIntegral i) V2.Term.Request r i -> - V1.Term.Request <$> reference2to1 lookupSize r <*> pure (fromIntegral i) + pure $ V1.Term.Request (reference2to1 r) (fromIntegral i) V2.Term.Handle a a4 -> pure $ V1.Term.Handle a a4 V2.Term.App a a4 -> pure $ V1.Term.App a a4 - V2.Term.Ann a t2 -> V1.Term.Ann a <$> ttype2to1 lookupSize t2 + V2.Term.Ann a t2 -> V1.Term.Ann a <$> ttype2to1 t2 V2.Term.List sa -> pure $ V1.Term.List sa V2.Term.If a a4 a5 -> pure $ V1.Term.If a a4 a5 V2.Term.And a a4 -> pure $ V1.Term.And a a4 @@ -180,8 +180,8 @@ term2to1 h lookupSize lookupCT tm = V2.Term.LetRec as a -> pure $ V1.Term.LetRec False as a V2.Term.Let a a4 -> pure $ V1.Term.Let False a a4 V2.Term.Match a cases -> V1.Term.Match a <$> traverse goCase cases - V2.Term.TermLink rr -> V1.Term.TermLink <$> rreferent2to1 h lookupSize lookupCT rr - V2.Term.TypeLink r -> V1.Term.TypeLink <$> reference2to1 lookupSize r + V2.Term.TermLink rr -> V1.Term.TermLink <$> rreferent2to1 h lookupCT rr + V2.Term.TypeLink r -> pure $ V1.Term.TypeLink (reference2to1 r) goCase = \case V2.Term.MatchCase pat cond body -> V1.Term.MatchCase <$> (goPat pat) <*> pure cond <*> pure body @@ -195,10 +195,10 @@ term2to1 h lookupSize lookupCT tm = V2.Term.PText t -> pure $ V1.Pattern.Text a t V2.Term.PChar c -> pure $ V1.Pattern.Char a c V2.Term.PConstructor r i ps -> - V1.Pattern.Constructor a <$> reference2to1 lookupSize r <*> pure i <*> (traverse goPat ps) + V1.Pattern.Constructor a (reference2to1 r) i <$> (traverse goPat ps) V2.Term.PAs p -> V1.Pattern.As a <$> goPat p V2.Term.PEffectPure p -> V1.Pattern.EffectPure a <$> goPat p - V2.Term.PEffectBind r i ps p -> V1.Pattern.EffectBind a <$> reference2to1 lookupSize r <*> pure i <*> traverse goPat ps <*> goPat p + V2.Term.PEffectBind r i ps p -> V1.Pattern.EffectBind a (reference2to1 r) i <$> traverse goPat ps <*> goPat p V2.Term.PSequenceLiteral ps -> V1.Pattern.SequenceLiteral a <$> traverse goPat ps V2.Term.PSequenceOp p1 op p2 -> V1.Pattern.SequenceOp a <$> goPat p1 <*> pure (goOp op) <*> goPat p2 goOp = \case @@ -247,8 +247,7 @@ symbol1to2 x = error $ "unimplemented: symbol1to2 " ++ show x shortHashSuffix1to2 :: Text -> V1.Reference.Pos shortHashSuffix1to2 = - fst - . fromRight (error "todo: move suffix parsing to frontend") + fromRight (error "todo: move suffix parsing to frontend") . V1.Reference.readSuffix abt2to1 :: Functor f => V2.ABT.Term f v a -> V1.ABT.Term f v a @@ -269,24 +268,22 @@ abt1to2 (V1.ABT.Term fv a out) = V2.ABT.Term fv a (go out) V1.ABT.Var v -> V2.ABT.Var v V1.ABT.Tm tm -> V2.ABT.Tm (abt1to2 <$> tm) -rreference2to1 :: Applicative m => Hash -> (Hash -> m V1.Reference.Size) -> V2.Reference' Text (Maybe V2.Hash) -> m V1.Reference -rreference2to1 h lookupSize = \case +rreference2to1 :: Applicative m => Hash -> V2.Reference' Text (Maybe V2.Hash) -> m V1.Reference +rreference2to1 h = \case V2.ReferenceBuiltin t -> pure $ V1.Reference.Builtin t - V2.ReferenceDerived i -> V1.Reference.DerivedId <$> rreferenceid2to1 h lookupSize i + V2.ReferenceDerived i -> V1.Reference.DerivedId <$> rreferenceid2to1 h i rreference1to2 :: Hash -> V1.Reference -> V2.Reference' Text (Maybe V2.Hash) rreference1to2 h = \case V1.Reference.Builtin t -> V2.ReferenceBuiltin t V1.Reference.DerivedId i -> V2.ReferenceDerived (rreferenceid1to2 h i) -rreferenceid2to1 :: Functor m => Hash -> (Hash -> m V1.Reference.Size) -> V2.Reference.Id' (Maybe V2.Hash) -> m V1.Reference.Id -rreferenceid2to1 h lookupSize (V2.Reference.Id oh i) = - V1.Reference.Id h' i <$> lookupSize h' - where - h' = maybe h hash2to1 oh +rreferenceid2to1 :: Hash -> V2.Reference.Id' (Maybe V2.Hash) -> V1.Reference.Id +rreferenceid2to1 h (V2.Reference.Id oh i) = V1.Reference.Id h' i + where h' = maybe h hash2to1 oh rreferenceid1to2 :: Hash -> V1.Reference.Id -> V2.Reference.Id' (Maybe V2.Hash) -rreferenceid1to2 h (V1.Reference.Id h' i _n) = V2.Reference.Id oh i +rreferenceid1to2 h (V1.Reference.Id h' i) = V2.Reference.Id oh i where oh = if h == h' then Nothing else Just (hash1to2 h') @@ -302,10 +299,10 @@ branchHash2to1 = V1.Causal.RawHash . hash2to1 . V2.unCausalHash patchHash1to2 :: V1.Branch.EditHash -> V2.PatchHash patchHash1to2 = V2.PatchHash . hash1to2 -reference2to1 :: Applicative m => (Hash -> m V1.Reference.Size) -> V2.Reference -> m V1.Reference -reference2to1 lookupSize = \case - V2.ReferenceBuiltin t -> pure $ V1.Reference.Builtin t - V2.ReferenceDerived i -> V1.Reference.DerivedId <$> referenceid2to1 lookupSize i +reference2to1 :: V2.Reference -> V1.Reference +reference2to1 = \case + V2.ReferenceBuiltin t -> V1.Reference.Builtin t + V2.ReferenceDerived i -> V1.Reference.DerivedId $ referenceid2to1 i reference1to2 :: V1.Reference -> V2.Reference reference1to2 = \case @@ -313,41 +310,38 @@ reference1to2 = \case V1.Reference.DerivedId i -> V2.ReferenceDerived (referenceid1to2 i) referenceid1to2 :: V1.Reference.Id -> V2.Reference.Id -referenceid1to2 (V1.Reference.Id h i _n) = V2.Reference.Id (hash1to2 h) i +referenceid1to2 (V1.Reference.Id h i) = V2.Reference.Id (hash1to2 h) i -referenceid2to1 :: Functor m => (Hash -> m V1.Reference.Size) -> V2.Reference.Id -> m V1.Reference.Id -referenceid2to1 lookupSize (V2.Reference.Id h i) = - V1.Reference.Id sh i <$> lookupSize sh +referenceid2to1 :: V2.Reference.Id -> V1.Reference.Id +referenceid2to1 (V2.Reference.Id h i) = V1.Reference.Id sh i where sh = hash2to1 h -rreferent2to1 :: Applicative m => Hash -> (Hash -> m V1.Reference.Size) -> (V2.Reference -> m CT.ConstructorType) -> V2.ReferentH -> m V1.Referent -rreferent2to1 h lookupSize lookupCT = \case - V2.Ref r -> V1.Ref <$> rreference2to1 h lookupSize r - V2.Con r i -> V1.Con <$> reference2to1 lookupSize r <*> pure (fromIntegral i) <*> lookupCT r +rreferent2to1 :: Applicative m => Hash -> (V2.Reference -> m CT.ConstructorType) -> V2.ReferentH -> m V1.Referent +rreferent2to1 h lookupCT = \case + V2.Ref r -> V1.Ref <$> rreference2to1 h r + V2.Con r i -> V1.Con (reference2to1 r) (fromIntegral i) <$> lookupCT r rreferent1to2 :: Hash -> V1.Referent -> V2.ReferentH rreferent1to2 h = \case V1.Ref r -> V2.Ref (rreference1to2 h r) V1.Con r i _ct -> V2.Con (reference1to2 r) (fromIntegral i) -referent2to1 :: Applicative m => (Hash -> m V1.Reference.Size) -> (V2.Reference -> m CT.ConstructorType) -> V2.Referent -> m V1.Referent -referent2to1 lookupSize lookupCT = \case - V2.Ref r -> V1.Ref <$> reference2to1 lookupSize r - V2.Con r i -> V1.Con <$> reference2to1 lookupSize r <*> pure (fromIntegral i) <*> lookupCT r +referent2to1 :: Applicative m => (V2.Reference -> m CT.ConstructorType) -> V2.Referent -> m V1.Referent +referent2to1 lookupCT = \case + V2.Ref r -> pure $ V1.Ref (reference2to1 r) + V2.Con r i -> V1.Con (reference2to1 r) (fromIntegral i) <$> lookupCT r referent1to2 :: V1.Referent -> V2.Referent referent1to2 = \case V1.Ref r -> V2.Ref $ reference1to2 r V1.Con r i _ct -> V2.Con (reference1to2 r) (fromIntegral i) -referentid2to1 :: Applicative m => (Hash -> m V1.Reference.Size) -> (V2.Reference -> m CT.ConstructorType) -> V2.Referent.Id -> m V1.Referent.Id -referentid2to1 lookupSize lookupCT = \case - V2.RefId r -> V1.Ref' <$> referenceid2to1 lookupSize r +referentid2to1 :: Applicative m => (V2.Reference -> m CT.ConstructorType) -> V2.Referent.Id -> m V1.Referent.Id +referentid2to1 lookupCT = \case + V2.RefId r -> pure $ V1.Ref' (referenceid2to1 r) V2.ConId r i -> - V1.Con' <$> referenceid2to1 lookupSize r - <*> pure (fromIntegral i) - <*> lookupCT (V2.ReferenceDerived r) + V1.Con' (referenceid2to1 r) (fromIntegral i) <$> lookupCT (V2.ReferenceDerived r) hash2to1 :: V2.Hash.Hash -> Hash hash2to1 (V2.Hash.Hash sbs) = V1.Hash (SBS.fromShort sbs) diff --git a/parser-typechecker/src/Unison/PrettyPrintEnv.hs b/parser-typechecker/src/Unison/PrettyPrintEnv.hs index e01c10c48c..3364d6adac 100644 --- a/parser-typechecker/src/Unison/PrettyPrintEnv.hs +++ b/parser-typechecker/src/Unison/PrettyPrintEnv.hs @@ -66,9 +66,9 @@ data PrettyPrintEnvDecl = PrettyPrintEnvDecl { -- foo.bar x = foo.bar x -- and not -- foo.bar x = bar x -declarationPPE :: PrettyPrintEnvDecl -> Reference -> PrettyPrintEnv -declarationPPE ppe rd = PrettyPrintEnv tm ty where - comp = Reference.members (Reference.componentFor rd) +declarationPPE :: PrettyPrintEnvDecl -> Reference -> Reference.CycleSize -> PrettyPrintEnv +declarationPPE ppe rd n = PrettyPrintEnv tm ty where + comp = Reference.members (Reference.componentFor rd n) tm r0@(Referent.Ref r) = if Set.member r comp then terms (unsuffixifiedPPE ppe) r0 else terms (suffixifiedPPE ppe) r0 diff --git a/parser-typechecker/tests/Unison/Test/Referent.hs b/parser-typechecker/tests/Unison/Test/Referent.hs index 9c9dfb51db..d94c94103a 100644 --- a/parser-typechecker/tests/Unison/Test/Referent.hs +++ b/parser-typechecker/tests/Unison/Test/Referent.hs @@ -61,8 +61,8 @@ test = scope "hashparsing" . tests $ ] where h = "#1tdqrgl90qnmqvrff0j76kg2rnajq7n8j54e9cbk4p8pdi41q343bnh8h2rv6nadhlin8teg8371d445pvo0as7j2sav8k401d2s3no" - suffix1 = Rf.showSuffix 0 10 - suffix2 = Rf.showSuffix 3 6 + suffix1 = Rf.showSuffix 0 + suffix2 = Rf.showSuffix 3 ref txt = scope (Text.unpack txt) $ case Rf.fromText txt of Left e -> fail e Right r1 -> case Rf.fromText (Rf.toText r1) of diff --git a/unison-core/src/Unison/ABT.hs b/unison-core/src/Unison/ABT.hs index eff7d13af4..c5100a5306 100644 --- a/unison-core/src/Unison/ABT.hs +++ b/unison-core/src/Unison/ABT.hs @@ -670,7 +670,7 @@ hashComponent byName = let -- overall component has no free variables. hashComponents :: (Functor f, Hashable1 f, Foldable f, Eq v, Show v, Var v, Ord h, Accumulate h) - => (h -> Word64 -> Word64 -> Term f v ()) + => (h -> Word64 -> Term f v ()) -> Map.Map v (Term f v a) -> [(h, [(v, Term f v a)])] hashComponents termFromHash termsByName = let @@ -681,8 +681,7 @@ hashComponents termFromHash termsByName = let go prevHashes (component : rest) = let sub = substsInheritAnnotation (Map.toList prevHashes) (h, sortedComponent) = hashComponent $ Map.fromList [ (v, sub t) | (v, t) <- component ] - size = fromIntegral (length sortedComponent) - curHashes = Map.fromList [ (v, termFromHash h i size) | ((v, _),i) <- sortedComponent `zip` [0..]] + curHashes = Map.fromList [ (v, termFromHash h i) | ((v, _),i) <- sortedComponent `zip` [0..]] newHashes = prevHashes `Map.union` curHashes newHashesL = Map.toList newHashes sortedComponent' = [ (v, substsInheritAnnotation newHashesL t) | (v, t) <- sortedComponent ] diff --git a/unison-core/src/Unison/Reference.hs b/unison-core/src/Unison/Reference.hs index 8bf809b5a1..655afc683f 100644 --- a/unison-core/src/Unison/Reference.hs +++ b/unison-core/src/Unison/Reference.hs @@ -10,12 +10,13 @@ module Unison.Reference pattern DerivedId, Id(..), Pos, - Size, + CycleSize, Size, derivedBase32Hex, Component, members, components, groupByComponent, componentFor, + componentFor', unsafeFromText, idFromText, isPrefixOf, @@ -52,16 +53,15 @@ data Reference -- The `Pos` refers to a particular element of the component -- and the `Size` is the number of elements in the component. -- Using an ugly name so no one tempted to use this - | DerivedId Id deriving (Eq,Ord,Generic) + | DerivedId Id deriving (Eq, Ord) -pattern Derived :: H.Hash -> Pos -> Size -> Reference -pattern Derived h i n = DerivedId (Id h i n) +pattern Derived :: H.Hash -> Pos -> Reference +pattern Derived h i = DerivedId (Id h i) --- A good idea, but causes a weird problem with view patterns in PatternP.hs in ghc 8.4.3 ---{-# COMPLETE Builtin, Derived #-} +{-# COMPLETE Builtin, Derived #-} -- | @Pos@ is a position into a cycle of size @Size@, as cycles are hashed together. -data Id = Id H.Hash Pos Size deriving (Generic) +data Id = Id H.Hash Pos deriving (Eq, Ord) unsafeId :: Reference -> Id unsafeId (Builtin b) = @@ -71,16 +71,11 @@ unsafeId (DerivedId x) = x idToShortHash :: Id -> ShortHash idToShortHash = toShortHash . DerivedId --- todo: move these to ShortHash module? -- but Show Reference currently depends on SH toShortHash :: Reference -> ShortHash toShortHash (Builtin b) = SH.Builtin b -toShortHash (Derived h _ 1) = SH.ShortHash (H.base32Hex h) Nothing Nothing -toShortHash (Derived h i n) = SH.ShortHash (H.base32Hex h) index Nothing - where - -- todo: remove `n` parameter; must also update readSuffix - index = Just $ showSuffix i n -toShortHash (DerivedId _) = error "this should be covered above" +toShortHash (Derived h 0) = SH.ShortHash (H.base32Hex h) Nothing Nothing +toShortHash (Derived h i) = SH.ShortHash (H.base32Hex h) (Just $ showSuffix i) Nothing -- toShortHash . fromJust . fromShortHash == id and -- fromJust . fromShortHash . toShortHash == id @@ -93,23 +88,17 @@ fromShortHash (SH.Builtin b) = Just (Builtin b) fromShortHash (SH.ShortHash prefix cycle Nothing) = do h <- H.fromBase32Hex prefix case cycle of - Nothing -> Just (Derived h 0 1) - Just t -> case Text.splitOn "c" t of - [i,n] -> Derived h <$> readMay (Text.unpack i) <*> readMay (Text.unpack n) - _ -> Nothing + Nothing -> Just (Derived h 0) + Just i -> Derived h <$> readMay (Text.unpack i) fromShortHash (SH.ShortHash _prefix _cycle (Just _cid)) = Nothing --- (3,10) encoded as "3c10" --- (0,93) encoded as "0c93" -showSuffix :: Pos -> Size -> Text -showSuffix i n = Text.pack $ show i <> "c" <> show n +showSuffix :: Pos -> Text +showSuffix = Text.pack . show --- todo: don't read or return size; must also update showSuffix and fromText -readSuffix :: Text -> Either String (Pos, Size) -readSuffix t = case Text.breakOn "c" t of - (pos, Text.drop 1 -> size) | Text.all isDigit pos && Text.all isDigit size -> - Right (read (Text.unpack pos), read (Text.unpack size)) - _ -> Left "suffix decoding error" +readSuffix :: Text -> Either String Pos +readSuffix = \case + pos | Text.all isDigit pos -> Right (read (Text.unpack pos)) + t -> Left ("suffix decoding error: " ++ show t) isPrefixOf :: ShortHash -> Reference -> Bool isPrefixOf sh r = SH.isPrefixOf sh (toShortHash r) @@ -121,20 +110,22 @@ showShort :: Int -> Reference -> Text showShort numHashChars = SH.toText . SH.take numHashChars . toShortHash type Pos = Word64 -type Size = Word64 +type Size = CycleSize +type CycleSize = Word64 newtype Component = Component { members :: Set Reference } -- Gives the component (dependency cycle) that the reference is a part of -componentFor :: Reference -> Component -componentFor b@(Builtin _ ) = Component (Set.singleton b) -componentFor ( DerivedId (Id h _ n)) = Component - (Set.fromList - [ DerivedId (Id h i n) | i <- take (fromIntegral n) [0 ..] ] - ) - -derivedBase32Hex :: Text -> Pos -> Size -> Reference -derivedBase32Hex b32Hex i n = DerivedId (Id (fromMaybe msg h) i n) +componentFor :: Reference -> CycleSize -> Component +componentFor r n = case r of + b@Builtin{} -> Component (Set.singleton b) + DerivedId (Id h _) -> Component . Set.fromList $ DerivedId . Id h <$> [0 .. n] + +componentFor' :: H.Hash -> [a] -> [(Reference, a)] +componentFor' h as = [ (Derived h i, a) | (fromIntegral -> i, a) <- zip [0..] as] + +derivedBase32Hex :: Text -> Pos -> Reference +derivedBase32Hex b32Hex i = DerivedId (Id (fromMaybe msg h) i) where msg = error $ "Reference.derivedBase32Hex " <> show h h = H.fromBase32Hex b32Hex @@ -161,16 +152,15 @@ fromText :: Text -> Either String Reference fromText t = case Text.split (=='#') t of [_, "", b] -> Right (Builtin b) [_, h] -> case Text.split (=='.') h of - [hash] -> Right (derivedBase32Hex hash 0 1) - [hash, suffix] -> uncurry (derivedBase32Hex hash) <$> readSuffix suffix + [hash] -> Right (derivedBase32Hex hash 0) + [hash, suffix] -> derivedBase32Hex hash <$> readSuffix suffix _ -> bail _ -> bail where bail = Left $ "couldn't parse a Reference from " <> Text.unpack t component :: H.Hash -> [k] -> [(k, Id)] component h ks = let - size = fromIntegral (length ks) - in [ (k, (Id h i size)) | (k, i) <- ks `zip` [0..]] + in [ (k, (Id h i)) | (k, i) <- ks `zip` [0..]] components :: [(H.Hash, [k])] -> [(k, Id)] components sccs = uncurry component =<< sccs @@ -178,7 +168,7 @@ components sccs = uncurry component =<< sccs groupByComponent :: [(k, Reference)] -> [[(k, Reference)]] groupByComponent refs = done $ foldl' insert Map.empty refs where - insert m (k, r@(Derived h _ _)) = + insert m (k, r@(Derived h _)) = Map.unionWith (<>) m (Map.fromList [(Right h, [(k,r)])]) insert m (k, r) = Map.unionWith (<>) m (Map.fromList [(Left r, [(k,r)])]) @@ -189,8 +179,4 @@ instance Show Reference where show = SH.toString . SH.take 5 . toShortHash instance Hashable.Hashable Reference where tokens (Builtin txt) = [Hashable.Tag 0, Hashable.Text txt] - tokens (DerivedId (Id h i n)) = [Hashable.Tag 1, Hashable.Bytes (H.toBytes h), Hashable.Nat i, Hashable.Nat n] - --- | Two references mustn't differ in cycle length only. -instance Eq Id where x == y = compare x y == EQ -instance Ord Id where Id h i _ `compare` Id h2 i2 _ = compare h h2 <> compare i i2 + tokens (DerivedId (Id h i)) = [Hashable.Tag 1, Hashable.Bytes (H.toBytes h), Hashable.Nat i] diff --git a/unison-core/src/Unison/Reference/Util.hs b/unison-core/src/Unison/Reference/Util.hs index 2d63d2d6b1..ed58252855 100644 --- a/unison-core/src/Unison/Reference/Util.hs +++ b/unison-core/src/Unison/Reference/Util.hs @@ -17,6 +17,6 @@ hashComponents :: hashComponents embedRef tms = Map.fromList [ (v, (r,e)) | ((v,e), r) <- cs ] where cs = components $ ABT.hashComponents ref tms - ref h i n = embedRef (Id h i n) + ref h i = embedRef (Id h i) diff --git a/unison-core/src/Unison/Term.hs b/unison-core/src/Unison/Term.hs index 71526acc53..1300265a9f 100644 --- a/unison-core/src/Unison/Term.hs +++ b/unison-core/src/Unison/Term.hs @@ -994,7 +994,7 @@ hashComponents hashComponents = ReferenceUtil.hashComponents $ refId () hashClosedTerm :: Var v => Term v a -> Reference.Id -hashClosedTerm tm = Reference.Id (ABT.hash tm) 0 1 +hashClosedTerm tm = Reference.Id (ABT.hash tm) 0 -- The hash for a constructor hashConstructor' @@ -1035,12 +1035,11 @@ instance Var v => Hashable1 (F v a p) where -- are 'transparent' wrt hash and hashing is unaffected by whether -- expressions are linked. So for example `x = 1 + 1` and `y = x` hash -- the same. - Ref (Reference.Derived h 0 1) -> Hashable.fromBytes (Hash.toBytes h) - Ref (Reference.Derived h i n) -> Hashable.accumulate + Ref (Reference.Derived h 0) -> Hashable.fromBytes (Hash.toBytes h) + Ref (Reference.Derived h i) -> Hashable.accumulate [ tag 1 , hashed $ Hashable.fromBytes (Hash.toBytes h) , Hashable.Nat i - , Hashable.Nat n ] -- Note: start each layer with leading `1` byte, to avoid collisions -- with types, which start each layer with leading `0`. diff --git a/unison-core/src/Unison/Type.hs b/unison-core/src/Unison/Type.hs index 7559acc8b6..fa68466931 100644 --- a/unison-core/src/Unison/Type.hs +++ b/unison-core/src/Unison/Type.hs @@ -641,7 +641,7 @@ toReference :: (ABT.Var v, Show v) => Type v a -> Reference toReference (Ref' r) = r -- a bit of normalization - any unused type parameters aren't part of the hash toReference (ForallNamed' v body) | not (Set.member v (ABT.freeVars body)) = toReference body -toReference t = Reference.Derived (ABT.hash t) 0 1 +toReference t = Reference.Derived (ABT.hash t) 0 toReferenceMentions :: (ABT.Var v, Show v) => Type v a -> Set Reference toReferenceMentions ty = From 8532bf611a94d51e46585b0c71ee145d525e5bdc Mon Sep 17 00:00:00 2001 From: Arya Irani Date: Tue, 31 Aug 2021 16:33:02 -0400 Subject: [PATCH 002/297] drop cycle length from SqliteCodebase, etc when calling putTerm / putTypeDecl, check all the dependencies of the term looking for evidence of a bigger cycle size. todo: fill in missing codebase functions --- .../U/Codebase/Sqlite/Operations.hs | 13 +- .../src/Unison/Codebase/Editor/HandleInput.hs | 6 +- .../src/Unison/Codebase/Editor/Propagate.hs | 10 +- .../src/Unison/Codebase/SqliteCodebase.hs | 94 +++++----- .../Codebase/SqliteCodebase/Conversions.hs | 170 +++++++----------- .../src/Unison/Codebase/Type.hs | 5 +- .../src/Unison/PrettyPrintEnv/Util.hs | 2 - .../src/Unison/Runtime/ANF/Serialize.hs | 5 +- 8 files changed, 133 insertions(+), 172 deletions(-) diff --git a/codebase2/codebase-sqlite/U/Codebase/Sqlite/Operations.hs b/codebase2/codebase-sqlite/U/Codebase/Sqlite/Operations.hs index 9c5535c94d..5df2360b64 100644 --- a/codebase2/codebase-sqlite/U/Codebase/Sqlite/Operations.hs +++ b/codebase2/codebase-sqlite/U/Codebase/Sqlite/Operations.hs @@ -1310,26 +1310,25 @@ declReferentsByPrefix :: Text -> Maybe C.Reference.Pos -> Maybe ConstructorId -> - m [(H.Hash, C.Reference.Pos, Word64, C.DeclType, [C.Decl.ConstructorId])] + m [(H.Hash, C.Reference.Pos, C.DeclType, [C.Decl.ConstructorId])] declReferentsByPrefix b32prefix pos cid = do componentReferencesByPrefix OT.DeclComponent b32prefix pos >>= traverse (loadConstructors cid) where - loadConstructors :: EDB m => Maybe Word64 -> S.Reference.Id -> m (H.Hash, C.Reference.Pos, Word64, C.DeclType, [ConstructorId]) + loadConstructors :: EDB m => Maybe Word64 -> S.Reference.Id -> m (H.Hash, C.Reference.Pos, C.DeclType, [ConstructorId]) loadConstructors cid rid@(C.Reference.Id oId pos) = do - (dt, len, ctorCount) <- getDeclCtorCount rid + (dt, ctorCount) <- getDeclCtorCount rid h <- loadHashByObjectId oId let test :: ConstructorId -> Bool test = maybe (const True) (==) cid cids = [cid | cid <- [0 :: ConstructorId .. ctorCount - 1], test cid] - pure (h, pos, len, dt, cids) - getDeclCtorCount :: EDB m => S.Reference.Id -> m (C.Decl.DeclType, Word64, ConstructorId) + pure (h, pos, dt, cids) + getDeclCtorCount :: EDB m => S.Reference.Id -> m (C.Decl.DeclType, ConstructorId) getDeclCtorCount id@(C.Reference.Id r i) = do when debug $ traceM $ "getDeclCtorCount " ++ show id bs <- liftQ (Q.loadObjectById r) - len <- decodeComponentLengthOnly bs (_localIds, decl) <- decodeDeclElement i bs - pure (C.Decl.declType decl, len, fromIntegral $ length (C.Decl.constructorTypes decl)) + pure (C.Decl.declType decl, fromIntegral $ length (C.Decl.constructorTypes decl)) branchHashesByPrefix :: EDB m => ShortBranchHash -> m (Set BranchHash) branchHashesByPrefix (ShortBranchHash b32prefix) = do diff --git a/parser-typechecker/src/Unison/Codebase/Editor/HandleInput.hs b/parser-typechecker/src/Unison/Codebase/Editor/HandleInput.hs index 518b94e0a7..ce7447cfed 100644 --- a/parser-typechecker/src/Unison/Codebase/Editor/HandleInput.hs +++ b/parser-typechecker/src/Unison/Codebase/Editor/HandleInput.hs @@ -916,7 +916,7 @@ loop = do diffHelper (Branch.head prev) (Branch.head root') >>= respondNumbered . uncurry Output.ShowDiffAfterUndo - UiI -> eval UI + UiI -> eval UI AliasTermI src dest -> do referents <- resolveHHQS'Referents src @@ -2013,8 +2013,8 @@ showTodoOutput getPpe patch names0 = do checkTodo :: Patch -> Names0 -> Action m i v (TO.TodoOutput v Ann) checkTodo patch names0 = do - let careToUpdate = Names.contains names0 - f <- Propagate.computeFrontier (eval . GetDependents) patch careToUpdate + let shouldUpdate = Names.contains names0 + f <- Propagate.computeFrontier (eval . GetDependents) patch shouldUpdate let dirty = R.dom f frontier = R.ran f (frontierTerms, frontierTypes) <- loadDisplayInfo frontier diff --git a/parser-typechecker/src/Unison/Codebase/Editor/Propagate.hs b/parser-typechecker/src/Unison/Codebase/Editor/Propagate.hs index 07034a5e10..93adff6848 100644 --- a/parser-typechecker/src/Unison/Codebase/Editor/Propagate.hs +++ b/parser-typechecker/src/Unison/Codebase/Editor/Propagate.hs @@ -5,7 +5,7 @@ {-# LANGUAGE RecordWildCards #-} {-# LANGUAGE ScopedTypeVariables #-} -module Unison.Codebase.Editor.Propagate (propagateAndApply) where +module Unison.Codebase.Editor.Propagate (computeFrontier, propagateAndApply) where import Control.Error.Util ( hush ) import Control.Lens @@ -256,7 +256,7 @@ propagate rootNames patch b = case validatePatch patch of [] -> Referent.toString r n : _ -> show n - initialDirty <- R.dom <$> computeFrontier (eval . GetDependents) patch names0 + initialDirty <- R.dom <$> computeFrontier (eval . GetDependents) patch (Names.contains names0) let initialTypeReplacements = Map.mapMaybe TypeEdit.toReference initialTypeEdits -- TODO: once patches can directly contain constructor replacements, this @@ -633,9 +633,9 @@ computeFrontier . Monad m => (Reference -> m (Set Reference)) -- eg Codebase.dependents codebase -> Patch - -> Names0 + -> (Reference -> Bool) -> m (R.Relation Reference Reference) -computeFrontier getDependents patch names = do +computeFrontier getDependents patch shouldUpdate = do -- (r,r2) ∈ dependsOn if r depends on r2 dependsOn <- foldM addDependents R.empty edited -- Dirty is everything that `dependsOn` Frontier, minus already edited defns @@ -649,5 +649,5 @@ computeFrontier getDependents patch names = do -> m (R.Relation Reference Reference) addDependents dependents ref = (\ds -> R.insertManyDom ds ref dependents) - . Set.filter (Names.contains names) + . Set.filter shouldUpdate <$> getDependents ref diff --git a/parser-typechecker/src/Unison/Codebase/SqliteCodebase.hs b/parser-typechecker/src/Unison/Codebase/SqliteCodebase.hs index b7af4ef418..d51eabde5c 100644 --- a/parser-typechecker/src/Unison/Codebase/SqliteCodebase.hs +++ b/parser-typechecker/src/Unison/Codebase/SqliteCodebase.hs @@ -32,6 +32,7 @@ import qualified Data.Either.Combinators as Either import Data.Foldable (Foldable (toList), for_, traverse_) import Data.Functor (void, ($>), (<&>)) import qualified Data.List as List +import Data.List.NonEmpty.Extra (NonEmpty ((:|)), maximum1) import Data.Map (Map) import qualified Data.Map as Map import Data.Maybe (fromJust) @@ -283,19 +284,12 @@ sqliteCodebase debugName root = do -- the individual definitions until a complete component has been written. termBuffer :: TVar (Map Hash TermBufferEntry) <- newTVarIO Map.empty declBuffer :: TVar (Map Hash DeclBufferEntry) <- newTVarIO Map.empty - cycleLengthCache <- Cache.semispaceCache 8192 declTypeCache <- Cache.semispaceCache 2048 let getTerm :: MonadIO m => Reference.Id -> m (Maybe (Term Symbol Ann)) getTerm (Reference.Id h1@(Cv.hash1to2 -> h2) i) = runDB' conn do term2 <- Ops.loadTermByReference (C.Reference.Id h2 i) - Cv.term2to1 h1 (getCycleLen "getTerm") getDeclType term2 - - getCycleLen :: EDB m => String -> Hash -> m Reference.Size - getCycleLen source = Cache.apply cycleLengthCache \h -> - (Ops.getCycleLen . Cv.hash1to2) h `Except.catchError` \case - e@(Ops.DatabaseIntegrityError (Q.NoObjectForPrimaryHashId {})) -> pure . error $ show e ++ " in " ++ source - e -> Except.throwError e + Cv.term2to1 h1 getDeclType term2 getDeclType :: EDB m => C.Reference.Reference -> m CT.ConstructorType getDeclType = Cache.apply declTypeCache \case @@ -314,42 +308,47 @@ sqliteCodebase debugName root = do getTypeOfTermImpl :: MonadIO m => Reference.Id -> m (Maybe (Type Symbol Ann)) getTypeOfTermImpl id | debug && trace ("getTypeOfTermImpl " ++ show id) False = undefined - getTypeOfTermImpl (Reference.Id (Cv.hash1to2 -> h2) i _n) = + getTypeOfTermImpl (Reference.Id (Cv.hash1to2 -> h2) i) = runDB' conn do type2 <- Ops.loadTypeOfTermByTermReference (C.Reference.Id h2 i) - Cv.ttype2to1 (getCycleLen "getTypeOfTermImpl") type2 + pure $ Cv.ttype2to1 type2 getTypeDeclaration :: MonadIO m => Reference.Id -> m (Maybe (Decl Symbol Ann)) - getTypeDeclaration (Reference.Id h1@(Cv.hash1to2 -> h2) i _n) = + getTypeDeclaration (Reference.Id h1@(Cv.hash1to2 -> h2) i) = runDB' conn do decl2 <- Ops.loadDeclByReference (C.Reference.Id h2 i) - Cv.decl2to1 h1 (getCycleLen "getTypeDeclaration") decl2 + pure $ Cv.decl2to1 h1 decl2 + + --putTermComponent :: MonadIO m => Hash -> [(Term Symbol Ann, Type Symbol Ann)] -> m () + --putTerms :: MonadIO m => Map Reference.Id (Term Symbol Ann, Type Symbol Ann) -> m () -- dies horribly if missing dependencies? + + -- option 1: tweak putTerm to incrementally notice the cycle length until each component is full + -- option 2: switch codebase interface from putTerm to putTerms -- buffering can be local to the function + -- option 3: switch from putTerm to putTermComponent -- needs to buffer dependencies non-locally (or require application to manage + die horribly) putTerm :: MonadIO m => Reference.Id -> Term Symbol Ann -> Type Symbol Ann -> m () putTerm id tm tp | debug && trace (show "SqliteCodebase.putTerm " ++ show id ++ " " ++ show tm ++ " " ++ show tp) False = undefined - putTerm (Reference.Id h@(Cv.hash1to2 -> h2) i n') tm tp = + putTerm (Reference.Id h@(Cv.hash1to2 -> h2) i) tm tp = runDB conn $ unlessM (Ops.objectExistsForHash h2 >>= if debug then \b -> do traceM $ "objectExistsForHash " ++ show h2 ++ " = " ++ show b; pure b else pure) ( withBuffer termBuffer h \be@(BufferEntry size comp missing waiting) -> do Monad.when debug $ traceM $ "adding to BufferEntry" ++ show be - let size' = Just n' - -- if size was previously set, it's expected to match size'. - case size of - Just n - | n /= n' -> - error $ "targetSize for term " ++ show h ++ " was " ++ show size ++ ", but now " ++ show size' - _ -> pure () + let termDependencies = Set.toList $ Term.termDependencies tm + -- update the component target size if we encounter any higher self-references + let size' = max size (Just $ biggestSelfReference + 1) where + biggestSelfReference = maximum1 $ + i :| [ i' | Reference.Derived h' i' <- termDependencies, h == h' ] let comp' = Map.insert i (tm, tp) comp -- for the component element that's been passed in, add its dependencies to missing' missingTerms' <- filterM (fmap not . Ops.objectExistsForHash . Cv.hash1to2) - [h | Reference.Derived h _i _n <- Set.toList $ Term.termDependencies tm] + [h | Reference.Derived h _i <- termDependencies] missingTypes' <- filterM (fmap not . Ops.objectExistsForHash . Cv.hash1to2) $ - [h | Reference.Derived h _i _n <- Set.toList $ Term.typeDependencies tm] - ++ [h | Reference.Derived h _i _n <- Set.toList $ Type.dependencies tp] + [h | Reference.Derived h _i <- Set.toList $ Term.typeDependencies tm] + ++ [h | Reference.Derived h _i <- Set.toList $ Type.dependencies tp] let missing' = missing <> Set.fromList (missingTerms' <> missingTypes') -- notify each of the dependencies that h depends on them. traverse (addBufferDependent h termBuffer) missingTerms' @@ -442,21 +441,19 @@ sqliteCodebase debugName root = do h putTypeDeclaration :: MonadIO m => Reference.Id -> Decl Symbol Ann -> m () - putTypeDeclaration (Reference.Id h@(Cv.hash1to2 -> h2) i n') decl = + putTypeDeclaration (Reference.Id h@(Cv.hash1to2 -> h2) i) decl = runDB conn $ unlessM (Ops.objectExistsForHash h2) ( withBuffer declBuffer h \(BufferEntry size comp missing waiting) -> do - let size' = Just n' - case size of - Just n - | n /= n' -> - error $ "targetSize for type " ++ show h ++ " was " ++ show size ++ ", but now " ++ show size' - _ -> pure () + let declDependencies = Set.toList $ Decl.declDependencies decl + let size' = max size (Just $ biggestSelfReference + 1) where + biggestSelfReference = maximum1 $ + i :| [i' | Reference.Derived h' i' <- declDependencies, h == h'] let comp' = Map.insert i decl comp moreMissing <- filterM (fmap not . Ops.objectExistsForHash . Cv.hash1to2) $ - [h | Reference.Derived h _i _n <- Set.toList $ Decl.declDependencies decl] + [h | Reference.Derived h _i <- declDependencies] let missing' = missing <> Set.fromList moreMissing traverse (addBufferDependent h declBuffer) moreMissing putBuffer declBuffer h (BufferEntry size' comp' missing' waiting) @@ -484,7 +481,7 @@ sqliteCodebase debugName root = do . runExceptT . flip runReaderT conn . fmap (Branch.transform (runDB conn)) - $ Cv.causalbranch2to1 getCycleLen getDeclType =<< Ops.loadRootCausal + $ Cv.causalbranch2to1 getDeclType =<< Ops.loadRootCausal v <- runDB conn Ops.dataVersion for_ b (atomically . writeTVar rootBranchCache . Just . (v,)) pure b @@ -556,7 +553,7 @@ sqliteCodebase debugName root = do Ops.loadCausalBranchByCausalHash (Cv.branchHash1to2 h) >>= \case Just b -> pure . Just . Branch.transform (runDB conn) - =<< Cv.causalbranch2to1 getCycleLen getDeclType b + =<< Cv.causalbranch2to1 getDeclType b Nothing -> pure Nothing putBranch :: MonadIO m => Branch m -> m () @@ -570,7 +567,7 @@ sqliteCodebase debugName root = do runDB conn . runMaybeT $ MaybeT (Ops.primaryHashToMaybePatchObjectId (Cv.patchHash1to2 h)) >>= Ops.loadPatchById - >>= Cv.patch2to1 getCycleLen + <&> Cv.patch2to1 putPatch :: MonadIO m => Branch.EditHash -> Patch -> m () putPatch h p = @@ -583,8 +580,8 @@ sqliteCodebase debugName root = do dependentsImpl :: MonadIO m => Reference -> m (Set Reference.Id) dependentsImpl r = runDB conn $ - Set.traverse (Cv.referenceid2to1 (getCycleLen "dependentsImpl")) - =<< Ops.dependents (Cv.reference1to2 r) + Set.map Cv.referenceid2to1 + <$> Ops.dependents (Cv.reference1to2 r) syncFromDirectory :: MonadIO m => Codebase1.CodebasePath -> SyncMode -> Branch m -> m () syncFromDirectory srcRoot _syncMode b = @@ -603,20 +600,20 @@ sqliteCodebase debugName root = do watches w = runDB conn $ Ops.listWatches (Cv.watchKind1to2 w) - >>= traverse (Cv.referenceid2to1 (getCycleLen "watches")) + <&> fmap Cv.referenceid2to1 getWatch :: MonadIO m => UF.WatchKind -> Reference.Id -> m (Maybe (Term Symbol Ann)) - getWatch k r@(Reference.Id h _i _n) + getWatch k r@(Reference.Id h _i) | elem k standardWatchKinds = runDB' conn $ Ops.loadWatch (Cv.watchKind1to2 k) (Cv.referenceid1to2 r) - >>= Cv.term2to1 h (getCycleLen "getWatch") getDeclType + >>= Cv.term2to1 h getDeclType getWatch _unknownKind _ = pure Nothing standardWatchKinds = [UF.RegularWatch, UF.TestWatch] putWatch :: MonadIO m => UF.WatchKind -> Reference.Id -> Term Symbol Ann -> m () - putWatch k r@(Reference.Id h _i _n) tm + putWatch k r@(Reference.Id h _i) tm | elem k standardWatchKinds = runDB conn $ Ops.saveWatch @@ -658,13 +655,13 @@ sqliteCodebase debugName root = do termsOfTypeImpl r = runDB conn $ Ops.termsHavingType (Cv.reference1to2 r) - >>= Set.traverse (Cv.referentid2to1 (getCycleLen "termsOfTypeImpl") getDeclType) + >>= Set.traverse (Cv.referentid2to1 getDeclType) termsMentioningTypeImpl :: MonadIO m => Reference -> m (Set Referent.Id) termsMentioningTypeImpl r = runDB conn $ Ops.termsMentioningType (Cv.reference1to2 r) - >>= Set.traverse (Cv.referentid2to1 (getCycleLen "termsMentioningTypeImpl") getDeclType) + >>= Set.traverse (Cv.referentid2to1 getDeclType) hashLength :: Applicative m => m Int hashLength = pure 10 @@ -681,7 +678,7 @@ sqliteCodebase debugName root = do >>= traverse (C.Reference.idH Ops.loadHashByObjectId) >>= pure . Set.fromList - Set.fromList <$> traverse (Cv.referenceid2to1 (getCycleLen "defnReferencesByPrefix")) (Set.toList refs) + pure $ Set.map Cv.referenceid2to1 refs termReferencesByPrefix :: MonadIO m => ShortHash -> m (Set Reference.Id) termReferencesByPrefix = defnReferencesByPrefix OT.TermComponent @@ -694,11 +691,11 @@ sqliteCodebase debugName root = do referentsByPrefix (SH.ShortHash prefix (fmap Cv.shortHashSuffix1to2 -> cycle) cid) = runDB conn do termReferents <- Ops.termReferentsByPrefix prefix cycle - >>= traverse (Cv.referentid2to1 (getCycleLen "referentsByPrefix") getDeclType) + >>= traverse (Cv.referentid2to1 getDeclType) declReferents' <- Ops.declReferentsByPrefix prefix cycle (read . Text.unpack <$> cid) let declReferents = - [ Referent.ConId (Reference.Id (Cv.hash2to1 h) pos len) (fromIntegral cid) (Cv.decltype2to1 ct) - | (h, pos, len, ct, cids) <- declReferents', + [ Referent.ConId (Reference.Id (Cv.hash2to1 h) pos) (fromIntegral cid) (Cv.decltype2to1 ct) + | (h, pos, ct, cids) <- declReferents', cid <- cids ] pure . Set.fromList $ termReferents <> declReferents @@ -743,6 +740,11 @@ sqliteCodebase debugName root = do (Cache.applyDefined declCache getTypeDeclaration) putTerm putTypeDeclaration + _getTermComponent + _getTermComponentWithTypes + _getTermComponentLength + _getDeclComponent + _getDeclComponentLength (getRootBranch rootBranchCache) (putRootBranch rootBranchCache) (rootBranchUpdates rootBranchCache) diff --git a/parser-typechecker/src/Unison/Codebase/SqliteCodebase/Conversions.hs b/parser-typechecker/src/Unison/Codebase/SqliteCodebase/Conversions.hs index c9c1ad7ab3..e819a2ff22 100644 --- a/parser-typechecker/src/Unison/Codebase/SqliteCodebase/Conversions.hs +++ b/parser-typechecker/src/Unison/Codebase/SqliteCodebase/Conversions.hs @@ -2,12 +2,10 @@ module Unison.Codebase.SqliteCodebase.Conversions where -import Control.Monad (foldM) import Data.Bifunctor (Bifunctor (bimap)) -import Data.Bitraversable (Bitraversable (bitraverse)) import qualified Data.ByteString.Short as SBS import Data.Either (fromRight) -import Data.Foldable (Foldable (toList)) +import Data.Foldable (Foldable (foldl', toList)) import Data.Map (Map) import qualified Data.Map as Map import qualified Data.Set as Set @@ -33,7 +31,6 @@ import qualified U.Core.ABT as V2.ABT import qualified U.Util.Hash as V2 import qualified U.Util.Hash as V2.Hash import qualified U.Util.Map as Map -import qualified U.Util.Set as Set import qualified Unison.ABT as V1.ABT import qualified Unison.Codebase.Branch as V1.Branch import qualified Unison.Codebase.Causal as V1.Causal @@ -171,7 +168,7 @@ term2to1 h lookupCT tm = pure $ V1.Term.Request (reference2to1 r) (fromIntegral i) V2.Term.Handle a a4 -> pure $ V1.Term.Handle a a4 V2.Term.App a a4 -> pure $ V1.Term.App a a4 - V2.Term.Ann a t2 -> V1.Term.Ann a <$> ttype2to1 t2 + V2.Term.Ann a t2 -> pure $ V1.Term.Ann a (ttype2to1 t2) V2.Term.List sa -> pure $ V1.Term.List sa V2.Term.If a a4 a5 -> pure $ V1.Term.If a a4 a5 V2.Term.And a a4 -> pure $ V1.Term.And a a4 @@ -207,11 +204,10 @@ term2to1 h lookupCT tm = V2.Term.PConcat -> V1.Pattern.Concat a = Ann.External -decl2to1 :: Monad m => Hash -> (Hash -> m V1.Reference.Size) -> V2.Decl.Decl V2.Symbol -> m (V1.Decl.Decl V1.Symbol Ann) -decl2to1 h lookupSize (V2.Decl.DataDeclaration dt m bound cts) = +decl2to1 :: Hash -> V2.Decl.Decl V2.Symbol -> V1.Decl.Decl V1.Symbol Ann +decl2to1 h (V2.Decl.DataDeclaration dt m bound cts) = goCT dt - <$> V1.Decl.DataDeclaration (goMod m) Ann.External (symbol2to1 <$> bound) - <$> cts' + $ V1.Decl.DataDeclaration (goMod m) Ann.External (symbol2to1 <$> bound) cts' where goMod = \case V2.Decl.Structural -> V1.Decl.Structural @@ -219,10 +215,10 @@ decl2to1 h lookupSize (V2.Decl.DataDeclaration dt m bound cts) = goCT = \case V2.Decl.Data -> Right V2.Decl.Effect -> Left . V1.Decl.EffectDeclaration - cts' = traverse mkCtor (zip cts [0 ..]) - mkCtor (type1, i) = do - type2 <- dtype2to1 h lookupSize type1 - pure $ (Ann.External, V1.symbol . pack $ "Constructor" ++ show i, type2) + cts' = map mkCtor (zip cts [0 ..]) + mkCtor (type1, i) = + (Ann.External, V1.symbol . pack $ "Constructor" ++ show i, type2) + where type2 = dtype2to1 h type1 decl1to2 :: Hash -> V1.Decl.Decl V1.Symbol a -> V2.Decl.Decl V2.Symbol decl1to2 h decl1 = case V1.Decl.asDataDecl decl1 of @@ -267,10 +263,10 @@ abt1to2 (V1.ABT.Term fv a out) = V2.ABT.Term fv a (go out) V1.ABT.Var v -> V2.ABT.Var v V1.ABT.Tm tm -> V2.ABT.Tm (abt1to2 <$> tm) -rreference2to1 :: Applicative m => Hash -> V2.Reference' Text (Maybe V2.Hash) -> m V1.Reference +rreference2to1 :: Hash -> V2.Reference' Text (Maybe V2.Hash) -> V1.Reference rreference2to1 h = \case - V2.ReferenceBuiltin t -> pure $ V1.Reference.Builtin t - V2.ReferenceDerived i -> V1.Reference.DerivedId <$> rreferenceid2to1 h i + V2.ReferenceBuiltin t -> V1.Reference.Builtin t + V2.ReferenceDerived i -> V1.Reference.DerivedId $ rreferenceid2to1 h i rreference1to2 :: Hash -> V1.Reference -> V2.Reference' Text (Maybe V2.Hash) rreference1to2 h = \case @@ -318,7 +314,7 @@ referenceid2to1 (V2.Reference.Id h i) = V1.Reference.Id sh i rreferent2to1 :: Applicative m => Hash -> (V2.Reference -> m CT.ConstructorType) -> V2.ReferentH -> m V1.Referent rreferent2to1 h lookupCT = \case - V2.Ref r -> V1.Ref <$> rreference2to1 h r + V2.Ref r -> pure . V1.Ref $ rreference2to1 h r V2.Con r i -> V1.Con (reference2to1 r) (fromIntegral i) <$> lookupCT r rreferent1to2 :: Hash -> V1.Referent -> V2.ReferentH @@ -351,29 +347,29 @@ causalHash2to1 = V1.Causal.RawHash . hash2to1 . V2.unCausalHash causalHash1to2 :: V1.Causal.RawHash V1.Branch.Raw -> V2.CausalHash causalHash1to2 = V2.CausalHash . hash1to2 . V1.Causal.unRawHash -ttype2to1 :: Monad m => (Hash -> m V1.Reference.Size) -> V2.Term.Type V2.Symbol -> m (V1.Type.Type V1.Symbol Ann) -ttype2to1 lookupSize = type2to1' (reference2to1 lookupSize) +ttype2to1 :: V2.Term.Type V2.Symbol -> V1.Type.Type V1.Symbol Ann +ttype2to1 = type2to1' reference2to1 -dtype2to1 :: Monad m => Hash -> (Hash -> m V1.Reference.Size) -> V2.Decl.Type V2.Symbol -> m (V1.Type.Type V1.Symbol Ann) -dtype2to1 h lookupSize = type2to1' (rreference2to1 h lookupSize) +dtype2to1 :: Hash -> V2.Decl.Type V2.Symbol -> V1.Type.Type V1.Symbol Ann +dtype2to1 h = type2to1' (rreference2to1 h) -type2to1' :: Monad m => (r -> m V1.Reference) -> V2.Type.TypeR r V2.Symbol -> m (V1.Type.Type V1.Symbol Ann) +type2to1' :: (r -> V1.Reference) -> V2.Type.TypeR r V2.Symbol -> V1.Type.Type V1.Symbol Ann type2to1' convertRef = - V1.ABT.transformM (typeF2to1 convertRef) + V1.ABT.transform (typeF2to1 convertRef) . V1.ABT.vmap symbol2to1 . V1.ABT.amap (const Ann.External) . abt2to1 where - typeF2to1 :: Applicative m => (r -> m V1.Reference) -> V2.Type.F' r a -> m (V1.Type.F a) + typeF2to1 :: (r -> V1.Reference) -> V2.Type.F' r a -> (V1.Type.F a) typeF2to1 convertRef = \case - V2.Type.Ref r -> V1.Type.Ref <$> convertRef r - V2.Type.Arrow i o -> pure $ V1.Type.Arrow i o - V2.Type.Ann a k -> pure $ V1.Type.Ann a (convertKind k) - V2.Type.App f x -> pure $ V1.Type.App f x - V2.Type.Effect e b -> pure $ V1.Type.Effect e b - V2.Type.Effects as -> pure $ V1.Type.Effects as - V2.Type.Forall a -> pure $ V1.Type.Forall a - V2.Type.IntroOuter a -> pure $ V1.Type.IntroOuter a + V2.Type.Ref r -> V1.Type.Ref $ convertRef r + V2.Type.Arrow i o -> V1.Type.Arrow i o + V2.Type.Ann a k -> V1.Type.Ann a (convertKind k) + V2.Type.App f x -> V1.Type.App f x + V2.Type.Effect e b -> V1.Type.Effect e b + V2.Type.Effects as -> V1.Type.Effects as + V2.Type.Forall a -> V1.Type.Forall a + V2.Type.IntroOuter a -> V1.Type.IntroOuter a where convertKind = \case V2.Kind.Star -> V1.Kind.Star @@ -408,23 +404,23 @@ type1to2' convertRef = V1.Kind.Arrow i o -> V2.Kind.Arrow (convertKind i) (convertKind o) -- | forces loading v1 branches even if they may not exist -causalbranch2to1 :: Monad m => (String -> Hash -> m V1.Reference.Size) -> (V2.Reference -> m CT.ConstructorType) -> V2.Branch.Causal m -> m (V1.Branch.Branch m) -causalbranch2to1 lookupSize lookupCT = fmap V1.Branch.Branch . causalbranch2to1' lookupSize lookupCT +causalbranch2to1 :: Monad m => (V2.Reference -> m CT.ConstructorType) -> V2.Branch.Causal m -> m (V1.Branch.Branch m) +causalbranch2to1 lookupCT = fmap V1.Branch.Branch . causalbranch2to1' lookupCT -causalbranch2to1' :: Monad m => (String -> Hash -> m V1.Reference.Size) -> (V2.Reference -> m CT.ConstructorType) -> V2.Branch.Causal m -> m (V1.Branch.UnwrappedBranch m) -causalbranch2to1' lookupSize lookupCT (V2.Causal hc _he (Map.toList -> parents) me) = do +causalbranch2to1' :: Monad m => (V2.Reference -> m CT.ConstructorType) -> V2.Branch.Causal m -> m (V1.Branch.UnwrappedBranch m) +causalbranch2to1' lookupCT (V2.Causal hc _he (Map.toList -> parents) me) = do let currentHash = causalHash2to1 hc case parents of - [] -> V1.Causal.One currentHash <$> (me >>= branch2to1 lookupSize lookupCT) + [] -> V1.Causal.One currentHash <$> (me >>= branch2to1 lookupCT) [(hp, mp)] -> do let parentHash = causalHash2to1 hp V1.Causal.Cons currentHash - <$> (me >>= branch2to1 lookupSize lookupCT) - <*> pure (parentHash, causalbranch2to1' lookupSize lookupCT =<< mp) + <$> (me >>= branch2to1 lookupCT) + <*> pure (parentHash, causalbranch2to1' lookupCT =<< mp) merge -> do - let tailsList = map (bimap causalHash2to1 (causalbranch2to1' lookupSize lookupCT =<<)) merge + let tailsList = map (bimap causalHash2to1 (causalbranch2to1' lookupCT =<<)) merge e <- me - V1.Causal.Merge currentHash <$> branch2to1 lookupSize lookupCT e <*> pure (Map.fromList tailsList) + V1.Causal.Merge currentHash <$> branch2to1 lookupCT e <*> pure (Map.fromList tailsList) causalbranch1to2 :: forall m. Monad m => V1.Branch.Branch m -> V2.Branch.Causal m causalbranch1to2 (V1.Branch.Branch c) = causal1to2' hash1to2cb hash1to2c branch1to2 c @@ -492,32 +488,27 @@ causalbranch1to2 (V1.Branch.Branch c) = causal1to2' hash1to2cb hash1to2c branch1 doChildren :: Map V1.NameSegment (V1.Branch.Branch m) -> Map V2.Branch.NameSegment (V2.Branch.Causal m) doChildren = Map.bimap namesegment1to2 causalbranch1to2 -patch2to1 :: - forall m. - Monad m => - (String -> Hash -> m V1.Reference.Size) -> - V2.Branch.Patch -> - m V1.Patch -patch2to1 lookupSize (V2.Branch.Patch v2termedits v2typeedits) = do - termEdits <- Map.bitraverse referent2to1' (Set.traverse termedit2to1) v2termedits - typeEdits <- Map.bitraverse (reference2to1 (lookupSize "patch->old type")) (Set.traverse typeedit2to1) v2typeedits - pure $ V1.Patch (Relation.fromMultimap termEdits) (Relation.fromMultimap typeEdits) +patch2to1 :: V2.Branch.Patch -> V1.Patch +patch2to1 (V2.Branch.Patch v2termedits v2typeedits) = + V1.Patch (Relation.fromMultimap termEdits) (Relation.fromMultimap typeEdits) where - referent2to1' :: V2.Referent -> m V1.Reference + termEdits = Map.bimap referent2to1' (Set.map termedit2to1) v2termedits + typeEdits = Map.bimap reference2to1 (Set.map typeedit2to1) v2typeedits + referent2to1' :: V2.Referent -> V1.Reference referent2to1' = \case - V2.Referent.Ref r -> reference2to1 (lookupSize "patch->old term") r + V2.Referent.Ref r -> reference2to1 r V2.Referent.Con {} -> error "found referent on LHS when converting patch2to1" - termedit2to1 :: V2.TermEdit.TermEdit -> m V1.TermEdit.TermEdit + termedit2to1 :: V2.TermEdit.TermEdit -> V1.TermEdit.TermEdit termedit2to1 = \case V2.TermEdit.Replace (V2.Referent.Ref r) t -> - V1.TermEdit.Replace <$> reference2to1 (lookupSize "patch->new term") r <*> typing2to1 t + V1.TermEdit.Replace (reference2to1 r) (typing2to1 t) V2.TermEdit.Replace {} -> error "found referent on RHS when converting patch2to1" - V2.TermEdit.Deprecate -> pure V1.TermEdit.Deprecate - typeedit2to1 :: V2.TypeEdit.TypeEdit -> m V1.TypeEdit.TypeEdit + V2.TermEdit.Deprecate -> V1.TermEdit.Deprecate + typeedit2to1 :: V2.TypeEdit.TypeEdit -> V1.TypeEdit.TypeEdit typeedit2to1 = \case - V2.TypeEdit.Replace r -> V1.TypeEdit.Replace <$> reference2to1 (lookupSize "patch->new type") r - V2.TypeEdit.Deprecate -> pure V1.TypeEdit.Deprecate - typing2to1 t = pure $ case t of + V2.TypeEdit.Replace r -> V1.TypeEdit.Replace (reference2to1 r) + V2.TypeEdit.Deprecate -> V1.TypeEdit.Deprecate + typing2to1 t = case t of V2.TermEdit.Same -> V1.TermEdit.Same V2.TermEdit.Subtype -> V1.TermEdit.Subtype V2.TermEdit.Different -> V1.TermEdit.Different @@ -554,55 +545,26 @@ namesegment1to2 (V1.NameSegment t) = V2.Branch.NameSegment t branch2to1 :: Monad m => - (String -> Hash -> m V1.Reference.Size) -> (V2.Reference -> m CT.ConstructorType) -> V2.Branch.Branch m -> m (V1.Branch.Branch0 m) -branch2to1 lookupSize lookupCT (V2.Branch.Branch v2terms v2types v2patches v2children) = do - v1terms <- toStar (reference2to1 $ lookupSize "term metadata") =<< Map.bitraverse (pure . namesegment2to1) (Map.bitraverse (referent2to1 (lookupSize "term") lookupCT) id) v2terms - v1types <- toStar (reference2to1 $ lookupSize "type metadata") =<< Map.bitraverse (pure . namesegment2to1) (Map.bitraverse (reference2to1 (lookupSize "type")) id) v2types - v1patches <- Map.bitraverse (pure . namesegment2to1) (bitraverse (pure . edithash2to1) (fmap (patch2to1 lookupSize))) v2patches - v1children <- Map.bitraverse (pure . namesegment2to1) (causalbranch2to1 lookupSize lookupCT) v2children +branch2to1 lookupCT (V2.Branch.Branch v2terms v2types v2patches v2children) = do + v1terms <- toStar reference2to1 <$> Map.bitraverse (pure . namesegment2to1) (Map.bitraverse (referent2to1 lookupCT) id) v2terms + v1types <- toStar reference2to1 <$> Map.bitraverse (pure . namesegment2to1) (Map.bitraverse (pure . reference2to1) id) v2types + v1children <- Map.bitraverse (pure . namesegment2to1) (causalbranch2to1 lookupCT) v2children pure $ V1.Branch.branch0 v1terms v1types v1children v1patches where - toStar :: forall m name ref. (Monad m, Ord name, Ord ref) => (V2.Reference -> m V1.Reference) -> Map name (Map ref V2.Branch.MdValues) -> m (V1.Metadata.Star ref name) - toStar mdref2to1 m = foldM insert mempty (Map.toList m) + v1patches = Map.bimap namesegment2to1 (bimap edithash2to1 (fmap patch2to1)) v2patches + toStar :: forall name ref. (Ord name, Ord ref) => (V2.Reference -> V1.Reference) -> Map name (Map ref V2.Branch.MdValues) -> V1.Metadata.Star ref name + toStar mdref2to1 m = foldl' insert mempty (Map.toList m) where - insert star (name, m) = foldM (insert' name) star (Map.toList m) - insert' :: name -> V1.Metadata.Star ref name -> (ref, V2.Branch.MdValues) -> m (V1.Metadata.Star ref name) - insert' name star (ref, V2.Branch.MdValues mdvals) = do + insert star (name, m) = foldl' (insert' name) star (Map.toList m) + insert' :: name -> V1.Metadata.Star ref name -> (ref, V2.Branch.MdValues) -> V1.Metadata.Star ref name + insert' name star (ref, V2.Branch.MdValues mdvals) = let facts = Set.singleton ref names = Relation.singleton ref name - types :: Relation.Relation ref V1.Metadata.Type <- - Relation.insertManyRan ref <$> traverse mdref2to1 (Map.elems mdvals) <*> pure mempty - vals :: Relation.Relation ref (V1.Metadata.Type, V1.Metadata.Value) <- - Relation.insertManyRan ref <$> (traverse (\(t, v) -> (,) <$> mdref2to1 v <*> mdref2to1 t) (Map.toList mdvals)) <*> pure mempty - pure $ star <> V1.Star3.Star3 facts names types vals - --- V2.Branch0 should have the metadata types, could bulk load with relational operations --- type Star a n = Star3 a n Type (Type, Value) --- type Star a n = Star3 a n Type (Reference, Reference) --- MdValues is a Set V2.Reference - --- (Name, TermRef, Metadata Type, Metadata Value) <-- decided not this (because name was too long/repetitive?) --- (BranchId/Hash, TermRef, Metadata Type, Metadata Value) <-- what about this - --- data V2.Branch m = Branch --- { terms :: Map NameSegment (Map Referent (m MdValues)), --- types :: Map NameSegment (Map Reference (m MdValues)), --- patches :: Map NameSegment (PatchHash, m Patch), --- children :: Map NameSegment (Causal m) --- } --- branch0 :: Metadata.Star Referent NameSegment --- -> Metadata.Star Reference NameSegment --- -> Map NameSegment (Branch m) --- -> Map NameSegment (EditHash, m Patch) --- -> Branch0 m - --- type Metadata.Star a n = Star3 a n Type (Type, Value) - --- data Star3 fact d1 d2 d3 --- = Star3 { fact :: Set fact --- , d1 :: Relation fact d1 --- , d2 :: Relation fact d2 --- , d3 :: Relation fact d3 } deriving (Eq,Ord,Show) + types :: Relation.Relation ref V1.Metadata.Type = + Relation.insertManyRan ref (fmap mdref2to1 (Map.elems mdvals)) mempty + vals :: Relation.Relation ref (V1.Metadata.Type, V1.Metadata.Value) = + Relation.insertManyRan ref (fmap (\(v, t) -> (mdref2to1 t, mdref2to1 v)) (Map.toList mdvals)) mempty + in star <> V1.Star3.Star3 facts names types vals diff --git a/parser-typechecker/src/Unison/Codebase/Type.hs b/parser-typechecker/src/Unison/Codebase/Type.hs index e34f68ae05..f27bc0efe0 100644 --- a/parser-typechecker/src/Unison/Codebase/Type.hs +++ b/parser-typechecker/src/Unison/Codebase/Type.hs @@ -6,12 +6,15 @@ module Unison.Codebase.Type (Codebase (..), CodebasePath, GitError(..), GetRootB import Unison.Codebase.Branch (Branch) import qualified Unison.Codebase.Branch as Branch import Unison.Codebase.Editor.RemoteRepo (ReadRemoteNamespace, WriteRepo) +import Unison.Codebase.GitError (GitCodebaseError, GitProtocolError) import Unison.Codebase.Patch (Patch) import qualified Unison.Codebase.Reflog as Reflog import Unison.Codebase.ShortBranchHash (ShortBranchHash) +import Unison.Codebase.SqliteCodebase.GitError (GitSqliteCodebaseError) import Unison.Codebase.SyncMode (SyncMode) import Unison.CodebasePath (CodebasePath) import Unison.DataDeclaration (Decl) +import Unison.Hash (Hash) import Unison.Prelude import Unison.Reference (Reference) import qualified Unison.Reference as Reference @@ -20,8 +23,6 @@ import Unison.ShortHash (ShortHash) import Unison.Term (Term) import Unison.Type (Type) import qualified Unison.WatchKind as WK -import Unison.Codebase.GitError (GitProtocolError, GitCodebaseError) -import Unison.Codebase.SqliteCodebase.GitError (GitSqliteCodebaseError) type SyncToDir m = CodebasePath -> -- dest codebase diff --git a/parser-typechecker/src/Unison/PrettyPrintEnv/Util.hs b/parser-typechecker/src/Unison/PrettyPrintEnv/Util.hs index 0ec9c1698f..66dc8a0102 100644 --- a/parser-typechecker/src/Unison/PrettyPrintEnv/Util.hs +++ b/parser-typechecker/src/Unison/PrettyPrintEnv/Util.hs @@ -2,7 +2,6 @@ module Unison.PrettyPrintEnv.Util (declarationPPE) where -import qualified Data.Set as Set import Unison.PrettyPrintEnv (PrettyPrintEnv (..)) import Unison.PrettyPrintEnvDecl (PrettyPrintEnvDecl (suffixifiedPPE, unsuffixifiedPPE)) import Unison.Reference (Reference) @@ -20,7 +19,6 @@ declarationPPE :: PrettyPrintEnvDecl -> Reference -> PrettyPrintEnv declarationPPE ppe ref = PrettyPrintEnv tm ty where rootH = hash ref - comp = Reference.members (Reference.componentFor rd n) hash Reference.Builtin {} = Nothing hash (Reference.Derived h _) = Just h tm r0@(Referent.Ref r) diff --git a/parser-typechecker/src/Unison/Runtime/ANF/Serialize.hs b/parser-typechecker/src/Unison/Runtime/ANF/Serialize.hs index 38864fe5bd..b0bfae20a2 100644 --- a/parser-typechecker/src/Unison/Runtime/ANF/Serialize.hs +++ b/parser-typechecker/src/Unison/Runtime/ANF/Serialize.hs @@ -767,17 +767,16 @@ putReference r = case r of Builtin name -> do putWord8 0 putText name - Derived hash i n -> do + Derived hash i-> do putWord8 1 putHash hash putLength i - putLength n getReference :: MonadGet m => m Reference getReference = do tag <- getWord8 case tag of 0 -> Builtin <$> getText - 1 -> DerivedId <$> (Id <$> getHash <*> getLength <*> getLength) + 1 -> DerivedId <$> (Id <$> getHash <*> getLength) _ -> unknownTag "Reference" tag From c409b433e9aad21323f8060c5468afad4e0b5ccf Mon Sep 17 00:00:00 2001 From: Arya Irani Date: Tue, 31 Aug 2021 18:41:29 -0400 Subject: [PATCH 003/297] delete Sync12 Oops, this should have gone with #2363 --- .../src/Unison/Codebase/Conversion/Sync12.hs | 687 ------------------ .../unison-parser-typechecker.cabal | 1 - 2 files changed, 688 deletions(-) delete mode 100644 parser-typechecker/src/Unison/Codebase/Conversion/Sync12.hs diff --git a/parser-typechecker/src/Unison/Codebase/Conversion/Sync12.hs b/parser-typechecker/src/Unison/Codebase/Conversion/Sync12.hs deleted file mode 100644 index 5406c4dbdc..0000000000 --- a/parser-typechecker/src/Unison/Codebase/Conversion/Sync12.hs +++ /dev/null @@ -1,687 +0,0 @@ -{-# LANGUAGE ApplicativeDo #-} -{-# LANGUAGE ConstraintKinds #-} -{-# LANGUAGE ExistentialQuantification #-} -{-# LANGUAGE PartialTypeSignatures #-} -{-# LANGUAGE RankNTypes #-} -{-# LANGUAGE ScopedTypeVariables #-} -{-# LANGUAGE TemplateHaskell #-} -{-# LANGUAGE TypeApplications #-} -{-# LANGUAGE TypeOperators #-} -{-# LANGUAGE ViewPatterns #-} - -module Unison.Codebase.Conversion.Sync12 where - -import Control.Lens -import qualified Control.Lens as Lens -import Control.Monad.Except (MonadError, runExceptT) -import qualified Control.Monad.Except as Except -import Control.Monad.Extra ((&&^)) -import Control.Monad.Reader -import qualified Control.Monad.Reader as Reader -import Control.Monad.State (MonadState) -import qualified Control.Monad.State as State -import Control.Monad.Validate (MonadValidate, runValidateT) -import qualified Control.Monad.Validate as Validate -import Control.Natural (type (~>)) -import Data.Bifoldable (bitraverse_) -import qualified Data.Foldable as Foldable -import qualified Data.List as List -import qualified Data.Map as Map -import qualified Data.Set as Set -import System.IO (stdout) -import System.IO.Extra (hFlush) -import U.Codebase.Sqlite.Connection (Connection) -import U.Codebase.Sync (Sync (Sync), TrySyncResult) -import qualified U.Codebase.Sync as Sync -import qualified U.Util.Monoid as Monoid -import Unison.Codebase (Codebase) -import qualified Unison.Codebase as Codebase -import Unison.Codebase.Branch (UnwrappedBranch) -import qualified Unison.Codebase.Branch as Branch -import qualified Unison.Codebase.Causal as Causal -import qualified Unison.Codebase.Metadata as Metadata -import Unison.Codebase.Patch (Patch (..)) -import qualified Unison.Codebase.TermEdit as TermEdit -import qualified Unison.Codebase.TypeEdit as TypeEdit -import Unison.DataDeclaration (Decl) -import qualified Unison.DataDeclaration as DD -import Unison.Hash (Hash) -import qualified Unison.Hashable as H -import qualified Unison.LabeledDependency as LD -import Unison.NameSegment (NameSegment) -import Unison.Prelude -import qualified Unison.Reference as Reference -import Unison.Referent (Referent) -import qualified Unison.Referent as Referent -import Unison.Symbol (Symbol) -import Unison.Term (Term) -import qualified Unison.Term as Term -import Unison.Type (Type) -import qualified Unison.Type as Type -import Unison.Util.Relation (Relation) -import qualified Unison.Util.Relation as Relation -import Unison.Util.Star3 (Star3 (Star3)) -import Unison.WatchKind (WatchKind) - -debug :: Bool -debug = False - -data Env m a = Env - { srcCodebase :: Codebase m Symbol a, - destCodebase :: Codebase m Symbol a, - destConnection :: Connection - } - -data Entity m - = C Branch.Hash (m (UnwrappedBranch m)) - | T Hash - | W WatchKind Reference.Id - | D Hash - | P Branch.EditHash - -type V m n = MonadValidate (Set (Entity m)) n - -type E e n = MonadError e n - -type S m n = MonadState (Status m) n - -type R m n a = MonadReader (Env m a) n - -type RS m n a = (R m n a, S m n) - -data BranchStatus m - = BranchOk - | BranchReplaced Branch.Hash (UnwrappedBranch m) - -data TermStatus - = TermOk - | TermMissing - | TermMissingType - | TermMissingDependencies - deriving (Show) - -data WatchStatus - = WatchOk - | WatchNotCached - | WatchMissingDependencies - deriving (Show) - -data DeclStatus - = DeclOk - | DeclMissing - | DeclMissingDependencies - deriving (Show) - -data PatchStatus - = PatchOk - | PatchMissing - | PatchReplaced Branch.EditHash - deriving (Show) - -data Status m = Status - { _branchStatus :: Map Branch.Hash (BranchStatus m), - _termStatus :: Map Hash TermStatus, - _declStatus :: Map Hash DeclStatus, - _patchStatus :: Map Branch.EditHash PatchStatus, - _watchStatus :: Map (WatchKind, Reference.Id) WatchStatus - } - -emptyStatus :: Status m -emptyStatus = Status mempty mempty mempty mempty mempty - -makeLenses ''Status - -sync12 :: - (MonadIO f, MonadReader (Env p x) f, RS m n a, Applicative m) => - (m ~> n) -> - f (Sync n (Entity m)) -sync12 t = pure $ Sync (trySync t) - --- For each entity, we have to check to see --- a) if it exists (if not, mark as missing in Status) --- b) if any of its dependencies have not yet been synced --- (if so, note as validation) --- c) if any of its dependencies are missing from the source codebase --- (if so, then filter them if possible, otherwise give this entity an --- error Status) - -trySync :: - forall m n a. - (R m n a, S m n, Applicative m) => - (m ~> n) -> - Entity m -> - n (TrySyncResult (Entity m)) -trySync t e = do - Env _ dest _ <- Reader.ask - case e of - C h mc -> do - getBranchStatus h >>= \case - Just {} -> pure Sync.PreviouslyDone - Nothing -> t (Codebase.branchExists dest h) >>= \case - True -> setBranchStatus h BranchOk $> Sync.PreviouslyDone - False -> do - c <- t mc - runValidateT (repairBranch c) >>= \case - Left deps -> pure . Sync.Missing $ Foldable.toList deps - Right c' -> do - let h' = Causal.currentHash c' - t $ Codebase.putBranch dest (Branch.Branch c') - if h == h' - then do - setBranchStatus h BranchOk - pure Sync.Done - else do - setBranchStatus h (BranchReplaced h' c') - pure Sync.NonFatalError - T h -> - getTermStatus h >>= \case - Just {} -> pure Sync.PreviouslyDone - Nothing -> do - runExceptT (runValidateT (checkTermComponent (lift . lift . t) h)) >>= \case - Left status -> do - setTermStatus h status - pure Sync.NonFatalError - Right (Left deps) -> - pure . Sync.Missing $ Foldable.toList deps - Right (Right component) -> do - Foldable.for_ (zip component [0 ..]) \((term, typ), i) -> - t $ Codebase.putTerm dest (Reference.Id h i) term typ - setTermStatus h TermOk - pure Sync.Done - W k r -> - getWatchStatus k r >>= \case - Just {} -> pure Sync.PreviouslyDone - Nothing -> do - runExceptT (runValidateT (checkWatchComponent (lift . lift . t) k r)) >>= \case - Left status -> do - setWatchStatus k r status - pure Sync.NonFatalError - Right (Left deps) -> - pure . Sync.Missing $ Foldable.toList deps - Right (Right watchResult) -> do - t $ Codebase.putWatch dest k r watchResult - setWatchStatus k r WatchOk - pure Sync.Done - D h -> - getDeclStatus h >>= \case - Just {} -> pure Sync.PreviouslyDone - Nothing -> - runExceptT (runValidateT (checkDeclComponent (lift . lift . t) h)) >>= \case - Left status -> do - setDeclStatus h status - pure Sync.NonFatalError - Right (Left deps) -> - pure . Sync.Missing $ Foldable.toList deps - Right (Right component) -> do - Foldable.for_ (zip component [0 ..]) \(decl, i) -> - t $ Codebase.putTypeDeclaration dest (Reference.Id h i) decl - setDeclStatus h DeclOk - pure Sync.Done - P h -> - getPatchStatus h >>= \case - Just {} -> pure Sync.PreviouslyDone - Nothing -> - runExceptT (runValidateT (checkPatch (lift . lift . t) h)) >>= \case - Left status -> setPatchStatus h status >> pure Sync.NonFatalError - Right (Left deps) -> pure . Sync.Missing $ Foldable.toList deps - Right (Right (h', patch')) -> do - t $ Codebase.putPatch dest h' patch' - setPatchStatus h if h == h' then PatchOk else PatchReplaced h' - pure Sync.Done - -getBranchStatus :: S m n => Branch.Hash -> n (Maybe (BranchStatus m)) -getBranchStatus h = use (branchStatus . at h) - -getTermStatus :: S m n => Hash -> n (Maybe TermStatus) -getTermStatus h = use (termStatus . at h) - -getDeclStatus :: S m n => Hash -> n (Maybe DeclStatus) -getDeclStatus h = use (declStatus . at h) - -getPatchStatus :: S m n => Hash -> n (Maybe PatchStatus) -getPatchStatus h = use (patchStatus . at h) - -getWatchStatus :: S m n => WatchKind -> Reference.Id -> n (Maybe WatchStatus) -getWatchStatus w r = use (watchStatus . at (w, r)) - -setTermStatus :: S m n => Hash -> TermStatus -> n () -setTermStatus h s = do - when debug (traceM $ "setTermStatus " ++ take 10 (show h) ++ " " ++ show s) - termStatus . at h .= Just s - -setDeclStatus :: S m n => Hash -> DeclStatus -> n () -setDeclStatus h s = do - when debug (traceM $ "setDeclStatus " ++ take 10 (show h) ++ " " ++ show s) - declStatus . at h .= Just s - -setPatchStatus :: S m n => Hash -> PatchStatus -> n () -setPatchStatus h s = do - when debug (traceM $ "setPatchStatus " ++ take 10 (show h) ++ " " ++ show s) - patchStatus . at h .= Just s - -setBranchStatus :: S m n => Branch.Hash -> BranchStatus m -> n () -setBranchStatus h s = do - when debug (traceM $ "setBranchStatus " ++ take 10 (show h) ++ " " ++ show s) - branchStatus . at h .= Just s - -setWatchStatus :: S m n => WatchKind -> Reference.Id -> WatchStatus -> n () -setWatchStatus k r@(Reference.Id h i) s = do - when debug (traceM $ "setWatchStatus " ++ show k ++ " " ++ take 10 (show h) ++ " " ++ show i) - watchStatus . at (k, r) .= Just s - --- | verifies that the entire term component, the types-of-terms, and dependencies are available -checkTermComponent :: - forall m n a. - (RS m n a, V m n, E TermStatus n) => - (m ~> n) -> - Hash -> - n [(Term Symbol a, Type Symbol a)] -checkTermComponent t h = do - Env src _ _ <- Reader.ask - n <- t $ Codebase.getTermComponentLength src h - for [Reference.Id h i | i <- [0 .. n -1]] \r -> do - term <- t $ Codebase.getTerm src r - typ <- t $ Codebase.getTypeOfTermImpl src r - case (term, typ) of - (Nothing, _) -> Except.throwError TermMissing - (_, Nothing) -> Except.throwError TermMissingType - (Just term, Just typ) -> do - let termDeps = Term.labeledDependencies term - typeDeps = Type.dependencies typ - let checkDecl = \case - Reference.Builtin {} -> pure () - Reference.Derived h' _ -> - getDeclStatus h' >>= \case - Just DeclOk -> pure () - Just _ -> Except.throwError TermMissingDependencies - Nothing -> Validate.dispute . Set.singleton $ D h' - checkTerm = \case - Reference.Builtin {} -> - pure () - Reference.Derived h' _ - | h == h' -> pure () -- ignore self-references - | otherwise -> getTermStatus h' >>= \case - Just TermOk -> pure () - Just _ -> Except.throwError TermMissingDependencies - Nothing -> Validate.dispute . Set.singleton $ T h' - traverse_ (bitraverse_ checkDecl checkTerm . LD.toReference) termDeps - traverse_ checkDecl typeDeps - pure (term, typ) - -checkWatchComponent :: - forall m n a. - (RS m n a, V m n, E WatchStatus n) => - (m ~> n) -> - WatchKind -> - Reference.Id -> - n (Term Symbol a) -checkWatchComponent t k r@(Reference.Id h _) = do - Env src _ _ <- Reader.ask - (t $ Codebase.getWatch src k r) >>= \case - Nothing -> Except.throwError WatchNotCached - Just watchResult -> do - let deps = Term.labeledDependencies watchResult - let checkDecl = \case - Reference.Builtin {} -> pure () - Reference.Derived h' _ -> - getDeclStatus h' >>= \case - Just DeclOk -> pure () - Just _ -> Except.throwError WatchMissingDependencies - Nothing -> Validate.dispute . Set.singleton $ D h' - checkTerm = \case - Reference.Builtin {} -> - pure () - Reference.Derived h' _ - | h == h' -> pure () -- ignore self-references - | otherwise -> getTermStatus h' >>= \case - Just TermOk -> pure () - Just _ -> Except.throwError WatchMissingDependencies - Nothing -> Validate.dispute . Set.singleton $ T h' - traverse_ (bitraverse_ checkDecl checkTerm . LD.toReference) deps - pure watchResult - -checkDeclComponent :: - forall m n a. - (RS m n a, E DeclStatus n, V m n) => - (m ~> n) -> - Hash -> - n [Decl Symbol a] -checkDeclComponent t h = do - Env src _ _ <- Reader.ask - n <- t $ Codebase.getDeclComponentLength src h - for [Reference.Id h i | i <- [0 .. n - 1]] \r -> do - decl <- t $ Codebase.getTypeDeclaration src r - case decl of - Nothing -> Except.throwError DeclMissing - Just decl -> do - let deps = DD.declDependencies decl - checkDecl = \case - Reference.Builtin {} -> pure () - Reference.Derived h' _ -> - unless (h == h') $ getDeclStatus h' >>= \case - Just DeclOk -> pure () - Just _ -> Except.throwError DeclMissingDependencies - Nothing -> Validate.dispute . Set.singleton $ D h' - traverse_ checkDecl deps - pure decl - -checkPatch :: - forall m n a. - (RS m n a, E PatchStatus n, V m n) => - (m ~> n) -> - Branch.EditHash -> - n (Branch.EditHash, Patch) -checkPatch t h = do - Env src _ _ <- Reader.ask - t (Codebase.getPatch src h) >>= \case - Nothing -> Except.throwError PatchMissing - Just patch -> do - (h', patch) <- repairPatch patch - if h == h' - then setPatchStatus h PatchOk - else setPatchStatus h (PatchReplaced h') - pure (h', patch) - -repairBranch :: - forall m n. - (S m n, V m n, Applicative m) => - UnwrappedBranch m -> - n (UnwrappedBranch m) -repairBranch = \case - Causal.One _h e -> do - e' <- repairBranch0 e - pure $ Causal.one e' - Causal.Cons _h e (ht, mt) -> do - getBranchStatus ht >>= \case - Nothing -> Validate.refute . Set.singleton $ C ht mt - Just tailStatus -> do - e' <- repairBranch0 e - pure case tailStatus of - BranchOk -> Causal.cons' e' ht mt - BranchReplaced _ht' t' -> Causal.consDistinct e' t' - Causal.Merge _h e (Map.toList -> tails) -> do - tails' <- - Map.fromList <$> for tails \(ht, mt) -> - getBranchStatus ht >>= \case - Nothing -> Validate.refute . Set.singleton $ C ht mt - Just tailStatus -> - pure case tailStatus of - BranchOk -> (ht, mt) - BranchReplaced ht' t' -> (ht', pure t') - e' <- repairBranch0 e - let h' = Causal.RawHash $ Causal.hash (e', Map.keys tails') - pure $ Causal.Merge h' e' tails' - -repairBranch0 :: - forall m n. - (S m n, V m n, Applicative m) => - Branch.Branch0 m -> - n (Branch.Branch0 m) -repairBranch0 b = do - terms' <- filterBranchTermStar (view Branch.terms b) - types' <- filterBranchTypeStar (view Branch.types b) - children' <- filterBranchChildren (view Branch.children b) - edits' <- filterBranchEdits (view Branch.edits b) - pure @n $ Branch.branch0 terms' types' children' edits' - -repairPatch :: - forall m n. - (MonadState (Status m) n, MonadValidate (Set (Entity m)) n) => - Patch -> - n (Branch.EditHash, Patch) -repairPatch (Patch termEdits typeEdits) = do - termEdits' <- Relation.filterM (uncurry filterTermEdit) termEdits - typeEdits' <- Relation.filterM (uncurry filterTypeEdit) typeEdits - let patch = Patch termEdits' typeEdits' - pure (H.accumulate' patch, patch) - where - -- filtering `old` is part of a workaround for ucm currently - -- requiring the actual component in order to construct a - -- reference to it. See Sync22.syncPatchLocalIds - helpTermEdit = \case - Reference.Builtin _ -> pure True - Reference.Derived h _ -> - getTermStatus h >>= \case - Nothing -> Validate.refute . Set.singleton $ T h - Just TermOk -> pure True - Just _ -> pure False - helpTypeEdit = \case - Reference.Builtin _ -> pure True - Reference.Derived h _ -> - getDeclStatus h >>= \case - Nothing -> Validate.refute . Set.singleton $ D h - Just DeclOk -> pure True - Just _ -> pure False - filterTermEdit old new = do - oldOk <- helpTermEdit old - newOk <- case new of - TermEdit.Deprecate -> pure True - TermEdit.Replace new _typing -> helpTermEdit new - pure $ oldOk && newOk - filterTypeEdit old new = do - oldOk <- helpTypeEdit old - newOk <- case new of - TypeEdit.Deprecate -> pure True - TypeEdit.Replace new -> helpTypeEdit new - pure $ oldOk && newOk - -filterBranchTermStar :: (S m n, V m n) => Metadata.Star Referent NameSegment -> n (Metadata.Star Referent NameSegment) -filterBranchTermStar (Star3 _refs names _mdType md) = do - names' <- filterTermNames names - let refs' = Relation.dom names' - mdTypeValues' <- filterMetadata $ Relation.restrictDom refs' md - let mdType' = Relation.mapRan fst mdTypeValues' - pure $ Star3 refs' names' mdType' mdTypeValues' - -filterBranchTypeStar :: (S m n, V m n) => Metadata.Star Reference.Reference NameSegment -> n (Metadata.Star Reference.Reference NameSegment) -filterBranchTypeStar (Star3 _refs names _mdType md) = do - names' <- filterTypeNames names - let refs' = Relation.dom names' - mdTypeValues' <- filterMetadata $ Relation.restrictDom refs' md - let mdType' = Relation.mapRan fst mdTypeValues' - pure $ Star3 refs' names mdType' mdTypeValues' - -filterMetadata :: (S m n, V m n, Ord r) => Relation r (Metadata.Type, Metadata.Value) -> n (Relation r (Metadata.Type, Metadata.Value)) -filterMetadata = Relation.filterRanM \(t, v) -> - validateTypeReference t &&^ validateTermReference v - -filterTermNames :: (S m n, V m n) => Relation Referent NameSegment -> n (Relation Referent NameSegment) -filterTermNames = Relation.filterDomM validateTermReferent - -validateTermReferent :: (S m n, V m n) => Referent -> n Bool -validateTermReferent = \case - Referent.Ref r -> validateTermReference r - Referent.Con r _ _ -> validateTypeReference r - -validateTermReference :: (S m n, V m n) => Reference.Reference -> n Bool -validateTermReference = \case - Reference.Builtin {} -> pure True - Reference.Derived h _i -> - getTermStatus h >>= \case - Nothing -> Validate.refute . Set.singleton $ T h - Just TermOk -> pure True - Just _ -> pure False - -validateTypeReference :: (S m n, V m n) => Reference.Reference -> n Bool -validateTypeReference = \case - Reference.Builtin {} -> pure True - Reference.Derived h _i -> - getDeclStatus h >>= \case - Nothing -> Validate.refute . Set.singleton $ D h - Just DeclOk -> pure True - Just _ -> pure False - -filterTypeNames :: (S m n, V m n) => Relation Reference.Reference NameSegment -> n (Relation Reference.Reference NameSegment) -filterTypeNames = Relation.filterDomM validateTypeReference - -filterBranchChildren :: (S m n, V m n, Applicative m) => Map NameSegment (Branch.Branch m) -> n (Map NameSegment (Branch.Branch m)) -filterBranchChildren = fmap Map.fromList . traverse go . Map.toList - where - go orig@(ns, Branch.Branch c) = - getBranchStatus (Causal.currentHash c) >>= \case - Nothing -> Validate.refute . Set.singleton $ C (Causal.currentHash c) (pure c) - Just BranchOk -> pure orig - Just (BranchReplaced _h c) -> pure (ns, Branch.Branch c) - --- | if a dependency is missing, then remove the entry -filterBranchEdits :: (S m n, V m n) => Map NameSegment (Branch.EditHash, m Patch) -> n (Map NameSegment (Branch.EditHash, m Patch)) -filterBranchEdits = fmap (Map.fromList . catMaybes) . traverse go . Map.toList - where - go (ns, (h, _)) = - getPatchStatus h >>= \case - Nothing -> Validate.refute . Set.singleton $ P h - Just PatchOk -> rebuild h - Just PatchMissing -> pure Nothing - Just (PatchReplaced h) -> rebuild h - where - rebuild h = pure $ Just (ns, (h, err h)) - err h = error $ "expected to short-circuit already-synced patch " ++ show h - -runSrc, runDest :: R m n a => (Codebase m Symbol a -> x) -> n x -runSrc = (Reader.reader srcCodebase <&>) -runDest = (Reader.reader destCodebase <&>) - -runDest' :: R m n x => ReaderT Connection n a -> n a -runDest' ma = Reader.reader destConnection >>= flip runDB ma - -runDB :: Connection -> ReaderT Connection m a -> m a -runDB conn action = Reader.runReaderT action conn - -data DoneCount = DoneCount - { _doneBranches :: Int, - _doneTerms :: Int, - _doneDecls :: Int, - _donePatches :: Int, - _doneWatches :: Int - } - -data ErrorCount = ErrorCount - { _errorBranches :: Int, - _errorTerms :: Int, - _errorDecls :: Int, - _errorPatches :: Int, - _errorWatches :: Int - } - -emptyDoneCount :: DoneCount -emptyDoneCount = DoneCount 0 0 0 0 0 - -emptyErrorCount :: ErrorCount -emptyErrorCount = ErrorCount 0 0 0 0 0 - -makeLenses ''DoneCount -makeLenses ''ErrorCount - -type ProgressState m = (DoneCount, ErrorCount, Status m) - -simpleProgress :: MonadState (ProgressState m) n => MonadIO n => Sync.Progress n (Entity m) -simpleProgress = Sync.Progress need done error allDone - where - newlines = False - logEntities = False - -- ignore need - need e = - when logEntities $ liftIO $ putStrLn $ "need " ++ show e - - done e = do - when logEntities $ liftIO $ putStrLn $ "done " ++ show e - case e of - C {} -> _1 . doneBranches += 1 - T {} -> _1 . doneTerms += 1 - D {} -> _1 . doneDecls += 1 - P {} -> _1 . donePatches += 1 - W {} -> _1 . doneWatches += 1 - printProgress - - error e = do - when logEntities $ liftIO $ putStrLn $ "error " ++ show e - case e of - C {} -> _2 . errorBranches += 1 - T {} -> _2 . errorTerms += 1 - D {} -> _2 . errorDecls += 1 - P {} -> _2 . errorPatches += 1 - W {} -> _2 . errorWatches += 1 - printProgress - - allDone :: MonadState (DoneCount, ErrorCount, Status m) n => MonadIO n => n () - allDone = do - Status branches terms decls patches watches <- Lens.use Lens._3 - liftIO $ putStrLn "Finished." - Foldable.for_ (Map.toList decls) \(h, s) -> case s of - DeclOk -> pure () - DeclMissing -> liftIO . putStrLn $ "I couldn't find the decl " ++ show h ++ ", so I filtered it out of the sync." - DeclMissingDependencies -> liftIO . putStrLn $ "One or more dependencies of decl " ++ show h ++ " were missing, so I filtered it out of the sync." - Foldable.for_ (Map.toList terms) \(h, s) -> case s of - TermOk -> pure () - TermMissing -> liftIO . putStrLn $ "I couldn't find the term " ++ show h ++ "so I filtered it out of the sync." - TermMissingType -> liftIO . putStrLn $ "The type of term " ++ show h ++ " was missing, so I filtered it out of the sync." - TermMissingDependencies -> liftIO . putStrLn $ "One or more dependencies of term " ++ show h ++ " were missing, so I filtered it out of the sync." - Foldable.for_ (Map.toList watches) \((k, r), s) -> case s of - WatchOk -> pure () - WatchNotCached -> pure () - WatchMissingDependencies -> liftIO . putStrLn $ "One or more dependencies of watch expression " ++ show (k, r) ++ " were missing, so I skipped it." - Foldable.for_ (Map.toList patches) \(h, s) -> case s of - PatchOk -> pure () - PatchMissing -> liftIO . putStrLn $ "I couldn't find the patch " ++ show h ++ ", so I filtered it out of the sync." - PatchReplaced h' -> liftIO . putStrLn $ "I replaced the patch " ++ show h ++ " with the filtered version " ++ show h' ++ "." - Foldable.for_ (Map.toList branches) \(h, s) -> case s of - BranchOk -> pure () - BranchReplaced h' _ -> liftIO . putStrLn $ "I replaced the branch " ++ show h ++ " with the filtered version " ++ show h' ++ "." - - printProgress :: MonadState (ProgressState m) n => MonadIO n => n () - printProgress = do - (DoneCount b t d p w, ErrorCount b' t' d' p' w', _) <- State.get - let ways :: [Maybe String] = - [ Monoid.whenM (b > 0 || b' > 0) (Just $ show (b + b') ++ " branches" ++ Monoid.whenM (b' > 0) (" (" ++ show b' ++ " repaired)")), - Monoid.whenM (t > 0 || t' > 0) (Just $ show t ++ " terms" ++ Monoid.whenM (t' > 0) (" (" ++ show t' ++ " errors)")), - Monoid.whenM (w > 0 || w' > 0) (Just $ show w ++ " test results" ++ Monoid.whenM (w' > 0) (" (" ++ show w' ++ " errors)")), - Monoid.whenM (d > 0 || d' > 0) (Just $ show d ++ " types" ++ Monoid.whenM (d' > 0) (" (" ++ show d' ++ " errors)")), - Monoid.whenM (p > 0 || p' > 0) (Just $ show p ++ " patches" ++ Monoid.whenM (p' > 0) (" (" ++ show p' ++ " errors)")) - ] - liftIO do - putStr $ "\rSynced " ++ List.intercalate ", " (catMaybes ways) ++ Monoid.whenM newlines "\n" - hFlush stdout - -instance Show (Entity m) where - show = show . toEntity' - -data Entity' - = C' Branch.Hash - | T' Hash - | D' Hash - | P' Branch.EditHash - | W' WatchKind Reference.Id - deriving (Eq, Ord, Show) - -toEntity' :: Entity m -> Entity' -toEntity' = \case - C h _ -> C' h - T h -> T' h - D h -> D' h - P h -> P' h - W k r -> W' k r - -instance Eq (Entity m) where - x == y = toEntity' x == toEntity' y - -instance Ord (Entity m) where - x `compare` y = toEntity' x `compare` toEntity' y - -data BranchStatus' - = BranchOk' - | BranchReplaced' Branch.Hash - deriving (Eq, Ord, Show) - -toBranchStatus' :: BranchStatus m -> BranchStatus' -toBranchStatus' = \case - BranchOk -> BranchOk' - BranchReplaced h _ -> BranchReplaced' h - -instance Eq (BranchStatus m) where - x == y = toBranchStatus' x == toBranchStatus' y - -instance Ord (BranchStatus m) where - x `compare` y = toBranchStatus' x `compare` toBranchStatus' y - -instance Show (BranchStatus m) where - show = show . toBranchStatus' diff --git a/parser-typechecker/unison-parser-typechecker.cabal b/parser-typechecker/unison-parser-typechecker.cabal index 8ee43dd277..012d6df7b5 100644 --- a/parser-typechecker/unison-parser-typechecker.cabal +++ b/parser-typechecker/unison-parser-typechecker.cabal @@ -37,7 +37,6 @@ library Unison.Codebase.Causal.FoldHistory Unison.Codebase.CodeLookup Unison.Codebase.CodeLookup.Util - Unison.Codebase.Conversion.Sync12 Unison.Codebase.Editor.AuthorInfo Unison.Codebase.Editor.Command Unison.Codebase.Editor.DisplayObject From c98f16e024f19a3f3116a009d2a40304d6330b3a Mon Sep 17 00:00:00 2001 From: Arya Irani Date: Tue, 31 Aug 2021 21:59:47 -0400 Subject: [PATCH 004/297] builds and runs, but tests failing unexpectedly hash changes are expected due to the dropped field, we should factor that out still, per #2373; we could put the component lengths back into the hashing representation, if we wanted to put off a hash format update. --- .../src/Unison/Codebase/Editor/Command.hs | 7 +- .../src/Unison/Codebase/Editor/Propagate.hs | 80 +++++++++---------- .../src/Unison/Codebase/SqliteCodebase.hs | 10 +-- .../src/Unison/Codebase/Type.hs | 12 +-- .../tests/Unison/Test/DataDeclaration.hs | 12 +-- parser-typechecker/tests/Unison/Test/Term.hs | 6 +- unison-core/src/Unison/DataDeclaration.hs | 8 +- unison-core/src/Unison/Reference.hs | 8 +- unison-core/src/Unison/Term.hs | 12 +-- unison-core/src/Unison/Var/RefNamed.hs | 3 + 10 files changed, 83 insertions(+), 75 deletions(-) diff --git a/parser-typechecker/src/Unison/Codebase/Editor/Command.hs b/parser-typechecker/src/Unison/Codebase/Editor/Command.hs index 08d2bf3964..b13a97a87c 100644 --- a/parser-typechecker/src/Unison/Codebase/Editor/Command.hs +++ b/parser-typechecker/src/Unison/Codebase/Editor/Command.hs @@ -204,7 +204,7 @@ data Command m i v a where LoadReflog :: Command m i v [Reflog.Entry Branch.Hash] LoadTerm :: Reference.Id -> Command m i v (Maybe (Term v Ann)) - LoadTermComponent :: H.Hash -> Command m i v (Maybe [Term v Ann]) + -- LoadTermComponent :: H.Hash -> Command m i v (Maybe [Term v Ann]) LoadTermComponentWithType :: H.Hash -> Command m i v (Maybe [(Term v Ann, Type v Ann)]) -- todo: change this to take Reference and return DeclOrBuiltin @@ -221,8 +221,8 @@ data Command m i v a where -- (why, again? because we can know from the Reference?) IsTerm :: Reference -> Command m i v Bool IsType :: Reference -> Command m i v Bool - IsDerivedTerm :: H.Hash -> Command m i v Bool - IsDerivedType :: H.Hash -> Command m i v Bool + -- IsDerivedTerm :: H.Hash -> Command m i v Bool + -- IsDerivedType :: H.Hash -> Command m i v Bool -- Get the immediate (not transitive) dependents of the given reference -- This might include historical definitions not in any current path; these @@ -294,6 +294,7 @@ commandName = \case IsTerm{} -> "IsTerm" IsType{} -> "IsType" GetDependents{} -> "GetDependents" + GetDependentsOfComponent{} -> "GetDependentsOfComponent" GetTermsOfType{} -> "GetTermsOfType" GetTermsMentioningType{} -> "GetTermsMentioningType" Execute{} -> "Execute" diff --git a/parser-typechecker/src/Unison/Codebase/Editor/Propagate.hs b/parser-typechecker/src/Unison/Codebase/Editor/Propagate.hs index 93adff6848..b366f5ad10 100644 --- a/parser-typechecker/src/Unison/Codebase/Editor/Propagate.hs +++ b/parser-typechecker/src/Unison/Codebase/Editor/Propagate.hs @@ -4,6 +4,7 @@ {-# LANGUAGE PatternSynonyms #-} {-# LANGUAGE RecordWildCards #-} {-# LANGUAGE ScopedTypeVariables #-} +{-# LANGUAGE ViewPatterns #-} module Unison.Codebase.Editor.Propagate (computeFrontier, propagateAndApply) where @@ -23,6 +24,7 @@ import Unison.Codebase.Patch ( Patch(..) ) import qualified Unison.Codebase.Patch as Patch import Unison.DataDeclaration ( Decl ) import qualified Unison.DataDeclaration as Decl +import Unison.Hash (Hash) import qualified Unison.Name as Name import Unison.Names3 ( Names0 ) import qualified Unison.Names2 as Names @@ -304,12 +306,9 @@ propagate rootNames patch b = case validatePatch patch of (Nothing , seen') -> collectEdits es seen' todo (Just edits', seen') -> do -- plan to update the dependents of this component too - dependents <- - fmap Set.unions - . traverse (eval . GetDependents) - . toList - . Reference.members - $ Reference.componentFor r + dependents <- case r of + Reference.Builtin{} -> eval $ GetDependents r + Reference.Derived h _i -> eval $ GetDependentsOfComponent h let todo' = todo <> getOrdered dependents collectEdits edits' seen' todo' @@ -459,6 +458,7 @@ propagate rootNames patch b = case validatePatch patch of :: Patch -> Maybe (Map Reference TermEdit, Map Reference TypeEdit) validatePatch p = (,) <$> R.toMap (Patch._termEdits p) <*> R.toMap (Patch._typeEdits p) + -- Turns a cycle of references into a term with free vars that we can edit -- and hash again. -- todo: Maybe this an others can be moved to HandleCommand, in the @@ -470,27 +470,29 @@ propagate rootNames patch b = case validatePatch patch of . (Applicative m, Var v) => Reference -> F m i v (Map v (Reference, Term v _, Type v _)) - unhashTermComponent ref = do - let component = Reference.members $ Reference.componentFor ref - termInfo - :: Reference -> F m i v (Maybe (Reference, (Term v Ann, Type v Ann))) - termInfo termRef = do - tpm <- eval $ LoadTypeOfTerm termRef - tp <- maybe (error $ "Missing type for term " <> show termRef) - pure - tpm - case termRef of - Reference.DerivedId id -> do - mtm <- eval $ LoadTerm id - tm <- maybe (error $ "Missing term with id " <> show id) pure mtm - pure $ Just (termRef, (tm, tp)) - Reference.Builtin{} -> pure Nothing - unhash m = - let f (_oldTm, oldTyp) (v, newTm) = (v, newTm, oldTyp) - m' = Map.intersectionWith f m (Term.unhashComponent (fst <$> m)) - in Map.fromList - [ (v, (r, tm, tp)) | (r, (v, tm, tp)) <- Map.toList m' ] - unhash . Map.fromList . catMaybes <$> traverse termInfo (toList component) + unhashTermComponent r = case Reference.toId r of + Nothing -> pure mempty + Just r -> do + unhashed <- unhashTermComponent' (Reference.idToHash r) + pure $ fmap (over _1 Reference.DerivedId) unhashed + + unhashTermComponent' + :: forall m v + . (Applicative m, Var v) + => Hash + -> F m i v (Map v (Reference.Id, Term v _, Type v _)) + unhashTermComponent' h = + eval (LoadTermComponentWithType h) <&> foldMap \termsWithTypes -> + unhash $ Map.fromList (Reference.componentFor' h termsWithTypes) + where + unhash m = + -- this grabs the corresponding input map values (with types) + -- and arranges them with the newly unhashed terms. + let f (_oldTm, typ) (v, newTm) = (v, newTm, typ) + m' = Map.intersectionWith f m (Term.unhashComponent (fst <$> m)) + in Map.fromList + [ (v, (r, tm, tp)) | (r, (v, tm, tp)) <- Map.toList m' ] + verifyTermComponent :: Map v (Reference, Term v _, a) -> Edits v @@ -524,22 +526,20 @@ propagate rootNames patch b = case validatePatch patch of >>= hush unhashTypeComponent :: Var v => Reference -> F m i v (Map v (Reference, Decl v Ann)) -unhashTypeComponent ref = do - let - component = Reference.members $ Reference.componentFor ref - typeInfo :: Reference -> F m i v (Maybe (Reference, Decl v Ann)) - typeInfo typeRef = case typeRef of - Reference.DerivedId id -> do - declm <- eval $ LoadType id - decl <- maybe (error $ "Missing type declaration " <> show typeRef) - pure - declm - pure $ Just (typeRef, decl) - Reference.Builtin{} -> pure Nothing +unhashTypeComponent r = case Reference.toId r of + Nothing -> pure mempty + Just id -> do + unhashed <- unhashTypeComponent' (Reference.idToHash id) + pure $ over _1 Reference.DerivedId <$> unhashed + +unhashTypeComponent' :: Var v => Hash -> F m i v (Map v (Reference.Id, Decl v Ann)) +unhashTypeComponent' h = + eval (LoadDeclComponent h) <&> foldMap \decls -> + unhash $ Map.fromList (Reference.componentFor' h decls) + where unhash = Map.fromList . map reshuffle . Map.toList . Decl.unhashComponent where reshuffle (r, (v, decl)) = (v, (r, decl)) - unhash . Map.fromList . catMaybes <$> traverse typeInfo (toList component) applyDeprecations :: Applicative m => Patch -> Branch0 m -> Branch0 m applyDeprecations patch = deleteDeprecatedTerms deprecatedTerms diff --git a/parser-typechecker/src/Unison/Codebase/SqliteCodebase.hs b/parser-typechecker/src/Unison/Codebase/SqliteCodebase.hs index d51eabde5c..fac515d759 100644 --- a/parser-typechecker/src/Unison/Codebase/SqliteCodebase.hs +++ b/parser-typechecker/src/Unison/Codebase/SqliteCodebase.hs @@ -740,11 +740,11 @@ sqliteCodebase debugName root = do (Cache.applyDefined declCache getTypeDeclaration) putTerm putTypeDeclaration - _getTermComponent - _getTermComponentWithTypes - _getTermComponentLength - _getDeclComponent - _getDeclComponentLength + -- _getTermComponent + -- _getTermComponentWithTypes + -- _getTermComponentLength + -- _getDeclComponent + -- _getDeclComponentLength (getRootBranch rootBranchCache) (putRootBranch rootBranchCache) (rootBranchUpdates rootBranchCache) diff --git a/parser-typechecker/src/Unison/Codebase/Type.hs b/parser-typechecker/src/Unison/Codebase/Type.hs index f27bc0efe0..0525b2bda8 100644 --- a/parser-typechecker/src/Unison/Codebase/Type.hs +++ b/parser-typechecker/src/Unison/Codebase/Type.hs @@ -14,7 +14,7 @@ import Unison.Codebase.SqliteCodebase.GitError (GitSqliteCodebaseError) import Unison.Codebase.SyncMode (SyncMode) import Unison.CodebasePath (CodebasePath) import Unison.DataDeclaration (Decl) -import Unison.Hash (Hash) +-- import Unison.Hash (Hash) import Unison.Prelude import Unison.Reference (Reference) import qualified Unison.Reference as Reference @@ -39,11 +39,11 @@ data Codebase m v a = Codebase getTypeDeclaration :: Reference.Id -> m (Maybe (Decl v a)), putTerm :: Reference.Id -> Term v a -> Type v a -> m (), putTypeDeclaration :: Reference.Id -> Decl v a -> m (), - getTermComponent :: Hash -> m (Maybe [Term v a]), - getTermComponentWithTypes :: Hash -> m (Maybe [(Term v a, Type v a)]), - getTermComponentLength :: Hash -> m (Reference.CycleSize), - getDeclComponent :: Hash -> m (Maybe [Decl v a]), - getDeclComponentLength :: Hash -> m (Reference.CycleSize), + -- getTermComponent :: Hash -> m (Maybe [Term v a]), + -- getTermComponentWithTypes :: Hash -> m (Maybe [(Term v a, Type v a)]), + -- getTermComponentLength :: Hash -> m (Reference.CycleSize), + -- getDeclComponent :: Hash -> m (Maybe [Decl v a]), + -- getDeclComponentLength :: Hash -> m (Reference.CycleSize), getRootBranch :: m (Either GetRootBranchError (Branch m)), putRootBranch :: Branch m -> m (), rootBranchUpdates :: m (IO (), IO (Set Branch.Hash)), diff --git a/parser-typechecker/tests/Unison/Test/DataDeclaration.hs b/parser-typechecker/tests/Unison/Test/DataDeclaration.hs index 613c549117..c00b502a4e 100644 --- a/parser-typechecker/tests/Unison/Test/DataDeclaration.hs +++ b/parser-typechecker/tests/Unison/Test/DataDeclaration.hs @@ -87,13 +87,13 @@ unhashComponentTest = tests forall = Type.forall () (-->) = Type.arrow () h = Hash.unsafeFromBase32Hex "abcd" - ref = R.Derived h 0 1 - a = Var.refNamed ref + ref = R.Id h 0 + a = Var.refIdNamed ref b = Var.named "b" nil = Var.named "Nil" - cons = Var.refNamed ref + cons = Var.refIdNamed ref listRef = ref - listType = Type.ref () listRef + listType = Type.refId () listRef listDecl = DataDeclaration { modifier = DD.Structural, annotation = (), @@ -103,9 +103,9 @@ unhashComponentTest = tests , ((), cons, forall b (var b --> listType `app` var b --> listType `app` var b)) ] } - component :: Map R.Reference (Decl Symbol ()) + component :: Map R.Id (Decl Symbol ()) component = Map.singleton listRef (Right listDecl) - component' :: Map R.Reference (Symbol, Decl Symbol ()) + component' :: Map R.Id (Symbol, Decl Symbol ()) component' = DD.unhashComponent component (listVar, Right listDecl') = component' ! listRef listType' = var listVar diff --git a/parser-typechecker/tests/Unison/Test/Term.hs b/parser-typechecker/tests/Unison/Test/Term.hs index 14b39e34df..fbc92b3f88 100644 --- a/parser-typechecker/tests/Unison/Test/Term.hs +++ b/parser-typechecker/tests/Unison/Test/Term.hs @@ -41,10 +41,10 @@ test = scope "term" $ tests ok , scope "Term.unhashComponent" $ let h = Hash.unsafeFromBase32Hex "abcd" - ref = R.Derived h 0 1 - v1 = Var.refNamed @Symbol ref + ref = R.Id h 0 + v1 = Var.refIdNamed @Symbol ref -- input component: `ref = \v1 -> ref` - component = Map.singleton ref (Term.lam () v1 (Term.ref () ref)) + component = Map.singleton ref (Term.lam () v1 (Term.refId () ref)) component' = Term.unhashComponent component -- expected unhashed component: `v2 = \v1 -> v2`, where `v2 /= v1`, -- i.e. `v2` cannot be just `ref` converted to a ref-named variable, diff --git a/unison-core/src/Unison/DataDeclaration.hs b/unison-core/src/Unison/DataDeclaration.hs index 90f6196402..f0ad59f7bf 100644 --- a/unison-core/src/Unison/DataDeclaration.hs +++ b/unison-core/src/Unison/DataDeclaration.hs @@ -287,16 +287,16 @@ updateDependencies typeUpdates decl = back $ dataDecl -- This converts `Reference`s it finds that are in the input `Map` -- back to free variables unhashComponent - :: forall v a. Var v => Map Reference (Decl v a) -> Map Reference (v, Decl v a) + :: forall v a. Var v => Map Reference.Id (Decl v a) -> Map Reference.Id (v, Decl v a) unhashComponent m = let usedVars = foldMap allVars' m - m' :: Map Reference (v, Decl v a) + m' :: Map Reference.Id (v, Decl v a) m' = evalState (Map.traverseWithKey assignVar m) usedVars where - assignVar r d = (,d) <$> ABT.freshenS (Var.refNamed r) + assignVar r d = (,d) <$> ABT.freshenS (Var.refIdNamed r) unhash1 = ABT.rebuildUp' go where - go e@(Type.Ref' r) = case Map.lookup r m' of + go e@(Type.Ref' (Reference.DerivedId r)) = case Map.lookup r m' of Nothing -> e Just (v,_) -> Type.var (ABT.annotation e) v go e = e diff --git a/unison-core/src/Unison/Reference.hs b/unison-core/src/Unison/Reference.hs index 5cc2346369..1b3a212277 100644 --- a/unison-core/src/Unison/Reference.hs +++ b/unison-core/src/Unison/Reference.hs @@ -29,6 +29,7 @@ module Unison.Reference toText, unsafeId, toShortHash, + idToHash, idToShortHash) where import Unison.Prelude @@ -68,6 +69,9 @@ unsafeId (Builtin b) = error $ "Tried to get the hash of builtin " <> Text.unpack b <> "." unsafeId (DerivedId x) = x +idToHash :: Id -> H.Hash +idToHash (Id h _) = h + idToShortHash :: Id -> ShortHash idToShortHash = toShortHash . DerivedId @@ -121,8 +125,8 @@ componentFor r n = case r of b@Builtin{} -> Component (Set.singleton b) Derived h _ -> Component . Set.fromList $ Derived h <$> [0 .. n] -componentFor' :: H.Hash -> [a] -> [(Reference, a)] -componentFor' h as = [ (Derived h i, a) | (fromIntegral -> i, a) <- zip [0..] as] +componentFor' :: H.Hash -> [a] -> [(Id, a)] +componentFor' h as = [ (Id h i, a) | (fromIntegral -> i, a) <- zip [0..] as] derivedBase32Hex :: Text -> Pos -> Reference derivedBase32Hex b32Hex i = DerivedId (Id (fromMaybe msg h) i) diff --git a/unison-core/src/Unison/Term.hs b/unison-core/src/Unison/Term.hs index 5d8df9915b..9f4333da71 100644 --- a/unison-core/src/Unison/Term.hs +++ b/unison-core/src/Unison/Term.hs @@ -984,21 +984,21 @@ etaReduceEtaVars tm = case tm of -- This converts `Reference`s it finds that are in the input `Map` -- back to free variables unhashComponent :: forall v a. Var v - => Map Reference (Term v a) - -> Map Reference (v, Term v a) + => Map Reference.Id (Term v a) + -> Map Reference.Id (v, Term v a) unhashComponent m = let usedVars = foldMap (Set.fromList . ABT.allVars) m - m' :: Map Reference (v, Term v a) + m' :: Map Reference.Id (v, Term v a) m' = evalState (Map.traverseWithKey assignVar m) usedVars where - assignVar r t = (,t) <$> ABT.freshenS (Var.refNamed r) + assignVar r t = (,t) <$> ABT.freshenS (Var.refIdNamed r) + unhash1 :: Term v a -> Term v a unhash1 = ABT.rebuildUp' go where - go e@(Ref' r) = case Map.lookup r m' of + go e@(Ref' (Reference.DerivedId r)) = case Map.lookup r m' of Nothing -> e Just (v, _) -> var (ABT.annotation e) v go e = e in second unhash1 <$> m' - hashComponents :: Var v => Map v (Term v a) -> Map v (Reference.Id, Term v a) hashComponents = ReferenceUtil.hashComponents $ refId () diff --git a/unison-core/src/Unison/Var/RefNamed.hs b/unison-core/src/Unison/Var/RefNamed.hs index 446359b20a..6b859239a3 100644 --- a/unison-core/src/Unison/Var/RefNamed.hs +++ b/unison-core/src/Unison/Var/RefNamed.hs @@ -11,3 +11,6 @@ import qualified Unison.Var as Var refNamed :: Var v => Reference -> v refNamed ref = Var.named ("ℍ" <> Reference.toText ref) + +refIdNamed :: Var v => Reference.Id -> v +refIdNamed = refNamed . Reference.DerivedId From 4bb607858c5304e78c549dcd7ac94f39f35dc008 Mon Sep 17 00:00:00 2001 From: Arya Irani Date: Tue, 31 Aug 2021 23:14:51 -0400 Subject: [PATCH 005/297] minor cleanup --- .../src/Unison/Codebase/Editor/Propagate.hs | 4 ++-- .../src/Unison/PrettyPrintEnv/Util.hs | 2 +- unison-core/src/Unison/Reference.hs | 16 +++------------- 3 files changed, 6 insertions(+), 16 deletions(-) diff --git a/parser-typechecker/src/Unison/Codebase/Editor/Propagate.hs b/parser-typechecker/src/Unison/Codebase/Editor/Propagate.hs index b366f5ad10..b232ba716f 100644 --- a/parser-typechecker/src/Unison/Codebase/Editor/Propagate.hs +++ b/parser-typechecker/src/Unison/Codebase/Editor/Propagate.hs @@ -483,7 +483,7 @@ propagate rootNames patch b = case validatePatch patch of -> F m i v (Map v (Reference.Id, Term v _, Type v _)) unhashTermComponent' h = eval (LoadTermComponentWithType h) <&> foldMap \termsWithTypes -> - unhash $ Map.fromList (Reference.componentFor' h termsWithTypes) + unhash $ Map.fromList (Reference.componentFor h termsWithTypes) where unhash m = -- this grabs the corresponding input map values (with types) @@ -535,7 +535,7 @@ unhashTypeComponent r = case Reference.toId r of unhashTypeComponent' :: Var v => Hash -> F m i v (Map v (Reference.Id, Decl v Ann)) unhashTypeComponent' h = eval (LoadDeclComponent h) <&> foldMap \decls -> - unhash $ Map.fromList (Reference.componentFor' h decls) + unhash $ Map.fromList (Reference.componentFor h decls) where unhash = Map.fromList . map reshuffle . Map.toList . Decl.unhashComponent diff --git a/parser-typechecker/src/Unison/PrettyPrintEnv/Util.hs b/parser-typechecker/src/Unison/PrettyPrintEnv/Util.hs index 66dc8a0102..cc7445d0ea 100644 --- a/parser-typechecker/src/Unison/PrettyPrintEnv/Util.hs +++ b/parser-typechecker/src/Unison/PrettyPrintEnv/Util.hs @@ -27,4 +27,4 @@ declarationPPE ppe ref = PrettyPrintEnv tm ty tm r = terms (suffixifiedPPE ppe) r ty r | hash r == rootH = types (unsuffixifiedPPE ppe) r - | otherwise = types (suffixifiedPPE ppe) r \ No newline at end of file + | otherwise = types (suffixifiedPPE ppe) r diff --git a/unison-core/src/Unison/Reference.hs b/unison-core/src/Unison/Reference.hs index 1b3a212277..84a8c16b41 100644 --- a/unison-core/src/Unison/Reference.hs +++ b/unison-core/src/Unison/Reference.hs @@ -12,11 +12,9 @@ module Unison.Reference Pos, CycleSize, Size, derivedBase32Hex, - Component, members, components, groupByComponent, componentFor, - componentFor', unsafeFromText, idFromText, isPrefixOf, @@ -35,7 +33,6 @@ module Unison.Reference import Unison.Prelude import qualified Data.Map as Map -import qualified Data.Set as Set import qualified Data.Text as Text import qualified Unison.Hash as H import Unison.Hashable as Hashable @@ -117,16 +114,9 @@ type Pos = Word64 type Size = CycleSize type CycleSize = Word64 -newtype Component = Component { members :: Set Reference } - --- Gives the component (dependency cycle) that the reference is a part of -componentFor :: Reference -> CycleSize -> Component -componentFor r n = case r of - b@Builtin{} -> Component (Set.singleton b) - Derived h _ -> Component . Set.fromList $ Derived h <$> [0 .. n] - -componentFor' :: H.Hash -> [a] -> [(Id, a)] -componentFor' h as = [ (Id h i, a) | (fromIntegral -> i, a) <- zip [0..] as] +-- enumerate the `a`s and associates them with corresponding `Reference.Id`s +componentFor :: H.Hash -> [a] -> [(Id, a)] +componentFor h as = [ (Id h i, a) | (fromIntegral -> i, a) <- zip [0..] as] derivedBase32Hex :: Text -> Pos -> Reference derivedBase32Hex b32Hex i = DerivedId (Id (fromMaybe msg h) i) From e3ca0c4415e36e8c2a22a4ca6fcab56765132277 Mon Sep 17 00:00:00 2001 From: Arya Irani Date: Thu, 2 Sep 2021 10:56:34 -0400 Subject: [PATCH 006/297] Handle LoadTermComponentWithTypes --- .../U/Codebase/Sqlite/Operations.hs | 22 ++++++++++++++++++- .../src/Unison/Codebase/Editor/Command.hs | 3 ++- .../Unison/Codebase/Editor/HandleCommand.hs | 2 ++ .../src/Unison/Codebase/Editor/Propagate.hs | 2 +- .../src/Unison/Codebase/SqliteCodebase.hs | 9 +++++++- .../src/Unison/Codebase/Type.hs | 4 ++-- 6 files changed, 36 insertions(+), 6 deletions(-) diff --git a/codebase2/codebase-sqlite/U/Codebase/Sqlite/Operations.hs b/codebase2/codebase-sqlite/U/Codebase/Sqlite/Operations.hs index 5df2360b64..534982af1f 100644 --- a/codebase2/codebase-sqlite/U/Codebase/Sqlite/Operations.hs +++ b/codebase2/codebase-sqlite/U/Codebase/Sqlite/Operations.hs @@ -143,7 +143,8 @@ type EDB m = (Err m, DB m) type ErrString = String data DecodeError - = ErrTermElement Word64 + = ErrTermComponent + | ErrTermElement Word64 | ErrDeclElement Word64 | ErrFramedArrayLen | ErrTypeOfTerm C.Reference.Id @@ -200,6 +201,12 @@ primaryHashToMaybeObjectId h = do Just hashId -> Q.maybeObjectIdForPrimaryHashId hashId Nothing -> pure Nothing +anyHashToMaybeObjectId :: DB m => H.Hash -> m (Maybe Db.ObjectId) +anyHashToMaybeObjectId h = do + (Q.loadHashId . H.toBase32Hex) h >>= \case + Just hashId -> Q.maybeObjectIdForAnyHashId hashId + Nothing -> pure Nothing + primaryHashToMaybePatchObjectId :: EDB m => PatchHash -> m (Maybe Db.PatchObjectId) primaryHashToMaybePatchObjectId = (fmap . fmap) Db.PatchObjectId . primaryHashToMaybeObjectId . unPatchHash @@ -381,6 +388,9 @@ diffPatch (S.Patch fullTerms fullTypes) (S.Patch refTerms refTypes) = -- * Deserialization helpers +decodeTermComponent :: Err m => ByteString -> m S.Term.TermFormat +decodeTermComponent = getFromBytesOr ErrTermComponent S.getTermFormat + decodeComponentLengthOnly :: Err m => ByteString -> m Word64 decodeComponentLengthOnly = getFromBytesOr ErrFramedArrayLen (Get.skip 1 >> S.lengthFramedArray) @@ -427,6 +437,16 @@ componentByObjectId id = do -- * Codebase operations -- ** Saving & loading terms +loadTermComponent :: EDB m => H.Hash -> MaybeT m [(C.Term Symbol, C.Term.Type Symbol)] +loadTermComponent h = do + MaybeT (anyHashToMaybeObjectId h) + >>= liftQ . Q.loadObjectById + -- retrieve and deserialize the blob + >>= decodeTermComponent + >>= \case + S.Term.Term (S.Term.LocallyIndexedComponent elements) -> + lift . traverse (uncurry3 s2cTermWithType) $ + Foldable.toList elements saveTermComponent :: EDB m => H.Hash -> [(C.Term Symbol, C.Term.Type Symbol)] -> m Db.ObjectId saveTermComponent h terms = do diff --git a/parser-typechecker/src/Unison/Codebase/Editor/Command.hs b/parser-typechecker/src/Unison/Codebase/Editor/Command.hs index b13a97a87c..5ec2862f5f 100644 --- a/parser-typechecker/src/Unison/Codebase/Editor/Command.hs +++ b/parser-typechecker/src/Unison/Codebase/Editor/Command.hs @@ -205,7 +205,7 @@ data Command m i v a where LoadTerm :: Reference.Id -> Command m i v (Maybe (Term v Ann)) -- LoadTermComponent :: H.Hash -> Command m i v (Maybe [Term v Ann]) - LoadTermComponentWithType :: H.Hash -> Command m i v (Maybe [(Term v Ann, Type v Ann)]) + LoadTermComponentWithTypes :: H.Hash -> Command m i v (Maybe [(Term v Ann, Type v Ann)]) -- todo: change this to take Reference and return DeclOrBuiltin -- todo: change this to LoadDecl @@ -287,6 +287,7 @@ commandName = \case AppendToReflog{} -> "AppendToReflog" LoadReflog -> "LoadReflog" LoadTerm{} -> "LoadTerm" + LoadTermComponentWithTypes{} -> "LoadTermComponentWithTypes" LoadType{} -> "LoadType" LoadTypeOfTerm{} -> "LoadTypeOfTerm" PutTerm{} -> "PutTerm" diff --git a/parser-typechecker/src/Unison/Codebase/Editor/HandleCommand.hs b/parser-typechecker/src/Unison/Codebase/Editor/HandleCommand.hs index cd8e82fdda..cd8bda5f3d 100644 --- a/parser-typechecker/src/Unison/Codebase/Editor/HandleCommand.hs +++ b/parser-typechecker/src/Unison/Codebase/Editor/HandleCommand.hs @@ -137,6 +137,8 @@ commandLine config awaitInput setBranchRef rt notifyUser notifyNumbered loadSour SyncRemoteRootBranch repo branch syncMode -> lift $ Codebase.pushGitRootBranch codebase branch repo syncMode LoadTerm r -> lift $ Codebase.getTerm codebase r + -- LoadTermComponentWithType :: H.Hash -> Command m i v (Maybe [(Term v Ann, Type v Ann)]) + LoadTermComponentWithTypes h -> lift $ Codebase.getTermComponentWithTypes codebase h LoadType r -> lift $ Codebase.getTypeDeclaration codebase r LoadTypeOfTerm r -> lift $ Codebase.getTypeOfTerm codebase r PutTerm r tm tp -> lift $ Codebase.putTerm codebase r tm tp diff --git a/parser-typechecker/src/Unison/Codebase/Editor/Propagate.hs b/parser-typechecker/src/Unison/Codebase/Editor/Propagate.hs index b232ba716f..f31acf2ad0 100644 --- a/parser-typechecker/src/Unison/Codebase/Editor/Propagate.hs +++ b/parser-typechecker/src/Unison/Codebase/Editor/Propagate.hs @@ -482,7 +482,7 @@ propagate rootNames patch b = case validatePatch patch of => Hash -> F m i v (Map v (Reference.Id, Term v _, Type v _)) unhashTermComponent' h = - eval (LoadTermComponentWithType h) <&> foldMap \termsWithTypes -> + eval (LoadTermComponentWithTypes h) <&> foldMap \termsWithTypes -> unhash $ Map.fromList (Reference.componentFor h termsWithTypes) where unhash m = diff --git a/parser-typechecker/src/Unison/Codebase/SqliteCodebase.hs b/parser-typechecker/src/Unison/Codebase/SqliteCodebase.hs index fac515d759..5ded1ce324 100644 --- a/parser-typechecker/src/Unison/Codebase/SqliteCodebase.hs +++ b/parser-typechecker/src/Unison/Codebase/SqliteCodebase.hs @@ -27,6 +27,7 @@ import qualified Control.Monad.State as State import Control.Monad.Trans (MonadTrans (lift)) import Control.Monad.Trans.Maybe (MaybeT (MaybeT)) import Data.Bifunctor (Bifunctor (bimap, first), second) +import Data.Bitraversable (bitraverse) import qualified Data.Char as Char import qualified Data.Either.Combinators as Either import Data.Foldable (Foldable (toList), for_, traverse_) @@ -313,6 +314,12 @@ sqliteCodebase debugName root = do type2 <- Ops.loadTypeOfTermByTermReference (C.Reference.Id h2 i) pure $ Cv.ttype2to1 type2 + getTermComponentWithTypes :: MonadIO m => Hash -> m (Maybe [(Term Symbol Ann, Type Symbol Ann)]) + getTermComponentWithTypes h1@(Cv.hash1to2 -> h2) = + runDB' conn $ do + tms <- Ops.loadTermComponent h2 + for tms (bitraverse (Cv.term2to1 h1 getDeclType) (pure . Cv.ttype2to1)) + getTypeDeclaration :: MonadIO m => Reference.Id -> m (Maybe (Decl Symbol Ann)) getTypeDeclaration (Reference.Id h1@(Cv.hash1to2 -> h2) i) = runDB' conn do @@ -741,7 +748,7 @@ sqliteCodebase debugName root = do putTerm putTypeDeclaration -- _getTermComponent - -- _getTermComponentWithTypes + getTermComponentWithTypes -- _getTermComponentLength -- _getDeclComponent -- _getDeclComponentLength diff --git a/parser-typechecker/src/Unison/Codebase/Type.hs b/parser-typechecker/src/Unison/Codebase/Type.hs index 0525b2bda8..7b7d9712f9 100644 --- a/parser-typechecker/src/Unison/Codebase/Type.hs +++ b/parser-typechecker/src/Unison/Codebase/Type.hs @@ -14,7 +14,7 @@ import Unison.Codebase.SqliteCodebase.GitError (GitSqliteCodebaseError) import Unison.Codebase.SyncMode (SyncMode) import Unison.CodebasePath (CodebasePath) import Unison.DataDeclaration (Decl) --- import Unison.Hash (Hash) +import Unison.Hash (Hash) import Unison.Prelude import Unison.Reference (Reference) import qualified Unison.Reference as Reference @@ -40,7 +40,7 @@ data Codebase m v a = Codebase putTerm :: Reference.Id -> Term v a -> Type v a -> m (), putTypeDeclaration :: Reference.Id -> Decl v a -> m (), -- getTermComponent :: Hash -> m (Maybe [Term v a]), - -- getTermComponentWithTypes :: Hash -> m (Maybe [(Term v a, Type v a)]), + getTermComponentWithTypes :: Hash -> m (Maybe [(Term v a, Type v a)]), -- getTermComponentLength :: Hash -> m (Reference.CycleSize), -- getDeclComponent :: Hash -> m (Maybe [Decl v a]), -- getDeclComponentLength :: Hash -> m (Reference.CycleSize), From 2108234e2ca9dcc013decc94189c7369cae5fc4b Mon Sep 17 00:00:00 2001 From: Arya Irani Date: Thu, 2 Sep 2021 12:09:46 -0400 Subject: [PATCH 007/297] handle LoadDeclComponent --- .../U/Codebase/Sqlite/Operations.hs | 47 ++++++++++++------- .../src/Unison/Codebase/Editor/Command.hs | 1 + .../Unison/Codebase/Editor/HandleCommand.hs | 3 +- .../src/Unison/Codebase/SqliteCodebase.hs | 6 ++- .../src/Unison/Codebase/Type.hs | 2 +- 5 files changed, 38 insertions(+), 21 deletions(-) diff --git a/codebase2/codebase-sqlite/U/Codebase/Sqlite/Operations.hs b/codebase2/codebase-sqlite/U/Codebase/Sqlite/Operations.hs index 534982af1f..fa1c465358 100644 --- a/codebase2/codebase-sqlite/U/Codebase/Sqlite/Operations.hs +++ b/codebase2/codebase-sqlite/U/Codebase/Sqlite/Operations.hs @@ -144,6 +144,7 @@ type ErrString = String data DecodeError = ErrTermComponent + | ErrDeclComponent | ErrTermElement Word64 | ErrDeclElement Word64 | ErrFramedArrayLen @@ -403,6 +404,9 @@ decodeTermElementDiscardingTerm i = getFromBytesOr (ErrTermElement i) (S.lookupT decodeTermElementDiscardingType :: Err m => C.Reference.Pos -> ByteString -> m (LocalIds, S.Term.Term) decodeTermElementDiscardingType i = getFromBytesOr (ErrTermElement i) (S.lookupTermElementDiscardingType i) +decodeDeclComponent :: Err m => ByteString -> m S.Decl.DeclFormat +decodeDeclComponent = getFromBytesOr ErrDeclComponent S.getDeclFormat + decodeDeclElement :: Err m => Word64 -> ByteString -> m (LocalIds, S.Decl.Decl Symbol) decodeDeclElement i = getFromBytesOr (ErrDeclElement i) (S.lookupDeclElement i) @@ -646,7 +650,7 @@ s2cTypeOfTerm ids tp = do (substText, substHash) <- localIdsToLookups loadTextById loadHashByObjectId ids pure $ x2cTType substText substHash tp --- | implementation detail of {s,w}2c*Term* +-- | implementation detail of {s,w}2c*Term* & s2cDecl localIdsToLookups :: Monad m => (t -> m Text) -> (d -> m H.Hash) -> LocalIds' t d -> m (LocalTextId -> Text, LocalDefnId -> H.Hash) localIdsToLookups loadText loadHash localIds = do texts <- traverse loadText $ LocalIds.textLookup localIds @@ -655,6 +659,11 @@ localIdsToLookups loadText loadHash localIds = do substHash (LocalDefnId w) = hashes Vector.! fromIntegral w pure (substText, substHash) +localIdsToTypeRefLookup :: EDB m => LocalIds -> m (S.Decl.TypeRef -> C.Decl.TypeRef) +localIdsToTypeRefLookup localIds = do + (substText, substHash) <- localIdsToLookups loadTextById loadHashByObjectId localIds + pure $ bimap substText (fmap substHash) + -- | implementation detail of {s,w}2c*Term* x2cTerm :: (LocalTextId -> Text) -> (LocalDefnId -> H.Hash) -> S.Term.Term -> C.Term Symbol x2cTerm substText substHash = @@ -750,6 +759,15 @@ w2cTerm ids tm = do -- ** Saving & loading type decls +loadDeclComponent :: EDB m => H.Hash -> MaybeT m [C.Decl Symbol] +loadDeclComponent h = do + MaybeT (anyHashToMaybeObjectId h) + >>= liftQ . Q.loadObjectById + >>= decodeDeclComponent + >>= \case + S.Decl.Decl (S.Decl.LocallyIndexedComponent elements) -> + lift . traverse (uncurry s2cDecl) $ Foldable.toList elements + saveDeclComponent :: EDB m => H.Hash -> [C.Decl Symbol] -> m Db.ObjectId saveDeclComponent h decls = do when debug . traceM $ "Operations.saveDeclComponent " ++ show h @@ -817,26 +835,19 @@ c2sDecl saveText saveDefn (C.Decl.DataDeclaration dt m b cts) = do (Vector.fromList (Foldable.toList defnIds)) pure (ids, decl) +s2cDecl :: EDB m => LocalIds -> S.Decl.Decl Symbol -> m (C.Decl Symbol) +s2cDecl ids (C.Decl.DataDeclaration dt m b ct) = do + substTypeRef <- localIdsToTypeRefLookup ids + pure (C.Decl.DataDeclaration dt m b (C.Type.rmap substTypeRef <$> ct)) + loadDeclByReference :: EDB m => C.Reference.Id -> MaybeT m (C.Decl Symbol) loadDeclByReference r@(C.Reference.Id h i) = do when debug . traceM $ "loadDeclByReference " ++ show r - -- retrieve the blob - (localIds, C.Decl.DataDeclaration dt m b ct) <- - MaybeT (primaryHashToMaybeObjectId h) - >>= liftQ . Q.loadObjectWithTypeById - >>= \case (OT.DeclComponent, blob) -> pure blob; _ -> mzero - >>= decodeDeclElement i - - -- look up the text and hashes that are used by the term - texts <- traverse loadTextById $ LocalIds.textLookup localIds - hashes <- traverse loadHashByObjectId $ LocalIds.defnLookup localIds - - -- substitute the text and hashes back into the term - let substText tIdx = texts Vector.! fromIntegral tIdx - substHash hIdx = hashes Vector.! fromIntegral hIdx - substTypeRef :: S.Decl.TypeRef -> C.Decl.TypeRef - substTypeRef = bimap substText (fmap substHash) - pure (C.Decl.DataDeclaration dt m b (C.Type.rmap substTypeRef <$> ct)) -- lens might be nice here + MaybeT (primaryHashToMaybeObjectId h) + >>= liftQ . Q.loadObjectWithTypeById + >>= \case (OT.DeclComponent, blob) -> pure blob; _ -> mzero + >>= decodeDeclElement i + >>= uncurry s2cDecl -- * Branch transformation diff --git a/parser-typechecker/src/Unison/Codebase/Editor/Command.hs b/parser-typechecker/src/Unison/Codebase/Editor/Command.hs index 5ec2862f5f..b8f40983f7 100644 --- a/parser-typechecker/src/Unison/Codebase/Editor/Command.hs +++ b/parser-typechecker/src/Unison/Codebase/Editor/Command.hs @@ -290,6 +290,7 @@ commandName = \case LoadTermComponentWithTypes{} -> "LoadTermComponentWithTypes" LoadType{} -> "LoadType" LoadTypeOfTerm{} -> "LoadTypeOfTerm" + LoadDeclComponent{} -> "LoadDeclComponent" PutTerm{} -> "PutTerm" PutDecl{} -> "PutDecl" IsTerm{} -> "IsTerm" diff --git a/parser-typechecker/src/Unison/Codebase/Editor/HandleCommand.hs b/parser-typechecker/src/Unison/Codebase/Editor/HandleCommand.hs index cd8bda5f3d..8a6024e9e6 100644 --- a/parser-typechecker/src/Unison/Codebase/Editor/HandleCommand.hs +++ b/parser-typechecker/src/Unison/Codebase/Editor/HandleCommand.hs @@ -138,9 +138,10 @@ commandLine config awaitInput setBranchRef rt notifyUser notifyNumbered loadSour lift $ Codebase.pushGitRootBranch codebase branch repo syncMode LoadTerm r -> lift $ Codebase.getTerm codebase r -- LoadTermComponentWithType :: H.Hash -> Command m i v (Maybe [(Term v Ann, Type v Ann)]) + LoadTypeOfTerm r -> lift $ Codebase.getTypeOfTerm codebase r LoadTermComponentWithTypes h -> lift $ Codebase.getTermComponentWithTypes codebase h LoadType r -> lift $ Codebase.getTypeDeclaration codebase r - LoadTypeOfTerm r -> lift $ Codebase.getTypeOfTerm codebase r + LoadDeclComponent r -> lift $ Codebase.getDeclComponent codebase r PutTerm r tm tp -> lift $ Codebase.putTerm codebase r tm tp PutDecl r decl -> lift $ Codebase.putTypeDeclaration codebase r decl PutWatch kind r e -> lift $ Codebase.putWatch codebase kind r e diff --git a/parser-typechecker/src/Unison/Codebase/SqliteCodebase.hs b/parser-typechecker/src/Unison/Codebase/SqliteCodebase.hs index 5ded1ce324..97b57194ae 100644 --- a/parser-typechecker/src/Unison/Codebase/SqliteCodebase.hs +++ b/parser-typechecker/src/Unison/Codebase/SqliteCodebase.hs @@ -326,6 +326,10 @@ sqliteCodebase debugName root = do decl2 <- Ops.loadDeclByReference (C.Reference.Id h2 i) pure $ Cv.decl2to1 h1 decl2 + getDeclComponent :: MonadIO m => Hash -> m (Maybe [Decl Symbol Ann]) + getDeclComponent h1@(Cv.hash1to2 -> h2) = + runDB' conn $ map (Cv.decl2to1 h1) <$> Ops.loadDeclComponent h2 + --putTermComponent :: MonadIO m => Hash -> [(Term Symbol Ann, Type Symbol Ann)] -> m () --putTerms :: MonadIO m => Map Reference.Id (Term Symbol Ann, Type Symbol Ann) -> m () -- dies horribly if missing dependencies? @@ -750,7 +754,7 @@ sqliteCodebase debugName root = do -- _getTermComponent getTermComponentWithTypes -- _getTermComponentLength - -- _getDeclComponent + getDeclComponent -- _getDeclComponentLength (getRootBranch rootBranchCache) (putRootBranch rootBranchCache) diff --git a/parser-typechecker/src/Unison/Codebase/Type.hs b/parser-typechecker/src/Unison/Codebase/Type.hs index 7b7d9712f9..eeccf09931 100644 --- a/parser-typechecker/src/Unison/Codebase/Type.hs +++ b/parser-typechecker/src/Unison/Codebase/Type.hs @@ -42,7 +42,7 @@ data Codebase m v a = Codebase -- getTermComponent :: Hash -> m (Maybe [Term v a]), getTermComponentWithTypes :: Hash -> m (Maybe [(Term v a, Type v a)]), -- getTermComponentLength :: Hash -> m (Reference.CycleSize), - -- getDeclComponent :: Hash -> m (Maybe [Decl v a]), + getDeclComponent :: Hash -> m (Maybe [Decl v a]), -- getDeclComponentLength :: Hash -> m (Reference.CycleSize), getRootBranch :: m (Either GetRootBranchError (Branch m)), putRootBranch :: Branch m -> m (), From 68c0e9ed046425298f5abf48c5f3df1c59e581fe Mon Sep 17 00:00:00 2001 From: Arya Irani Date: Fri, 24 Sep 2021 14:54:13 -0400 Subject: [PATCH 008/297] GetDependentsOfComponent, but did we actually need it? --- .../U/Codebase/Sqlite/Operations.hs | 18 ++++++++++++------ .../U/Codebase/Sqlite/Queries.hs | 8 ++++++++ parser-typechecker/src/Unison/Builtin.hs | 11 +++++++++++ parser-typechecker/src/Unison/Codebase.hs | 8 ++++++++ .../Unison/Codebase/Editor/HandleCommand.hs | 1 + .../src/Unison/Codebase/SqliteCodebase.hs | 7 +++++++ parser-typechecker/src/Unison/Codebase/Type.hs | 1 + 7 files changed, 48 insertions(+), 6 deletions(-) diff --git a/codebase2/codebase-sqlite/U/Codebase/Sqlite/Operations.hs b/codebase2/codebase-sqlite/U/Codebase/Sqlite/Operations.hs index fa1c465358..978ab74d04 100644 --- a/codebase2/codebase-sqlite/U/Codebase/Sqlite/Operations.hs +++ b/codebase2/codebase-sqlite/U/Codebase/Sqlite/Operations.hs @@ -143,8 +143,8 @@ type EDB m = (Err m, DB m) type ErrString = String data DecodeError - = ErrTermComponent - | ErrDeclComponent + = ErrTermFormat + | ErrDeclFormat | ErrTermElement Word64 | ErrDeclElement Word64 | ErrFramedArrayLen @@ -390,7 +390,7 @@ diffPatch (S.Patch fullTerms fullTypes) (S.Patch refTerms refTypes) = -- * Deserialization helpers decodeTermComponent :: Err m => ByteString -> m S.Term.TermFormat -decodeTermComponent = getFromBytesOr ErrTermComponent S.getTermFormat +decodeTermComponent = getFromBytesOr ErrTermFormat S.getTermFormat decodeComponentLengthOnly :: Err m => ByteString -> m Word64 decodeComponentLengthOnly = getFromBytesOr ErrFramedArrayLen (Get.skip 1 >> S.lengthFramedArray) @@ -405,7 +405,7 @@ decodeTermElementDiscardingType :: Err m => C.Reference.Pos -> ByteString -> m ( decodeTermElementDiscardingType i = getFromBytesOr (ErrTermElement i) (S.lookupTermElementDiscardingType i) decodeDeclComponent :: Err m => ByteString -> m S.Decl.DeclFormat -decodeDeclComponent = getFromBytesOr ErrDeclComponent S.getDeclFormat +decodeDeclComponent = getFromBytesOr ErrDeclFormat S.getDeclFormat decodeDeclElement :: Err m => Word64 -> ByteString -> m (LocalIds, S.Decl.Decl Symbol) decodeDeclElement i = getFromBytesOr (ErrDeclElement i) (S.lookupDeclElement i) @@ -1378,8 +1378,14 @@ dependents :: EDB m => C.Reference -> m (Set C.Reference.Id) dependents r = do r' <- c2sReference r sIds :: [S.Reference.Id] <- Q.getDependentsForDependency r' - -- how will you convert this back to Unison.Reference if you - -- don't know the cycle size? + cIds <- traverse s2cReferenceId sIds + pure $ Set.fromList cIds + +-- | returns a list of known definitions referencing `r` +dependentsOfComponent :: EDB m => H.Hash -> m (Set C.Reference.Id) +dependentsOfComponent h = do + oId <- primaryHashToExistingObjectId h + sIds :: [S.Reference.Id] <- Q.getDependentsForDependencyComponent oId cIds <- traverse s2cReferenceId sIds pure $ Set.fromList cIds diff --git a/codebase2/codebase-sqlite/U/Codebase/Sqlite/Queries.hs b/codebase2/codebase-sqlite/U/Codebase/Sqlite/Queries.hs index 48d1543b3b..7bd64d0f87 100644 --- a/codebase2/codebase-sqlite/U/Codebase/Sqlite/Queries.hs +++ b/codebase2/codebase-sqlite/U/Codebase/Sqlite/Queries.hs @@ -553,6 +553,14 @@ getDependentsForDependency dependency = query sql dependency where sql = [here| AND dependency_component_index IS ? |] +getDependentsForDependencyComponent :: DB m => ObjectId -> m [Reference.Id] +getDependentsForDependencyComponent dependency = query sql (Only dependency) where sql = [here| + SELECT dependent_object_id, dependent_component_index + FROM dependents_index + WHERE dependency_builtin IS NULL + AND dependency_object_id IS ? +|] + getDependenciesForDependent :: DB m => Reference.Id -> m [Reference.Reference] getDependenciesForDependent dependent = query sql dependent where sql = [here| SELECT dependency_builtin, dependency_object_id, dependency_component_index diff --git a/parser-typechecker/src/Unison/Builtin.hs b/parser-typechecker/src/Unison/Builtin.hs index e10a320803..af5e4a8647 100644 --- a/parser-typechecker/src/Unison/Builtin.hs +++ b/parser-typechecker/src/Unison/Builtin.hs @@ -10,6 +10,7 @@ module Unison.Builtin ,builtinEffectDecls ,builtinConstructorType ,builtinTypeDependents + ,builtinTypeDependentsOfComponent ,builtinTypes ,builtinTermsByType ,builtinTermsByTypeMention @@ -46,6 +47,7 @@ import Unison.Names3 (Names(Names), Names0) import qualified Unison.Names3 as Names3 import qualified Unison.Typechecker.TypeLookup as TL import qualified Unison.Util.Relation as Rel +import Unison.Hash (Hash) type DataDeclaration v = DD.DataDeclaration v Ann type EffectDeclaration v = DD.EffectDeclaration v Ann @@ -121,6 +123,15 @@ builtinTermsByTypeMention = builtinTypeDependents :: R.Reference -> Set R.Reference builtinTypeDependents r = Rel.lookupRan r builtinDependencies +builtinTypeDependentsOfComponent :: Hash -> Set R.Reference +builtinTypeDependentsOfComponent h0 = Rel.searchRan ord builtinDependencies + where + ord :: R.Reference -> Ordering + ord = \case + R.Derived h _i -> compare h h0 + r -> compare r r0 + r0 = R.Derived h0 0 + -- WARNING: -- As with the terms, we should avoid changing these references, even -- if we decide to change their names. diff --git a/parser-typechecker/src/Unison/Codebase.hs b/parser-typechecker/src/Unison/Codebase.hs index bf3f05022c..5d3b89cd14 100644 --- a/parser-typechecker/src/Unison/Codebase.hs +++ b/parser-typechecker/src/Unison/Codebase.hs @@ -21,6 +21,7 @@ module Unison.Codebase termsOfType, termsMentioningType, dependents, + dependentsOfComponent, isTerm, isType, ) @@ -61,6 +62,7 @@ import Unison.Codebase.Editor.Git (withStatus) import qualified Data.Set as Set import qualified Unison.Util.Relation as Rel import qualified Unison.Type as Type +import Unison.Hash (Hash) -- Attempt to find the Branch in the current codebase cache and root up to 3 levels deep -- If not found, attempt to find it in the Codebase (sqlite) @@ -181,6 +183,12 @@ dependents c r . Set.map Reference.DerivedId <$> dependentsImpl c r +dependentsOfComponent :: Functor f => Codebase f v a -> Hash -> f (Set Reference) +dependentsOfComponent c h = + Set.union (Builtin.builtinTypeDependentsOfComponent h) + . Set.map Reference.DerivedId + <$> dependentsOfComponentImpl c h + termsOfType :: (Var v, Functor m) => Codebase m v a -> Type v a -> m (Set Referent.Referent) termsOfType c ty = Set.union (Rel.lookupDom r Builtin.builtinTermsByType) diff --git a/parser-typechecker/src/Unison/Codebase/Editor/HandleCommand.hs b/parser-typechecker/src/Unison/Codebase/Editor/HandleCommand.hs index 8a6024e9e6..c8eb7c85b7 100644 --- a/parser-typechecker/src/Unison/Codebase/Editor/HandleCommand.hs +++ b/parser-typechecker/src/Unison/Codebase/Editor/HandleCommand.hs @@ -152,6 +152,7 @@ commandLine config awaitInput setBranchRef rt notifyUser notifyNumbered loadSour IsTerm r -> lift $ Codebase.isTerm codebase r IsType r -> lift $ Codebase.isType codebase r GetDependents r -> lift $ Codebase.dependents codebase r + GetDependentsOfComponent h -> lift $ Codebase.dependentsOfComponent codebase h AddDefsToCodebase unisonFile -> lift $ Codebase.addDefsToCodebase codebase unisonFile GetTermsOfType ty -> lift $ Codebase.termsOfType codebase ty GetTermsMentioningType ty -> lift $ Codebase.termsMentioningType codebase ty diff --git a/parser-typechecker/src/Unison/Codebase/SqliteCodebase.hs b/parser-typechecker/src/Unison/Codebase/SqliteCodebase.hs index 97b57194ae..24a2a14ef9 100644 --- a/parser-typechecker/src/Unison/Codebase/SqliteCodebase.hs +++ b/parser-typechecker/src/Unison/Codebase/SqliteCodebase.hs @@ -594,6 +594,12 @@ sqliteCodebase debugName root = do Set.map Cv.referenceid2to1 <$> Ops.dependents (Cv.reference1to2 r) + dependentsOfComponentImpl :: MonadIO m => Hash -> m (Set Reference.Id) + dependentsOfComponentImpl h = + runDB conn $ + Set.map Cv.referenceid2to1 + <$> Ops.dependentsOfComponent (Cv.hash1to2 h) + syncFromDirectory :: MonadIO m => Codebase1.CodebasePath -> SyncMode -> Branch m -> m () syncFromDirectory srcRoot _syncMode b = flip State.evalStateT emptySyncProgressState $ do @@ -766,6 +772,7 @@ sqliteCodebase debugName root = do putPatch patchExists dependentsImpl + dependentsOfComponentImpl syncFromDirectory syncToDirectory viewRemoteBranch' diff --git a/parser-typechecker/src/Unison/Codebase/Type.hs b/parser-typechecker/src/Unison/Codebase/Type.hs index eeccf09931..4db6499a04 100644 --- a/parser-typechecker/src/Unison/Codebase/Type.hs +++ b/parser-typechecker/src/Unison/Codebase/Type.hs @@ -54,6 +54,7 @@ data Codebase m v a = Codebase putPatch :: Branch.EditHash -> Patch -> m (), patchExists :: Branch.EditHash -> m Bool, dependentsImpl :: Reference -> m (Set Reference.Id), + dependentsOfComponentImpl :: Hash -> m (Set Reference.Id), -- This copies all the dependencies of `b` from the specified Codebase into this one syncFromDirectory :: CodebasePath -> SyncMode -> Branch m -> m (), -- This copies all the dependencies of `b` from this Codebase From fb43c7283d5f5717ad36ee70e6fbcbe4e8a511a4 Mon Sep 17 00:00:00 2001 From: Arya Irani Date: Sat, 25 Sep 2021 21:45:21 -0400 Subject: [PATCH 009/297] pull ABT hashing code into Unison.Hashing.{V1,V2}.ABT & do weeding --- .../src/Unison/Hashing/V1/ABT.hs | 116 +++ .../src/Unison/Hashing/V1/Convert.hs | 20 +- .../src/Unison/Hashing/V1/DataDeclaration.hs | 41 +- .../Unison/Hashing/V1/LabeledDependency.hs | 56 -- .../src/Unison/Hashing/V1/Pattern.hs | 76 +- .../src/Unison/Hashing/V1/Reference.hs | 141 +-- .../src/Unison/Hashing/V1/Reference/Util.hs | 16 +- .../src/Unison/Hashing/V1/Referent.hs | 112 +-- .../src/Unison/Hashing/V1/Term.hs | 893 +---------------- .../src/Unison/Hashing/V1/Type.hs | 589 +----------- .../src/Unison/Hashing/V2/ABT.hs | 104 ++ .../src/Unison/Hashing/V2/Convert.hs | 4 +- .../src/Unison/Hashing/V2/DataDeclaration.hs | 34 +- .../Unison/Hashing/V2/LabeledDependency.hs | 56 -- .../src/Unison/Hashing/V2/Reference.hs | 170 +--- .../src/Unison/Hashing/V2/Reference/Util.hs | 4 +- .../src/Unison/Hashing/V2/Referent.hs | 100 +- .../src/Unison/Hashing/V2/Term.hs | 903 +----------------- .../src/Unison/Hashing/V2/Type.hs | 585 +----------- .../unison-parser-typechecker.cabal | 4 +- unison-core/src/Unison/ABT.hs | 111 +-- unison-core/src/Unison/Reference/Util.hs | 21 - unison-core/unison-core1.cabal | 3 +- 23 files changed, 405 insertions(+), 3754 deletions(-) create mode 100644 parser-typechecker/src/Unison/Hashing/V1/ABT.hs delete mode 100644 parser-typechecker/src/Unison/Hashing/V1/LabeledDependency.hs create mode 100644 parser-typechecker/src/Unison/Hashing/V2/ABT.hs delete mode 100644 parser-typechecker/src/Unison/Hashing/V2/LabeledDependency.hs delete mode 100644 unison-core/src/Unison/Reference/Util.hs diff --git a/parser-typechecker/src/Unison/Hashing/V1/ABT.hs b/parser-typechecker/src/Unison/Hashing/V1/ABT.hs new file mode 100644 index 0000000000..c98d3fa7b7 --- /dev/null +++ b/parser-typechecker/src/Unison/Hashing/V1/ABT.hs @@ -0,0 +1,116 @@ +-- Based on: http://semantic-domain.blogspot.com/2015/03/abstract-binding-trees.html +{-# LANGUAGE DeriveFoldable #-} +{-# LANGUAGE DeriveTraversable #-} +{-# LANGUAGE OverloadedStrings #-} +{-# LANGUAGE PatternSynonyms #-} +{-# LANGUAGE Rank2Types #-} +{-# LANGUAGE ViewPatterns #-} +{-# LANGUAGE UndecidableInstances #-} + +module Unison.Hashing.V1.ABT (hash, hashComponents) where + +import Unison.Prelude +import Unison.ABT + ( pattern AbsN', + absCycle, + components, + substsInheritAnnotation, + tm, + transform, + var, + ABT(Tm, Var, Cycle, Abs), + Term(Term, freeVars), + Var ) + +import Data.List hiding (cycle, find) +import Data.Vector ((!)) +import Prelude hiding (abs,cycle) +import Unison.Hashable (Accumulate,Hashable1,hash1) +import qualified Data.Map as Map +import qualified Data.Set as Set +import qualified Data.Vector as Vector +import qualified Unison.Hashable as Hashable + +-- Hash a strongly connected component and sort its definitions into a canonical order. +hashComponent :: + (Functor f, Hashable1 f, Foldable f, Eq v, Show v, Ord v, Ord h, Accumulate h) + => Map.Map v (Term f v a) -> (h, [(v, Term f v a)]) +hashComponent byName = let + ts = Map.toList byName + embeds = [ (v, void (transform Embed t)) | (v,t) <- ts ] + vs = fst <$> ts + tms = [ (v, absCycle vs (tm $ Component (snd <$> embeds) (var v))) | v <- vs ] + hashed = [ ((v,t), hash t) | (v,t) <- tms ] + sortedHashed = sortOn snd hashed + overallHash = Hashable.accumulate (Hashable.Hashed . snd <$> sortedHashed) + in (overallHash, [ (v, t) | ((v, _),_) <- sortedHashed, Just t <- [Map.lookup v byName] ]) + +-- Group the definitions into strongly connected components and hash +-- each component. Substitute the hash of each component into subsequent +-- components (using the `termFromHash` function). Requires that the +-- overall component has no free variables. +hashComponents + :: (Functor f, Hashable1 f, Foldable f, Eq v, Show v, Var v, Ord h, Accumulate h) + => (h -> Word64 -> Word64 -> Term f v ()) + -> Map.Map v (Term f v a) + -> [(h, [(v, Term f v a)])] +hashComponents termFromHash termsByName = let + bound = Set.fromList (Map.keys termsByName) + escapedVars = Set.unions (freeVars <$> Map.elems termsByName) `Set.difference` bound + sccs = components (Map.toList termsByName) + go _ [] = [] + go prevHashes (component : rest) = let + sub = substsInheritAnnotation (Map.toList prevHashes) + (h, sortedComponent) = hashComponent $ Map.fromList [ (v, sub t) | (v, t) <- component ] + size = fromIntegral (length sortedComponent) + curHashes = Map.fromList [ (v, termFromHash h i size) | ((v, _),i) <- sortedComponent `zip` [0..]] + newHashes = prevHashes `Map.union` curHashes + newHashesL = Map.toList newHashes + sortedComponent' = [ (v, substsInheritAnnotation newHashesL t) | (v, t) <- sortedComponent ] + in (h, sortedComponent') : go newHashes rest + in if Set.null escapedVars then go Map.empty sccs + else error $ "can't hashComponents if bindings have free variables:\n " + ++ show (map show (Set.toList escapedVars)) + ++ "\n " ++ show (map show (Map.keys termsByName)) + +-- Implementation detail of hashComponent +data Component f a = Component [a] a | Embed (f a) deriving (Functor, Traversable, Foldable) + +instance (Hashable1 f, Functor f) => Hashable1 (Component f) where + hash1 hashCycle hash c = case c of + Component as a -> let + (hs, hash) = hashCycle as + toks = Hashable.Hashed <$> hs + in Hashable.accumulate $ (Hashable.Tag 1 : toks) ++ [Hashable.Hashed (hash a)] + Embed fa -> Hashable.hash1 hashCycle hash fa + +-- | We ignore annotations in the `Term`, as these should never affect the +-- meaning of the term. +hash :: forall f v a h . (Functor f, Hashable1 f, Eq v, Show v, Ord h, Accumulate h) + => Term f v a -> h +hash = hash' [] where + hash' :: [Either [v] v] -> Term f v a -> h + hash' env (Term _ _ t) = case t of + Var v -> maybe die hashInt ind + where lookup (Left cycle) = v `elem` cycle + lookup (Right v') = v == v' + ind = findIndex lookup env + hashInt :: Int -> h + hashInt i = Hashable.accumulate [Hashable.Nat $ fromIntegral i] + die = error $ "unknown var in environment: " ++ show v + ++ " environment = " ++ show env + Cycle (AbsN' vs t) -> hash' (Left vs : env) t + -- Cycle t -> hash' env t + Abs v t -> hash' (Right v : env) t + Tm t -> Hashable.hash1 (hashCycle env) (hash' env) t + + hashCycle :: [Either [v] v] -> [Term f v a] -> ([h], Term f v a -> h) + hashCycle env@(Left cycle : envTl) ts | length cycle == length ts = + let + permute p xs = case Vector.fromList xs of xs -> map (xs !) p + hashed = map (\(i,t) -> ((i,t), hash' env t)) (zip [0..] ts) + pt = fst <$> sortOn snd hashed + (p,ts') = unzip pt + in case map Right (permute p cycle) ++ envTl of + env -> (map (hash' env) ts', hash' env) + hashCycle env ts = (map (hash' env) ts, hash' env) diff --git a/parser-typechecker/src/Unison/Hashing/V1/Convert.hs b/parser-typechecker/src/Unison/Hashing/V1/Convert.hs index 3b7efb671c..5d0dbfe8da 100644 --- a/parser-typechecker/src/Unison/Hashing/V1/Convert.hs +++ b/parser-typechecker/src/Unison/Hashing/V1/Convert.hs @@ -1,8 +1,8 @@ {-# LANGUAGE ViewPatterns #-} module Unison.Hashing.V1.Convert - ( HashingInfo(..), - ResolutionFailure(..), + ( HashingInfo (..), + ResolutionFailure (..), ResolutionResult, assumeSingletonComponent, hashDecls, @@ -22,6 +22,7 @@ import qualified Control.Monad.Validate as Validate import Data.Map (Map) import Data.Sequence (Seq) import Data.Set (Set) +import qualified Data.Set as Set import qualified Unison.ABT as ABT import qualified Unison.DataDeclaration as Memory.DD import Unison.Hash (Hash) @@ -38,7 +39,6 @@ import qualified Unison.Referent as Memory.Referent import qualified Unison.Term as Memory.Term import qualified Unison.Type as Memory.Type import Unison.Var (Var) -import qualified Data.Set as Set data ResolutionFailure v a = TermResolutionFailure v a (Set Memory.Referent.Referent) @@ -48,7 +48,7 @@ data ResolutionFailure v a type ResolutionResult v a r = Validate (Seq (ResolutionFailure v a)) r -newtype HashingInfo = HashingInfo (Hash -> Maybe Hashing.Reference.Size) +newtype HashingInfo = HashingInfo (Hash -> Maybe Hashing.Reference.CycleSize) convertResolutionResult :: Names.ResolutionResult v a r -> ResolutionResult v a r convertResolutionResult = \case @@ -154,9 +154,9 @@ m2hPattern f = \case m2hSequenceOp :: Memory.Pattern.SeqOp -> Hashing.Pattern.SeqOp m2hSequenceOp = \case - Memory.Pattern.Cons -> Hashing.Pattern.Cons - Memory.Pattern.Snoc -> Hashing.Pattern.Snoc - Memory.Pattern.Concat -> Hashing.Pattern.Concat + Memory.Pattern.Cons -> Hashing.Pattern.Cons + Memory.Pattern.Snoc -> Hashing.Pattern.Snoc + Memory.Pattern.Concat -> Hashing.Pattern.Concat m2hReferent :: HashingInfo -> Memory.Referent.Referent -> Validate (Seq Hash) Hashing.Referent.Referent m2hReferent f = \case @@ -242,7 +242,7 @@ m2hDecl f (Memory.DD.DataDeclaration mod ann bound ctors) = Hashing.DD.DataDeclaration (m2hModifier mod) ann bound <$> traverse (Lens.mapMOf _3 (m2hType f)) ctors -lookupHash :: HashingInfo -> Hash -> Validate (Seq Hash) Hashing.Reference.Size +lookupHash :: HashingInfo -> Hash -> Validate (Seq Hash) Hashing.Reference.CycleSize lookupHash (HashingInfo f) h = case f h of Just size -> pure size Nothing -> Validate.refute $ pure h @@ -274,7 +274,7 @@ m2hReferenceId :: HashingInfo -> Memory.Reference.Id -> Validate (Seq Hash) Hashing.Reference.Id -m2hReferenceId f (Memory.Reference.Id h i _n) = Hashing.Reference.Id h i <$> lookupHash f h +m2hReferenceId f (Memory.Reference.Id h i) = Hashing.Reference.Id h i <$> lookupHash f h h2mModifier :: Hashing.DD.Modifier -> Memory.DD.Modifier h2mModifier = \case @@ -307,4 +307,4 @@ h2mReference = \case Hashing.Reference.DerivedId d -> Memory.Reference.DerivedId (h2mReferenceId d) h2mReferenceId :: Hashing.Reference.Id -> Memory.Reference.Id -h2mReferenceId (Hashing.Reference.Id h i n) = Memory.Reference.Id h i n +h2mReferenceId (Hashing.Reference.Id h i _n) = Memory.Reference.Id h i diff --git a/parser-typechecker/src/Unison/Hashing/V1/DataDeclaration.hs b/parser-typechecker/src/Unison/Hashing/V1/DataDeclaration.hs index f153f8513c..860cf1d6b1 100644 --- a/parser-typechecker/src/Unison/Hashing/V1/DataDeclaration.hs +++ b/parser-typechecker/src/Unison/Hashing/V1/DataDeclaration.hs @@ -8,29 +8,23 @@ module Unison.Hashing.V1.DataDeclaration ( DataDeclaration (..), - EffectDeclaration (..), - Decl, Modifier (..), - asDataDecl, - constructorType, - constructorTypes, - declDependencies, - dependencies, - bindReferences, hashDecls, ) where +import Unison.Prelude +import Prelude hiding (cycle) + import Control.Lens (over, _3) import Data.Bifunctor (first, second) import qualified Data.Map as Map -import qualified Data.Set as Set import Prelude.Extras (Show1) import qualified Unison.ABT as ABT -import qualified Unison.ConstructorType as CT import Unison.Hash (Hash) import Unison.Hashable (Hashable1) import qualified Unison.Hashable as Hashable +import qualified Unison.Hashing.V1.ABT as ABT import Unison.Hashing.V1.Reference (Reference) import qualified Unison.Hashing.V1.Reference as Reference import qualified Unison.Hashing.V1.Reference.Util as Reference.Util @@ -38,26 +32,7 @@ import Unison.Hashing.V1.Type (Type) import qualified Unison.Hashing.V1.Type as Type import qualified Unison.Name as Name import qualified Unison.Names.ResolutionResult as Names -import Unison.Prelude import Unison.Var (Var) -import Prelude hiding (cycle) -type Decl v a = Either (EffectDeclaration v a) (DataDeclaration v a) - -data DeclOrBuiltin v a - = Builtin CT.ConstructorType - | Decl (Decl v a) - deriving (Eq, Show) - -asDataDecl :: Decl v a -> DataDeclaration v a -asDataDecl = either toDataDecl id - -declDependencies :: Ord v => Decl v a -> Set Reference -declDependencies = either (dependencies . toDataDecl) dependencies - -constructorType :: Decl v a -> CT.ConstructorType -constructorType = \case - Left {} -> CT.Effect - Right {} -> CT.Data data Modifier = Structural | Unique Text -- | Opaque (Set Reference) deriving (Eq, Ord, Show) @@ -81,11 +56,7 @@ constructorTypes = (snd <$>) . constructors constructors :: DataDeclaration v a -> [(v, Type v a)] constructors (DataDeclaration _ _ _ ctors) = [(v, t) | (_, v, t) <- ctors] -dependencies :: Ord v => DataDeclaration v a -> Set Reference -dependencies dd = - Set.unions (Type.dependencies <$> constructorTypes dd) - -toABT :: Var v => DataDeclaration v () -> ABT.Term F v () +toABT :: ABT.Var v => DataDeclaration v () -> ABT.Term F v () toABT dd = ABT.tm $ Modified (modifier dd) dd' where dd' = ABT.absChain (bound dd) $ ABT.cycle @@ -94,7 +65,7 @@ toABT dd = ABT.tm $ Modified (modifier dd) dd' (ABT.tm . Constructors $ ABT.transform Type <$> constructorTypes dd)) -- Implementation detail of `hashDecls`, works with unannotated data decls -hashDecls0 :: (Eq v, Var v, Show v) => Map v (DataDeclaration v ()) -> [(v, Reference.Id)] +hashDecls0 :: (Eq v, ABT.Var v, Show v) => Map v (DataDeclaration v ()) -> [(v, Reference.Id)] hashDecls0 decls = let abts = toABT <$> decls ref r = ABT.tm (Type (Type.Ref (Reference.DerivedId r))) diff --git a/parser-typechecker/src/Unison/Hashing/V1/LabeledDependency.hs b/parser-typechecker/src/Unison/Hashing/V1/LabeledDependency.hs deleted file mode 100644 index 8453208239..0000000000 --- a/parser-typechecker/src/Unison/Hashing/V1/LabeledDependency.hs +++ /dev/null @@ -1,56 +0,0 @@ -{-# LANGUAGE PatternSynonyms #-} - -module Unison.Hashing.V1.LabeledDependency - ( derivedTerm - , derivedType - , termRef - , typeRef - , referent - , dataConstructor - , effectConstructor - , fold - , referents - , toReference - , LabeledDependency - , partition - ) where - -import Unison.Prelude hiding (fold) - -import qualified Data.Set as Set -import Unison.Hashing.V1.Reference (Id, Reference (DerivedId)) -import Unison.Hashing.V1.Referent (ConstructorId, Referent, pattern Con, pattern Ref) -import Unison.ConstructorType (ConstructorType (Data, Effect)) - --- dumb constructor name is private -newtype LabeledDependency = X (Either Reference Referent) deriving (Eq, Ord, Show) - -derivedType, derivedTerm :: Id -> LabeledDependency -typeRef, termRef :: Reference -> LabeledDependency -referent :: Referent -> LabeledDependency -dataConstructor :: Reference -> ConstructorId -> LabeledDependency -effectConstructor :: Reference -> ConstructorId -> LabeledDependency - -derivedType = X . Left . DerivedId -derivedTerm = X . Right . Ref . DerivedId -typeRef = X . Left -termRef = X . Right . Ref -referent = X . Right -dataConstructor r cid = X . Right $ Con r cid Data -effectConstructor r cid = X . Right $ Con r cid Effect - -referents :: Foldable f => f Referent -> Set LabeledDependency -referents rs = Set.fromList (map referent $ toList rs) - -fold :: (Reference -> a) -> (Referent -> a) -> LabeledDependency -> a -fold f g (X e) = either f g e - -partition :: Foldable t => t LabeledDependency -> ([Reference], [Referent]) -partition = partitionEithers . map (\(X e) -> e) . toList - --- | Left TypeRef | Right TermRef -toReference :: LabeledDependency -> Either Reference Reference -toReference = \case - X (Left r) -> Left r - X (Right (Ref r)) -> Right r - X (Right (Con r _ _)) -> Left r diff --git a/parser-typechecker/src/Unison/Hashing/V1/Pattern.hs b/parser-typechecker/src/Unison/Hashing/V1/Pattern.hs index 8647a1cb91..0e2502623f 100644 --- a/parser-typechecker/src/Unison/Hashing/V1/Pattern.hs +++ b/parser-typechecker/src/Unison/Hashing/V1/Pattern.hs @@ -1,15 +1,12 @@ {-# Language DeriveTraversable, DeriveGeneric, PatternSynonyms, OverloadedStrings #-} -module Unison.Hashing.V1.Pattern where +module Unison.Hashing.V1.Pattern (Pattern(..), ConstructorId, SeqOp(..)) where import Unison.Prelude -import Data.Foldable as Foldable hiding (foldMap') import Data.List (intercalate) -import qualified Data.Set as Set -import Unison.Hashing.V1.Reference (Reference) -import qualified Unison.Hashing.V1.Type as Type import qualified Unison.Hashable as H +import Unison.Hashing.V1.Reference (Reference) type ConstructorId = Int @@ -49,32 +46,13 @@ instance Show (Pattern loc) where show (Float _ x) = "Float " <> show x show (Text _ t) = "Text " <> show t show (Char _ c) = "Char " <> show c - show (Constructor _ r i ps) = - "Constructor " <> unwords [show r, show i, show ps] + show (Constructor _ r i ps) = "Constructor " <> unwords [show r, show i, show ps] show (As _ p) = "As " <> show p show (EffectPure _ k) = "EffectPure " <> show k - show (EffectBind _ r i ps k) = - "EffectBind " <> unwords [show r, show i, show ps, show k] + show (EffectBind _ r i ps k) = "EffectBind " <> unwords [show r, show i, show ps, show k] show (SequenceLiteral _ ps) = "Sequence " <> intercalate ", " (fmap show ps) show (SequenceOp _ ph op pt) = "Sequence " <> show ph <> " " <> show op <> " " <> show pt -application :: Pattern loc -> Bool -application (Constructor _ _ _ (_ : _)) = True -application _ = False - -loc :: Pattern loc -> loc -loc p = head $ Foldable.toList p - -setLoc :: Pattern loc -> loc -> Pattern loc -setLoc p loc = case p of - EffectBind _ a b c d -> EffectBind loc a b c d - EffectPure _ a -> EffectPure loc a - As _ a -> As loc a - Constructor _ a b c -> Constructor loc a b c - SequenceLiteral _ ps -> SequenceLiteral loc ps - SequenceOp _ ph op pt -> SequenceOp loc ph op pt - x -> fmap (const loc) x - instance H.Hashable (Pattern p) where tokens (Unbound _) = [H.Tag 0] tokens (Var _) = [H.Tag 1] @@ -108,49 +86,3 @@ instance Eq (Pattern loc) where SequenceLiteral _ ps == SequenceLiteral _ ps2 = ps == ps2 SequenceOp _ ph op pt == SequenceOp _ ph2 op2 pt2 = ph == ph2 && op == op2 && pt == pt2 _ == _ = False - -foldMap' :: Monoid m => (Pattern loc -> m) -> Pattern loc -> m -foldMap' f p = case p of - Unbound _ -> f p - Var _ -> f p - Boolean _ _ -> f p - Int _ _ -> f p - Nat _ _ -> f p - Float _ _ -> f p - Text _ _ -> f p - Char _ _ -> f p - Constructor _ _ _ ps -> f p <> foldMap (foldMap' f) ps - As _ p' -> f p <> foldMap' f p' - EffectPure _ p' -> f p <> foldMap' f p' - EffectBind _ _ _ ps p' -> f p <> foldMap (foldMap' f) ps <> foldMap' f p' - SequenceLiteral _ ps -> f p <> foldMap (foldMap' f) ps - SequenceOp _ p1 _ p2 -> f p <> foldMap' f p1 <> foldMap' f p2 - -generalizedDependencies - :: Ord r - => (Reference -> r) - -> (Reference -> ConstructorId -> r) - -> (Reference -> r) - -> (Reference -> ConstructorId -> r) - -> (Reference -> r) - -> Pattern loc - -> Set r -generalizedDependencies literalType dataConstructor dataType effectConstructor effectType - = Set.fromList . foldMap' - (\case - Unbound _ -> mempty - Var _ -> mempty - As _ _ -> mempty - Constructor _ r cid _ -> [dataType r, dataConstructor r cid] - EffectPure _ _ -> [effectType Type.effectRef] - EffectBind _ r cid _ _ -> - [effectType Type.effectRef, effectType r, effectConstructor r cid] - SequenceLiteral _ _ -> [literalType Type.listRef] - SequenceOp {} -> [literalType Type.listRef] - Boolean _ _ -> [literalType Type.booleanRef] - Int _ _ -> [literalType Type.intRef] - Nat _ _ -> [literalType Type.natRef] - Float _ _ -> [literalType Type.floatRef] - Text _ _ -> [literalType Type.textRef] - Char _ _ -> [literalType Type.charRef] - ) diff --git a/parser-typechecker/src/Unison/Hashing/V1/Reference.hs b/parser-typechecker/src/Unison/Hashing/V1/Reference.hs index 0202b44f5d..ee908bd969 100644 --- a/parser-typechecker/src/Unison/Hashing/V1/Reference.hs +++ b/parser-typechecker/src/Unison/Hashing/V1/Reference.hs @@ -4,42 +4,21 @@ {-# LANGUAGE ViewPatterns #-} module Unison.Hashing.V1.Reference - (Reference, - pattern Builtin, - pattern Derived, - pattern DerivedId, - Id(..), - Pos, - Size, - derivedBase32Hex, - Component, members, - components, - groupByComponent, - componentFor, - unsafeFromText, - idFromText, - isPrefixOf, - fromShortHash, - fromText, - readSuffix, - showShort, - showSuffix, - toId, - toText, - unsafeId, - toShortHash, - idToShortHash) where + ( Reference (Builtin, Derived), + pattern DerivedId, + Id (Id), + CycleSize, + components, + ) +where import Unison.Prelude -import qualified Data.Map as Map -import qualified Data.Set as Set -import qualified Data.Text as Text -import qualified Unison.Hash as H -import Unison.Hashable as Hashable +import qualified Data.Text as Text +import qualified Unison.Hash as H +import Unison.Hashable as Hashable (Hashable (..), Token (Bytes, Nat, Tag, Text)) import Unison.ShortHash (ShortHash) import qualified Unison.ShortHash as SH -import Data.Char (isDigit) -- | Either a builtin or a user defined (hashed) top-level declaration. -- @@ -54,21 +33,13 @@ data Reference -- Using an ugly name so no one tempted to use this | DerivedId Id deriving (Eq,Ord,Generic) -pattern Derived :: H.Hash -> Pos -> Size -> Reference +pattern Derived :: H.Hash -> Pos -> CycleSize -> Reference pattern Derived h i n = DerivedId (Id h i n) {-# COMPLETE Builtin, Derived #-} -- | @Pos@ is a position into a cycle of size @Size@, as cycles are hashed together. -data Id = Id H.Hash Pos Size deriving (Generic) - -unsafeId :: Reference -> Id -unsafeId (Builtin b) = - error $ "Tried to get the hash of builtin " <> Text.unpack b <> "." -unsafeId (DerivedId x) = x - -idToShortHash :: Id -> ShortHash -idToShortHash = toShortHash . DerivedId +data Id = Id H.Hash Pos CycleSize deriving (Generic) -- todo: move these to ShortHash module? -- but Show Reference currently depends on SH @@ -80,88 +51,13 @@ toShortHash (Derived h i n) = SH.ShortHash (H.base32Hex h) index Nothing -- todo: remove `n` parameter; must also update readSuffix index = Just $ showSuffix i n --- toShortHash . fromJust . fromShortHash == id and --- fromJust . fromShortHash . toShortHash == id --- but for arbitrary ShortHashes which may be broken at the wrong boundary, it --- may not be possible to base32Hex decode them. These will return Nothing. --- Also, ShortHashes that include constructor ids will return Nothing; --- try Referent.fromShortHash -fromShortHash :: ShortHash -> Maybe Reference -fromShortHash (SH.Builtin b) = Just (Builtin b) -fromShortHash (SH.ShortHash prefix cycle Nothing) = do - h <- H.fromBase32Hex prefix - case cycle of - Nothing -> Just (Derived h 0 1) - Just t -> case Text.splitOn "c" t of - [i,n] -> Derived h <$> readMay (Text.unpack i) <*> readMay (Text.unpack n) - _ -> Nothing -fromShortHash (SH.ShortHash _prefix _cycle (Just _cid)) = Nothing - -- (3,10) encoded as "3c10" -- (0,93) encoded as "0c93" -showSuffix :: Pos -> Size -> Text +showSuffix :: Pos -> CycleSize -> Text showSuffix i n = Text.pack $ show i <> "c" <> show n --- todo: don't read or return size; must also update showSuffix and fromText -readSuffix :: Text -> Either String (Pos, Size) -readSuffix t = case Text.breakOn "c" t of - (pos, Text.drop 1 -> size) | Text.all isDigit pos && Text.all isDigit size -> - Right (read (Text.unpack pos), read (Text.unpack size)) - _ -> Left "suffix decoding error" - -isPrefixOf :: ShortHash -> Reference -> Bool -isPrefixOf sh r = SH.isPrefixOf sh (toShortHash r) - -toText :: Reference -> Text -toText = SH.toText . toShortHash - -showShort :: Int -> Reference -> Text -showShort numHashChars = SH.toText . SH.take numHashChars . toShortHash - type Pos = Word64 -type Size = Word64 - -newtype Component = Component { members :: Set Reference } - --- Gives the component (dependency cycle) that the reference is a part of -componentFor :: Reference -> Component -componentFor b@Builtin {} = Component (Set.singleton b) -componentFor (Derived h _ n) = - Component $ Set.fromList [Derived h i n | i <- take (fromIntegral n) [0 ..]] - -derivedBase32Hex :: Text -> Pos -> Size -> Reference -derivedBase32Hex b32Hex i n = DerivedId (Id (fromMaybe msg h) i n) - where - msg = error $ "Reference.derivedBase32Hex " <> show h - h = H.fromBase32Hex b32Hex - -unsafeFromText :: Text -> Reference -unsafeFromText = either error id . fromText - -idFromText :: Text -> Maybe Id -idFromText s = case fromText s of - Left _ -> Nothing - Right (Builtin _) -> Nothing - Right (DerivedId id) -> pure id - -toId :: Reference -> Maybe Id -toId (DerivedId id) = Just id -toId Builtin{} = Nothing - --- examples: --- `##Text.take` — builtins don’t have cycles --- `#2tWjVAuc7` — derived, no cycle --- `#y9ycWkiC1.y9` — derived, part of cycle --- todo: take a (Reference -> CycleSize) so that `readSuffix` doesn't have to parse the size from the text. -fromText :: Text -> Either String Reference -fromText t = case Text.split (=='#') t of - [_, "", b] -> Right (Builtin b) - [_, h] -> case Text.split (=='.') h of - [hash] -> Right (derivedBase32Hex hash 0 1) - [hash, suffix] -> uncurry (derivedBase32Hex hash) <$> readSuffix suffix - _ -> bail - _ -> bail - where bail = Left $ "couldn't parse a Reference from " <> Text.unpack t +type CycleSize = Word64 component :: H.Hash -> [k] -> [(k, Id)] component h ks = let @@ -171,15 +67,6 @@ component h ks = let components :: [(H.Hash, [k])] -> [(k, Id)] components sccs = uncurry component =<< sccs -groupByComponent :: [(k, Reference)] -> [[(k, Reference)]] -groupByComponent refs = done $ foldl' insert Map.empty refs - where - insert m (k, r@(Derived h _ _)) = - Map.unionWith (<>) m (Map.fromList [(Right h, [(k,r)])]) - insert m (k, r) = - Map.unionWith (<>) m (Map.fromList [(Left r, [(k,r)])]) - done m = sortOn snd <$> toList m - instance Show Id where show = SH.toString . SH.take 5 . toShortHash . DerivedId instance Show Reference where show = SH.toString . SH.take 5 . toShortHash diff --git a/parser-typechecker/src/Unison/Hashing/V1/Reference/Util.hs b/parser-typechecker/src/Unison/Hashing/V1/Reference/Util.hs index e954492f44..2db033aea5 100644 --- a/parser-typechecker/src/Unison/Hashing/V1/Reference/Util.hs +++ b/parser-typechecker/src/Unison/Hashing/V1/Reference/Util.hs @@ -2,11 +2,12 @@ module Unison.Hashing.V1.Reference.Util where import Unison.Prelude -import qualified Unison.Hashing.V1.Reference as Reference +import qualified Data.Map as Map import Unison.Hashable (Hashable1) -import Unison.ABT (Var) +import qualified Unison.Hashing.V1.ABT as ABT import qualified Unison.ABT as ABT -import qualified Data.Map as Map +import qualified Unison.Hashing.V1.Reference as Reference +import Unison.ABT (Var) hashComponents :: (Functor f, Hashable1 f, Foldable f, Eq v, Show v, Var v) @@ -14,8 +15,7 @@ hashComponents :: -> Map v (ABT.Term f v a) -> Map v (Reference.Id, ABT.Term f v a) hashComponents embedRef tms = - Map.fromList [ (v, (r,e)) | ((v,e), r) <- cs ] - where cs = Reference.components $ ABT.hashComponents ref tms - ref h i n = embedRef (Reference.Id h i n) - - + Map.fromList [(v, (r, e)) | ((v, e), r) <- cs] + where + cs = Reference.components $ ABT.hashComponents ref tms + ref h i n = embedRef (Reference.Id h i n) diff --git a/parser-typechecker/src/Unison/Hashing/V1/Referent.hs b/parser-typechecker/src/Unison/Hashing/V1/Referent.hs index b5a5035ebe..e053350a4c 100644 --- a/parser-typechecker/src/Unison/Hashing/V1/Referent.hs +++ b/parser-typechecker/src/Unison/Hashing/V1/Referent.hs @@ -1,123 +1,19 @@ {-# LANGUAGE OverloadedStrings #-} {-# LANGUAGE PatternSynonyms #-} -module Unison.Hashing.V1.Referent where - -import Unison.Prelude -import Unison.Referent' ( Referent'(..), toReference' ) - -import qualified Data.Char as Char -import qualified Data.Text as Text -import Unison.Hashing.V1.Reference (Reference) -import qualified Unison.Hashing.V1.Reference as R -import Unison.ShortHash (ShortHash) -import qualified Unison.ShortHash as SH +module Unison.Hashing.V1.Referent (Referent, pattern Ref, pattern Con) where import Unison.ConstructorType (ConstructorType) -import qualified Unison.ConstructorType as CT +import Unison.Hashing.V1.Reference (Reference) +import Unison.Referent' (Referent' (..)) --- | Specifies a term. --- --- Either a term 'Reference', a data constructor, or an effect constructor. --- -- Slightly odd naming. This is the "referent of term name in the codebase", -- rather than the target of a Reference. type Referent = Referent' Reference type ConstructorId = Int + pattern Ref :: Reference -> Referent pattern Ref r = Ref' r pattern Con :: Reference -> ConstructorId -> ConstructorType -> Referent pattern Con r i t = Con' r i t {-# COMPLETE Ref, Con #-} - --- | Cannot be a builtin. -type Id = Referent' R.Id - --- referentToTerm moved to Term.fromReferent --- termToReferent moved to Term.toReferent - --- todo: move these to ShortHash module -toShortHash :: Referent -> ShortHash -toShortHash = \case - Ref r -> R.toShortHash r - Con r i _ -> patternShortHash r i - -toShortHashId :: Id -> ShortHash -toShortHashId = toShortHash . fromId - --- also used by HashQualified.fromPattern -patternShortHash :: Reference -> ConstructorId -> ShortHash -patternShortHash r i = (R.toShortHash r) { SH.cid = Just . Text.pack $ show i } - -showShort :: Int -> Referent -> Text -showShort numHashChars = SH.toText . SH.take numHashChars . toShortHash - -toText :: Referent -> Text -toText = \case - Ref r -> R.toText r - Con r cid ct -> R.toText r <> "#" <> ctorTypeText ct <> Text.pack (show cid) - -ctorTypeText :: CT.ConstructorType -> Text -ctorTypeText CT.Effect = EffectCtor -ctorTypeText CT.Data = DataCtor - -pattern EffectCtor = "a" -pattern DataCtor = "d" - -toString :: Referent -> String -toString = Text.unpack . toText - -isConstructor :: Referent -> Bool -isConstructor Con{} = True -isConstructor _ = False - -toTermReference :: Referent -> Maybe Reference -toTermReference = \case - Ref r -> Just r - _ -> Nothing - -toReference :: Referent -> Reference -toReference = toReference' - -fromId :: Id -> Referent -fromId = fmap R.DerivedId - -toTypeReference :: Referent -> Maybe Reference -toTypeReference = \case - Con r _i _t -> Just r - _ -> Nothing - -isPrefixOf :: ShortHash -> Referent -> Bool -isPrefixOf sh r = SH.isPrefixOf sh (toShortHash r) - -unsafeFromText :: Text -> Referent -unsafeFromText = fromMaybe (error "invalid referent") . fromText - --- #abc[.xy][#cid] -fromText :: Text -> Maybe Referent -fromText t = either (const Nothing) Just $ - -- if the string has just one hash at the start, it's just a reference - if Text.length refPart == 1 then - Ref <$> R.fromText t - else if Text.all Char.isDigit cidPart then do - r <- R.fromText (Text.dropEnd 1 refPart) - ctorType <- ctorType - let cid = read (Text.unpack cidPart) - pure $ Con r cid ctorType - else - Left ("invalid constructor id: " <> Text.unpack cidPart) - where - ctorType = case Text.take 1 cidPart' of - EffectCtor -> Right CT.Effect - DataCtor -> Right CT.Data - _otherwise -> - Left ("invalid constructor type (expected '" - <> EffectCtor <> "' or '" <> DataCtor <> "'): " <> Text.unpack cidPart') - refPart = Text.dropWhileEnd (/= '#') t - cidPart' = Text.takeWhileEnd (/= '#') t - cidPart = Text.drop 1 cidPart' - -fold :: (r -> a) -> (r -> ConstructorId -> ConstructorType -> a) -> Referent' r -> a -fold fr fc = \case - Ref' r -> fr r - Con' r i ct -> fc r i ct diff --git a/parser-typechecker/src/Unison/Hashing/V1/Term.hs b/parser-typechecker/src/Unison/Hashing/V1/Term.hs index 27ee4fdbb2..e4d98f07c8 100644 --- a/parser-typechecker/src/Unison/Hashing/V1/Term.hs +++ b/parser-typechecker/src/Unison/Hashing/V1/Term.hs @@ -7,42 +7,35 @@ {-# LANGUAGE UnicodeSyntax #-} {-# LANGUAGE ViewPatterns #-} -module Unison.Hashing.V1.Term where +module Unison.Hashing.V1.Term ( + Term, + F(..), + MatchCase(MatchCase), + hashClosedTerm, + hashComponents, +) where import Unison.Prelude - import Prelude hiding (and,or) -import Control.Monad.State (evalState) -import qualified Control.Monad.Writer.Strict as Writer -import Data.Bifunctor (second) -import qualified Data.Map as Map -import qualified Data.Set as Set -import qualified Data.Text as Text + import qualified Data.Sequence as Sequence -import Prelude.Extras (Eq1(..), Show1(..)) -import Text.Show +import qualified Data.Text as Text +import Prelude.Extras (Eq1 (..), Show1 (..)) +import Text.Show import qualified Unison.ABT as ABT import qualified Unison.Blank as B import qualified Unison.Hash as Hash -import Unison.Hashable (Hashable1, accumulateToken) +import Unison.Hashable (Hashable1, accumulateToken) import qualified Unison.Hashable as Hashable -import Unison.Hashing.V1.Pattern (Pattern) +import qualified Unison.Hashing.V1.ABT as ABT +import Unison.Hashing.V1.Pattern (Pattern) import qualified Unison.Hashing.V1.Pattern as Pattern -import Unison.Hashing.V1.Reference (Reference, pattern Builtin) +import Unison.Hashing.V1.Reference (Reference) import qualified Unison.Hashing.V1.Reference as Reference import qualified Unison.Hashing.V1.Reference.Util as ReferenceUtil -import Unison.Hashing.V1.Referent (Referent) -import qualified Unison.Hashing.V1.Referent as Referent -import Unison.Hashing.V1.Type (Type) -import qualified Unison.Hashing.V1.Type as Type -import qualified Unison.ConstructorType as CT -import Unison.Util.List (multimap) -import Unison.Var (Var) -import qualified Unison.Var as Var -import Unsafe.Coerce -import Unison.Symbol (Symbol) -import qualified Unison.Hashing.V1.LabeledDependency as LD -import Unison.Hashing.V1.LabeledDependency (LabeledDependency) +import Unison.Hashing.V1.Referent (Referent) +import Unison.Hashing.V1.Type (Type) +import Unison.Var (Var) -- This gets reexported; should maybe live somewhere other than Pattern, though. type ConstructorId = Pattern.ConstructorId @@ -98,840 +91,16 @@ type IsTop = Bool -- | Like `Term v`, but with an annotation of type `a` at every level in the tree type Term v a = Term2 v a a v a --- | Allow type variables and term variables to differ -type Term' vt v a = Term2 vt a a v a -- | Allow type variables, term variables, type annotations and term annotations -- to all differ type Term2 vt at ap v a = ABT.Term (F vt at ap) v a --- | Like `Term v a`, but with only () for type and pattern annotations. -type Term3 v a = Term2 v () () v a - --- | Terms are represented as ABTs over the base functor F, with variables in `v` -type Term0 v = Term v () --- | Terms with type variables in `vt`, and term variables in `v` -type Term0' vt v = Term' vt v () - --- Prepare a term for type-directed name resolution by replacing --- any remaining free variables with blanks to be resolved by TDNR -prepareTDNR :: Var v => ABT.Term (F vt b ap) v b -> ABT.Term (F vt b ap) v b -prepareTDNR t = fmap fst . ABT.visitPure f $ ABT.annotateBound t - where f (ABT.Term _ (a, bound) (ABT.Var v)) | Set.notMember v bound = - Just $ resolve (a, bound) a (Text.unpack $ Var.name v) - f _ = Nothing - -amap :: Ord v => (a -> a2) -> Term v a -> Term v a2 -amap f = fmap f . patternMap (fmap f) . typeMap (fmap f) - -patternMap :: (Pattern ap -> Pattern ap2) -> Term2 vt at ap v a -> Term2 vt at ap2 v a -patternMap f = go where - go (ABT.Term fvs a t) = ABT.Term fvs a $ case t of - ABT.Abs v t -> ABT.Abs v (go t) - ABT.Var v -> ABT.Var v - ABT.Cycle t -> ABT.Cycle (go t) - ABT.Tm (Match e cases) -> ABT.Tm (Match (go e) [ - MatchCase (f p) (go <$> g) (go a) | MatchCase p g a <- cases ]) - -- Safe since `Match` is only ctor that has embedded `Pattern ap` arg - ABT.Tm ts -> unsafeCoerce $ ABT.Tm (fmap go ts) - -vmap :: Ord v2 => (v -> v2) -> Term v a -> Term v2 a -vmap f = ABT.vmap f . typeMap (ABT.vmap f) - -vtmap :: Ord vt2 => (vt -> vt2) -> Term' vt v a -> Term' vt2 v a -vtmap f = typeMap (ABT.vmap f) - -typeMap - :: Ord vt2 - => (Type vt at -> Type vt2 at2) - -> Term2 vt at ap v a - -> Term2 vt2 at2 ap v a -typeMap f = go - where - go (ABT.Term fvs a t) = ABT.Term fvs a $ case t of - ABT.Abs v t -> ABT.Abs v (go t) - ABT.Var v -> ABT.Var v - ABT.Cycle t -> ABT.Cycle (go t) - ABT.Tm (Ann e t) -> ABT.Tm (Ann (go e) (f t)) - -- Safe since `Ann` is only ctor that has embedded `Type v` arg - -- otherwise we'd have to manually match on every non-`Ann` ctor - ABT.Tm ts -> unsafeCoerce $ ABT.Tm (fmap go ts) - -extraMap' - :: (Ord vt, Ord vt') - => (vt -> vt') - -> (at -> at') - -> (ap -> ap') - -> Term2 vt at ap v a - -> Term2 vt' at' ap' v a -extraMap' vtf atf apf = ABT.extraMap (extraMap vtf atf apf) - -extraMap - :: (Ord vt, Ord vt') - => (vt -> vt') - -> (at -> at') - -> (ap -> ap') - -> F vt at ap a - -> F vt' at' ap' a -extraMap vtf atf apf = \case - Int x -> Int x - Nat x -> Nat x - Float x -> Float x - Boolean x -> Boolean x - Text x -> Text x - Char x -> Char x - Blank x -> Blank (fmap atf x) - Ref x -> Ref x - Constructor x y -> Constructor x y - Request x y -> Request x y - Handle x y -> Handle x y - App x y -> App x y - Ann tm x -> Ann tm (ABT.amap atf (ABT.vmap vtf x)) - List x -> List x - If x y z -> If x y z - And x y -> And x y - Or x y -> Or x y - Lam x -> Lam x - LetRec x y z -> LetRec x y z - Let x y z -> Let x y z - Match tm l -> Match tm (map (matchCaseExtraMap apf) l) - TermLink r -> TermLink r - TypeLink r -> TypeLink r - -matchCaseExtraMap :: (loc -> loc') -> MatchCase loc a -> MatchCase loc' a -matchCaseExtraMap f (MatchCase p x y) = MatchCase (fmap f p) x y - -unannotate - :: forall vt at ap v a . Ord v => Term2 vt at ap v a -> Term0' vt v -unannotate = go - where - go :: Term2 vt at ap v a -> Term0' vt v - go (ABT.out -> ABT.Abs v body) = ABT.abs v (go body) - go (ABT.out -> ABT.Cycle body) = ABT.cycle (go body) - go (ABT.Var' v ) = ABT.var v - go (ABT.Tm' f ) = case go <$> f of - Ann e t -> ABT.tm (Ann e (void t)) - Match scrutinee branches -> - let unann (MatchCase pat guard body) = MatchCase (void pat) guard body - in ABT.tm (Match scrutinee (unann <$> branches)) - f' -> ABT.tm (unsafeCoerce f') - go _ = error "unpossible" - -wrapV :: Ord v => Term v a -> Term (ABT.V v) a -wrapV = vmap ABT.Bound - --- | All variables mentioned in the given term. --- Includes both term and type variables, both free and bound. -allVars :: Ord v => Term v a -> Set v -allVars tm = Set.fromList $ - ABT.allVars tm ++ [ v | tp <- allTypes tm, v <- ABT.allVars tp ] - where - allTypes tm = case tm of - Ann' e tp -> tp : allTypes e - _ -> foldMap allTypes $ ABT.out tm - -freeVars :: Term' vt v a -> Set v -freeVars = ABT.freeVars - -freeTypeVars :: Ord vt => Term' vt v a -> Set vt -freeTypeVars t = Map.keysSet $ freeTypeVarAnnotations t - -freeTypeVarAnnotations :: Ord vt => Term' vt v a -> Map vt [a] -freeTypeVarAnnotations e = multimap $ go Set.empty e where - go bound tm = case tm of - Var' _ -> mempty - Ann' e (Type.stripIntroOuters -> t1) -> let - bound' = case t1 of Type.ForallsNamed' vs _ -> bound <> Set.fromList vs - _ -> bound - in go bound' e <> ABT.freeVarOccurrences bound t1 - ABT.Tm' f -> foldMap (go bound) f - (ABT.out -> ABT.Abs _ body) -> go bound body - (ABT.out -> ABT.Cycle body) -> go bound body - _ -> error "unpossible" - -substTypeVars :: (Ord v, Var vt) - => [(vt, Type vt b)] - -> Term' vt v a - -> Term' vt v a -substTypeVars subs e = foldl' go e subs where - go e (vt, t) = substTypeVar vt t e - --- Capture-avoiding substitution of a type variable inside a term. This --- will replace that type variable wherever it appears in type signatures of --- the term, avoiding capture by renaming ∀-binders. -substTypeVar - :: (Ord v, ABT.Var vt) - => vt - -> Type vt b - -> Term' vt v a - -> Term' vt v a -substTypeVar vt ty = go Set.empty where - go bound tm | Set.member vt bound = tm - go bound tm = let loc = ABT.annotation tm in case tm of - Var' _ -> tm - Ann' e t -> uncapture [] e (Type.stripIntroOuters t) where - fvs = ABT.freeVars ty - -- if the ∀ introduces a variable, v, which is free in `ty`, we pick a new - -- variable name for v which is unique, v', and rename v to v' in e. - uncapture vs e t@(Type.Forall' body) | Set.member (ABT.variable body) fvs = let - v = ABT.variable body - v2 = Var.freshIn (ABT.freeVars t) . Var.freshIn (Set.insert vt fvs) $ v - t2 = ABT.bindInheritAnnotation body (Type.var() v2) - in uncapture ((ABT.annotation t, v2):vs) (renameTypeVar v v2 e) t2 - uncapture vs e t0 = let - t = foldl (\body (loc,v) -> Type.forall loc v body) t0 vs - bound' = case Type.unForalls (Type.stripIntroOuters t) of - Nothing -> bound - Just (vs, _) -> bound <> Set.fromList vs - t' = ABT.substInheritAnnotation vt ty (Type.stripIntroOuters t) - in ann loc (go bound' e) (Type.freeVarsToOuters bound t') - ABT.Tm' f -> ABT.tm' loc (go bound <$> f) - (ABT.out -> ABT.Abs v body) -> ABT.abs' loc v (go bound body) - (ABT.out -> ABT.Cycle body) -> ABT.cycle' loc (go bound body) - _ -> error "unpossible" - -renameTypeVar :: (Ord v, ABT.Var vt) => vt -> vt -> Term' vt v a -> Term' vt v a -renameTypeVar old new = go Set.empty where - go bound tm | Set.member old bound = tm - go bound tm = let loc = ABT.annotation tm in case tm of - Var' _ -> tm - Ann' e t -> let - bound' = case Type.unForalls (Type.stripIntroOuters t) of - Nothing -> bound - Just (vs, _) -> bound <> Set.fromList vs - t' = ABT.rename old new (Type.stripIntroOuters t) - in ann loc (go bound' e) (Type.freeVarsToOuters bound t') - ABT.Tm' f -> ABT.tm' loc (go bound <$> f) - (ABT.out -> ABT.Abs v body) -> ABT.abs' loc v (go bound body) - (ABT.out -> ABT.Cycle body) -> ABT.cycle' loc (go bound body) - _ -> error "unpossible" - --- Converts free variables to bound variables using forall or introOuter. Example: --- --- foo : x -> x --- foo a = --- r : x --- r = a --- r --- --- This becomes: --- --- foo : ∀ x . x -> x --- foo a = --- r : outer x . x -- FYI, not valid syntax --- r = a --- r --- --- More specifically: in the expression `e : t`, unbound lowercase variables in `t` --- are bound with foralls, and any ∀-quantified type variables are made bound in --- `e` and its subexpressions. The result is a term with no lowercase free --- variables in any of its type signatures, with outer references represented --- with explicit `introOuter` binders. The resulting term may have uppercase --- free variables that are still unbound. -generalizeTypeSignatures :: (Var vt, Var v) => Term' vt v a -> Term' vt v a -generalizeTypeSignatures = go Set.empty where - go bound tm = let loc = ABT.annotation tm in case tm of - Var' _ -> tm - Ann' e (Type.generalizeLowercase bound -> t) -> let - bound' = case Type.unForalls t of - Nothing -> bound - Just (vs, _) -> bound <> Set.fromList vs - in ann loc (go bound' e) (Type.freeVarsToOuters bound t) - ABT.Tm' f -> ABT.tm' loc (go bound <$> f) - (ABT.out -> ABT.Abs v body) -> ABT.abs' loc v (go bound body) - (ABT.out -> ABT.Cycle body) -> ABT.cycle' loc (go bound body) - _ -> error "unpossible" - --- nicer pattern syntax - -pattern Var' v <- ABT.Var' v -pattern Cycle' xs t <- ABT.Cycle' xs t -pattern Abs' subst <- ABT.Abs' subst -pattern Int' n <- (ABT.out -> ABT.Tm (Int n)) -pattern Nat' n <- (ABT.out -> ABT.Tm (Nat n)) -pattern Float' n <- (ABT.out -> ABT.Tm (Float n)) -pattern Boolean' b <- (ABT.out -> ABT.Tm (Boolean b)) -pattern Text' s <- (ABT.out -> ABT.Tm (Text s)) -pattern Char' c <- (ABT.out -> ABT.Tm (Char c)) -pattern Blank' b <- (ABT.out -> ABT.Tm (Blank b)) -pattern Ref' r <- (ABT.out -> ABT.Tm (Ref r)) -pattern TermLink' r <- (ABT.out -> ABT.Tm (TermLink r)) -pattern TypeLink' r <- (ABT.out -> ABT.Tm (TypeLink r)) -pattern Builtin' r <- (ABT.out -> ABT.Tm (Ref (Builtin r))) -pattern App' f x <- (ABT.out -> ABT.Tm (App f x)) -pattern Match' scrutinee branches <- (ABT.out -> ABT.Tm (Match scrutinee branches)) -pattern Constructor' ref n <- (ABT.out -> ABT.Tm (Constructor ref n)) -pattern Request' ref n <- (ABT.out -> ABT.Tm (Request ref n)) -pattern RequestOrCtor' ref n <- (unReqOrCtor -> Just (ref, n)) -pattern If' cond t f <- (ABT.out -> ABT.Tm (If cond t f)) -pattern And' x y <- (ABT.out -> ABT.Tm (And x y)) -pattern Or' x y <- (ABT.out -> ABT.Tm (Or x y)) -pattern Handle' h body <- (ABT.out -> ABT.Tm (Handle h body)) -pattern Apps' f args <- (unApps -> Just (f, args)) --- begin pretty-printer helper patterns -pattern AppsPred' f args <- (unAppsPred -> Just (f, args)) -pattern BinaryApp' f arg1 arg2 <- (unBinaryApp -> Just (f, arg1, arg2)) -pattern BinaryApps' apps lastArg <- (unBinaryApps -> Just (apps, lastArg)) -pattern BinaryAppsPred' apps lastArg <- (unBinaryAppsPred -> Just (apps, lastArg)) --- end pretty-printer helper patterns -pattern Ann' x t <- (ABT.out -> ABT.Tm (Ann x t)) -pattern List' xs <- (ABT.out -> ABT.Tm (List xs)) -pattern Lam' subst <- ABT.Tm' (Lam (ABT.Abs' subst)) - -pattern Delay' body <- (unDelay -> Just body) -unDelay :: Ord v => Term2 vt at ap v a -> Maybe (Term2 vt at ap v a) -unDelay tm = case ABT.out tm of - ABT.Tm (Lam (ABT.Term _ _ (ABT.Abs v body))) - | Set.notMember v (ABT.freeVars body) - -> Just body - _ -> Nothing - -pattern LamNamed' v body <- (ABT.out -> ABT.Tm (Lam (ABT.Term _ _ (ABT.Abs v body)))) -pattern LamsNamed' vs body <- (unLams' -> Just (vs, body)) -pattern LamsNamedOpt' vs body <- (unLamsOpt' -> Just (vs, body)) -pattern LamsNamedPred' vs body <- (unLamsPred' -> Just (vs, body)) -pattern LamsNamedOrDelay' vs body <- (unLamsUntilDelay' -> Just (vs, body)) -pattern Let1' b subst <- (unLet1 -> Just (_, b, subst)) -pattern Let1Top' top b subst <- (unLet1 -> Just (top, b, subst)) -pattern Let1Named' v b e <- (ABT.Tm' (Let _ b (ABT.out -> ABT.Abs v e))) -pattern Let1NamedTop' top v b e <- (ABT.Tm' (Let top b (ABT.out -> ABT.Abs v e))) -pattern Lets' bs e <- (unLet -> Just (bs, e)) -pattern LetRecNamed' bs e <- (unLetRecNamed -> Just (_,bs,e)) -pattern LetRecNamedTop' top bs e <- (unLetRecNamed -> Just (top,bs,e)) -pattern LetRec' subst <- (unLetRec -> Just (_, subst)) -pattern LetRecTop' top subst <- (unLetRec -> Just (top, subst)) -pattern LetRecNamedAnnotated' ann bs e <- (unLetRecNamedAnnotated -> Just (_, ann, bs,e)) -pattern LetRecNamedAnnotatedTop' top ann bs e <- - (unLetRecNamedAnnotated -> Just (top, ann, bs,e)) - -fresh :: Var v => Term0 v -> v -> v -fresh = ABT.fresh - --- some smart constructors - -var :: a -> v -> Term2 vt at ap v a -var = ABT.annotatedVar - -var' :: Var v => Text -> Term0' vt v -var' = var() . Var.named ref :: Ord v => a -> Reference -> Term2 vt at ap v a ref a r = ABT.tm' a (Ref r) -pattern Referent' r <- (unReferent -> Just r) - -unReferent :: Term2 vt at ap v a -> Maybe Referent -unReferent (Ref' r) = Just $ Referent.Ref r -unReferent (Constructor' r cid) = Just $ Referent.Con r cid CT.Data -unReferent (Request' r cid) = Just $ Referent.Con r cid CT.Effect -unReferent _ = Nothing - refId :: Ord v => a -> Reference.Id -> Term2 vt at ap v a refId a = ref a . Reference.DerivedId -termLink :: Ord v => a -> Referent -> Term2 vt at ap v a -termLink a r = ABT.tm' a (TermLink r) - -typeLink :: Ord v => a -> Reference -> Term2 vt at ap v a -typeLink a r = ABT.tm' a (TypeLink r) - -builtin :: Ord v => a -> Text -> Term2 vt at ap v a -builtin a n = ref a (Reference.Builtin n) - -float :: Ord v => a -> Double -> Term2 vt at ap v a -float a d = ABT.tm' a (Float d) - -boolean :: Ord v => a -> Bool -> Term2 vt at ap v a -boolean a b = ABT.tm' a (Boolean b) - -int :: Ord v => a -> Int64 -> Term2 vt at ap v a -int a d = ABT.tm' a (Int d) - -nat :: Ord v => a -> Word64 -> Term2 vt at ap v a -nat a d = ABT.tm' a (Nat d) - -text :: Ord v => a -> Text -> Term2 vt at ap v a -text a = ABT.tm' a . Text - -char :: Ord v => a -> Char -> Term2 vt at ap v a -char a = ABT.tm' a . Char - -watch :: (Var v, Semigroup a) => a -> String -> Term v a -> Term v a -watch a note e = - apps' (builtin a "Debug.watch") [text a (Text.pack note), e] - -watchMaybe :: (Var v, Semigroup a) => Maybe String -> Term v a -> Term v a -watchMaybe Nothing e = e -watchMaybe (Just note) e = watch (ABT.annotation e) note e - -blank :: Ord v => a -> Term2 vt at ap v a -blank a = ABT.tm' a (Blank B.Blank) - -placeholder :: Ord v => a -> String -> Term2 vt a ap v a -placeholder a s = ABT.tm' a . Blank $ B.Recorded (B.Placeholder a s) - -resolve :: Ord v => at -> ab -> String -> Term2 vt ab ap v at -resolve at ab s = ABT.tm' at . Blank $ B.Recorded (B.Resolve ab s) - -constructor :: Ord v => a -> Reference -> ConstructorId -> Term2 vt at ap v a -constructor a ref n = ABT.tm' a (Constructor ref n) - -request :: Ord v => a -> Reference -> ConstructorId -> Term2 vt at ap v a -request a ref n = ABT.tm' a (Request ref n) - --- todo: delete and rename app' to app -app_ :: Ord v => Term0' vt v -> Term0' vt v -> Term0' vt v -app_ f arg = ABT.tm (App f arg) - -app :: Ord v => a -> Term2 vt at ap v a -> Term2 vt at ap v a -> Term2 vt at ap v a -app a f arg = ABT.tm' a (App f arg) - -match :: Ord v => a -> Term2 vt at a v a -> [MatchCase a (Term2 vt at a v a)] -> Term2 vt at a v a -match a scrutinee branches = ABT.tm' a (Match scrutinee branches) - -handle :: Ord v => a -> Term2 vt at ap v a -> Term2 vt at ap v a -> Term2 vt at ap v a -handle a h block = ABT.tm' a (Handle h block) - -and :: Ord v => a -> Term2 vt at ap v a -> Term2 vt at ap v a -> Term2 vt at ap v a -and a x y = ABT.tm' a (And x y) - -or :: Ord v => a -> Term2 vt at ap v a -> Term2 vt at ap v a -> Term2 vt at ap v a -or a x y = ABT.tm' a (Or x y) - -list :: Ord v => a -> [Term2 vt at ap v a] -> Term2 vt at ap v a -list a es = list' a (Sequence.fromList es) - -list' :: Ord v => a -> Seq (Term2 vt at ap v a) -> Term2 vt at ap v a -list' a es = ABT.tm' a (List es) - -apps - :: Ord v - => Term2 vt at ap v a - -> [(a, Term2 vt at ap v a)] - -> Term2 vt at ap v a -apps = foldl' (\f (a, t) -> app a f t) - -apps' - :: (Ord v, Semigroup a) - => Term2 vt at ap v a - -> [Term2 vt at ap v a] - -> Term2 vt at ap v a -apps' = foldl' (\f t -> app (ABT.annotation f <> ABT.annotation t) f t) - -iff :: Ord v => a -> Term2 vt at ap v a -> Term2 vt at ap v a -> Term2 vt at ap v a -> Term2 vt at ap v a -iff a cond t f = ABT.tm' a (If cond t f) - -ann_ :: Ord v => Term0' vt v -> Type vt () -> Term0' vt v -ann_ e t = ABT.tm (Ann e t) - -ann :: Ord v - => a - -> Term2 vt at ap v a - -> Type vt at - -> Term2 vt at ap v a -ann a e t = ABT.tm' a (Ann e t) - --- arya: are we sure we want the two annotations to be the same? -lam :: Ord v => a -> v -> Term2 vt at ap v a -> Term2 vt at ap v a -lam a v body = ABT.tm' a (Lam (ABT.abs' a v body)) - -delay :: Var v => a -> Term2 vt at ap v a -> Term2 vt at ap v a -delay a body = - ABT.tm' a (Lam (ABT.abs' a (ABT.freshIn (ABT.freeVars body) (Var.named "_")) body)) - -lam' :: Ord v => a -> [v] -> Term2 vt at ap v a -> Term2 vt at ap v a -lam' a vs body = foldr (lam a) body vs - -lam'' :: Ord v => [(a,v)] -> Term2 vt at ap v a -> Term2 vt at ap v a -lam'' vs body = foldr (uncurry lam) body vs - -isLam :: Term2 vt at ap v a -> Bool -isLam t = arity t > 0 - -arity :: Term2 vt at ap v a -> Int -arity (LamNamed' _ body) = 1 + arity body -arity (Ann' e _) = arity e -arity _ = 0 - -unLetRecNamedAnnotated - :: Term' vt v a - -> Maybe - (IsTop, a, [((a, v), Term' vt v a)], Term' vt v a) -unLetRecNamedAnnotated (ABT.CycleA' ann avs (ABT.Tm' (LetRec isTop bs e))) = - Just (isTop, ann, avs `zip` bs, e) -unLetRecNamedAnnotated _ = Nothing - -letRec' - :: (Ord v, Monoid a) - => Bool - -> [(v, Term' vt v a)] - -> Term' vt v a - -> Term' vt v a -letRec' isTop bindings body = - letRec isTop - (foldMap (ABT.annotation . snd) bindings <> ABT.annotation body) - [ ((ABT.annotation b, v), b) | (v,b) <- bindings ] - body - --- Prepend a binding to form a (bigger) let rec. Useful when --- building up a block incrementally using a right fold. --- --- For example: --- consLetRec (x = 42) "hi" --- => --- let rec x = 42 in "hi" --- --- consLetRec (x = 42) (let rec y = "hi" in (x,y)) --- => --- let rec x = 42; y = "hi" in (x,y) -consLetRec - :: Ord v - => Bool -- isTop parameter - -> a -- annotation for overall let rec - -> (a, v, Term' vt v a) -- the binding - -> Term' vt v a -- the body - -> Term' vt v a -consLetRec isTop a (ab, vb, b) body = case body of - LetRecNamedAnnotated' _ bs body -> letRec isTop a (((ab,vb), b) : bs) body - _ -> letRec isTop a [((ab,vb),b)] body - -letRec - :: Ord v - => Bool - -> a - -> [((a, v), Term' vt v a)] - -> Term' vt v a - -> Term' vt v a -letRec _ _ [] e = e -letRec isTop a bindings e = ABT.cycle' - a - (foldr (uncurry ABT.abs' . fst) z bindings) - where z = ABT.tm' a (LetRec isTop (map snd bindings) e) - - --- | Smart constructor for let rec blocks. Each binding in the block may --- reference any other binding in the block in its body (including itself), --- and the output expression may also reference any binding in the block. -letRec_ :: Ord v => IsTop -> [(v, Term0' vt v)] -> Term0' vt v -> Term0' vt v -letRec_ _ [] e = e -letRec_ isTop bindings e = ABT.cycle (foldr (ABT.abs . fst) z bindings) - where - z = ABT.tm (LetRec isTop (map snd bindings) e) - --- | Smart constructor for let blocks. Each binding in the block may --- reference only previous bindings in the block, not including itself. --- The output expression may reference any binding in the block. --- todo: delete me -let1_ :: Ord v => IsTop -> [(v,Term0' vt v)] -> Term0' vt v -> Term0' vt v -let1_ isTop bindings e = foldr f e bindings - where - f (v,b) body = ABT.tm (Let isTop b (ABT.abs v body)) - --- | annotations are applied to each nested Let expression -let1 - :: Ord v - => IsTop - -> [((a, v), Term2 vt at ap v a)] - -> Term2 vt at ap v a - -> Term2 vt at ap v a -let1 isTop bindings e = foldr f e bindings - where f ((ann, v), b) body = ABT.tm' ann (Let isTop b (ABT.abs' ann v body)) - -let1' - :: (Semigroup a, Ord v) - => IsTop - -> [(v, Term2 vt at ap v a)] - -> Term2 vt at ap v a - -> Term2 vt at ap v a -let1' isTop bindings e = foldr f e bindings - where - ann = ABT.annotation - f (v, b) body = ABT.tm' a (Let isTop b (ABT.abs' a v body)) - where a = ann b <> ann body - --- let1' :: Var v => [(Text, Term0 vt v)] -> Term0 vt v -> Term0 vt v --- let1' bs e = let1 [(ABT.v' name, b) | (name,b) <- bs ] e - -unLet1 - :: Var v - => Term' vt v a - -> Maybe (IsTop, Term' vt v a, ABT.Subst (F vt a a) v a) -unLet1 (ABT.Tm' (Let isTop b (ABT.Abs' subst))) = Just (isTop, b, subst) -unLet1 _ = Nothing - --- | Satisfies `unLet (let' bs e) == Just (bs, e)` -unLet - :: Term2 vt at ap v a - -> Maybe ([(IsTop, v, Term2 vt at ap v a)], Term2 vt at ap v a) -unLet t = fixup (go t) - where - go (ABT.Tm' (Let isTop b (ABT.out -> ABT.Abs v t))) = case go t of - (env, t) -> ((isTop, v, b) : env, t) - go t = ([], t) - fixup ([], _) = Nothing - fixup bst = Just bst - --- | Satisfies `unLetRec (letRec bs e) == Just (bs, e)` -unLetRecNamed - :: Term2 vt at ap v a - -> Maybe - ( IsTop - , [(v, Term2 vt at ap v a)] - , Term2 vt at ap v a - ) -unLetRecNamed (ABT.Cycle' vs (ABT.Tm' (LetRec isTop bs e))) - | length vs == length bs = Just (isTop, zip vs bs, e) -unLetRecNamed _ = Nothing - -unLetRec - :: (Monad m, Var v) - => Term2 vt at ap v a - -> Maybe - ( IsTop - , (v -> m v) - -> m - ( [(v, Term2 vt at ap v a)] - , Term2 vt at ap v a - ) - ) -unLetRec (unLetRecNamed -> Just (isTop, bs, e)) = Just - ( isTop - , \freshen -> do - vs <- sequence [ freshen v | (v, _) <- bs ] - let sub = ABT.substsInheritAnnotation (map fst bs `zip` map ABT.var vs) - pure (vs `zip` [ sub b | (_, b) <- bs ], sub e) - ) -unLetRec _ = Nothing - -unApps - :: Term2 vt at ap v a - -> Maybe (Term2 vt at ap v a, [Term2 vt at ap v a]) -unApps t = unAppsPred (t, const True) - --- Same as unApps but taking a predicate controlling whether we match on a given function argument. -unAppsPred :: (Term2 vt at ap v a, Term2 vt at ap v a -> Bool) -> - Maybe (Term2 vt at ap v a, [Term2 vt at ap v a]) -unAppsPred (t, pred) = case go t [] of [] -> Nothing; f:args -> Just (f,args) - where - go (App' i o) acc | pred o = go i (o:acc) - go _ [] = [] - go fn args = fn:args - -unBinaryApp :: Term2 vt at ap v a - -> Maybe (Term2 vt at ap v a, - Term2 vt at ap v a, - Term2 vt at ap v a) -unBinaryApp t = case unApps t of - Just (f, [arg1, arg2]) -> Just (f, arg1, arg2) - _ -> Nothing - --- "((a1 `f1` a2) `f2` a3)" becomes "Just ([(a2, f2), (a1, f1)], a3)" -unBinaryApps - :: Term2 vt at ap v a - -> Maybe - ( [(Term2 vt at ap v a, Term2 vt at ap v a)] - , Term2 vt at ap v a - ) -unBinaryApps t = unBinaryAppsPred (t, const True) - --- Same as unBinaryApps but taking a predicate controlling whether we match on a given binary function. -unBinaryAppsPred :: (Term2 vt at ap v a - ,Term2 vt at ap v a -> Bool) - -> Maybe ([(Term2 vt at ap v a, - Term2 vt at ap v a)], - Term2 vt at ap v a) -unBinaryAppsPred (t, pred) = case unBinaryApp t of - Just (f, x, y) | pred f -> case unBinaryAppsPred (x, pred) of - Just (as, xLast) -> Just ((xLast, f) : as, y) - Nothing -> Just ([(x, f)], y) - _ -> Nothing - -unLams' - :: Term2 vt at ap v a -> Maybe ([v], Term2 vt at ap v a) -unLams' t = unLamsPred' (t, const True) - --- Same as unLams', but always matches. Returns an empty [v] if the term doesn't start with a --- lambda extraction. -unLamsOpt' :: Term2 vt at ap v a -> Maybe ([v], Term2 vt at ap v a) -unLamsOpt' t = case unLams' t of - r@(Just _) -> r - Nothing -> Just ([], t) - --- Same as unLams', but stops at any variable named `()`, which indicates a --- delay (`'`) annotation which we want to preserve. -unLamsUntilDelay' - :: Var v - => Term2 vt at ap v a - -> Maybe ([v], Term2 vt at ap v a) -unLamsUntilDelay' t = case unLamsPred' (t, (/=) $ Var.named "()") of - r@(Just _) -> r - Nothing -> Just ([], t) - --- Same as unLams' but taking a predicate controlling whether we match on a given binary function. -unLamsPred' :: (Term2 vt at ap v a, v -> Bool) -> - Maybe ([v], Term2 vt at ap v a) -unLamsPred' (LamNamed' v body, pred) | pred v = case unLamsPred' (body, pred) of - Nothing -> Just ([v], body) - Just (vs, body) -> Just (v:vs, body) -unLamsPred' _ = Nothing - -unReqOrCtor :: Term2 vt at ap v a -> Maybe (Reference, ConstructorId) -unReqOrCtor (Constructor' r cid) = Just (r, cid) -unReqOrCtor (Request' r cid) = Just (r, cid) -unReqOrCtor _ = Nothing - --- Dependencies including referenced data and effect decls -dependencies :: (Ord v, Ord vt) => Term2 vt at ap v a -> Set Reference -dependencies t = Set.map (LD.fold id Referent.toReference) (labeledDependencies t) - -termDependencies :: (Ord v, Ord vt) => Term2 vt at ap v a -> Set Reference -termDependencies = - Set.fromList - . mapMaybe - ( LD.fold - (\_typeRef -> Nothing) - ( Referent.fold - (\termRef -> Just termRef) - (\_typeConRef _i _ct -> Nothing) - ) - ) - . toList - . labeledDependencies - --- gets types from annotations and constructors -typeDependencies :: (Ord v, Ord vt) => Term2 vt at ap v a -> Set Reference -typeDependencies = - Set.fromList . mapMaybe (LD.fold Just (const Nothing)) . toList . labeledDependencies - --- Gets the types to which this term contains references via patterns and --- data constructors. -constructorDependencies - :: (Ord v, Ord vt) => Term2 vt at ap v a -> Set Reference -constructorDependencies = - Set.unions - . generalizedDependencies (const mempty) - (const mempty) - Set.singleton - (const . Set.singleton) - Set.singleton - (const . Set.singleton) - Set.singleton - -generalizedDependencies - :: (Ord v, Ord vt, Ord r) - => (Reference -> r) - -> (Reference -> r) - -> (Reference -> r) - -> (Reference -> ConstructorId -> r) - -> (Reference -> r) - -> (Reference -> ConstructorId -> r) - -> (Reference -> r) - -> Term2 vt at ap v a - -> Set r -generalizedDependencies termRef typeRef literalType dataConstructor dataType effectConstructor effectType - = Set.fromList . Writer.execWriter . ABT.visit' f where - f t@(Ref r) = Writer.tell [termRef r] $> t - f t@(TermLink r) = case r of - Referent.Ref r -> Writer.tell [termRef r] $> t - Referent.Con r id CT.Data -> Writer.tell [dataConstructor r id] $> t - Referent.Con r id CT.Effect -> Writer.tell [effectConstructor r id] $> t - f t@(TypeLink r) = Writer.tell [typeRef r] $> t - f t@(Ann _ typ) = - Writer.tell (map typeRef . toList $ Type.dependencies typ) $> t - f t@(Nat _) = Writer.tell [literalType Type.natRef] $> t - f t@(Int _) = Writer.tell [literalType Type.intRef] $> t - f t@(Float _) = Writer.tell [literalType Type.floatRef] $> t - f t@(Boolean _) = Writer.tell [literalType Type.booleanRef] $> t - f t@(Text _) = Writer.tell [literalType Type.textRef] $> t - f t@(List _) = Writer.tell [literalType Type.listRef] $> t - f t@(Constructor r cid) = - Writer.tell [dataType r, dataConstructor r cid] $> t - f t@(Request r cid) = - Writer.tell [effectType r, effectConstructor r cid] $> t - f t@(Match _ cases) = traverse_ goPat cases $> t - f t = pure t - goPat (MatchCase pat _ _) = - Writer.tell . toList $ Pattern.generalizedDependencies literalType - dataConstructor - dataType - effectConstructor - effectType - pat - -labeledDependencies - :: (Ord v, Ord vt) => Term2 vt at ap v a -> Set LabeledDependency -labeledDependencies = generalizedDependencies LD.termRef - LD.typeRef - LD.typeRef - LD.dataConstructor - LD.typeRef - LD.effectConstructor - LD.typeRef - -updateDependencies - :: Ord v - => Map Reference Reference - -> Map Reference Reference - -> Term v a - -> Term v a -updateDependencies termUpdates typeUpdates = ABT.rebuildUp go - where - -- todo: this function might need tweaking if we ever allow type replacements - -- would need to look inside pattern matching and constructor calls - go (Ref r ) = Ref (Map.findWithDefault r r termUpdates) - go (TermLink (Referent.Ref r)) = TermLink (Referent.Ref $ Map.findWithDefault r r termUpdates) - go (TypeLink r) = TypeLink (Map.findWithDefault r r typeUpdates) - go (Ann tm tp) = Ann tm $ Type.updateDependencies typeUpdates tp - go f = f - --- | If the outermost term is a function application, --- perform substitution of the argument into the body -betaReduce :: Var v => Term0 v -> Term0 v -betaReduce (App' (Lam' f) arg) = ABT.bind f arg -betaReduce e = e - -betaNormalForm :: Var v => Term0 v -> Term0 v -betaNormalForm (App' f a) = betaNormalForm (betaReduce (app() (betaNormalForm f) a)) -betaNormalForm e = e - --- x -> f x => f -etaNormalForm :: Ord v => Term0 v -> Term0 v -etaNormalForm tm = case tm of - LamNamed' v body -> step . lam (ABT.annotation tm) v $ etaNormalForm body - where - step (LamNamed' v (App' f (Var' v'))) | v == v' = f - step tm = tm - _ -> tm - --- x -> f x => f as long as `x` is a variable of type `Var.Eta` -etaReduceEtaVars :: Var v => Term0 v -> Term0 v -etaReduceEtaVars tm = case tm of - LamNamed' v body -> step . lam (ABT.annotation tm) v $ etaReduceEtaVars body - where - ok v v' = v == v' && Var.typeOf v == Var.Eta - step (LamNamed' v (App' f (Var' v'))) | ok v v' = f - step tm = tm - _ -> tm - --- This converts `Reference`s it finds that are in the input `Map` --- back to free variables -unhashComponent :: forall v a. Var v - => Map Reference (Term v a) - -> Map Reference (v, Term v a) -unhashComponent m = let - usedVars = foldMap (Set.fromList . ABT.allVars) m - m' :: Map Reference (v, Term v a) - m' = evalState (Map.traverseWithKey assignVar m) usedVars where - assignVar r t = (,t) <$> ABT.freshenS (refNamed r) - unhash1 = ABT.rebuildUp' go where - go e@(Ref' r) = case Map.lookup r m' of - Nothing -> e - Just (v, _) -> var (ABT.annotation e) v - go e = e - in second unhash1 <$> m' - where - -- Variable whose name is derived from the given reference. - refNamed :: Var v => Reference -> v - refNamed ref = Var.named ("ℍ" <> Reference.toText ref) - hashComponents :: Var v => Map v (Term v a) -> Map v (Reference.Id, Term v a) hashComponents = ReferenceUtil.hashComponents $ refId () @@ -940,32 +109,6 @@ hashClosedTerm :: Var v => Term v a -> Reference.Id hashClosedTerm tm = Reference.Id (ABT.hash tm) 0 1 -- The hash for a constructor -hashConstructor' - :: (Reference -> ConstructorId -> Term0 Symbol) -> Reference -> ConstructorId -> Reference -hashConstructor' f r cid = - let --- this is a bit circuitous, but defining everything in terms of hashComponents --- ensure the hashing is always done in the same way - m = hashComponents (Map.fromList [(Var.named "_" :: Symbol, f r cid)]) - in case toList m of - [(r, _)] -> Reference.DerivedId r - _ -> error "unpossible" - -hashConstructor :: Reference -> ConstructorId -> Reference -hashConstructor = hashConstructor' $ constructor () - -hashRequest :: Reference -> ConstructorId -> Reference -hashRequest = hashConstructor' $ request () - -fromReferent :: Ord v - => a - -> Referent - -> Term2 vt at ap v a -fromReferent a = \case - Referent.Ref r -> ref a r - Referent.Con r i ct -> case ct of - CT.Data -> constructor a r i - CT.Effect -> request a r i instance Var v => Hashable1 (F v a p) where hash1 hashCycle hash e diff --git a/parser-typechecker/src/Unison/Hashing/V1/Type.hs b/parser-typechecker/src/Unison/Hashing/V1/Type.hs index da4b183c73..946ebe5829 100644 --- a/parser-typechecker/src/Unison/Hashing/V1/Type.hs +++ b/parser-typechecker/src/Unison/Hashing/V1/Type.hs @@ -1,34 +1,35 @@ -{-# LANGUAGE DeriveGeneric #-} {-# LANGUAGE DeriveTraversable #-} -{-# LANGUAGE OverloadedStrings #-} {-# LANGUAGE PatternSynonyms #-} -{-# LANGUAGE Rank2Types #-} {-# LANGUAGE ViewPatterns #-} -module Unison.Hashing.V1.Type where +module Unison.Hashing.V1.Type + ( Type, + F (..), + bindExternal, + bindReferences, + toReference, + toReferenceMentions, + hashComponents, + ) +where import Unison.Prelude -import qualified Control.Monad.Writer.Strict as Writer -import Data.Functor.Identity (runIdentity) -import Data.Monoid (Any(..)) -import Data.List.Extra (nubOrd) import qualified Data.Map as Map import qualified Data.Set as Set -import Prelude.Extras (Eq1(..),Show1(..),Ord1(..)) +import Prelude.Extras (Eq1 (..), Ord1 (..), Show1 (..)) import qualified Unison.ABT as ABT -import Unison.Hashable (Hashable1) +import Unison.Hashable (Hashable1) import qualified Unison.Hashable as Hashable -import qualified Unison.Kind as K -import Unison.Hashing.V1.Reference (Reference) +import qualified Unison.Hashing.V1.ABT as ABT +import Unison.Hashing.V1.Reference (Reference) import qualified Unison.Hashing.V1.Reference as Reference import qualified Unison.Hashing.V1.Reference.Util as ReferenceUtil -import Unison.Var (Var) -import qualified Unison.Var as Var -import qualified Unison.Settings as Settings -import qualified Unison.Names.ResolutionResult as Names +import qualified Unison.Kind as K import qualified Unison.Name as Name +import qualified Unison.Names.ResolutionResult as Names import qualified Unison.Util.List as List +import Unison.Var (Var) -- | Base functor for types in the Unison language data F a @@ -42,7 +43,7 @@ data F a | IntroOuter a -- binder like ∀, used to introduce variables that are -- bound by outer type signatures, to support scoped type -- variables - deriving (Foldable,Functor,Generic,Generic1,Eq,Ord,Traversable) + deriving (Foldable,Functor,Eq,Ord,Traversable) instance Eq1 F where (==#) = (==) instance Ord1 F where compare1 = compare @@ -51,9 +52,6 @@ instance Show1 F where showsPrec1 = showsPrec -- | Types are represented as ABTs over the base functor F, with variables in `v` type Type v a = ABT.Term F v a -wrapV :: Ord v => Type v a -> Type (ABT.V v) a -wrapV = ABT.vmap ABT.Bound - freeVars :: Type v a -> Set v freeVars = ABT.freeVars @@ -74,107 +72,15 @@ bindReferences keepFree ns t = let ok (v, a, Nothing) = Left (pure (Names.TypeResolutionFailure v a mempty)) in List.validate ok rs <&> \es -> bindExternal es t -bindNames - :: Var v - => Set v - -> Map Name.Name Reference - -> Type v a - -> Names.ResolutionResult v a (Type v a) -bindNames keepFree ns t = let - fvs = ABT.freeVarOccurrences keepFree t - rs = [(v, a, Map.lookup (Name.fromVar v) ns) | (v, a) <- fvs] - ok (v, _a, Just r) = pure (v, r) - ok (v, a, Nothing) = Left (pure (Names.TypeResolutionFailure v a mempty)) - in List.validate ok rs <&> \es -> bindExternal es t - newtype Monotype v a = Monotype { getPolytype :: Type v a } deriving Eq instance (Show v) => Show (Monotype v a) where show = show . getPolytype --- Smart constructor which checks if a `Type` has no `Forall` quantifiers. -monotype :: ABT.Var v => Type v a -> Maybe (Monotype v a) -monotype t = Monotype <$> ABT.visit isMono t where - isMono (Forall' _) = Just Nothing - isMono _ = Nothing - -arity :: Type v a -> Int -arity (ForallNamed' _ body) = arity body -arity (Arrow' _ o) = 1 + arity o -arity (Ann' a _) = arity a -arity _ = 0 - -- some smart patterns pattern Ref' r <- ABT.Tm' (Ref r) -pattern Arrow' i o <- ABT.Tm' (Arrow i o) -pattern Arrow'' i es o <- Arrow' i (Effect'' es o) -pattern Arrows' spine <- (unArrows -> Just spine) -pattern EffectfulArrows' fst rest <- (unEffectfulArrows -> Just (fst, rest)) -pattern Ann' t k <- ABT.Tm' (Ann t k) -pattern App' f x <- ABT.Tm' (App f x) -pattern Apps' f args <- (unApps -> Just (f, args)) -pattern Pure' t <- (unPure -> Just t) -pattern Effects' es <- ABT.Tm' (Effects es) --- Effect1' must match at least one effect -pattern Effect1' e t <- ABT.Tm' (Effect e t) -pattern Effect' es t <- (unEffects1 -> Just (es, t)) -pattern Effect'' es t <- (unEffect0 -> (es, t)) --- Effect0' may match zero effects -pattern Effect0' es t <- (unEffect0 -> (es, t)) -pattern Forall' subst <- ABT.Tm' (Forall (ABT.Abs' subst)) -pattern IntroOuter' subst <- ABT.Tm' (IntroOuter (ABT.Abs' subst)) -pattern IntroOuterNamed' v body <- ABT.Tm' (IntroOuter (ABT.out -> ABT.Abs v body)) pattern ForallsNamed' vs body <- (unForalls -> Just (vs, body)) pattern ForallNamed' v body <- ABT.Tm' (Forall (ABT.out -> ABT.Abs v body)) -pattern Var' v <- ABT.Var' v -pattern Cycle' xs t <- ABT.Cycle' xs t -pattern Abs' subst <- ABT.Abs' subst - -unPure :: Ord v => Type v a -> Maybe (Type v a) -unPure (Effect'' [] t) = Just t -unPure (Effect'' _ _) = Nothing -unPure t = Just t - -unArrows :: Type v a -> Maybe [Type v a] -unArrows t = - case go t of [_] -> Nothing; l -> Just l - where go (Arrow' i o) = i : go o - go o = [o] - -unEffectfulArrows - :: Type v a -> Maybe (Type v a, [(Maybe [Type v a], Type v a)]) -unEffectfulArrows t = case t of - Arrow' i o -> Just (i, go o) - _ -> Nothing - where - go (Effect1' (Effects' es) (Arrow' i o)) = - (Just $ es >>= flattenEffects, i) : go o - go (Effect1' (Effects' es) t) = [(Just $ es >>= flattenEffects, t)] - go (Arrow' i o) = (Nothing, i) : go o - go t = [(Nothing, t)] - -unApps :: Type v a -> Maybe (Type v a, [Type v a]) -unApps t = case go t [] of - [] -> Nothing - [ _ ] -> Nothing - f : args -> Just (f, args) - where - go (App' i o) acc = go i (o : acc) - go fn args = fn : args - -unIntroOuters :: Type v a -> Maybe ([v], Type v a) -unIntroOuters t = go t [] - where go (IntroOuterNamed' v body) vs = go body (v:vs) - go _body [] = Nothing - go body vs = Just (reverse vs, body) - --- Most code doesn't care about `introOuter` binders and is fine dealing with the --- these outer variable references as free variables. This function strips out --- one or more `introOuter` binders, so `outer a b . (a, b)` becomes `(a, b)`. -stripIntroOuters :: Type v a -> Type v a -stripIntroOuters t = case unIntroOuters t of - Just (_, t) -> t - Nothing -> t unForalls :: Type v a -> Maybe ([v], Type v a) unForalls t = go t [] @@ -182,262 +88,16 @@ unForalls t = go t [] go _body [] = Nothing go body vs = Just(reverse vs, body) -unEffect0 :: Ord v => Type v a -> ([Type v a], Type v a) -unEffect0 (Effect1' e a) = (flattenEffects e, a) -unEffect0 t = ([], t) - -unEffects1 :: Ord v => Type v a -> Maybe ([Type v a], Type v a) -unEffects1 (Effect1' (Effects' es) a) = Just (es, a) -unEffects1 _ = Nothing - --- | True if the given type is a function, possibly quantified -isArrow :: ABT.Var v => Type v a -> Bool -isArrow (ForallNamed' _ t) = isArrow t -isArrow (Arrow' _ _) = True -isArrow _ = False - -- some smart constructors - ref :: Ord v => a -> Reference -> Type v a ref a = ABT.tm' a . Ref refId :: Ord v => a -> Reference.Id -> Type v a refId a = ref a . Reference.DerivedId -termLink :: Ord v => a -> Type v a -termLink a = ABT.tm' a . Ref $ termLinkRef - -typeLink :: Ord v => a -> Type v a -typeLink a = ABT.tm' a . Ref $ typeLinkRef - -derivedBase32Hex :: Ord v => Reference -> a -> Type v a -derivedBase32Hex r a = ref a r - -intRef, natRef, floatRef, booleanRef, textRef, charRef, listRef, bytesRef, effectRef, termLinkRef, typeLinkRef :: Reference -intRef = Reference.Builtin "Int" -natRef = Reference.Builtin "Nat" -floatRef = Reference.Builtin "Float" -booleanRef = Reference.Builtin "Boolean" -textRef = Reference.Builtin "Text" -charRef = Reference.Builtin "Char" -listRef = Reference.Builtin "Sequence" -bytesRef = Reference.Builtin "Bytes" -effectRef = Reference.Builtin "Effect" -termLinkRef = Reference.Builtin "Link.Term" -typeLinkRef = Reference.Builtin "Link.Type" - -builtinIORef, fileHandleRef, filePathRef, threadIdRef, socketRef :: Reference -builtinIORef = Reference.Builtin "IO" -fileHandleRef = Reference.Builtin "Handle" -filePathRef = Reference.Builtin "FilePath" -threadIdRef = Reference.Builtin "ThreadId" -socketRef = Reference.Builtin "Socket" - -mvarRef, tvarRef :: Reference -mvarRef = Reference.Builtin "MVar" -tvarRef = Reference.Builtin "TVar" - -tlsRef :: Reference -tlsRef = Reference.Builtin "Tls" - -stmRef :: Reference -stmRef = Reference.Builtin "STM" - -tlsClientConfigRef :: Reference -tlsClientConfigRef = Reference.Builtin "Tls.ClientConfig" - -tlsServerConfigRef :: Reference -tlsServerConfigRef = Reference.Builtin "Tls.ServerConfig" - -tlsSignedCertRef :: Reference -tlsSignedCertRef = Reference.Builtin "Tls.SignedCert" - -tlsPrivateKeyRef :: Reference -tlsPrivateKeyRef = Reference.Builtin "Tls.PrivateKey" - -tlsCipherRef :: Reference -tlsCipherRef = Reference.Builtin "Tls.Cipher" - -tlsVersionRef :: Reference -tlsVersionRef = Reference.Builtin "Tls.Version" - -hashAlgorithmRef :: Reference -hashAlgorithmRef = Reference.Builtin "crypto.HashAlgorithm" - -codeRef, valueRef :: Reference -codeRef = Reference.Builtin "Code" -valueRef = Reference.Builtin "Value" - -anyRef :: Reference -anyRef = Reference.Builtin "Any" - -any :: Ord v => a -> Type v a -any a = ref a anyRef - -builtin :: Ord v => a -> Text -> Type v a -builtin a = ref a . Reference.Builtin - -int :: Ord v => a -> Type v a -int a = ref a intRef - -nat :: Ord v => a -> Type v a -nat a = ref a natRef - -float :: Ord v => a -> Type v a -float a = ref a floatRef - -boolean :: Ord v => a -> Type v a -boolean a = ref a booleanRef - -text :: Ord v => a -> Type v a -text a = ref a textRef - -char :: Ord v => a -> Type v a -char a = ref a charRef - -fileHandle :: Ord v => a -> Type v a -fileHandle a = ref a fileHandleRef - -threadId :: Ord v => a -> Type v a -threadId a = ref a threadIdRef - -builtinIO :: Ord v => a -> Type v a -builtinIO a = ref a builtinIORef - -socket :: Ord v => a -> Type v a -socket a = ref a socketRef - -list :: Ord v => a -> Type v a -list a = ref a listRef - -bytes :: Ord v => a -> Type v a -bytes a = ref a bytesRef - -effectType :: Ord v => a -> Type v a -effectType a = ref a $ effectRef - -code, value :: Ord v => a -> Type v a -code a = ref a codeRef -value a = ref a valueRef - -app :: Ord v => a -> Type v a -> Type v a -> Type v a -app a f arg = ABT.tm' a (App f arg) - --- `f x y z` means `((f x) y) z` and the annotation paired with `y` is the one --- meant for `app (f x) y` -apps :: Ord v => Type v a -> [(a, Type v a)] -> Type v a -apps = foldl' go where go f (a, t) = app a f t - -app' :: (Ord v, Semigroup a) => Type v a -> Type v a -> Type v a -app' f arg = app (ABT.annotation f <> ABT.annotation arg) f arg - -apps' :: (Semigroup a, Ord v) => Type v a -> [Type v a] -> Type v a -apps' = foldl app' - -arrow :: Ord v => a -> Type v a -> Type v a -> Type v a -arrow a i o = ABT.tm' a (Arrow i o) - -arrow' :: (Semigroup a, Ord v) => Type v a -> Type v a -> Type v a -arrow' i o = arrow (ABT.annotation i <> ABT.annotation o) i o - -ann :: Ord v => a -> Type v a -> K.Kind -> Type v a -ann a e t = ABT.tm' a (Ann e t) - forall :: Ord v => a -> v -> Type v a -> Type v a forall a v body = ABT.tm' a (Forall (ABT.abs' a v body)) -introOuter :: Ord v => a -> v -> Type v a -> Type v a -introOuter a v body = ABT.tm' a (IntroOuter (ABT.abs' a v body)) - -iff :: Var v => Type v () -iff = forall () aa $ arrows (f <$> [boolean(), a, a]) a - where aa = Var.named "a" - a = var () aa - f x = ((), x) - -iff' :: Var v => a -> Type v a -iff' loc = forall loc aa $ arrows (f <$> [boolean loc, a, a]) a - where aa = Var.named "a" - a = var loc aa - f x = (loc, x) - -iff2 :: Var v => a -> Type v a -iff2 loc = forall loc aa $ arrows (f <$> [a, a]) a - where aa = Var.named "a" - a = var loc aa - f x = (loc, x) - -andor :: Ord v => Type v () -andor = arrows (f <$> [boolean(), boolean()]) $ boolean() - where f x = ((), x) - -andor' :: Ord v => a -> Type v a -andor' a = arrows (f <$> [boolean a, boolean a]) $ boolean a - where f x = (a, x) - -var :: Ord v => a -> v -> Type v a -var = ABT.annotatedVar - -v' :: Var v => Text -> Type v () -v' s = ABT.var (Var.named s) - --- Like `v'`, but creates an annotated variable given an annotation -av' :: Var v => a -> Text -> Type v a -av' a s = ABT.annotatedVar a (Var.named s) - -forall' :: Var v => a -> [Text] -> Type v a -> Type v a -forall' a vs body = foldr (forall a) body (Var.named <$> vs) - -foralls :: Ord v => a -> [v] -> Type v a -> Type v a -foralls a vs body = foldr (forall a) body vs - --- Note: `a -> b -> c` parses as `a -> (b -> c)` --- the annotation associated with `b` will be the annotation for the `b -> c` --- node -arrows :: Ord v => [(a, Type v a)] -> Type v a -> Type v a -arrows ts result = foldr go result ts where - go = uncurry arrow - --- The types of effectful computations -effect :: Ord v => a -> [Type v a] -> Type v a -> Type v a -effect a es (Effect1' fs t) = - let es' = (es >>= flattenEffects) ++ flattenEffects fs - in ABT.tm' a (Effect (ABT.tm' a (Effects es')) t) -effect a es t = ABT.tm' a (Effect (ABT.tm' a (Effects es)) t) - -effects :: Ord v => a -> [Type v a] -> Type v a -effects a es = ABT.tm' a (Effects $ es >>= flattenEffects) - -effect1 :: Ord v => a -> Type v a -> Type v a -> Type v a -effect1 a es (Effect1' fs t) = - let es' = flattenEffects es ++ flattenEffects fs - in ABT.tm' a (Effect (ABT.tm' a (Effects es')) t) -effect1 a es t = ABT.tm' a (Effect es t) - -flattenEffects :: Type v a -> [Type v a] -flattenEffects (Effects' es) = es >>= flattenEffects -flattenEffects es = [es] - --- The types of first-class effect values --- which get deconstructed in effect handlers. -effectV :: Ord v => a -> (a, Type v a) -> (a, Type v a) -> Type v a -effectV builtinA e t = apps (builtin builtinA "Effect") [e, t] - --- Strips effects from a type. E.g. `{e} a` becomes `a`. -stripEffect :: Ord v => Type v a -> ([Type v a], Type v a) -stripEffect (Effect' e t) = case stripEffect t of (ei, t) -> (e ++ ei, t) -stripEffect t = ([], t) - --- The type of the flipped function application operator: --- `(a -> (a -> b) -> b)` -flipApply :: Var v => Type v () -> Type v () -flipApply t = forall() b $ arrow() (arrow() t (var() b)) (var() b) - where b = ABT.fresh t (Var.named "b") - -generalize' :: Var v => Var.Type -> Type v a -> Type v a -generalize' k t = generalize vsk t where - vsk = [ v | v <- Set.toList (freeVars t), Var.typeOf v == k ] - -- | Bind the given variables with an outer `forall`, if they are used in `t`. generalize :: Ord v => [v] -> Type v a -> Type v a generalize vs t = foldr f t vs @@ -445,221 +105,10 @@ generalize vs t = foldr f t vs f v t = if Set.member v (ABT.freeVars t) then forall (ABT.annotation t) v t else t -unforall :: Type v a -> Type v a -unforall (ForallsNamed' _ t) = t -unforall t = t - unforall' :: Type v a -> ([v], Type v a) unforall' (ForallsNamed' vs t) = (vs, t) unforall' t = ([], t) -dependencies :: Ord v => Type v a -> Set Reference -dependencies t = Set.fromList . Writer.execWriter $ ABT.visit' f t - where f t@(Ref r) = Writer.tell [r] $> t - f t = pure t - -updateDependencies :: Ord v => Map Reference Reference -> Type v a -> Type v a -updateDependencies typeUpdates = ABT.rebuildUp go - where - go (Ref r) = Ref (Map.findWithDefault r r typeUpdates) - go f = f - -usesEffects :: Ord v => Type v a -> Bool -usesEffects t = getAny . getConst $ ABT.visit go t where - go (Effect1' _ _) = Just (Const (Any True)) - go _ = Nothing - --- Returns free effect variables in the given type, for instance, in: --- --- ∀ e3 . a ->{e,e2} b ->{e3} c --- --- This function would return the set {e, e2}, but not `e3` since `e3` --- is bound by the enclosing forall. -freeEffectVars :: Ord v => Type v a -> Set v -freeEffectVars t = - Set.fromList . join . runIdentity $ - ABT.foreachSubterm go (snd <$> ABT.annotateBound t) - where - go t@(Effects' es) = - let frees = Set.fromList [ v | Var' v <- es >>= flattenEffects ] - in pure . Set.toList $ frees `Set.difference` ABT.annotation t - go t@(Effect1' e _) = - let frees = Set.fromList [ v | Var' v <- flattenEffects e ] - in pure . Set.toList $ frees `Set.difference` ABT.annotation t - go _ = pure [] - --- Converts all unadorned arrows in a type to have fresh --- existential ability requirements. For example: --- --- (a -> b) -> [a] -> [b] --- --- Becomes --- --- (a ->{e1} b) ->{e2} [a] ->{e3} [b] -existentializeArrows :: (Ord v, Monad m) => m v -> Type v a -> m (Type v a) -existentializeArrows newVar t = ABT.visit go t - where - go t@(Arrow' a b) = case b of - -- If an arrow already has attached abilities, - -- leave it alone. Ex: `a ->{e} b` is kept as is. - Effect1' _ _ -> Just $ do - a <- existentializeArrows newVar a - b <- existentializeArrows newVar b - pure $ arrow (ABT.annotation t) a b - -- For unadorned arrows, make up a fresh variable. - -- So `a -> b` becomes `a ->{e} b`, using the - -- `newVar` variable generator. - _ -> Just $ do - e <- newVar - a <- existentializeArrows newVar a - b <- existentializeArrows newVar b - let ann = ABT.annotation t - pure $ arrow ann a (effect ann [var ann e] b) - go _ = Nothing - -purifyArrows :: (Ord v) => Type v a -> Type v a -purifyArrows = ABT.visitPure go - where - go t@(Arrow' a b) = case b of - Effect1' _ _ -> Nothing - _ -> Just $ arrow ann a (effect ann [] b) - where ann = ABT.annotation t - go _ = Nothing - --- Remove free effect variables from the type that are in the set -removeEffectVars :: ABT.Var v => Set v -> Type v a -> Type v a -removeEffectVars removals t = - let z = effects () [] - t' = ABT.substsInheritAnnotation ((,z) <$> Set.toList removals) t - -- leave explicitly empty `{}` alone - removeEmpty (Effect1' (Effects' []) v) = Just (ABT.visitPure removeEmpty v) - removeEmpty t@(Effect1' e v) = - case flattenEffects e of - [] -> Just (ABT.visitPure removeEmpty v) - es -> Just (effect (ABT.annotation t) es $ ABT.visitPure removeEmpty v) - removeEmpty t@(Effects' es) = - Just $ effects (ABT.annotation t) (es >>= flattenEffects) - removeEmpty _ = Nothing - in ABT.visitPure removeEmpty t' - --- Remove all effect variables from the type. --- Used for type-based search, we apply this transformation to both the --- indexed type and the query type, so the user can supply `a -> b` that will --- match `a ->{e} b` (but not `a ->{IO} b`). -removeAllEffectVars :: ABT.Var v => Type v a -> Type v a -removeAllEffectVars t = let - allEffectVars = foldMap go (ABT.subterms t) - go (Effects' vs) = Set.fromList [ v | Var' v <- vs] - go (Effect1' (Var' v) _) = Set.singleton v - go _ = mempty - (vs, tu) = unforall' t - in generalize vs (removeEffectVars allEffectVars tu) - -removePureEffects :: ABT.Var v => Type v a -> Type v a -removePureEffects t | not Settings.removePureEffects = t - | otherwise = - generalize vs $ removeEffectVars (Set.filter isPure fvs) tu - where - (vs, tu) = unforall' t - fvs = freeEffectVars tu `Set.difference` ABT.freeVars t - -- If an effect variable is mentioned only once, it is on - -- an arrow `a ->{e} b`. Generalizing this to - -- `∀ e . a ->{e} b` gives us the pure arrow `a -> b`. - isPure v = ABT.occurrences v tu <= 1 - -editFunctionResult - :: forall v a - . Ord v - => (Type v a -> Type v a) - -> Type v a - -> Type v a -editFunctionResult f = go - where - go :: Type v a -> Type v a - go (ABT.Term s a t) = case t of - ABT.Tm (Forall t) -> - (\x -> ABT.Term (s <> freeVars x) a . ABT.Tm $ Forall x) $ go t - ABT.Tm (Arrow i o) -> - (\x -> ABT.Term (s <> freeVars x) a . ABT.Tm $ Arrow i x) $ go o - ABT.Abs v r -> - (\x -> ABT.Term (s <> freeVars x) a $ ABT.Abs v x) $ go r - _ -> f (ABT.Term s a t) - -functionResult :: Type v a -> Maybe (Type v a) -functionResult = go False - where - go inArr (ForallNamed' _ body) = go inArr body - go _inArr (Arrow' _i o ) = go True o - go inArr t = if inArr then Just t else Nothing - - --- | Bind all free variables (not in `except`) that start with a lowercase --- letter and are unqualified with an outer `forall`. --- `a -> a` becomes `∀ a . a -> a` --- `B -> B` becomes `B -> B` (not changed) --- `.foo -> .foo` becomes `.foo -> .foo` (not changed) --- `.foo.bar -> blarrg.woot` becomes `.foo.bar -> blarrg.woot` (unchanged) -generalizeLowercase :: Var v => Set v -> Type v a -> Type v a -generalizeLowercase except t = foldr (forall (ABT.annotation t)) t vars - where - vars = - [ v | v <- Set.toList (ABT.freeVars t `Set.difference` except), Var.universallyQuantifyIfFree v ] - --- Convert all free variables in `allowed` to variables bound by an `introOuter`. -freeVarsToOuters :: Ord v => Set v -> Type v a -> Type v a -freeVarsToOuters allowed t = foldr (introOuter (ABT.annotation t)) t vars - where vars = Set.toList $ ABT.freeVars t `Set.intersection` allowed - --- | This function removes all variable shadowing from the types and reduces --- fresh ids to the minimum possible to avoid ambiguity. Useful when showing --- two different types. -cleanupVars :: Var v => [Type v a] -> [Type v a] -cleanupVars ts | not Settings.cleanupTypes = ts -cleanupVars ts = let - changedVars = cleanupVarsMap ts - in cleanupVars1' changedVars <$> ts - --- Compute a variable replacement map from a collection of types, which --- can be passed to `cleanupVars1'`. This is used to cleanup variable ids --- for multiple related types, like when reporting a type error. -cleanupVarsMap :: Var v => [Type v a] -> Map.Map v v -cleanupVarsMap ts = let - varsByName = foldl' step Map.empty (ts >>= ABT.allVars) - step m v = Map.insertWith (++) (Var.name $ Var.reset v) [v] m - changedVars = Map.fromList [ (v, Var.freshenId i v) - | (_, vs) <- Map.toList varsByName - , (v,i) <- nubOrd vs `zip` [0..]] - in changedVars - -cleanupVars1' :: Var v => Map.Map v v -> Type v a -> Type v a -cleanupVars1' = ABT.changeVars - --- | This function removes all variable shadowing from the type and reduces --- fresh ids to the minimum possible to avoid ambiguity. -cleanupVars1 :: Var v => Type v a -> Type v a -cleanupVars1 t | not Settings.cleanupTypes = t -cleanupVars1 t = let [t'] = cleanupVars [t] in t' - --- This removes duplicates and normalizes the order of ability lists -cleanupAbilityLists :: Var v => Type v a -> Type v a -cleanupAbilityLists = ABT.visitPure go - where - -- leave explicitly empty `{}` alone - go (Effect1' (Effects' []) _v) = Nothing - go t@(Effect1' e v) = - let es = Set.toList . Set.fromList $ flattenEffects e - in case es of - [] -> Just (ABT.visitPure go v) - _ -> Just (effect (ABT.annotation t) es $ ABT.visitPure go v) - go _ = Nothing - -cleanups :: Var v => [Type v a] -> [Type v a] -cleanups ts = cleanupVars $ map cleanupAbilityLists ts - -cleanup :: Var v => Type v a -> Type v a -cleanup t | not Settings.cleanupTypes = t -cleanup t = cleanupVars1 . cleanupAbilityLists $ t - toReference :: (ABT.Var v, Show v) => Type v a -> Reference toReference (Ref' r) = r -- a bit of normalization - any unused type parameters aren't part of the hash @@ -673,7 +122,7 @@ toReferenceMentions ty = in Set.fromList $ toReference . gen <$> ABT.subterms ty hashComponents - :: Var v => Map v (Type v a) -> Map v (Reference.Id, Type v a) + :: (ABT.Var v, Show v) => Map v (Type v a) -> Map v (Reference.Id, Type v a) hashComponents = ReferenceUtil.hashComponents $ refId () instance Hashable1 F where diff --git a/parser-typechecker/src/Unison/Hashing/V2/ABT.hs b/parser-typechecker/src/Unison/Hashing/V2/ABT.hs new file mode 100644 index 0000000000..54bba0b4f0 --- /dev/null +++ b/parser-typechecker/src/Unison/Hashing/V2/ABT.hs @@ -0,0 +1,104 @@ +-- Based on: http://semantic-domain.blogspot.com/2015/03/abstract-binding-trees.html +{-# LANGUAGE DeriveFoldable #-} +{-# LANGUAGE DeriveTraversable #-} +{-# LANGUAGE OverloadedStrings #-} +{-# LANGUAGE PatternSynonyms #-} +{-# LANGUAGE Rank2Types #-} +{-# LANGUAGE ViewPatterns #-} +{-# LANGUAGE UndecidableInstances #-} + +module Unison.Hashing.V2.ABT (Unison.ABT.Term, hash, hashComponents) where +import Unison.Prelude +import Unison.ABT + +import Data.List hiding (cycle, find) +import Data.Vector ((!)) +import Prelude hiding (abs,cycle) +import Unison.Hashable (Accumulate,Hashable1,hash1) +import qualified Data.Map as Map +import qualified Data.Set as Set +import qualified Data.Vector as Vector +import qualified Unison.Hashable as Hashable + +-- Hash a strongly connected component and sort its definitions into a canonical order. +hashComponent :: + (Functor f, Hashable1 f, Foldable f, Eq v, Show v, Ord v, Ord h, Accumulate h) + => Map.Map v (Term f v a) -> (h, [(v, Term f v a)]) +hashComponent byName = let + ts = Map.toList byName + embeds = [ (v, void (transform Embed t)) | (v,t) <- ts ] + vs = fst <$> ts + tms = [ (v, absCycle vs (tm $ Component (snd <$> embeds) (var v))) | v <- vs ] + hashed = [ ((v,t), hash t) | (v,t) <- tms ] + sortedHashed = sortOn snd hashed + overallHash = Hashable.accumulate (Hashable.Hashed . snd <$> sortedHashed) + in (overallHash, [ (v, t) | ((v, _),_) <- sortedHashed, Just t <- [Map.lookup v byName] ]) + +-- Group the definitions into strongly connected components and hash +-- each component. Substitute the hash of each component into subsequent +-- components (using the `termFromHash` function). Requires that the +-- overall component has no free variables. +hashComponents + :: (Functor f, Hashable1 f, Foldable f, Eq v, Show v, Var v, Ord h, Accumulate h) + => (h -> Word64 -> Term f v ()) + -> Map.Map v (Term f v a) + -> [(h, [(v, Term f v a)])] +hashComponents termFromHash termsByName = let + bound = Set.fromList (Map.keys termsByName) + escapedVars = Set.unions (freeVars <$> Map.elems termsByName) `Set.difference` bound + sccs = components (Map.toList termsByName) + go _ [] = [] + go prevHashes (component : rest) = let + sub = substsInheritAnnotation (Map.toList prevHashes) + (h, sortedComponent) = hashComponent $ Map.fromList [ (v, sub t) | (v, t) <- component ] + curHashes = Map.fromList [ (v, termFromHash h i) | ((v, _),i) <- sortedComponent `zip` [0..]] + newHashes = prevHashes `Map.union` curHashes + newHashesL = Map.toList newHashes + sortedComponent' = [ (v, substsInheritAnnotation newHashesL t) | (v, t) <- sortedComponent ] + in (h, sortedComponent') : go newHashes rest + in if Set.null escapedVars then go Map.empty sccs + else error $ "can't hashComponents if bindings have free variables:\n " + ++ show (map show (Set.toList escapedVars)) + ++ "\n " ++ show (map show (Map.keys termsByName)) + +-- Implementation detail of hashComponent +data Component f a = Component [a] a | Embed (f a) deriving (Functor, Traversable, Foldable) + +instance (Hashable1 f, Functor f) => Hashable1 (Component f) where + hash1 hashCycle hash c = case c of + Component as a -> let + (hs, hash) = hashCycle as + toks = Hashable.Hashed <$> hs + in Hashable.accumulate $ (Hashable.Tag 1 : toks) ++ [Hashable.Hashed (hash a)] + Embed fa -> Hashable.hash1 hashCycle hash fa + +-- | We ignore annotations in the `Term`, as these should never affect the +-- meaning of the term. +hash :: forall f v a h . (Functor f, Hashable1 f, Eq v, Show v, Ord h, Accumulate h) + => Term f v a -> h +hash = hash' [] where + hash' :: [Either [v] v] -> Term f v a -> h + hash' env (Term _ _ t) = case t of + Var v -> maybe die hashInt ind + where lookup (Left cycle) = v `elem` cycle + lookup (Right v') = v == v' + ind = findIndex lookup env + hashInt :: Int -> h + hashInt i = Hashable.accumulate [Hashable.Nat $ fromIntegral i] + die = error $ "unknown var in environment: " ++ show v + ++ " environment = " ++ show env + Cycle (AbsN' vs t) -> hash' (Left vs : env) t + -- Cycle t -> hash' env t + Abs v t -> hash' (Right v : env) t + Tm t -> Hashable.hash1 (hashCycle env) (hash' env) t + + hashCycle :: [Either [v] v] -> [Term f v a] -> ([h], Term f v a -> h) + hashCycle env@(Left cycle : envTl) ts | length cycle == length ts = + let + permute p xs = case Vector.fromList xs of xs -> map (xs !) p + hashed = map (\(i,t) -> ((i,t), hash' env t)) (zip [0..] ts) + pt = fst <$> sortOn snd hashed + (p,ts') = unzip pt + in case map Right (permute p cycle) ++ envTl of + env -> (map (hash' env) ts', hash' env) + hashCycle env ts = (map (hash' env) ts, hash' env) diff --git a/parser-typechecker/src/Unison/Hashing/V2/Convert.hs b/parser-typechecker/src/Unison/Hashing/V2/Convert.hs index 8f642ab84f..c2b781edb5 100644 --- a/parser-typechecker/src/Unison/Hashing/V2/Convert.hs +++ b/parser-typechecker/src/Unison/Hashing/V2/Convert.hs @@ -200,7 +200,7 @@ m2hReference = \case Memory.Reference.DerivedId d -> Hashing.Reference.DerivedId (m2hReferenceId d) m2hReferenceId :: Memory.Reference.Id -> Hashing.Reference.Id -m2hReferenceId (Memory.Reference.Id h i _n) = Hashing.Reference.Id h i _n +m2hReferenceId (Memory.Reference.Id h i) = Hashing.Reference.Id h i h2mModifier :: Hashing.DD.Modifier -> Memory.DD.Modifier h2mModifier = \case @@ -233,4 +233,4 @@ h2mReference = \case Hashing.Reference.DerivedId d -> Memory.Reference.DerivedId (h2mReferenceId d) h2mReferenceId :: Hashing.Reference.Id -> Memory.Reference.Id -h2mReferenceId (Hashing.Reference.Id h i n) = Memory.Reference.Id h i n +h2mReferenceId (Hashing.Reference.Id h i) = Memory.Reference.Id h i diff --git a/parser-typechecker/src/Unison/Hashing/V2/DataDeclaration.hs b/parser-typechecker/src/Unison/Hashing/V2/DataDeclaration.hs index eab303bcb6..0077ed9ca0 100644 --- a/parser-typechecker/src/Unison/Hashing/V2/DataDeclaration.hs +++ b/parser-typechecker/src/Unison/Hashing/V2/DataDeclaration.hs @@ -11,23 +11,20 @@ module Unison.Hashing.V2.DataDeclaration EffectDeclaration (..), Decl, Modifier (..), - asDataDecl, - constructorType, - constructorTypes, - declDependencies, - dependencies, - bindReferences, hashDecls, ) where +import Unison.Prelude +import Prelude hiding (cycle) + import Control.Lens (over, _3) import Data.Bifunctor (first, second) import qualified Data.Map as Map -import qualified Data.Set as Set import Prelude.Extras (Show1) import Unison.Var (Var) import qualified Unison.ABT as ABT +import qualified Unison.Hashing.V2.ABT as ABT import qualified Unison.ConstructorType as CT import Unison.Hash (Hash) import Unison.Hashable (Hashable1) @@ -39,10 +36,6 @@ import Unison.Hashing.V2.Type (Type) import qualified Unison.Hashing.V2.Type as Type import qualified Unison.Name as Name import qualified Unison.Names.ResolutionResult as Names -import Unison.Prelude --- import qualified Unison.Referent as Referent --- import qualified Unison.Referent' as Referent' -import Prelude hiding (cycle) type Decl v a = Either (EffectDeclaration v a) (DataDeclaration v a) @@ -51,17 +44,6 @@ data DeclOrBuiltin v a | Decl (Decl v a) deriving (Eq, Show) -asDataDecl :: Decl v a -> DataDeclaration v a -asDataDecl = either toDataDecl id - -declDependencies :: Ord v => Decl v a -> Set Reference -declDependencies = either (dependencies . toDataDecl) dependencies - -constructorType :: Decl v a -> CT.ConstructorType -constructorType = \case - Left {} -> CT.Effect - Right {} -> CT.Data - data Modifier = Structural | Unique Text -- | Opaque (Set Reference) deriving (Eq, Ord, Show) @@ -84,11 +66,7 @@ constructorTypes = (snd <$>) . constructors constructors :: DataDeclaration v a -> [(v, Type v a)] constructors (DataDeclaration _ _ _ ctors) = [(v, t) | (_, v, t) <- ctors] -dependencies :: Ord v => DataDeclaration v a -> Set Reference -dependencies dd = - Set.unions (Type.dependencies <$> constructorTypes dd) - -toABT :: Var v => DataDeclaration v () -> ABT.Term F v () +toABT :: ABT.Var v => DataDeclaration v () -> ABT.Term F v () toABT dd = ABT.tm $ Modified (modifier dd) dd' where dd' = ABT.absChain (bound dd) $ ABT.cycle @@ -97,7 +75,7 @@ toABT dd = ABT.tm $ Modified (modifier dd) dd' (ABT.tm . Constructors $ ABT.transform Type <$> constructorTypes dd)) -- Implementation detail of `hashDecls`, works with unannotated data decls -hashDecls0 :: (Eq v, Var v, Show v) => Map v (DataDeclaration v ()) -> [(v, Reference.Id)] +hashDecls0 :: (Eq v, ABT.Var v, Show v) => Map v (DataDeclaration v ()) -> [(v, Reference.Id)] hashDecls0 decls = let abts = toABT <$> decls ref r = ABT.tm (Type (Type.Ref (Reference.DerivedId r))) diff --git a/parser-typechecker/src/Unison/Hashing/V2/LabeledDependency.hs b/parser-typechecker/src/Unison/Hashing/V2/LabeledDependency.hs deleted file mode 100644 index 8a00577122..0000000000 --- a/parser-typechecker/src/Unison/Hashing/V2/LabeledDependency.hs +++ /dev/null @@ -1,56 +0,0 @@ -{-# LANGUAGE PatternSynonyms #-} - -module Unison.Hashing.V2.LabeledDependency - ( derivedTerm - , derivedType - , termRef - , typeRef - , referent - , dataConstructor - , effectConstructor - , fold - , referents - , toReference - , LabeledDependency - , partition - ) where - -import Unison.Prelude hiding (fold) - -import qualified Data.Set as Set -import Unison.Hashing.V2.Reference (Id, Reference (DerivedId)) -import Unison.Hashing.V2.Referent (ConstructorId, Referent, pattern Con, pattern Ref) -import Unison.ConstructorType (ConstructorType (Data, Effect)) - --- dumb constructor name is private -newtype LabeledDependency = X (Either Reference Referent) deriving (Eq, Ord, Show) - -derivedType, derivedTerm :: Id -> LabeledDependency -typeRef, termRef :: Reference -> LabeledDependency -referent :: Referent -> LabeledDependency -dataConstructor :: Reference -> ConstructorId -> LabeledDependency -effectConstructor :: Reference -> ConstructorId -> LabeledDependency - -derivedType = X . Left . DerivedId -derivedTerm = X . Right . Ref . DerivedId -typeRef = X . Left -termRef = X . Right . Ref -referent = X . Right -dataConstructor r cid = X . Right $ Con r cid Data -effectConstructor r cid = X . Right $ Con r cid Effect - -referents :: Foldable f => f Referent -> Set LabeledDependency -referents rs = Set.fromList (map referent $ toList rs) - -fold :: (Reference -> a) -> (Referent -> a) -> LabeledDependency -> a -fold f g (X e) = either f g e - -partition :: Foldable t => t LabeledDependency -> ([Reference], [Referent]) -partition = partitionEithers . map (\(X e) -> e) . toList - --- | Left TypeRef | Right TermRef -toReference :: LabeledDependency -> Either Reference Reference -toReference = \case - X (Left r) -> Left r - X (Right (Ref r)) -> Right r - X (Right (Con r _ _)) -> Left r diff --git a/parser-typechecker/src/Unison/Hashing/V2/Reference.hs b/parser-typechecker/src/Unison/Hashing/V2/Reference.hs index 75e9641bea..6ccfdd4a01 100644 --- a/parser-typechecker/src/Unison/Hashing/V2/Reference.hs +++ b/parser-typechecker/src/Unison/Hashing/V2/Reference.hs @@ -4,42 +4,24 @@ {-# LANGUAGE ViewPatterns #-} module Unison.Hashing.V2.Reference - (Reference, - pattern Builtin, - pattern Derived, - pattern DerivedId, - Id(..), - Pos, - Size, - derivedBase32Hex, - Component, members, - components, - groupByComponent, - componentFor, - unsafeFromText, - idFromText, - isPrefixOf, - fromShortHash, - fromText, - readSuffix, - showShort, - showSuffix, - toId, - toText, - unsafeId, - toShortHash, - idToShortHash) where + ( Reference, + pattern Builtin, + pattern Derived, + pattern DerivedId, + Id (..), + components, + toText, + ) +where import Unison.Prelude -import qualified Data.Map as Map -import qualified Data.Set as Set -import qualified Data.Text as Text -import qualified Unison.Hash as H -import Unison.Hashable as Hashable +import qualified Data.Text as Text +import qualified Unison.Hash as H +import Unison.Hashable (Hashable) +import qualified Unison.Hashable as Hashable import Unison.ShortHash (ShortHash) import qualified Unison.ShortHash as SH -import Data.Char (isDigit) -- | Either a builtin or a user defined (hashed) top-level declaration. -- @@ -52,141 +34,41 @@ data Reference -- The `Pos` refers to a particular element of the component -- and the `Size` is the number of elements in the component. -- Using an ugly name so no one tempted to use this - | DerivedId Id deriving (Eq,Ord,Generic) + | DerivedId Id deriving (Eq, Ord) -pattern Derived :: H.Hash -> Pos -> Size -> Reference -pattern Derived h i n = DerivedId (Id h i n) +type Pos = Word64 + +pattern Derived :: H.Hash -> Pos -> Reference +pattern Derived h i = DerivedId (Id h i) {-# COMPLETE Builtin, Derived #-} -- | @Pos@ is a position into a cycle of size @Size@, as cycles are hashed together. -data Id = Id H.Hash Pos Size deriving (Generic) - -unsafeId :: Reference -> Id -unsafeId (Builtin b) = - error $ "Tried to get the hash of builtin " <> Text.unpack b <> "." -unsafeId (DerivedId x) = x - -idToShortHash :: Id -> ShortHash -idToShortHash = toShortHash . DerivedId +data Id = Id H.Hash Pos deriving (Eq, Ord) -- todo: move these to ShortHash module? -- but Show Reference currently depends on SH toShortHash :: Reference -> ShortHash toShortHash (Builtin b) = SH.Builtin b -toShortHash (Derived h _ 1) = SH.ShortHash (H.base32Hex h) Nothing Nothing -toShortHash (Derived h i n) = SH.ShortHash (H.base32Hex h) index Nothing - where - -- todo: remove `n` parameter; must also update readSuffix - index = Just $ showSuffix i n - --- toShortHash . fromJust . fromShortHash == id and --- fromJust . fromShortHash . toShortHash == id --- but for arbitrary ShortHashes which may be broken at the wrong boundary, it --- may not be possible to base32Hex decode them. These will return Nothing. --- Also, ShortHashes that include constructor ids will return Nothing; --- try Referent.fromShortHash -fromShortHash :: ShortHash -> Maybe Reference -fromShortHash (SH.Builtin b) = Just (Builtin b) -fromShortHash (SH.ShortHash prefix cycle Nothing) = do - h <- H.fromBase32Hex prefix - case cycle of - Nothing -> Just (Derived h 0 1) - Just t -> case Text.splitOn "c" t of - [i,n] -> Derived h <$> readMay (Text.unpack i) <*> readMay (Text.unpack n) - _ -> Nothing -fromShortHash (SH.ShortHash _prefix _cycle (Just _cid)) = Nothing +toShortHash (Derived h 0) = SH.ShortHash (H.base32Hex h) Nothing Nothing +toShortHash (Derived h i) = SH.ShortHash (H.base32Hex h) (Just $ showSuffix i) Nothing --- (3,10) encoded as "3c10" --- (0,93) encoded as "0c93" -showSuffix :: Pos -> Size -> Text -showSuffix i n = Text.pack $ show i <> "c" <> show n - --- todo: don't read or return size; must also update showSuffix and fromText -readSuffix :: Text -> Either String (Pos, Size) -readSuffix t = case Text.breakOn "c" t of - (pos, Text.drop 1 -> size) | Text.all isDigit pos && Text.all isDigit size -> - Right (read (Text.unpack pos), read (Text.unpack size)) - _ -> Left "suffix decoding error" - -isPrefixOf :: ShortHash -> Reference -> Bool -isPrefixOf sh r = SH.isPrefixOf sh (toShortHash r) +showSuffix :: Pos -> Text +showSuffix = Text.pack . show toText :: Reference -> Text toText = SH.toText . toShortHash -showShort :: Int -> Reference -> Text -showShort numHashChars = SH.toText . SH.take numHashChars . toShortHash - -type Pos = Word64 -type Size = Word64 - -newtype Component = Component { members :: Set Reference } - --- Gives the component (dependency cycle) that the reference is a part of -componentFor :: Reference -> Component -componentFor b@Builtin {} = Component (Set.singleton b) -componentFor (Derived h _ n) = - Component $ Set.fromList [Derived h i n | i <- take (fromIntegral n) [0 ..]] - -derivedBase32Hex :: Text -> Pos -> Size -> Reference -derivedBase32Hex b32Hex i n = DerivedId (Id (fromMaybe msg h) i n) - where - msg = error $ "Reference.derivedBase32Hex " <> show h - h = H.fromBase32Hex b32Hex - -unsafeFromText :: Text -> Reference -unsafeFromText = either error id . fromText - -idFromText :: Text -> Maybe Id -idFromText s = case fromText s of - Left _ -> Nothing - Right (Builtin _) -> Nothing - Right (DerivedId id) -> pure id - -toId :: Reference -> Maybe Id -toId (DerivedId id) = Just id -toId Builtin{} = Nothing - --- examples: --- `##Text.take` — builtins don’t have cycles --- `#2tWjVAuc7` — derived, no cycle --- `#y9ycWkiC1.y9` — derived, part of cycle --- todo: take a (Reference -> CycleSize) so that `readSuffix` doesn't have to parse the size from the text. -fromText :: Text -> Either String Reference -fromText t = case Text.split (=='#') t of - [_, "", b] -> Right (Builtin b) - [_, h] -> case Text.split (=='.') h of - [hash] -> Right (derivedBase32Hex hash 0 1) - [hash, suffix] -> uncurry (derivedBase32Hex hash) <$> readSuffix suffix - _ -> bail - _ -> bail - where bail = Left $ "couldn't parse a Reference from " <> Text.unpack t - component :: H.Hash -> [k] -> [(k, Id)] component h ks = let - size = fromIntegral (length ks) - in [ (k, (Id h i size)) | (k, i) <- ks `zip` [0..]] + in [ (k, (Id h i)) | (k, i) <- ks `zip` [0..]] components :: [(H.Hash, [k])] -> [(k, Id)] components sccs = uncurry component =<< sccs -groupByComponent :: [(k, Reference)] -> [[(k, Reference)]] -groupByComponent refs = done $ foldl' insert Map.empty refs - where - insert m (k, r@(Derived h _ _)) = - Map.unionWith (<>) m (Map.fromList [(Right h, [(k,r)])]) - insert m (k, r) = - Map.unionWith (<>) m (Map.fromList [(Left r, [(k,r)])]) - done m = sortOn snd <$> toList m - instance Show Id where show = SH.toString . SH.take 5 . toShortHash . DerivedId instance Show Reference where show = SH.toString . SH.take 5 . toShortHash -instance Hashable.Hashable Reference where +instance Hashable Reference where tokens (Builtin txt) = [Hashable.Tag 0, Hashable.Text txt] - tokens (DerivedId (Id h i n)) = [Hashable.Tag 1, Hashable.Bytes (H.toBytes h), Hashable.Nat i, Hashable.Nat n] - --- | Two references mustn't differ in cycle length only. -instance Eq Id where x == y = compare x y == EQ -instance Ord Id where Id h i _ `compare` Id h2 i2 _ = compare h h2 <> compare i i2 + tokens (DerivedId (Id h i)) = [Hashable.Tag 1, Hashable.Bytes (H.toBytes h), Hashable.Nat i] diff --git a/parser-typechecker/src/Unison/Hashing/V2/Reference/Util.hs b/parser-typechecker/src/Unison/Hashing/V2/Reference/Util.hs index 817da14efe..0e7f6376cd 100644 --- a/parser-typechecker/src/Unison/Hashing/V2/Reference/Util.hs +++ b/parser-typechecker/src/Unison/Hashing/V2/Reference/Util.hs @@ -5,7 +5,7 @@ import Unison.Prelude import qualified Unison.Hashing.V2.Reference as Reference import Unison.Hashable (Hashable1) import Unison.ABT (Var) -import qualified Unison.ABT as ABT +import qualified Unison.Hashing.V2.ABT as ABT import qualified Data.Map as Map hashComponents :: @@ -16,4 +16,4 @@ hashComponents :: hashComponents embedRef tms = Map.fromList [ (v, (r,e)) | ((v,e), r) <- cs ] where cs = Reference.components $ ABT.hashComponents ref tms - ref h i n = embedRef (Reference.Id h i n) + ref h i = embedRef (Reference.Id h i) diff --git a/parser-typechecker/src/Unison/Hashing/V2/Referent.hs b/parser-typechecker/src/Unison/Hashing/V2/Referent.hs index af9a00fc11..76aeedd1c5 100644 --- a/parser-typechecker/src/Unison/Hashing/V2/Referent.hs +++ b/parser-typechecker/src/Unison/Hashing/V2/Referent.hs @@ -1,20 +1,19 @@ {-# LANGUAGE OverloadedStrings #-} {-# LANGUAGE PatternSynonyms #-} -module Unison.Hashing.V2.Referent where +module Unison.Hashing.V2.Referent + ( Referent, + pattern Ref, + pattern Con, + ConstructorId, + toReference, + Unison.Hashing.V2.Referent.fold, + ) +where -import Unison.Prelude import Unison.Referent' ( Referent'(..), toReference' ) - -import qualified Data.Char as Char -import qualified Data.Text as Text import Unison.Hashing.V2.Reference (Reference) -import qualified Unison.Hashing.V2.Reference as R -import Unison.ShortHash (ShortHash) -import qualified Unison.ShortHash as SH - import Unison.ConstructorType (ConstructorType) -import qualified Unison.ConstructorType as CT -- | Specifies a term. -- @@ -30,90 +29,9 @@ pattern Con :: Reference -> ConstructorId -> ConstructorType -> Referent pattern Con r i t = Con' r i t {-# COMPLETE Ref, Con #-} --- | Cannot be a builtin. -type Id = Referent' R.Id - --- todo: move these to ShortHash module -toShortHash :: Referent -> ShortHash -toShortHash = \case - Ref r -> R.toShortHash r - Con r i _ -> patternShortHash r i - -toShortHashId :: Id -> ShortHash -toShortHashId = toShortHash . fromId - --- also used by HashQualified.fromPattern -patternShortHash :: Reference -> ConstructorId -> ShortHash -patternShortHash r i = (R.toShortHash r) { SH.cid = Just . Text.pack $ show i } - -showShort :: Int -> Referent -> Text -showShort numHashChars = SH.toText . SH.take numHashChars . toShortHash - -toText :: Referent -> Text -toText = \case - Ref r -> R.toText r - Con r cid ct -> R.toText r <> "#" <> ctorTypeText ct <> Text.pack (show cid) - -ctorTypeText :: CT.ConstructorType -> Text -ctorTypeText CT.Effect = EffectCtor -ctorTypeText CT.Data = DataCtor - -pattern EffectCtor = "a" -pattern DataCtor = "d" - -toString :: Referent -> String -toString = Text.unpack . toText - -isConstructor :: Referent -> Bool -isConstructor Con{} = True -isConstructor _ = False - -toTermReference :: Referent -> Maybe Reference -toTermReference = \case - Ref r -> Just r - _ -> Nothing - toReference :: Referent -> Reference toReference = toReference' -fromId :: Id -> Referent -fromId = fmap R.DerivedId - -toTypeReference :: Referent -> Maybe Reference -toTypeReference = \case - Con r _i _t -> Just r - _ -> Nothing - -isPrefixOf :: ShortHash -> Referent -> Bool -isPrefixOf sh r = SH.isPrefixOf sh (toShortHash r) - -unsafeFromText :: Text -> Referent -unsafeFromText = fromMaybe (error "invalid referent") . fromText - --- #abc[.xy][#cid] -fromText :: Text -> Maybe Referent -fromText t = either (const Nothing) Just $ - -- if the string has just one hash at the start, it's just a reference - if Text.length refPart == 1 then - Ref <$> R.fromText t - else if Text.all Char.isDigit cidPart then do - r <- R.fromText (Text.dropEnd 1 refPart) - ctorType <- ctorType - let cid = read (Text.unpack cidPart) - pure $ Con r cid ctorType - else - Left ("invalid constructor id: " <> Text.unpack cidPart) - where - ctorType = case Text.take 1 cidPart' of - EffectCtor -> Right CT.Effect - DataCtor -> Right CT.Data - _otherwise -> - Left ("invalid constructor type (expected '" - <> EffectCtor <> "' or '" <> DataCtor <> "'): " <> Text.unpack cidPart') - refPart = Text.dropWhileEnd (/= '#') t - cidPart' = Text.takeWhileEnd (/= '#') t - cidPart = Text.drop 1 cidPart' - fold :: (r -> a) -> (r -> ConstructorId -> ConstructorType -> a) -> Referent' r -> a fold fr fc = \case Ref' r -> fr r diff --git a/parser-typechecker/src/Unison/Hashing/V2/Term.hs b/parser-typechecker/src/Unison/Hashing/V2/Term.hs index 4d0eeb907a..168524e476 100644 --- a/parser-typechecker/src/Unison/Hashing/V2/Term.hs +++ b/parser-typechecker/src/Unison/Hashing/V2/Term.hs @@ -7,42 +7,35 @@ {-# LANGUAGE UnicodeSyntax #-} {-# LANGUAGE ViewPatterns #-} -module Unison.Hashing.V2.Term where +module Unison.Hashing.V2.Term ( + Term, + F(..), + MatchCase(..), + hashClosedTerm, + hashComponents +) where import Unison.Prelude - import Prelude hiding (and,or) -import Control.Monad.State (evalState) -import qualified Control.Monad.Writer.Strict as Writer -import Data.Bifunctor (second) -import qualified Data.Map as Map -import qualified Data.Set as Set -import qualified Data.Text as Text + import qualified Data.Sequence as Sequence -import Prelude.Extras (Eq1(..), Show1(..)) -import Text.Show +import qualified Data.Text as Text +import Prelude.Extras (Eq1 (..), Show1 (..)) +import Text.Show import qualified Unison.ABT as ABT import qualified Unison.Blank as B import qualified Unison.Hash as Hash -import Unison.Hashable (Hashable1, accumulateToken) +import Unison.Hashable (Hashable1, accumulateToken) import qualified Unison.Hashable as Hashable -import Unison.Hashing.V2.Pattern (Pattern) +import qualified Unison.Hashing.V2.ABT as ABT +import Unison.Hashing.V2.Pattern (Pattern) import qualified Unison.Hashing.V2.Pattern as Pattern -import Unison.Hashing.V2.Reference (Reference, pattern Builtin) +import Unison.Hashing.V2.Reference (Reference) import qualified Unison.Hashing.V2.Reference as Reference import qualified Unison.Hashing.V2.Reference.Util as ReferenceUtil -import Unison.Hashing.V2.Referent (Referent) -import qualified Unison.Hashing.V2.Referent as Referent -import Unison.Hashing.V2.Type (Type) -import qualified Unison.Hashing.V2.Type as Type -import qualified Unison.ConstructorType as CT -import Unison.Util.List (multimap) -import Unison.Var (Var) -import qualified Unison.Var as Var -import Unsafe.Coerce -import Unison.Symbol (Symbol) -import qualified Unison.Hashing.V2.LabeledDependency as LD -import Unison.Hashing.V2.LabeledDependency (LabeledDependency) +import Unison.Hashing.V2.Referent (Referent) +import Unison.Hashing.V2.Type (Type) +import Unison.Var (Var) -- This gets reexported; should maybe live somewhere other than Pattern, though. type ConstructorId = Pattern.ConstructorId @@ -98,874 +91,23 @@ type IsTop = Bool -- | Like `Term v`, but with an annotation of type `a` at every level in the tree type Term v a = Term2 v a a v a --- | Allow type variables and term variables to differ -type Term' vt v a = Term2 vt a a v a + -- | Allow type variables, term variables, type annotations and term annotations -- to all differ type Term2 vt at ap v a = ABT.Term (F vt at ap) v a --- | Like `Term v a`, but with only () for type and pattern annotations. -type Term3 v a = Term2 v () () v a - --- | Terms are represented as ABTs over the base functor F, with variables in `v` -type Term0 v = Term v () --- | Terms with type variables in `vt`, and term variables in `v` -type Term0' vt v = Term' vt v () - --- Prepare a term for type-directed name resolution by replacing --- any remaining free variables with blanks to be resolved by TDNR -prepareTDNR :: Var v => ABT.Term (F vt b ap) v b -> ABT.Term (F vt b ap) v b -prepareTDNR t = fmap fst . ABT.visitPure f $ ABT.annotateBound t - where f (ABT.Term _ (a, bound) (ABT.Var v)) | Set.notMember v bound = - Just $ resolve (a, bound) a (Text.unpack $ Var.name v) - f _ = Nothing - -amap :: Ord v => (a -> a2) -> Term v a -> Term v a2 -amap f = fmap f . patternMap (fmap f) . typeMap (fmap f) - -patternMap :: (Pattern ap -> Pattern ap2) -> Term2 vt at ap v a -> Term2 vt at ap2 v a -patternMap f = go where - go (ABT.Term fvs a t) = ABT.Term fvs a $ case t of - ABT.Abs v t -> ABT.Abs v (go t) - ABT.Var v -> ABT.Var v - ABT.Cycle t -> ABT.Cycle (go t) - ABT.Tm (Match e cases) -> ABT.Tm (Match (go e) [ - MatchCase (f p) (go <$> g) (go a) | MatchCase p g a <- cases ]) - -- Safe since `Match` is only ctor that has embedded `Pattern ap` arg - ABT.Tm ts -> unsafeCoerce $ ABT.Tm (fmap go ts) - -vmap :: Ord v2 => (v -> v2) -> Term v a -> Term v2 a -vmap f = ABT.vmap f . typeMap (ABT.vmap f) - -vtmap :: Ord vt2 => (vt -> vt2) -> Term' vt v a -> Term' vt2 v a -vtmap f = typeMap (ABT.vmap f) - -typeMap - :: Ord vt2 - => (Type vt at -> Type vt2 at2) - -> Term2 vt at ap v a - -> Term2 vt2 at2 ap v a -typeMap f = go - where - go (ABT.Term fvs a t) = ABT.Term fvs a $ case t of - ABT.Abs v t -> ABT.Abs v (go t) - ABT.Var v -> ABT.Var v - ABT.Cycle t -> ABT.Cycle (go t) - ABT.Tm (Ann e t) -> ABT.Tm (Ann (go e) (f t)) - -- Safe since `Ann` is only ctor that has embedded `Type v` arg - -- otherwise we'd have to manually match on every non-`Ann` ctor - ABT.Tm ts -> unsafeCoerce $ ABT.Tm (fmap go ts) - -extraMap' - :: (Ord vt, Ord vt') - => (vt -> vt') - -> (at -> at') - -> (ap -> ap') - -> Term2 vt at ap v a - -> Term2 vt' at' ap' v a -extraMap' vtf atf apf = ABT.extraMap (extraMap vtf atf apf) - -extraMap - :: (Ord vt, Ord vt') - => (vt -> vt') - -> (at -> at') - -> (ap -> ap') - -> F vt at ap a - -> F vt' at' ap' a -extraMap vtf atf apf = \case - Int x -> Int x - Nat x -> Nat x - Float x -> Float x - Boolean x -> Boolean x - Text x -> Text x - Char x -> Char x - Blank x -> Blank (fmap atf x) - Ref x -> Ref x - Constructor x y -> Constructor x y - Request x y -> Request x y - Handle x y -> Handle x y - App x y -> App x y - Ann tm x -> Ann tm (ABT.amap atf (ABT.vmap vtf x)) - List x -> List x - If x y z -> If x y z - And x y -> And x y - Or x y -> Or x y - Lam x -> Lam x - LetRec x y z -> LetRec x y z - Let x y z -> Let x y z - Match tm l -> Match tm (map (matchCaseExtraMap apf) l) - TermLink r -> TermLink r - TypeLink r -> TypeLink r - -matchCaseExtraMap :: (loc -> loc') -> MatchCase loc a -> MatchCase loc' a -matchCaseExtraMap f (MatchCase p x y) = MatchCase (fmap f p) x y - -unannotate - :: forall vt at ap v a . Ord v => Term2 vt at ap v a -> Term0' vt v -unannotate = go - where - go :: Term2 vt at ap v a -> Term0' vt v - go (ABT.out -> ABT.Abs v body) = ABT.abs v (go body) - go (ABT.out -> ABT.Cycle body) = ABT.cycle (go body) - go (ABT.Var' v ) = ABT.var v - go (ABT.Tm' f ) = case go <$> f of - Ann e t -> ABT.tm (Ann e (void t)) - Match scrutinee branches -> - let unann (MatchCase pat guard body) = MatchCase (void pat) guard body - in ABT.tm (Match scrutinee (unann <$> branches)) - f' -> ABT.tm (unsafeCoerce f') - go _ = error "unpossible" - -wrapV :: Ord v => Term v a -> Term (ABT.V v) a -wrapV = vmap ABT.Bound - --- | All variables mentioned in the given term. --- Includes both term and type variables, both free and bound. -allVars :: Ord v => Term v a -> Set v -allVars tm = Set.fromList $ - ABT.allVars tm ++ [ v | tp <- allTypes tm, v <- ABT.allVars tp ] - where - allTypes tm = case tm of - Ann' e tp -> tp : allTypes e - _ -> foldMap allTypes $ ABT.out tm - -freeVars :: Term' vt v a -> Set v -freeVars = ABT.freeVars - -freeTypeVars :: Ord vt => Term' vt v a -> Set vt -freeTypeVars t = Map.keysSet $ freeTypeVarAnnotations t - -freeTypeVarAnnotations :: Ord vt => Term' vt v a -> Map vt [a] -freeTypeVarAnnotations e = multimap $ go Set.empty e where - go bound tm = case tm of - Var' _ -> mempty - Ann' e (Type.stripIntroOuters -> t1) -> let - bound' = case t1 of Type.ForallsNamed' vs _ -> bound <> Set.fromList vs - _ -> bound - in go bound' e <> ABT.freeVarOccurrences bound t1 - ABT.Tm' f -> foldMap (go bound) f - (ABT.out -> ABT.Abs _ body) -> go bound body - (ABT.out -> ABT.Cycle body) -> go bound body - _ -> error "unpossible" - -substTypeVars :: (Ord v, Var vt) - => [(vt, Type vt b)] - -> Term' vt v a - -> Term' vt v a -substTypeVars subs e = foldl' go e subs where - go e (vt, t) = substTypeVar vt t e - --- Capture-avoiding substitution of a type variable inside a term. This --- will replace that type variable wherever it appears in type signatures of --- the term, avoiding capture by renaming ∀-binders. -substTypeVar - :: (Ord v, ABT.Var vt) - => vt - -> Type vt b - -> Term' vt v a - -> Term' vt v a -substTypeVar vt ty = go Set.empty where - go bound tm | Set.member vt bound = tm - go bound tm = let loc = ABT.annotation tm in case tm of - Var' _ -> tm - Ann' e t -> uncapture [] e (Type.stripIntroOuters t) where - fvs = ABT.freeVars ty - -- if the ∀ introduces a variable, v, which is free in `ty`, we pick a new - -- variable name for v which is unique, v', and rename v to v' in e. - uncapture vs e t@(Type.Forall' body) | Set.member (ABT.variable body) fvs = let - v = ABT.variable body - v2 = Var.freshIn (ABT.freeVars t) . Var.freshIn (Set.insert vt fvs) $ v - t2 = ABT.bindInheritAnnotation body (Type.var() v2) - in uncapture ((ABT.annotation t, v2):vs) (renameTypeVar v v2 e) t2 - uncapture vs e t0 = let - t = foldl (\body (loc,v) -> Type.forall loc v body) t0 vs - bound' = case Type.unForalls (Type.stripIntroOuters t) of - Nothing -> bound - Just (vs, _) -> bound <> Set.fromList vs - t' = ABT.substInheritAnnotation vt ty (Type.stripIntroOuters t) - in ann loc (go bound' e) (Type.freeVarsToOuters bound t') - ABT.Tm' f -> ABT.tm' loc (go bound <$> f) - (ABT.out -> ABT.Abs v body) -> ABT.abs' loc v (go bound body) - (ABT.out -> ABT.Cycle body) -> ABT.cycle' loc (go bound body) - _ -> error "unpossible" - -renameTypeVar :: (Ord v, ABT.Var vt) => vt -> vt -> Term' vt v a -> Term' vt v a -renameTypeVar old new = go Set.empty where - go bound tm | Set.member old bound = tm - go bound tm = let loc = ABT.annotation tm in case tm of - Var' _ -> tm - Ann' e t -> let - bound' = case Type.unForalls (Type.stripIntroOuters t) of - Nothing -> bound - Just (vs, _) -> bound <> Set.fromList vs - t' = ABT.rename old new (Type.stripIntroOuters t) - in ann loc (go bound' e) (Type.freeVarsToOuters bound t') - ABT.Tm' f -> ABT.tm' loc (go bound <$> f) - (ABT.out -> ABT.Abs v body) -> ABT.abs' loc v (go bound body) - (ABT.out -> ABT.Cycle body) -> ABT.cycle' loc (go bound body) - _ -> error "unpossible" - --- Converts free variables to bound variables using forall or introOuter. Example: --- --- foo : x -> x --- foo a = --- r : x --- r = a --- r --- --- This becomes: --- --- foo : ∀ x . x -> x --- foo a = --- r : outer x . x -- FYI, not valid syntax --- r = a --- r --- --- More specifically: in the expression `e : t`, unbound lowercase variables in `t` --- are bound with foralls, and any ∀-quantified type variables are made bound in --- `e` and its subexpressions. The result is a term with no lowercase free --- variables in any of its type signatures, with outer references represented --- with explicit `introOuter` binders. The resulting term may have uppercase --- free variables that are still unbound. -generalizeTypeSignatures :: (Var vt, Var v) => Term' vt v a -> Term' vt v a -generalizeTypeSignatures = go Set.empty where - go bound tm = let loc = ABT.annotation tm in case tm of - Var' _ -> tm - Ann' e (Type.generalizeLowercase bound -> t) -> let - bound' = case Type.unForalls t of - Nothing -> bound - Just (vs, _) -> bound <> Set.fromList vs - in ann loc (go bound' e) (Type.freeVarsToOuters bound t) - ABT.Tm' f -> ABT.tm' loc (go bound <$> f) - (ABT.out -> ABT.Abs v body) -> ABT.abs' loc v (go bound body) - (ABT.out -> ABT.Cycle body) -> ABT.cycle' loc (go bound body) - _ -> error "unpossible" - --- nicer pattern syntax - -pattern Var' v <- ABT.Var' v -pattern Cycle' xs t <- ABT.Cycle' xs t -pattern Abs' subst <- ABT.Abs' subst -pattern Int' n <- (ABT.out -> ABT.Tm (Int n)) -pattern Nat' n <- (ABT.out -> ABT.Tm (Nat n)) -pattern Float' n <- (ABT.out -> ABT.Tm (Float n)) -pattern Boolean' b <- (ABT.out -> ABT.Tm (Boolean b)) -pattern Text' s <- (ABT.out -> ABT.Tm (Text s)) -pattern Char' c <- (ABT.out -> ABT.Tm (Char c)) -pattern Blank' b <- (ABT.out -> ABT.Tm (Blank b)) -pattern Ref' r <- (ABT.out -> ABT.Tm (Ref r)) -pattern TermLink' r <- (ABT.out -> ABT.Tm (TermLink r)) -pattern TypeLink' r <- (ABT.out -> ABT.Tm (TypeLink r)) -pattern Builtin' r <- (ABT.out -> ABT.Tm (Ref (Builtin r))) -pattern App' f x <- (ABT.out -> ABT.Tm (App f x)) -pattern Match' scrutinee branches <- (ABT.out -> ABT.Tm (Match scrutinee branches)) -pattern Constructor' ref n <- (ABT.out -> ABT.Tm (Constructor ref n)) -pattern Request' ref n <- (ABT.out -> ABT.Tm (Request ref n)) -pattern RequestOrCtor' ref n <- (unReqOrCtor -> Just (ref, n)) -pattern If' cond t f <- (ABT.out -> ABT.Tm (If cond t f)) -pattern And' x y <- (ABT.out -> ABT.Tm (And x y)) -pattern Or' x y <- (ABT.out -> ABT.Tm (Or x y)) -pattern Handle' h body <- (ABT.out -> ABT.Tm (Handle h body)) -pattern Apps' f args <- (unApps -> Just (f, args)) --- begin pretty-printer helper patterns -pattern AppsPred' f args <- (unAppsPred -> Just (f, args)) -pattern BinaryApp' f arg1 arg2 <- (unBinaryApp -> Just (f, arg1, arg2)) -pattern BinaryApps' apps lastArg <- (unBinaryApps -> Just (apps, lastArg)) -pattern BinaryAppsPred' apps lastArg <- (unBinaryAppsPred -> Just (apps, lastArg)) --- end pretty-printer helper patterns -pattern Ann' x t <- (ABT.out -> ABT.Tm (Ann x t)) -pattern List' xs <- (ABT.out -> ABT.Tm (List xs)) -pattern Lam' subst <- ABT.Tm' (Lam (ABT.Abs' subst)) - -pattern Delay' body <- (unDelay -> Just body) -unDelay :: Ord v => Term2 vt at ap v a -> Maybe (Term2 vt at ap v a) -unDelay tm = case ABT.out tm of - ABT.Tm (Lam (ABT.Term _ _ (ABT.Abs v body))) - | Set.notMember v (ABT.freeVars body) - -> Just body - _ -> Nothing - -pattern LamNamed' v body <- (ABT.out -> ABT.Tm (Lam (ABT.Term _ _ (ABT.Abs v body)))) -pattern LamsNamed' vs body <- (unLams' -> Just (vs, body)) -pattern LamsNamedOpt' vs body <- (unLamsOpt' -> Just (vs, body)) -pattern LamsNamedPred' vs body <- (unLamsPred' -> Just (vs, body)) -pattern LamsNamedOrDelay' vs body <- (unLamsUntilDelay' -> Just (vs, body)) -pattern Let1' b subst <- (unLet1 -> Just (_, b, subst)) -pattern Let1Top' top b subst <- (unLet1 -> Just (top, b, subst)) -pattern Let1Named' v b e <- (ABT.Tm' (Let _ b (ABT.out -> ABT.Abs v e))) -pattern Let1NamedTop' top v b e <- (ABT.Tm' (Let top b (ABT.out -> ABT.Abs v e))) -pattern Lets' bs e <- (unLet -> Just (bs, e)) -pattern LetRecNamed' bs e <- (unLetRecNamed -> Just (_,bs,e)) -pattern LetRecNamedTop' top bs e <- (unLetRecNamed -> Just (top,bs,e)) -pattern LetRec' subst <- (unLetRec -> Just (_, subst)) -pattern LetRecTop' top subst <- (unLetRec -> Just (top, subst)) -pattern LetRecNamedAnnotated' ann bs e <- (unLetRecNamedAnnotated -> Just (_, ann, bs,e)) -pattern LetRecNamedAnnotatedTop' top ann bs e <- - (unLetRecNamedAnnotated -> Just (top, ann, bs,e)) - -fresh :: Var v => Term0 v -> v -> v -fresh = ABT.fresh - --- some smart constructors - -var :: a -> v -> Term2 vt at ap v a -var = ABT.annotatedVar - -var' :: Var v => Text -> Term0' vt v -var' = var() . Var.named ref :: Ord v => a -> Reference -> Term2 vt at ap v a ref a r = ABT.tm' a (Ref r) -pattern Referent' r <- (unReferent -> Just r) - -unReferent :: Term2 vt at ap v a -> Maybe Referent -unReferent (Ref' r) = Just $ Referent.Ref r -unReferent (Constructor' r cid) = Just $ Referent.Con r cid CT.Data -unReferent (Request' r cid) = Just $ Referent.Con r cid CT.Effect -unReferent _ = Nothing - refId :: Ord v => a -> Reference.Id -> Term2 vt at ap v a refId a = ref a . Reference.DerivedId -termLink :: Ord v => a -> Referent -> Term2 vt at ap v a -termLink a r = ABT.tm' a (TermLink r) - -typeLink :: Ord v => a -> Reference -> Term2 vt at ap v a -typeLink a r = ABT.tm' a (TypeLink r) - -builtin :: Ord v => a -> Text -> Term2 vt at ap v a -builtin a n = ref a (Reference.Builtin n) - -float :: Ord v => a -> Double -> Term2 vt at ap v a -float a d = ABT.tm' a (Float d) - -boolean :: Ord v => a -> Bool -> Term2 vt at ap v a -boolean a b = ABT.tm' a (Boolean b) - -int :: Ord v => a -> Int64 -> Term2 vt at ap v a -int a d = ABT.tm' a (Int d) - -nat :: Ord v => a -> Word64 -> Term2 vt at ap v a -nat a d = ABT.tm' a (Nat d) - -text :: Ord v => a -> Text -> Term2 vt at ap v a -text a = ABT.tm' a . Text - -char :: Ord v => a -> Char -> Term2 vt at ap v a -char a = ABT.tm' a . Char - -watch :: (Var v, Semigroup a) => a -> String -> Term v a -> Term v a -watch a note e = - apps' (builtin a "Debug.watch") [text a (Text.pack note), e] - -watchMaybe :: (Var v, Semigroup a) => Maybe String -> Term v a -> Term v a -watchMaybe Nothing e = e -watchMaybe (Just note) e = watch (ABT.annotation e) note e - -blank :: Ord v => a -> Term2 vt at ap v a -blank a = ABT.tm' a (Blank B.Blank) - -placeholder :: Ord v => a -> String -> Term2 vt a ap v a -placeholder a s = ABT.tm' a . Blank $ B.Recorded (B.Placeholder a s) - -resolve :: Ord v => at -> ab -> String -> Term2 vt ab ap v at -resolve at ab s = ABT.tm' at . Blank $ B.Recorded (B.Resolve ab s) - -constructor :: Ord v => a -> Reference -> ConstructorId -> Term2 vt at ap v a -constructor a ref n = ABT.tm' a (Constructor ref n) - -request :: Ord v => a -> Reference -> ConstructorId -> Term2 vt at ap v a -request a ref n = ABT.tm' a (Request ref n) - --- todo: delete and rename app' to app -app_ :: Ord v => Term0' vt v -> Term0' vt v -> Term0' vt v -app_ f arg = ABT.tm (App f arg) - -app :: Ord v => a -> Term2 vt at ap v a -> Term2 vt at ap v a -> Term2 vt at ap v a -app a f arg = ABT.tm' a (App f arg) - -match :: Ord v => a -> Term2 vt at a v a -> [MatchCase a (Term2 vt at a v a)] -> Term2 vt at a v a -match a scrutinee branches = ABT.tm' a (Match scrutinee branches) - -handle :: Ord v => a -> Term2 vt at ap v a -> Term2 vt at ap v a -> Term2 vt at ap v a -handle a h block = ABT.tm' a (Handle h block) - -and :: Ord v => a -> Term2 vt at ap v a -> Term2 vt at ap v a -> Term2 vt at ap v a -and a x y = ABT.tm' a (And x y) - -or :: Ord v => a -> Term2 vt at ap v a -> Term2 vt at ap v a -> Term2 vt at ap v a -or a x y = ABT.tm' a (Or x y) - -list :: Ord v => a -> [Term2 vt at ap v a] -> Term2 vt at ap v a -list a es = list' a (Sequence.fromList es) - -list' :: Ord v => a -> Seq (Term2 vt at ap v a) -> Term2 vt at ap v a -list' a es = ABT.tm' a (List es) - -apps - :: Ord v - => Term2 vt at ap v a - -> [(a, Term2 vt at ap v a)] - -> Term2 vt at ap v a -apps = foldl' (\f (a, t) -> app a f t) - -apps' - :: (Ord v, Semigroup a) - => Term2 vt at ap v a - -> [Term2 vt at ap v a] - -> Term2 vt at ap v a -apps' = foldl' (\f t -> app (ABT.annotation f <> ABT.annotation t) f t) - -iff :: Ord v => a -> Term2 vt at ap v a -> Term2 vt at ap v a -> Term2 vt at ap v a -> Term2 vt at ap v a -iff a cond t f = ABT.tm' a (If cond t f) - -ann_ :: Ord v => Term0' vt v -> Type vt () -> Term0' vt v -ann_ e t = ABT.tm (Ann e t) - -ann :: Ord v - => a - -> Term2 vt at ap v a - -> Type vt at - -> Term2 vt at ap v a -ann a e t = ABT.tm' a (Ann e t) - --- arya: are we sure we want the two annotations to be the same? -lam :: Ord v => a -> v -> Term2 vt at ap v a -> Term2 vt at ap v a -lam a v body = ABT.tm' a (Lam (ABT.abs' a v body)) - -delay :: Var v => a -> Term2 vt at ap v a -> Term2 vt at ap v a -delay a body = - ABT.tm' a (Lam (ABT.abs' a (ABT.freshIn (ABT.freeVars body) (Var.named "_")) body)) - -lam' :: Ord v => a -> [v] -> Term2 vt at ap v a -> Term2 vt at ap v a -lam' a vs body = foldr (lam a) body vs - -lam'' :: Ord v => [(a,v)] -> Term2 vt at ap v a -> Term2 vt at ap v a -lam'' vs body = foldr (uncurry lam) body vs - -isLam :: Term2 vt at ap v a -> Bool -isLam t = arity t > 0 - -arity :: Term2 vt at ap v a -> Int -arity (LamNamed' _ body) = 1 + arity body -arity (Ann' e _) = arity e -arity _ = 0 - -unLetRecNamedAnnotated - :: Term' vt v a - -> Maybe - (IsTop, a, [((a, v), Term' vt v a)], Term' vt v a) -unLetRecNamedAnnotated (ABT.CycleA' ann avs (ABT.Tm' (LetRec isTop bs e))) = - Just (isTop, ann, avs `zip` bs, e) -unLetRecNamedAnnotated _ = Nothing - -letRec' - :: (Ord v, Monoid a) - => Bool - -> [(v, Term' vt v a)] - -> Term' vt v a - -> Term' vt v a -letRec' isTop bindings body = - letRec isTop - (foldMap (ABT.annotation . snd) bindings <> ABT.annotation body) - [ ((ABT.annotation b, v), b) | (v,b) <- bindings ] - body - --- Prepend a binding to form a (bigger) let rec. Useful when --- building up a block incrementally using a right fold. --- --- For example: --- consLetRec (x = 42) "hi" --- => --- let rec x = 42 in "hi" --- --- consLetRec (x = 42) (let rec y = "hi" in (x,y)) --- => --- let rec x = 42; y = "hi" in (x,y) -consLetRec - :: Ord v - => Bool -- isTop parameter - -> a -- annotation for overall let rec - -> (a, v, Term' vt v a) -- the binding - -> Term' vt v a -- the body - -> Term' vt v a -consLetRec isTop a (ab, vb, b) body = case body of - LetRecNamedAnnotated' _ bs body -> letRec isTop a (((ab,vb), b) : bs) body - _ -> letRec isTop a [((ab,vb),b)] body - -letRec - :: Ord v - => Bool - -> a - -> [((a, v), Term' vt v a)] - -> Term' vt v a - -> Term' vt v a -letRec _ _ [] e = e -letRec isTop a bindings e = ABT.cycle' - a - (foldr (uncurry ABT.abs' . fst) z bindings) - where z = ABT.tm' a (LetRec isTop (map snd bindings) e) - - --- | Smart constructor for let rec blocks. Each binding in the block may --- reference any other binding in the block in its body (including itself), --- and the output expression may also reference any binding in the block. -letRec_ :: Ord v => IsTop -> [(v, Term0' vt v)] -> Term0' vt v -> Term0' vt v -letRec_ _ [] e = e -letRec_ isTop bindings e = ABT.cycle (foldr (ABT.abs . fst) z bindings) - where - z = ABT.tm (LetRec isTop (map snd bindings) e) - --- | Smart constructor for let blocks. Each binding in the block may --- reference only previous bindings in the block, not including itself. --- The output expression may reference any binding in the block. --- todo: delete me -let1_ :: Ord v => IsTop -> [(v,Term0' vt v)] -> Term0' vt v -> Term0' vt v -let1_ isTop bindings e = foldr f e bindings - where - f (v,b) body = ABT.tm (Let isTop b (ABT.abs v body)) - --- | annotations are applied to each nested Let expression -let1 - :: Ord v - => IsTop - -> [((a, v), Term2 vt at ap v a)] - -> Term2 vt at ap v a - -> Term2 vt at ap v a -let1 isTop bindings e = foldr f e bindings - where f ((ann, v), b) body = ABT.tm' ann (Let isTop b (ABT.abs' ann v body)) - -let1' - :: (Semigroup a, Ord v) - => IsTop - -> [(v, Term2 vt at ap v a)] - -> Term2 vt at ap v a - -> Term2 vt at ap v a -let1' isTop bindings e = foldr f e bindings - where - ann = ABT.annotation - f (v, b) body = ABT.tm' a (Let isTop b (ABT.abs' a v body)) - where a = ann b <> ann body - --- let1' :: Var v => [(Text, Term0 vt v)] -> Term0 vt v -> Term0 vt v --- let1' bs e = let1 [(ABT.v' name, b) | (name,b) <- bs ] e - -unLet1 - :: Var v - => Term' vt v a - -> Maybe (IsTop, Term' vt v a, ABT.Subst (F vt a a) v a) -unLet1 (ABT.Tm' (Let isTop b (ABT.Abs' subst))) = Just (isTop, b, subst) -unLet1 _ = Nothing - --- | Satisfies `unLet (let' bs e) == Just (bs, e)` -unLet - :: Term2 vt at ap v a - -> Maybe ([(IsTop, v, Term2 vt at ap v a)], Term2 vt at ap v a) -unLet t = fixup (go t) - where - go (ABT.Tm' (Let isTop b (ABT.out -> ABT.Abs v t))) = case go t of - (env, t) -> ((isTop, v, b) : env, t) - go t = ([], t) - fixup ([], _) = Nothing - fixup bst = Just bst - --- | Satisfies `unLetRec (letRec bs e) == Just (bs, e)` -unLetRecNamed - :: Term2 vt at ap v a - -> Maybe - ( IsTop - , [(v, Term2 vt at ap v a)] - , Term2 vt at ap v a - ) -unLetRecNamed (ABT.Cycle' vs (ABT.Tm' (LetRec isTop bs e))) - | length vs == length bs = Just (isTop, zip vs bs, e) -unLetRecNamed _ = Nothing - -unLetRec - :: (Monad m, Var v) - => Term2 vt at ap v a - -> Maybe - ( IsTop - , (v -> m v) - -> m - ( [(v, Term2 vt at ap v a)] - , Term2 vt at ap v a - ) - ) -unLetRec (unLetRecNamed -> Just (isTop, bs, e)) = Just - ( isTop - , \freshen -> do - vs <- sequence [ freshen v | (v, _) <- bs ] - let sub = ABT.substsInheritAnnotation (map fst bs `zip` map ABT.var vs) - pure (vs `zip` [ sub b | (_, b) <- bs ], sub e) - ) -unLetRec _ = Nothing - -unApps - :: Term2 vt at ap v a - -> Maybe (Term2 vt at ap v a, [Term2 vt at ap v a]) -unApps t = unAppsPred (t, const True) - --- Same as unApps but taking a predicate controlling whether we match on a given function argument. -unAppsPred :: (Term2 vt at ap v a, Term2 vt at ap v a -> Bool) -> - Maybe (Term2 vt at ap v a, [Term2 vt at ap v a]) -unAppsPred (t, pred) = case go t [] of [] -> Nothing; f:args -> Just (f,args) - where - go (App' i o) acc | pred o = go i (o:acc) - go _ [] = [] - go fn args = fn:args - -unBinaryApp :: Term2 vt at ap v a - -> Maybe (Term2 vt at ap v a, - Term2 vt at ap v a, - Term2 vt at ap v a) -unBinaryApp t = case unApps t of - Just (f, [arg1, arg2]) -> Just (f, arg1, arg2) - _ -> Nothing - --- "((a1 `f1` a2) `f2` a3)" becomes "Just ([(a2, f2), (a1, f1)], a3)" -unBinaryApps - :: Term2 vt at ap v a - -> Maybe - ( [(Term2 vt at ap v a, Term2 vt at ap v a)] - , Term2 vt at ap v a - ) -unBinaryApps t = unBinaryAppsPred (t, const True) - --- Same as unBinaryApps but taking a predicate controlling whether we match on a given binary function. -unBinaryAppsPred :: (Term2 vt at ap v a - ,Term2 vt at ap v a -> Bool) - -> Maybe ([(Term2 vt at ap v a, - Term2 vt at ap v a)], - Term2 vt at ap v a) -unBinaryAppsPred (t, pred) = case unBinaryApp t of - Just (f, x, y) | pred f -> case unBinaryAppsPred (x, pred) of - Just (as, xLast) -> Just ((xLast, f) : as, y) - Nothing -> Just ([(x, f)], y) - _ -> Nothing - -unLams' - :: Term2 vt at ap v a -> Maybe ([v], Term2 vt at ap v a) -unLams' t = unLamsPred' (t, const True) - --- Same as unLams', but always matches. Returns an empty [v] if the term doesn't start with a --- lambda extraction. -unLamsOpt' :: Term2 vt at ap v a -> Maybe ([v], Term2 vt at ap v a) -unLamsOpt' t = case unLams' t of - r@(Just _) -> r - Nothing -> Just ([], t) - --- Same as unLams', but stops at any variable named `()`, which indicates a --- delay (`'`) annotation which we want to preserve. -unLamsUntilDelay' - :: Var v - => Term2 vt at ap v a - -> Maybe ([v], Term2 vt at ap v a) -unLamsUntilDelay' t = case unLamsPred' (t, (/=) $ Var.named "()") of - r@(Just _) -> r - Nothing -> Just ([], t) - --- Same as unLams' but taking a predicate controlling whether we match on a given binary function. -unLamsPred' :: (Term2 vt at ap v a, v -> Bool) -> - Maybe ([v], Term2 vt at ap v a) -unLamsPred' (LamNamed' v body, pred) | pred v = case unLamsPred' (body, pred) of - Nothing -> Just ([v], body) - Just (vs, body) -> Just (v:vs, body) -unLamsPred' _ = Nothing - -unReqOrCtor :: Term2 vt at ap v a -> Maybe (Reference, ConstructorId) -unReqOrCtor (Constructor' r cid) = Just (r, cid) -unReqOrCtor (Request' r cid) = Just (r, cid) -unReqOrCtor _ = Nothing - --- Dependencies including referenced data and effect decls -dependencies :: (Ord v, Ord vt) => Term2 vt at ap v a -> Set Reference -dependencies t = Set.map (LD.fold id Referent.toReference) (labeledDependencies t) - -termDependencies :: (Ord v, Ord vt) => Term2 vt at ap v a -> Set Reference -termDependencies = - Set.fromList - . mapMaybe - ( LD.fold - (\_typeRef -> Nothing) - ( Referent.fold - (\termRef -> Just termRef) - (\_typeConRef _i _ct -> Nothing) - ) - ) - . toList - . labeledDependencies - --- gets types from annotations and constructors -typeDependencies :: (Ord v, Ord vt) => Term2 vt at ap v a -> Set Reference -typeDependencies = - Set.fromList . mapMaybe (LD.fold Just (const Nothing)) . toList . labeledDependencies - --- Gets the types to which this term contains references via patterns and --- data constructors. -constructorDependencies - :: (Ord v, Ord vt) => Term2 vt at ap v a -> Set Reference -constructorDependencies = - Set.unions - . generalizedDependencies (const mempty) - (const mempty) - Set.singleton - (const . Set.singleton) - Set.singleton - (const . Set.singleton) - Set.singleton - -generalizedDependencies - :: (Ord v, Ord vt, Ord r) - => (Reference -> r) - -> (Reference -> r) - -> (Reference -> r) - -> (Reference -> ConstructorId -> r) - -> (Reference -> r) - -> (Reference -> ConstructorId -> r) - -> (Reference -> r) - -> Term2 vt at ap v a - -> Set r -generalizedDependencies termRef typeRef literalType dataConstructor dataType effectConstructor effectType - = Set.fromList . Writer.execWriter . ABT.visit' f where - f t@(Ref r) = Writer.tell [termRef r] $> t - f t@(TermLink r) = case r of - Referent.Ref r -> Writer.tell [termRef r] $> t - Referent.Con r id CT.Data -> Writer.tell [dataConstructor r id] $> t - Referent.Con r id CT.Effect -> Writer.tell [effectConstructor r id] $> t - f t@(TypeLink r) = Writer.tell [typeRef r] $> t - f t@(Ann _ typ) = - Writer.tell (map typeRef . toList $ Type.dependencies typ) $> t - f t@(Nat _) = Writer.tell [literalType Type.natRef] $> t - f t@(Int _) = Writer.tell [literalType Type.intRef] $> t - f t@(Float _) = Writer.tell [literalType Type.floatRef] $> t - f t@(Boolean _) = Writer.tell [literalType Type.booleanRef] $> t - f t@(Text _) = Writer.tell [literalType Type.textRef] $> t - f t@(List _) = Writer.tell [literalType Type.listRef] $> t - f t@(Constructor r cid) = - Writer.tell [dataType r, dataConstructor r cid] $> t - f t@(Request r cid) = - Writer.tell [effectType r, effectConstructor r cid] $> t - f t@(Match _ cases) = traverse_ goPat cases $> t - f t = pure t - goPat (MatchCase pat _ _) = - Writer.tell . toList $ Pattern.generalizedDependencies literalType - dataConstructor - dataType - effectConstructor - effectType - pat - -labeledDependencies - :: (Ord v, Ord vt) => Term2 vt at ap v a -> Set LabeledDependency -labeledDependencies = generalizedDependencies LD.termRef - LD.typeRef - LD.typeRef - LD.dataConstructor - LD.typeRef - LD.effectConstructor - LD.typeRef - -updateDependencies - :: Ord v - => Map Reference Reference - -> Map Reference Reference - -> Term v a - -> Term v a -updateDependencies termUpdates typeUpdates = ABT.rebuildUp go - where - -- todo: this function might need tweaking if we ever allow type replacements - -- would need to look inside pattern matching and constructor calls - go (Ref r ) = Ref (Map.findWithDefault r r termUpdates) - go (TermLink (Referent.Ref r)) = TermLink (Referent.Ref $ Map.findWithDefault r r termUpdates) - go (TypeLink r) = TypeLink (Map.findWithDefault r r typeUpdates) - go (Ann tm tp) = Ann tm $ Type.updateDependencies typeUpdates tp - go f = f - --- | If the outermost term is a function application, --- perform substitution of the argument into the body -betaReduce :: Var v => Term0 v -> Term0 v -betaReduce (App' (Lam' f) arg) = ABT.bind f arg -betaReduce e = e - -betaNormalForm :: Var v => Term0 v -> Term0 v -betaNormalForm (App' f a) = betaNormalForm (betaReduce (app() (betaNormalForm f) a)) -betaNormalForm e = e - --- x -> f x => f -etaNormalForm :: Ord v => Term0 v -> Term0 v -etaNormalForm tm = case tm of - LamNamed' v body -> step . lam (ABT.annotation tm) v $ etaNormalForm body - where - step (LamNamed' v (App' f (Var' v'))) | v == v' = f - step tm = tm - _ -> tm - --- x -> f x => f as long as `x` is a variable of type `Var.Eta` -etaReduceEtaVars :: Var v => Term0 v -> Term0 v -etaReduceEtaVars tm = case tm of - LamNamed' v body -> step . lam (ABT.annotation tm) v $ etaReduceEtaVars body - where - ok v v' = v == v' && Var.typeOf v == Var.Eta - step (LamNamed' v (App' f (Var' v'))) | ok v v' = f - step tm = tm - _ -> tm - --- This converts `Reference`s it finds that are in the input `Map` --- back to free variables -unhashComponent :: forall v a. Var v - => Map Reference (Term v a) - -> Map Reference (v, Term v a) -unhashComponent m = let - usedVars = foldMap (Set.fromList . ABT.allVars) m - m' :: Map Reference (v, Term v a) - m' = evalState (Map.traverseWithKey assignVar m) usedVars where - assignVar r t = (,t) <$> ABT.freshenS (refNamed r) - unhash1 = ABT.rebuildUp' go where - go e@(Ref' r) = case Map.lookup r m' of - Nothing -> e - Just (v, _) -> var (ABT.annotation e) v - go e = e - in second unhash1 <$> m' - where - -- Variable whose name is derived from the given reference. - refNamed :: Var v => Reference -> v - refNamed ref = Var.named ("ℍ" <> Reference.toText ref) - hashComponents :: Var v => Map v (Term v a) -> Map v (Reference.Id, Term v a) hashComponents = ReferenceUtil.hashComponents $ refId () hashClosedTerm :: Var v => Term v a -> Reference.Id -hashClosedTerm tm = Reference.Id (ABT.hash tm) 0 1 - --- The hash for a constructor -hashConstructor' - :: (Reference -> ConstructorId -> Term0 Symbol) -> Reference -> ConstructorId -> Reference -hashConstructor' f r cid = - let --- this is a bit circuitous, but defining everything in terms of hashComponents --- ensure the hashing is always done in the same way - m = hashComponents (Map.fromList [(Var.named "_" :: Symbol, f r cid)]) - in case toList m of - [(r, _)] -> Reference.DerivedId r - _ -> error "unpossible" - -hashConstructor :: Reference -> ConstructorId -> Reference -hashConstructor = hashConstructor' $ constructor () - -hashRequest :: Reference -> ConstructorId -> Reference -hashRequest = hashConstructor' $ request () - -fromReferent :: Ord v - => a - -> Referent - -> Term2 vt at ap v a -fromReferent a = \case - Referent.Ref r -> ref a r - Referent.Con r i ct -> case ct of - CT.Data -> constructor a r i - CT.Effect -> request a r i +hashClosedTerm tm = Reference.Id (ABT.hash tm) 0 instance Var v => Hashable1 (F v a p) where hash1 hashCycle hash e @@ -978,12 +120,11 @@ instance Var v => Hashable1 (F v a p) where -- are 'transparent' wrt hash and hashing is unaffected by whether -- expressions are linked. So for example `x = 1 + 1` and `y = x` hash -- the same. - Ref (Reference.Derived h 0 1) -> Hashable.fromBytes (Hash.toBytes h) - Ref (Reference.Derived h i n) -> Hashable.accumulate + Ref (Reference.Derived h 0) -> Hashable.fromBytes (Hash.toBytes h) + Ref (Reference.Derived h i) -> Hashable.accumulate [ tag 1 , hashed $ Hashable.fromBytes (Hash.toBytes h) , Hashable.Nat i - , Hashable.Nat n ] -- Note: start each layer with leading `1` byte, to avoid collisions -- with types, which start each layer with leading `0`. diff --git a/parser-typechecker/src/Unison/Hashing/V2/Type.hs b/parser-typechecker/src/Unison/Hashing/V2/Type.hs index cc2a6e0dc4..3b5438aeaf 100644 --- a/parser-typechecker/src/Unison/Hashing/V2/Type.hs +++ b/parser-typechecker/src/Unison/Hashing/V2/Type.hs @@ -5,30 +5,42 @@ {-# LANGUAGE Rank2Types #-} {-# LANGUAGE ViewPatterns #-} -module Unison.Hashing.V2.Type where +module Unison.Hashing.V2.Type ( + Type, + F(..), + bindExternal, + bindReferences, + hashComponents, + toReference, + toReferenceMentions, + -- * builtin type references + effectRef, + listRef, + booleanRef, + intRef, + natRef, + floatRef, + charRef, + textRef, +) where import Unison.Prelude -import qualified Control.Monad.Writer.Strict as Writer -import Data.Functor.Identity (runIdentity) -import Data.Monoid (Any(..)) -import Data.List.Extra (nubOrd) import qualified Data.Map as Map import qualified Data.Set as Set -import Prelude.Extras (Eq1(..),Show1(..),Ord1(..)) +import Prelude.Extras (Eq1 (..), Ord1 (..), Show1 (..)) import qualified Unison.ABT as ABT -import Unison.Hashable (Hashable1) +import Unison.Hashable (Hashable1) import qualified Unison.Hashable as Hashable -import qualified Unison.Kind as K -import Unison.Hashing.V2.Reference (Reference) +import qualified Unison.Hashing.V2.ABT as ABT +import Unison.Hashing.V2.Reference (Reference) import qualified Unison.Hashing.V2.Reference as Reference import qualified Unison.Hashing.V2.Reference.Util as ReferenceUtil -import Unison.Var (Var) -import qualified Unison.Var as Var -import qualified Unison.Settings as Settings -import qualified Unison.Names.ResolutionResult as Names +import qualified Unison.Kind as K import qualified Unison.Name as Name +import qualified Unison.Names.ResolutionResult as Names import qualified Unison.Util.List as List +import Unison.Var (Var) -- | Base functor for types in the Unison language data F a @@ -42,7 +54,7 @@ data F a | IntroOuter a -- binder like ∀, used to introduce variables that are -- bound by outer type signatures, to support scoped type -- variables - deriving (Foldable,Functor,Generic,Generic1,Eq,Ord,Traversable) + deriving (Foldable,Functor,Eq,Ord,Traversable) instance Eq1 F where (==#) = (==) instance Ord1 F where compare1 = compare @@ -51,9 +63,6 @@ instance Show1 F where showsPrec1 = showsPrec -- | Types are represented as ABTs over the base functor F, with variables in `v` type Type v a = ABT.Term F v a -wrapV :: Ord v => Type v a -> Type (ABT.V v) a -wrapV = ABT.vmap ABT.Bound - freeVars :: Type v a -> Set v freeVars = ABT.freeVars @@ -74,107 +83,15 @@ bindReferences keepFree ns t = let ok (v, a, Nothing) = Left (pure (Names.TypeResolutionFailure v a mempty)) in List.validate ok rs <&> \es -> bindExternal es t -bindNames - :: Var v - => Set v - -> Map Name.Name Reference - -> Type v a - -> Names.ResolutionResult v a (Type v a) -bindNames keepFree ns t = let - fvs = ABT.freeVarOccurrences keepFree t - rs = [(v, a, Map.lookup (Name.fromVar v) ns) | (v, a) <- fvs] - ok (v, _a, Just r) = pure (v, r) - ok (v, a, Nothing) = Left (pure (Names.TypeResolutionFailure v a mempty)) - in List.validate ok rs <&> \es -> bindExternal es t - newtype Monotype v a = Monotype { getPolytype :: Type v a } deriving Eq instance (Show v) => Show (Monotype v a) where show = show . getPolytype --- Smart constructor which checks if a `Type` has no `Forall` quantifiers. -monotype :: ABT.Var v => Type v a -> Maybe (Monotype v a) -monotype t = Monotype <$> ABT.visit isMono t where - isMono (Forall' _) = Just Nothing - isMono _ = Nothing - -arity :: Type v a -> Int -arity (ForallNamed' _ body) = arity body -arity (Arrow' _ o) = 1 + arity o -arity (Ann' a _) = arity a -arity _ = 0 - -- some smart patterns pattern Ref' r <- ABT.Tm' (Ref r) -pattern Arrow' i o <- ABT.Tm' (Arrow i o) -pattern Arrow'' i es o <- Arrow' i (Effect'' es o) -pattern Arrows' spine <- (unArrows -> Just spine) -pattern EffectfulArrows' fst rest <- (unEffectfulArrows -> Just (fst, rest)) -pattern Ann' t k <- ABT.Tm' (Ann t k) -pattern App' f x <- ABT.Tm' (App f x) -pattern Apps' f args <- (unApps -> Just (f, args)) -pattern Pure' t <- (unPure -> Just t) -pattern Effects' es <- ABT.Tm' (Effects es) --- Effect1' must match at least one effect -pattern Effect1' e t <- ABT.Tm' (Effect e t) -pattern Effect' es t <- (unEffects1 -> Just (es, t)) -pattern Effect'' es t <- (unEffect0 -> (es, t)) --- Effect0' may match zero effects -pattern Effect0' es t <- (unEffect0 -> (es, t)) -pattern Forall' subst <- ABT.Tm' (Forall (ABT.Abs' subst)) -pattern IntroOuter' subst <- ABT.Tm' (IntroOuter (ABT.Abs' subst)) -pattern IntroOuterNamed' v body <- ABT.Tm' (IntroOuter (ABT.out -> ABT.Abs v body)) pattern ForallsNamed' vs body <- (unForalls -> Just (vs, body)) pattern ForallNamed' v body <- ABT.Tm' (Forall (ABT.out -> ABT.Abs v body)) -pattern Var' v <- ABT.Var' v -pattern Cycle' xs t <- ABT.Cycle' xs t -pattern Abs' subst <- ABT.Abs' subst - -unPure :: Ord v => Type v a -> Maybe (Type v a) -unPure (Effect'' [] t) = Just t -unPure (Effect'' _ _) = Nothing -unPure t = Just t - -unArrows :: Type v a -> Maybe [Type v a] -unArrows t = - case go t of [_] -> Nothing; l -> Just l - where go (Arrow' i o) = i : go o - go o = [o] - -unEffectfulArrows - :: Type v a -> Maybe (Type v a, [(Maybe [Type v a], Type v a)]) -unEffectfulArrows t = case t of - Arrow' i o -> Just (i, go o) - _ -> Nothing - where - go (Effect1' (Effects' es) (Arrow' i o)) = - (Just $ es >>= flattenEffects, i) : go o - go (Effect1' (Effects' es) t) = [(Just $ es >>= flattenEffects, t)] - go (Arrow' i o) = (Nothing, i) : go o - go t = [(Nothing, t)] - -unApps :: Type v a -> Maybe (Type v a, [Type v a]) -unApps t = case go t [] of - [] -> Nothing - [ _ ] -> Nothing - f : args -> Just (f, args) - where - go (App' i o) acc = go i (o : acc) - go fn args = fn : args - -unIntroOuters :: Type v a -> Maybe ([v], Type v a) -unIntroOuters t = go t [] - where go (IntroOuterNamed' v body) vs = go body (v:vs) - go _body [] = Nothing - go body vs = Just (reverse vs, body) - --- Most code doesn't care about `introOuter` binders and is fine dealing with the --- these outer variable references as free variables. This function strips out --- one or more `introOuter` binders, so `outer a b . (a, b)` becomes `(a, b)`. -stripIntroOuters :: Type v a -> Type v a -stripIntroOuters t = case unIntroOuters t of - Just (_, t) -> t - Nothing -> t unForalls :: Type v a -> Maybe ([v], Type v a) unForalls t = go t [] @@ -182,20 +99,6 @@ unForalls t = go t [] go _body [] = Nothing go body vs = Just(reverse vs, body) -unEffect0 :: Ord v => Type v a -> ([Type v a], Type v a) -unEffect0 (Effect1' e a) = (flattenEffects e, a) -unEffect0 t = ([], t) - -unEffects1 :: Ord v => Type v a -> Maybe ([Type v a], Type v a) -unEffects1 (Effect1' (Effects' es) a) = Just (es, a) -unEffects1 _ = Nothing - --- | True if the given type is a function, possibly quantified -isArrow :: ABT.Var v => Type v a -> Bool -isArrow (ForallNamed' _ t) = isArrow t -isArrow (Arrow' _ _) = True -isArrow _ = False - -- some smart constructors ref :: Ord v => a -> Reference -> Type v a @@ -204,16 +107,7 @@ ref a = ABT.tm' a . Ref refId :: Ord v => a -> Reference.Id -> Type v a refId a = ref a . Reference.DerivedId -termLink :: Ord v => a -> Type v a -termLink a = ABT.tm' a . Ref $ termLinkRef - -typeLink :: Ord v => a -> Type v a -typeLink a = ABT.tm' a . Ref $ typeLinkRef - -derivedBase32Hex :: Ord v => Reference -> a -> Type v a -derivedBase32Hex r a = ref a r - -intRef, natRef, floatRef, booleanRef, textRef, charRef, listRef, bytesRef, effectRef, termLinkRef, typeLinkRef :: Reference +intRef, natRef, floatRef, booleanRef, textRef, charRef, listRef, effectRef :: Reference intRef = Reference.Builtin "Int" natRef = Reference.Builtin "Nat" floatRef = Reference.Builtin "Float" @@ -221,223 +115,11 @@ booleanRef = Reference.Builtin "Boolean" textRef = Reference.Builtin "Text" charRef = Reference.Builtin "Char" listRef = Reference.Builtin "Sequence" -bytesRef = Reference.Builtin "Bytes" effectRef = Reference.Builtin "Effect" -termLinkRef = Reference.Builtin "Link.Term" -typeLinkRef = Reference.Builtin "Link.Type" - -builtinIORef, fileHandleRef, filePathRef, threadIdRef, socketRef :: Reference -builtinIORef = Reference.Builtin "IO" -fileHandleRef = Reference.Builtin "Handle" -filePathRef = Reference.Builtin "FilePath" -threadIdRef = Reference.Builtin "ThreadId" -socketRef = Reference.Builtin "Socket" - -mvarRef, tvarRef :: Reference -mvarRef = Reference.Builtin "MVar" -tvarRef = Reference.Builtin "TVar" - -tlsRef :: Reference -tlsRef = Reference.Builtin "Tls" - -stmRef :: Reference -stmRef = Reference.Builtin "STM" - -tlsClientConfigRef :: Reference -tlsClientConfigRef = Reference.Builtin "Tls.ClientConfig" - -tlsServerConfigRef :: Reference -tlsServerConfigRef = Reference.Builtin "Tls.ServerConfig" - -tlsSignedCertRef :: Reference -tlsSignedCertRef = Reference.Builtin "Tls.SignedCert" - -tlsPrivateKeyRef :: Reference -tlsPrivateKeyRef = Reference.Builtin "Tls.PrivateKey" - -tlsCipherRef :: Reference -tlsCipherRef = Reference.Builtin "Tls.Cipher" - -tlsVersionRef :: Reference -tlsVersionRef = Reference.Builtin "Tls.Version" - -hashAlgorithmRef :: Reference -hashAlgorithmRef = Reference.Builtin "crypto.HashAlgorithm" - -codeRef, valueRef :: Reference -codeRef = Reference.Builtin "Code" -valueRef = Reference.Builtin "Value" - -anyRef :: Reference -anyRef = Reference.Builtin "Any" - -any :: Ord v => a -> Type v a -any a = ref a anyRef - -builtin :: Ord v => a -> Text -> Type v a -builtin a = ref a . Reference.Builtin - -int :: Ord v => a -> Type v a -int a = ref a intRef - -nat :: Ord v => a -> Type v a -nat a = ref a natRef - -float :: Ord v => a -> Type v a -float a = ref a floatRef - -boolean :: Ord v => a -> Type v a -boolean a = ref a booleanRef - -text :: Ord v => a -> Type v a -text a = ref a textRef - -char :: Ord v => a -> Type v a -char a = ref a charRef - -fileHandle :: Ord v => a -> Type v a -fileHandle a = ref a fileHandleRef - -threadId :: Ord v => a -> Type v a -threadId a = ref a threadIdRef - -builtinIO :: Ord v => a -> Type v a -builtinIO a = ref a builtinIORef - -socket :: Ord v => a -> Type v a -socket a = ref a socketRef - -list :: Ord v => a -> Type v a -list a = ref a listRef - -bytes :: Ord v => a -> Type v a -bytes a = ref a bytesRef - -effectType :: Ord v => a -> Type v a -effectType a = ref a $ effectRef - -code, value :: Ord v => a -> Type v a -code a = ref a codeRef -value a = ref a valueRef - -app :: Ord v => a -> Type v a -> Type v a -> Type v a -app a f arg = ABT.tm' a (App f arg) - --- `f x y z` means `((f x) y) z` and the annotation paired with `y` is the one --- meant for `app (f x) y` -apps :: Ord v => Type v a -> [(a, Type v a)] -> Type v a -apps = foldl' go where go f (a, t) = app a f t - -app' :: (Ord v, Semigroup a) => Type v a -> Type v a -> Type v a -app' f arg = app (ABT.annotation f <> ABT.annotation arg) f arg - -apps' :: (Semigroup a, Ord v) => Type v a -> [Type v a] -> Type v a -apps' = foldl app' - -arrow :: Ord v => a -> Type v a -> Type v a -> Type v a -arrow a i o = ABT.tm' a (Arrow i o) - -arrow' :: (Semigroup a, Ord v) => Type v a -> Type v a -> Type v a -arrow' i o = arrow (ABT.annotation i <> ABT.annotation o) i o - -ann :: Ord v => a -> Type v a -> K.Kind -> Type v a -ann a e t = ABT.tm' a (Ann e t) forall :: Ord v => a -> v -> Type v a -> Type v a forall a v body = ABT.tm' a (Forall (ABT.abs' a v body)) -introOuter :: Ord v => a -> v -> Type v a -> Type v a -introOuter a v body = ABT.tm' a (IntroOuter (ABT.abs' a v body)) - -iff :: Var v => Type v () -iff = forall () aa $ arrows (f <$> [boolean(), a, a]) a - where aa = Var.named "a" - a = var () aa - f x = ((), x) - -iff' :: Var v => a -> Type v a -iff' loc = forall loc aa $ arrows (f <$> [boolean loc, a, a]) a - where aa = Var.named "a" - a = var loc aa - f x = (loc, x) - -iff2 :: Var v => a -> Type v a -iff2 loc = forall loc aa $ arrows (f <$> [a, a]) a - where aa = Var.named "a" - a = var loc aa - f x = (loc, x) - -andor :: Ord v => Type v () -andor = arrows (f <$> [boolean(), boolean()]) $ boolean() - where f x = ((), x) - -andor' :: Ord v => a -> Type v a -andor' a = arrows (f <$> [boolean a, boolean a]) $ boolean a - where f x = (a, x) - -var :: Ord v => a -> v -> Type v a -var = ABT.annotatedVar - -v' :: Var v => Text -> Type v () -v' s = ABT.var (Var.named s) - --- Like `v'`, but creates an annotated variable given an annotation -av' :: Var v => a -> Text -> Type v a -av' a s = ABT.annotatedVar a (Var.named s) - -forall' :: Var v => a -> [Text] -> Type v a -> Type v a -forall' a vs body = foldr (forall a) body (Var.named <$> vs) - -foralls :: Ord v => a -> [v] -> Type v a -> Type v a -foralls a vs body = foldr (forall a) body vs - --- Note: `a -> b -> c` parses as `a -> (b -> c)` --- the annotation associated with `b` will be the annotation for the `b -> c` --- node -arrows :: Ord v => [(a, Type v a)] -> Type v a -> Type v a -arrows ts result = foldr go result ts where - go = uncurry arrow - --- The types of effectful computations -effect :: Ord v => a -> [Type v a] -> Type v a -> Type v a -effect a es (Effect1' fs t) = - let es' = (es >>= flattenEffects) ++ flattenEffects fs - in ABT.tm' a (Effect (ABT.tm' a (Effects es')) t) -effect a es t = ABT.tm' a (Effect (ABT.tm' a (Effects es)) t) - -effects :: Ord v => a -> [Type v a] -> Type v a -effects a es = ABT.tm' a (Effects $ es >>= flattenEffects) - -effect1 :: Ord v => a -> Type v a -> Type v a -> Type v a -effect1 a es (Effect1' fs t) = - let es' = flattenEffects es ++ flattenEffects fs - in ABT.tm' a (Effect (ABT.tm' a (Effects es')) t) -effect1 a es t = ABT.tm' a (Effect es t) - -flattenEffects :: Type v a -> [Type v a] -flattenEffects (Effects' es) = es >>= flattenEffects -flattenEffects es = [es] - --- The types of first-class effect values --- which get deconstructed in effect handlers. -effectV :: Ord v => a -> (a, Type v a) -> (a, Type v a) -> Type v a -effectV builtinA e t = apps (builtin builtinA "Effect") [e, t] - --- Strips effects from a type. E.g. `{e} a` becomes `a`. -stripEffect :: Ord v => Type v a -> ([Type v a], Type v a) -stripEffect (Effect' e t) = case stripEffect t of (ei, t) -> (e ++ ei, t) -stripEffect t = ([], t) - --- The type of the flipped function application operator: --- `(a -> (a -> b) -> b)` -flipApply :: Var v => Type v () -> Type v () -flipApply t = forall() b $ arrow() (arrow() t (var() b)) (var() b) - where b = ABT.fresh t (Var.named "b") - -generalize' :: Var v => Var.Type -> Type v a -> Type v a -generalize' k t = generalize vsk t where - vsk = [ v | v <- Set.toList (freeVars t), Var.typeOf v == k ] - -- | Bind the given variables with an outer `forall`, if they are used in `t`. generalize :: Ord v => [v] -> Type v a -> Type v a generalize vs t = foldr f t vs @@ -445,226 +127,15 @@ generalize vs t = foldr f t vs f v t = if Set.member v (ABT.freeVars t) then forall (ABT.annotation t) v t else t -unforall :: Type v a -> Type v a -unforall (ForallsNamed' _ t) = t -unforall t = t - unforall' :: Type v a -> ([v], Type v a) unforall' (ForallsNamed' vs t) = (vs, t) unforall' t = ([], t) -dependencies :: Ord v => Type v a -> Set Reference -dependencies t = Set.fromList . Writer.execWriter $ ABT.visit' f t - where f t@(Ref r) = Writer.tell [r] $> t - f t = pure t - -updateDependencies :: Ord v => Map Reference Reference -> Type v a -> Type v a -updateDependencies typeUpdates = ABT.rebuildUp go - where - go (Ref r) = Ref (Map.findWithDefault r r typeUpdates) - go f = f - -usesEffects :: Ord v => Type v a -> Bool -usesEffects t = getAny . getConst $ ABT.visit go t where - go (Effect1' _ _) = Just (Const (Any True)) - go _ = Nothing - --- Returns free effect variables in the given type, for instance, in: --- --- ∀ e3 . a ->{e,e2} b ->{e3} c --- --- This function would return the set {e, e2}, but not `e3` since `e3` --- is bound by the enclosing forall. -freeEffectVars :: Ord v => Type v a -> Set v -freeEffectVars t = - Set.fromList . join . runIdentity $ - ABT.foreachSubterm go (snd <$> ABT.annotateBound t) - where - go t@(Effects' es) = - let frees = Set.fromList [ v | Var' v <- es >>= flattenEffects ] - in pure . Set.toList $ frees `Set.difference` ABT.annotation t - go t@(Effect1' e _) = - let frees = Set.fromList [ v | Var' v <- flattenEffects e ] - in pure . Set.toList $ frees `Set.difference` ABT.annotation t - go _ = pure [] - --- Converts all unadorned arrows in a type to have fresh --- existential ability requirements. For example: --- --- (a -> b) -> [a] -> [b] --- --- Becomes --- --- (a ->{e1} b) ->{e2} [a] ->{e3} [b] -existentializeArrows :: (Ord v, Monad m) => m v -> Type v a -> m (Type v a) -existentializeArrows newVar t = ABT.visit go t - where - go t@(Arrow' a b) = case b of - -- If an arrow already has attached abilities, - -- leave it alone. Ex: `a ->{e} b` is kept as is. - Effect1' _ _ -> Just $ do - a <- existentializeArrows newVar a - b <- existentializeArrows newVar b - pure $ arrow (ABT.annotation t) a b - -- For unadorned arrows, make up a fresh variable. - -- So `a -> b` becomes `a ->{e} b`, using the - -- `newVar` variable generator. - _ -> Just $ do - e <- newVar - a <- existentializeArrows newVar a - b <- existentializeArrows newVar b - let ann = ABT.annotation t - pure $ arrow ann a (effect ann [var ann e] b) - go _ = Nothing - -purifyArrows :: (Ord v) => Type v a -> Type v a -purifyArrows = ABT.visitPure go - where - go t@(Arrow' a b) = case b of - Effect1' _ _ -> Nothing - _ -> Just $ arrow ann a (effect ann [] b) - where ann = ABT.annotation t - go _ = Nothing - --- Remove free effect variables from the type that are in the set -removeEffectVars :: ABT.Var v => Set v -> Type v a -> Type v a -removeEffectVars removals t = - let z = effects () [] - t' = ABT.substsInheritAnnotation ((,z) <$> Set.toList removals) t - -- leave explicitly empty `{}` alone - removeEmpty (Effect1' (Effects' []) v) = Just (ABT.visitPure removeEmpty v) - removeEmpty t@(Effect1' e v) = - case flattenEffects e of - [] -> Just (ABT.visitPure removeEmpty v) - es -> Just (effect (ABT.annotation t) es $ ABT.visitPure removeEmpty v) - removeEmpty t@(Effects' es) = - Just $ effects (ABT.annotation t) (es >>= flattenEffects) - removeEmpty _ = Nothing - in ABT.visitPure removeEmpty t' - --- Remove all effect variables from the type. --- Used for type-based search, we apply this transformation to both the --- indexed type and the query type, so the user can supply `a -> b` that will --- match `a ->{e} b` (but not `a ->{IO} b`). -removeAllEffectVars :: ABT.Var v => Type v a -> Type v a -removeAllEffectVars t = let - allEffectVars = foldMap go (ABT.subterms t) - go (Effects' vs) = Set.fromList [ v | Var' v <- vs] - go (Effect1' (Var' v) _) = Set.singleton v - go _ = mempty - (vs, tu) = unforall' t - in generalize vs (removeEffectVars allEffectVars tu) - -removePureEffects :: ABT.Var v => Type v a -> Type v a -removePureEffects t | not Settings.removePureEffects = t - | otherwise = - generalize vs $ removeEffectVars (Set.filter isPure fvs) tu - where - (vs, tu) = unforall' t - fvs = freeEffectVars tu `Set.difference` ABT.freeVars t - -- If an effect variable is mentioned only once, it is on - -- an arrow `a ->{e} b`. Generalizing this to - -- `∀ e . a ->{e} b` gives us the pure arrow `a -> b`. - isPure v = ABT.occurrences v tu <= 1 - -editFunctionResult - :: forall v a - . Ord v - => (Type v a -> Type v a) - -> Type v a - -> Type v a -editFunctionResult f = go - where - go :: Type v a -> Type v a - go (ABT.Term s a t) = case t of - ABT.Tm (Forall t) -> - (\x -> ABT.Term (s <> freeVars x) a . ABT.Tm $ Forall x) $ go t - ABT.Tm (Arrow i o) -> - (\x -> ABT.Term (s <> freeVars x) a . ABT.Tm $ Arrow i x) $ go o - ABT.Abs v r -> - (\x -> ABT.Term (s <> freeVars x) a $ ABT.Abs v x) $ go r - _ -> f (ABT.Term s a t) - -functionResult :: Type v a -> Maybe (Type v a) -functionResult = go False - where - go inArr (ForallNamed' _ body) = go inArr body - go _inArr (Arrow' _i o ) = go True o - go inArr t = if inArr then Just t else Nothing - - --- | Bind all free variables (not in `except`) that start with a lowercase --- letter and are unqualified with an outer `forall`. --- `a -> a` becomes `∀ a . a -> a` --- `B -> B` becomes `B -> B` (not changed) --- `.foo -> .foo` becomes `.foo -> .foo` (not changed) --- `.foo.bar -> blarrg.woot` becomes `.foo.bar -> blarrg.woot` (unchanged) -generalizeLowercase :: Var v => Set v -> Type v a -> Type v a -generalizeLowercase except t = foldr (forall (ABT.annotation t)) t vars - where - vars = - [ v | v <- Set.toList (ABT.freeVars t `Set.difference` except), Var.universallyQuantifyIfFree v ] - --- Convert all free variables in `allowed` to variables bound by an `introOuter`. -freeVarsToOuters :: Ord v => Set v -> Type v a -> Type v a -freeVarsToOuters allowed t = foldr (introOuter (ABT.annotation t)) t vars - where vars = Set.toList $ ABT.freeVars t `Set.intersection` allowed - --- | This function removes all variable shadowing from the types and reduces --- fresh ids to the minimum possible to avoid ambiguity. Useful when showing --- two different types. -cleanupVars :: Var v => [Type v a] -> [Type v a] -cleanupVars ts | not Settings.cleanupTypes = ts -cleanupVars ts = let - changedVars = cleanupVarsMap ts - in cleanupVars1' changedVars <$> ts - --- Compute a variable replacement map from a collection of types, which --- can be passed to `cleanupVars1'`. This is used to cleanup variable ids --- for multiple related types, like when reporting a type error. -cleanupVarsMap :: Var v => [Type v a] -> Map.Map v v -cleanupVarsMap ts = let - varsByName = foldl' step Map.empty (ts >>= ABT.allVars) - step m v = Map.insertWith (++) (Var.name $ Var.reset v) [v] m - changedVars = Map.fromList [ (v, Var.freshenId i v) - | (_, vs) <- Map.toList varsByName - , (v,i) <- nubOrd vs `zip` [0..]] - in changedVars - -cleanupVars1' :: Var v => Map.Map v v -> Type v a -> Type v a -cleanupVars1' = ABT.changeVars - --- | This function removes all variable shadowing from the type and reduces --- fresh ids to the minimum possible to avoid ambiguity. -cleanupVars1 :: Var v => Type v a -> Type v a -cleanupVars1 t | not Settings.cleanupTypes = t -cleanupVars1 t = let [t'] = cleanupVars [t] in t' - --- This removes duplicates and normalizes the order of ability lists -cleanupAbilityLists :: Var v => Type v a -> Type v a -cleanupAbilityLists = ABT.visitPure go - where - -- leave explicitly empty `{}` alone - go (Effect1' (Effects' []) _v) = Nothing - go t@(Effect1' e v) = - let es = Set.toList . Set.fromList $ flattenEffects e - in case es of - [] -> Just (ABT.visitPure go v) - _ -> Just (effect (ABT.annotation t) es $ ABT.visitPure go v) - go _ = Nothing - -cleanups :: Var v => [Type v a] -> [Type v a] -cleanups ts = cleanupVars $ map cleanupAbilityLists ts - -cleanup :: Var v => Type v a -> Type v a -cleanup t | not Settings.cleanupTypes = t -cleanup t = cleanupVars1 . cleanupAbilityLists $ t - toReference :: (ABT.Var v, Show v) => Type v a -> Reference toReference (Ref' r) = r -- a bit of normalization - any unused type parameters aren't part of the hash toReference (ForallNamed' v body) | not (Set.member v (ABT.freeVars body)) = toReference body -toReference t = Reference.Derived (ABT.hash t) 0 1 +toReference t = Reference.Derived (ABT.hash t) 0 toReferenceMentions :: (ABT.Var v, Show v) => Type v a -> Set Reference toReferenceMentions ty = diff --git a/parser-typechecker/unison-parser-typechecker.cabal b/parser-typechecker/unison-parser-typechecker.cabal index 819a8ce5da..d49851fb88 100644 --- a/parser-typechecker/unison-parser-typechecker.cabal +++ b/parser-typechecker/unison-parser-typechecker.cabal @@ -92,18 +92,18 @@ library Unison.DeclPrinter Unison.FileParser Unison.FileParsers + Unison.Hashing.V1.ABT Unison.Hashing.V1.Convert Unison.Hashing.V1.DataDeclaration - Unison.Hashing.V1.LabeledDependency Unison.Hashing.V1.Pattern Unison.Hashing.V1.Reference Unison.Hashing.V1.Reference.Util Unison.Hashing.V1.Referent Unison.Hashing.V1.Term Unison.Hashing.V1.Type + Unison.Hashing.V2.ABT Unison.Hashing.V2.Convert Unison.Hashing.V2.DataDeclaration - Unison.Hashing.V2.LabeledDependency Unison.Hashing.V2.Pattern Unison.Hashing.V2.Reference Unison.Hashing.V2.Reference.Util diff --git a/unison-core/src/Unison/ABT.hs b/unison-core/src/Unison/ABT.hs index d3aa5232a1..00ad0613fa 100644 --- a/unison-core/src/Unison/ABT.hs +++ b/unison-core/src/Unison/ABT.hs @@ -9,20 +9,16 @@ module Unison.ABT where import Unison.Prelude +import Prelude hiding (abs, cycle) import Control.Lens (Lens', use, (.=)) -import Control.Monad.State (MonadState,evalState) +import Control.Monad.State (MonadState, evalState) +import qualified Data.Foldable as Foldable import Data.Functor.Identity (runIdentity) import Data.List hiding (cycle) -import Data.Vector ((!)) -import Prelude hiding (abs,cycle) -import Prelude.Extras (Eq1(..), Show1(..), Ord1(..)) -import Unison.Hashable (Accumulate,Hashable1,hash1) -import qualified Data.Foldable as Foldable import qualified Data.Map as Map import qualified Data.Set as Set -import qualified Data.Vector as Vector -import qualified Unison.Hashable as Hashable +import Prelude.Extras (Eq1 (..), Ord1 (..), Show1 (..)) import qualified Unison.Util.Components as Components data ABT f v r @@ -651,105 +647,6 @@ orderedComponents bs0 = tweak =<< orderedComponents' bs0 where isCyclic [(v,b)] = Set.member v (freeVars b) isCyclic bs = length bs > 1 --- Hash a strongly connected component and sort its definitions into a canonical order. -hashComponent :: - (Functor f, Hashable1 f, Foldable f, Eq v, Show v, Ord v, Ord h, Accumulate h) - => Map.Map v (Term f v a) -> (h, [(v, Term f v a)]) -hashComponent byName = let - ts = Map.toList byName - embeds = [ (v, void (transform Embed t)) | (v,t) <- ts ] - vs = fst <$> ts - tms = [ (v, absCycle vs (tm $ Component (snd <$> embeds) (var v))) | v <- vs ] - hashed = [ ((v,t), hash t) | (v,t) <- tms ] - sortedHashed = sortOn snd hashed - overallHash = Hashable.accumulate (Hashable.Hashed . snd <$> sortedHashed) - in (overallHash, [ (v, t) | ((v, _),_) <- sortedHashed, Just t <- [Map.lookup v byName] ]) - --- Group the definitions into strongly connected components and hash --- each component. Substitute the hash of each component into subsequent --- components (using the `termFromHash` function). Requires that the --- overall component has no free variables. -hashComponents - :: (Functor f, Hashable1 f, Foldable f, Eq v, Show v, Var v, Ord h, Accumulate h) - => (h -> Word64 -> Term f v ()) - -> Map.Map v (Term f v a) - -> [(h, [(v, Term f v a)])] -hashComponents termFromHash termsByName = let - bound = Set.fromList (Map.keys termsByName) - escapedVars = Set.unions (freeVars <$> Map.elems termsByName) `Set.difference` bound - sccs = components (Map.toList termsByName) - go _ [] = [] - go prevHashes (component : rest) = let - sub = substsInheritAnnotation (Map.toList prevHashes) - (h, sortedComponent) = hashComponent $ Map.fromList [ (v, sub t) | (v, t) <- component ] - curHashes = Map.fromList [ (v, termFromHash h i) | ((v, _),i) <- sortedComponent `zip` [0..]] - newHashes = prevHashes `Map.union` curHashes - newHashesL = Map.toList newHashes - sortedComponent' = [ (v, substsInheritAnnotation newHashesL t) | (v, t) <- sortedComponent ] - in (h, sortedComponent') : go newHashes rest - in if Set.null escapedVars then go Map.empty sccs - else error $ "can't hashComponents if bindings have free variables:\n " - ++ show (map show (Set.toList escapedVars)) - ++ "\n " ++ show (map show (Map.keys termsByName)) - --- Implementation detail of hashComponent -data Component f a = Component [a] a | Embed (f a) deriving (Functor, Traversable, Foldable) - -instance (Hashable1 f, Functor f) => Hashable1 (Component f) where - hash1 hashCycle hash c = case c of - Component as a -> let - (hs, hash) = hashCycle as - toks = Hashable.Hashed <$> hs - in Hashable.accumulate $ (Hashable.Tag 1 : toks) ++ [Hashable.Hashed (hash a)] - Embed fa -> Hashable.hash1 hashCycle hash fa - --- | We ignore annotations in the `Term`, as these should never affect the --- meaning of the term. -hash :: forall f v a h . (Functor f, Hashable1 f, Eq v, Show v, Ord h, Accumulate h) - => Term f v a -> h -hash = hash' [] where - hash' :: [Either [v] v] -> Term f v a -> h - hash' env (Term _ _ t) = case t of - Var v -> maybe die hashInt ind - where lookup (Left cycle) = v `elem` cycle - lookup (Right v') = v == v' - ind = findIndex lookup env - hashInt :: Int -> h - hashInt i = Hashable.accumulate [Hashable.Nat $ fromIntegral i] - die = error $ "unknown var in environment: " ++ show v - ++ " environment = " ++ show env - Cycle (AbsN' vs t) -> hash' (Left vs : env) t - -- Cycle t -> hash' env t - Abs v t -> hash' (Right v : env) t - Tm t -> Hashable.hash1 (hashCycle env) (hash' env) t - - hashCycle :: [Either [v] v] -> [Term f v a] -> ([h], Term f v a -> h) - hashCycle env@(Left cycle : envTl) ts | length cycle == length ts = - let - permute p xs = case Vector.fromList xs of xs -> map (xs !) p - hashed = map (\(i,t) -> ((i,t), hash' env t)) (zip [0..] ts) - pt = fst <$> sortOn snd hashed - (p,ts') = unzip pt - in case map Right (permute p cycle) ++ envTl of - env -> (map (hash' env) ts', hash' env) - hashCycle env ts = (map (hash' env) ts, hash' env) - --- | Use the `hash` function to efficiently remove duplicates from the list, preserving order. -distinct :: forall f v h a proxy . (Functor f, Hashable1 f, Eq v, Show v, Ord h, Accumulate h) - => proxy h - -> [Term f v a] -> [Term f v a] -distinct _ ts = fst <$> sortOn snd m - where m = Map.elems (Map.fromList (hashes `zip` (ts `zip` [0 :: Int .. 1]))) - hashes = map hash ts :: [h] - --- | Use the `hash` function to remove elements from `t1s` that exist in `t2s`, preserving order. -subtract :: forall f v h a proxy . (Functor f, Hashable1 f, Eq v, Show v, Ord h, Accumulate h) - => proxy h - -> [Term f v a] -> [Term f v a] -> [Term f v a] -subtract _ t1s t2s = - let skips = Set.fromList (map hash t2s :: [h]) - in filter (\t -> Set.notMember (hash t) skips) t1s - instance (Show1 f, Show v) => Show (Term f v a) where -- annotations not shown showsPrec p (Term _ _ out) = case out of diff --git a/unison-core/src/Unison/Reference/Util.hs b/unison-core/src/Unison/Reference/Util.hs deleted file mode 100644 index 69a663a5a6..0000000000 --- a/unison-core/src/Unison/Reference/Util.hs +++ /dev/null @@ -1,21 +0,0 @@ -module Unison.Reference.Util where - -import Unison.Prelude - -import qualified Unison.Reference as Reference -import Unison.Hashable (Hashable1) -import Unison.ABT (Var) -import qualified Unison.ABT as ABT -import qualified Data.Map as Map - -hashComponents :: - (Functor f, Hashable1 f, Foldable f, Eq v, Show v, Var v) - => (Reference.Id -> ABT.Term f v ()) - -> Map v (ABT.Term f v a) - -> Map v (Reference.Id, ABT.Term f v a) -hashComponents embedRef tms = - Map.fromList [ (v, (r,e)) | ((v,e), r) <- cs ] - where cs = Reference.components $ ABT.hashComponents ref tms - ref h i = embedRef (Reference.Id h i) - - diff --git a/unison-core/unison-core1.cabal b/unison-core/unison-core1.cabal index 5b2c5e5958..04fd60fcb3 100644 --- a/unison-core/unison-core1.cabal +++ b/unison-core/unison-core1.cabal @@ -4,7 +4,7 @@ cabal-version: 1.12 -- -- see: https://github.com/sol/hpack -- --- hash: 3dc00080efb41dcfb41dd8f03bea8ab3e2550a41a92fe1962b9210b52393ce88 +-- hash: 376639c26bffa766c92695ee10dfb8e173ff944dd0483f4aa41b3a6a79887099 name: unison-core1 version: 0.0.0 @@ -47,7 +47,6 @@ library Unison.Pattern Unison.Prelude Unison.Reference - Unison.Reference.Util Unison.Referent Unison.Referent' Unison.Settings From 2139e6959d71a3e54cba3c594e93df1d83eb321f Mon Sep 17 00:00:00 2001 From: Arya Irani Date: Sat, 25 Sep 2021 22:50:06 -0400 Subject: [PATCH 010/297] add constants for optional/either/seqView ctor ids # Conflicts: # parser-typechecker/src/Unison/Runtime/Builtin.hs --- .../src/Unison/Builtin/Decls.hs | 3 + .../src/Unison/Runtime/Builtin.hs | 117 ++++++++++-------- 2 files changed, 71 insertions(+), 49 deletions(-) diff --git a/parser-typechecker/src/Unison/Builtin/Decls.hs b/parser-typechecker/src/Unison/Builtin/Decls.hs index b24ebf9fad..292b830098 100644 --- a/parser-typechecker/src/Unison/Builtin/Decls.hs +++ b/parser-typechecker/src/Unison/Builtin/Decls.hs @@ -86,6 +86,7 @@ constructorId ref name = do noneId, someId, okConstructorId, failConstructorId, docBlobId, docLinkId, docSignatureId, docSourceId, docEvaluateId, docJoinId, linkTermId, linkTypeId, eitherRightId, eitherLeftId :: ConstructorId isPropagatedConstructorId, isTestConstructorId, bufferModeNoBufferingId, bufferModeLineBufferingId, bufferModeBlockBufferingId, bufferModeSizedBlockBufferingId :: ConstructorId +seqViewEmpty, seqViewElem :: ConstructorId Just noneId = constructorId optionalRef "Optional.None" Just someId = constructorId optionalRef "Optional.Some" Just isPropagatedConstructorId = constructorId isPropagatedRef "IsPropagated.IsPropagated" @@ -102,6 +103,8 @@ Just linkTermId = constructorId linkRef "Link.Term" Just linkTypeId = constructorId linkRef "Link.Type" Just eitherRightId = constructorId eitherRef "Either.Right" Just eitherLeftId = constructorId eitherRef "Either.Left" +Just seqViewEmpty = constructorId seqViewRef "SeqView.VEmpty" +Just seqViewElem = constructorId seqViewRef "SeqView.VElem" Just bufferModeNoBufferingId = constructorId bufferModeRef "io2.BufferMode.NoBuffering" Just bufferModeLineBufferingId = constructorId bufferModeRef "io2.BufferMode.LineBuffering" diff --git a/parser-typechecker/src/Unison/Runtime/Builtin.hs b/parser-typechecker/src/Unison/Runtime/Builtin.hs index a0bcec728c..34adb943d0 100644 --- a/parser-typechecker/src/Unison/Runtime/Builtin.hs +++ b/parser-typechecker/src/Unison/Runtime/Builtin.hs @@ -32,7 +32,6 @@ import Unison.Runtime.Foreign ( Foreign(Wrap), HashAlgorithm(..), pattern Failure) import qualified Unison.Runtime.Foreign as F import Unison.Runtime.Foreign.Function -import Unison.Runtime.IOSource (eitherReference) import qualified Unison.Type as Ty import qualified Unison.Builtin as Ty (builtinTypes) @@ -58,7 +57,7 @@ import Data.PEM (pemContent, pemParseLBS, PEM) import Data.Set (insert) import qualified Data.Map as Map -import Unison.Prelude +import Unison.Prelude hiding (some) import qualified Unison.Util.Bytes as Bytes import Network.Socket as SYS ( accept @@ -182,6 +181,17 @@ fls, tru :: Var v => ANormal v fls = TCon Ty.booleanRef 0 [] tru = TCon Ty.booleanRef 1 [] +none :: Var v => ANormal v +none = TCon Ty.optionalRef (toEnum Ty.noneId) [] +some, left, right :: Var v => v -> ANormal v +some a = TCon Ty.optionalRef (toEnum Ty.someId) [a] +left x = TCon Ty.eitherRef (toEnum Ty.eitherLeftId) [x] +right x = TCon Ty.eitherRef (toEnum Ty.eitherRightId) [x] +seqViewEmpty :: Var v => ANormal v +seqViewEmpty = TCon Ty.seqViewRef (toEnum Ty.seqViewEmpty) [] +seqViewElem :: Var v => v -> v -> ANormal v +seqViewElem l r = TCon Ty.seqViewRef (toEnum Ty.seqViewElem) [l,r] + boolift :: Var v => v -> ANormal v boolift v = TMatch v $ MatchIntegral (mapFromList [(0,fls), (1,tru)]) Nothing @@ -440,24 +450,24 @@ sizet = unop0 1 $ \[x,r] unconst = unop0 7 $ \[x,t,c0,c,y,p,u,yp] -> TLetD t UN (TPrm UCNS [x]) . TMatch t . MatchSum $ mapFromList - [ (0, ([], TCon Ty.optionalRef 0 [])) + [ (0, ([], none)) , (1, ([UN,BX], TAbss [c0,y] . TLetD u BX (TCon Ty.unitRef 0 []) . TLetD yp BX (TCon Ty.pairRef 0 [y,u]) . TLetD c BX (TCon Ty.charRef 0 [c0]) . TLetD p BX (TCon Ty.pairRef 0 [c,yp]) - $ TCon Ty.optionalRef 1 [p])) + $ some p)) ] unsnoct = unop0 7 $ \[x,t,c0,c,y,p,u,cp] -> TLetD t UN (TPrm USNC [x]) . TMatch t . MatchSum $ mapFromList - [ (0, ([], TCon Ty.optionalRef 0 [])) + [ (0, ([], none)) , (1, ([BX,UN], TAbss [y,c0] . TLetD u BX (TCon Ty.unitRef 0 []) . TLetD c BX (TCon Ty.charRef 0 [c0]) . TLetD cp BX (TCon Ty.pairRef 0 [c,u]) . TLetD p BX (TCon Ty.pairRef 0 [y,cp]) - $ TCon Ty.optionalRef 1 [p])) + $ some p)) ] appends, conss, snocs :: Var v => SuperNormal v @@ -484,8 +494,8 @@ ats = binop0 3 $ \[x0,y,x,t,r] -> unbox x0 Ty.natRef x . TLetD t UN (TPrm IDXS [x,y]) . TMatch t . MatchSum $ mapFromList - [ (0, ([], TCon Ty.optionalRef 0 [])) - , (1, ([BX], TAbs r $ TCon Ty.optionalRef 1 [r])) + [ (0, ([], none)) + , (1, ([BX], TAbs r $ some r)) ] emptys = Lambda [] $ TPrm BLDS [] @@ -493,14 +503,14 @@ viewls, viewrs :: Var v => SuperNormal v viewls = unop0 3 $ \[s,u,h,t] -> TLetD u UN (TPrm VWLS [s]) . TMatch u . MatchSum $ mapFromList - [ (0, ([], TCon Ty.seqViewRef 0 [])) - , (1, ([BX,BX], TAbss [h,t] $ TCon Ty.seqViewRef 1 [h,t])) + [ (0, ([], seqViewEmpty)) + , (1, ([BX,BX], TAbss [h,t] $ seqViewElem h t)) ] viewrs = unop0 3 $ \[s,u,i,l] -> TLetD u UN (TPrm VWRS [s]) . TMatch u . MatchSum $ mapFromList - [ (0, ([], TCon Ty.seqViewRef 0 [])) - , (1, ([BX,BX], TAbss [i,l] $ TCon Ty.seqViewRef 1 [i,l])) + [ (0, ([], seqViewEmpty)) + , (1, ([BX,BX], TAbss [i,l] $ seqViewElem i l)) ] eqt, neqt, leqt, geqt, lesst, great :: Var v => SuperNormal v @@ -551,10 +561,10 @@ atb = binop0 4 $ \[n0,b,n,t,r0,r] -> unbox n0 Ty.natRef n . TLetD t UN (TPrm IDXB [n,b]) . TMatch t . MatchSum $ mapFromList - [ (0, ([], TCon Ty.optionalRef 0 [])) + [ (0, ([], none)) , (1, ([UN], TAbs r0 . TLetD r BX (TCon Ty.natRef 0 [r0]) - $ TCon Ty.optionalRef 1 [r])) + $ some r)) ] sizeb = unop0 1 $ \[b,n] @@ -578,26 +588,26 @@ t2i, t2n, t2f :: Var v => SuperNormal v t2i = unop0 3 $ \[x,t,n0,n] -> TLetD t UN (TPrm TTOI [x]) . TMatch t . MatchSum $ mapFromList - [ (0, ([], TCon Ty.optionalRef 0 [])) + [ (0, ([], none)) , (1, ([UN], TAbs n0 . TLetD n BX (TCon Ty.intRef 0 [n0]) - $ TCon Ty.optionalRef 1 [n])) + $ some n)) ] t2n = unop0 3 $ \[x,t,n0,n] -> TLetD t UN (TPrm TTON [x]) . TMatch t . MatchSum $ mapFromList - [ (0, ([], TCon Ty.optionalRef 0 [])) + [ (0, ([], none)) , (1, ([UN], TAbs n0 . TLetD n BX (TCon Ty.natRef 0 [n0]) - $ TCon Ty.optionalRef 1 [n])) + $ some n)) ] t2f = unop0 3 $ \[x,t,f0,f] -> TLetD t UN (TPrm TTOF [x]) . TMatch t . MatchSum $ mapFromList - [ (0, ([], TCon Ty.optionalRef 0 [])) + [ (0, ([], none)) , (1, ([UN], TAbs f0 . TLetD f BX (TCon Ty.floatRef 0 [f0]) - $ TCon Ty.optionalRef 1 [f])) + $ some f)) ] equ :: Var v => SuperNormal v @@ -734,8 +744,8 @@ code'lookup = unop0 2 $ \[link,t,r] -> TLetD t UN (TPrm LKUP [link]) . TMatch t . MatchSum $ mapFromList - [ (0, ([], TCon Ty.optionalRef 0 [])) - , (1, ([BX], TAbs r $ TCon Ty.optionalRef 1 [r])) + [ (0, ([], none)) + , (1, ([BX], TAbs r $ some r)) ] code'validate :: Var v => SuperNormal v @@ -747,9 +757,9 @@ code'validate [ (1, ([BX, BX, BX],) . TAbss [ref, msg, extra] . TLetD fail BX (TCon Ty.failureRef 0 [ref, msg, extra]) - $ TCon Ty.optionalRef 1 [fail]) + $ some fail) , (0, ([],) - $ TCon Ty.optionalRef 0 []) + $ none) ] term'link'to'text :: Var v => SuperNormal v @@ -761,8 +771,8 @@ value'load = unop0 2 $ \[vlu,t,r] -> TLetD t UN (TPrm LOAD [vlu]) . TMatch t . MatchSum $ mapFromList - [ (0, ([BX], TAbs r $ TCon Ty.eitherRef 0 [r])) - , (1, ([BX], TAbs r $ TCon Ty.eitherRef 1 [r])) + [ (0, ([BX], TAbs r $ left r)) + , (1, ([BX], TAbs r $ right r)) ] value'create :: Var v => SuperNormal v @@ -792,7 +802,7 @@ standard'handle instr any'construct :: Var v => SuperNormal v any'construct = unop0 0 $ \[v] - -> TCon Ty.anyRef 0 [v] + -> TCon Ty.anyRef 0 [v] any'extract :: Var v => SuperNormal v any'extract @@ -967,20 +977,20 @@ inBxIomr arg1 arg2 fm result cont instr outMaybe :: forall v. Var v => v -> v -> ANormal v outMaybe maybe result = TMatch result . MatchSum $ mapFromList - [ (0, ([], TCon Ty.optionalRef 0 [])) - , (1, ([BX], TAbs maybe $ TCon Ty.optionalRef 1 [maybe])) + [ (0, ([], none)) + , (1, ([BX], TAbs maybe $ some maybe)) ] outMaybeTup :: forall v. Var v => v -> v -> v -> v -> v -> v -> v -> ANormal v outMaybeTup a b n u bp p result = TMatch result . MatchSum $ mapFromList - [ (0, ([], TCon Ty.optionalRef 0 [])) + [ (0, ([], none)) , (1, ([UN,BX], TAbss [a,b] . TLetD u BX (TCon Ty.unitRef 0 []) . TLetD bp BX (TCon Ty.pairRef 0 [b,u]) . TLetD n BX (TCon Ty.natRef 0 [a]) . TLetD p BX (TCon Ty.pairRef 0 [n,bp]) - $ TCon Ty.optionalRef 1 [p])) + $ some p)) ] outIoFail :: forall v. Var v => v -> v -> v -> v -> ANormal v @@ -989,8 +999,8 @@ outIoFail stack1 stack2 fail result = [ (0, ([BX, BX],) . TAbss [stack1, stack2] . TLetD fail BX (TCon Ty.failureRef 0 [stack1, stack2]) - $ TCon eitherReference 0 [fail]) - , (1, ([BX], TAbs stack1 $ TCon eitherReference 1 [stack1])) + $ left fail) + , (1, ([BX], TAbs stack1 $ right stack1)) ] outIoFailNat :: forall v. Var v => v -> v -> v -> v -> v -> v -> ANormal v @@ -999,11 +1009,11 @@ outIoFailNat stack1 stack2 stack3 fail nat result = [ (0, ([BX, BX],) . TAbss [stack1, stack2] . TLetD fail BX (TCon Ty.failureRef 0 [stack1, stack2]) - $ TCon eitherReference 0 [fail]) + $ left fail) , (1, ([UN],) . TAbs stack3 . TLetD nat BX (TCon Ty.natRef 0 [stack3]) - $ TCon eitherReference 1 [nat]) + $ right nat) ] outIoFailBox :: forall v. Var v => v -> v -> v -> v -> ANormal v @@ -1012,10 +1022,10 @@ outIoFailBox stack1 stack2 fail result = [ (0, ([BX, BX],) . TAbss [stack1, stack2] . TLetD fail BX (TCon Ty.failureRef 0 [stack1, stack2]) - $ TCon eitherReference 0 [fail]) + $ left fail) , (1, ([BX],) . TAbs stack1 - $ TCon eitherReference 1 [stack1]) + $ right stack1) ] outIoFailUnit :: forall v. Var v => v -> v -> v -> v -> v -> v -> ANormal v @@ -1025,11 +1035,11 @@ outIoFailUnit stack1 stack2 stack3 unit fail result = [ (0, ([BX, BX],) . TAbss [stack1, stack2] . TLetD fail BX (TCon Ty.failureRef 0 [stack1, stack2]) - $ TCon eitherReference 0 [fail]) + $ left fail) , (1, ([BX],) . TAbss [stack3] . TLetD unit BX (TCon Ty.unitRef 0 []) - $ TCon eitherReference 1 [unit]) + $ right unit) ] outIoFailBool :: forall v. Var v => v -> v -> v -> v -> v -> v -> ANormal v @@ -1039,11 +1049,11 @@ outIoFailBool stack1 stack2 stack3 bool fail result = [ (0, ([BX, BX],) . TAbss [stack1, stack2] . TLetD fail BX (TCon Ty.failureRef 0 [stack1, stack2]) - $ TCon eitherReference 0 [fail]) + $ left fail) , (1, ([UN],) . TAbs stack3 . TLet (Indirect 1) bool BX (boolift stack3) - $ TCon eitherReference 1 [bool]) + $ right bool) ] outIoFailG @@ -1055,9 +1065,9 @@ outIoFailG stack1 stack2 fail result output k [ (0, ([BX, BX],) . TAbss [stack1, stack2] . TLetD fail BX (TCon Ty.failureRef 0 [stack1, stack2]) - $ TCon eitherReference 0 [fail]) + $ left fail) , (1, k $ \t -> TLetD output BX t - $ TCon eitherReference 1 [output]) + $ right output) ] -- Input / Output glue @@ -1090,6 +1100,12 @@ unitToEFNat = inUnit unit result $ outIoFailNat stack1 stack2 stack3 fail nat result where (unit, stack1, stack2, stack3, fail, nat, result) = fresh7 +-- () -> Int +unitToInt :: ForeignOp +unitToInt = inUnit unit result + $ TCon Ty.intRef 0 [result] + where (unit, result) = fresh2 + -- () -> Either Failure a unitToEFBox :: ForeignOp unitToEFBox = inUnit unit result @@ -1190,8 +1206,8 @@ boxToEFMBox = inBx arg result . outIoFailG stack1 stack2 fail result output $ \k -> ([UN], TAbs stack3 . TMatch stack3 . MatchSum $ mapFromList - [ (0, ([], k $ TCon Ty.optionalRef 0 [])) - , (1, ([BX], TAbs stack4 . k $ TCon Ty.optionalRef 1 [stack4])) + [ (0, ([], k $ none)) + , (1, ([BX], TAbs stack4 . k $ some stack4)) ]) where (arg, result, stack1, stack2, stack3, stack4, fail, output) = fresh8 @@ -1276,10 +1292,10 @@ natToEFUnit [ (0, ([BX, BX],) . TAbss [stack1, stack2] . TLetD fail BX (TCon Ty.failureRef 0 [stack1, stack2]) - $ TCon eitherReference 0 [fail]) + $ left fail) , (1, ([],) . TLetD unit BX (TCon Ty.unitRef 0 []) - $ TCon eitherReference 1 [unit]) + $ right unit) ] where @@ -1293,8 +1309,8 @@ boxToEBoxBox instr . TLetD e UN (TFOp instr [b]) . TMatch e . MatchSum $ mapFromList - [ (0, ([BX], TAbs ev $ TCon eitherReference 0 [ev])) - , (1, ([BX], TAbs ev $ TCon eitherReference 1 [ev])) + [ (0, ([BX], TAbs ev $ left ev)) + , (1, ([BX], TAbs ev $ right ev)) ] where (e,b,ev) = fresh3 @@ -1565,6 +1581,9 @@ declareForeigns = do declareForeign "IO.systemTime.impl.v3" unitToEFNat $ mkForeignIOF $ \() -> getPOSIXTime + declareForeign "IO.systemTimeMicroseconds.v1" unitToInt + $ mkForeign $ \() -> fmap (1e6 *) getPOSIXTime + declareForeign "IO.getTempDirectory.impl.v3" unitToEFBox $ mkForeignIOF $ \() -> getTemporaryDirectory From d50ab2f739a86ebd5a55f073d40a820ae365307b Mon Sep 17 00:00:00 2001 From: Arya Irani Date: Sat, 25 Sep 2021 23:48:44 -0400 Subject: [PATCH 011/297] some renames in Sqlite.Operations --- .../U/Codebase/Sqlite/Operations.hs | 16 ++++++++-------- 1 file changed, 8 insertions(+), 8 deletions(-) diff --git a/codebase2/codebase-sqlite/U/Codebase/Sqlite/Operations.hs b/codebase2/codebase-sqlite/U/Codebase/Sqlite/Operations.hs index fa1c465358..dd1408c140 100644 --- a/codebase2/codebase-sqlite/U/Codebase/Sqlite/Operations.hs +++ b/codebase2/codebase-sqlite/U/Codebase/Sqlite/Operations.hs @@ -143,8 +143,8 @@ type EDB m = (Err m, DB m) type ErrString = String data DecodeError - = ErrTermComponent - | ErrDeclComponent + = ErrTermFormat + | ErrDeclFormat | ErrTermElement Word64 | ErrDeclElement Word64 | ErrFramedArrayLen @@ -389,8 +389,8 @@ diffPatch (S.Patch fullTerms fullTypes) (S.Patch refTerms refTypes) = -- * Deserialization helpers -decodeTermComponent :: Err m => ByteString -> m S.Term.TermFormat -decodeTermComponent = getFromBytesOr ErrTermComponent S.getTermFormat +decodeTermFormat :: Err m => ByteString -> m S.Term.TermFormat +decodeTermFormat = getFromBytesOr ErrTermFormat S.getTermFormat decodeComponentLengthOnly :: Err m => ByteString -> m Word64 decodeComponentLengthOnly = getFromBytesOr ErrFramedArrayLen (Get.skip 1 >> S.lengthFramedArray) @@ -404,8 +404,8 @@ decodeTermElementDiscardingTerm i = getFromBytesOr (ErrTermElement i) (S.lookupT decodeTermElementDiscardingType :: Err m => C.Reference.Pos -> ByteString -> m (LocalIds, S.Term.Term) decodeTermElementDiscardingType i = getFromBytesOr (ErrTermElement i) (S.lookupTermElementDiscardingType i) -decodeDeclComponent :: Err m => ByteString -> m S.Decl.DeclFormat -decodeDeclComponent = getFromBytesOr ErrDeclComponent S.getDeclFormat +decodeDeclFormat :: Err m => ByteString -> m S.Decl.DeclFormat +decodeDeclFormat = getFromBytesOr ErrDeclFormat S.getDeclFormat decodeDeclElement :: Err m => Word64 -> ByteString -> m (LocalIds, S.Decl.Decl Symbol) decodeDeclElement i = getFromBytesOr (ErrDeclElement i) (S.lookupDeclElement i) @@ -446,7 +446,7 @@ loadTermComponent h = do MaybeT (anyHashToMaybeObjectId h) >>= liftQ . Q.loadObjectById -- retrieve and deserialize the blob - >>= decodeTermComponent + >>= decodeTermFormat >>= \case S.Term.Term (S.Term.LocallyIndexedComponent elements) -> lift . traverse (uncurry3 s2cTermWithType) $ @@ -763,7 +763,7 @@ loadDeclComponent :: EDB m => H.Hash -> MaybeT m [C.Decl Symbol] loadDeclComponent h = do MaybeT (anyHashToMaybeObjectId h) >>= liftQ . Q.loadObjectById - >>= decodeDeclComponent + >>= decodeDeclFormat >>= \case S.Decl.Decl (S.Decl.LocallyIndexedComponent elements) -> lift . traverse (uncurry s2cDecl) $ Foldable.toList elements From 1b928c516d46cfae120620652131e2e11e11ae39 Mon Sep 17 00:00:00 2001 From: Arya Irani Date: Sun, 26 Sep 2021 00:03:37 -0400 Subject: [PATCH 012/297] superfluous read in U.Util.Serialization.getFramedArray --- codebase2/util-serialization/U/Util/Serialization.hs | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/codebase2/util-serialization/U/Util/Serialization.hs b/codebase2/util-serialization/U/Util/Serialization.hs index 7354037556..77831146b0 100644 --- a/codebase2/util-serialization/U/Util/Serialization.hs +++ b/codebase2/util-serialization/U/Util/Serialization.hs @@ -217,8 +217,7 @@ putFramedArray put (toList -> as) = do getFramedArray :: MonadGet m => m a -> m (Vector a) getFramedArray getA = do offsets :: [Int] <- getList getVarInt - _end <- getVarInt @_ @Int - let count = length offsets + let count = length offsets - 1 Vector.replicateM count getA -- | Look up a 0-based index in a framed array, O(num array elements), From dd73011846c6c931e5827021757a10344904f571 Mon Sep 17 00:00:00 2001 From: Arya Irani Date: Sun, 26 Sep 2021 00:12:28 -0400 Subject: [PATCH 013/297] update hash in gitsync22.sc.history test --- parser-typechecker/tests/Unison/Test/GitSync.hs | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/parser-typechecker/tests/Unison/Test/GitSync.hs b/parser-typechecker/tests/Unison/Test/GitSync.hs index 1b81613e21..532d4e8fa9 100644 --- a/parser-typechecker/tests/Unison/Test/GitSync.hs +++ b/parser-typechecker/tests/Unison/Test/GitSync.hs @@ -194,10 +194,12 @@ test = scope "gitsync22" . tests $ ```ucm .> pull ${repo} .> history - .> reset-root #dsh + .> reset-root #t21 .> history ``` - |]) + |]) -- Not sure why this hash is here. + -- Is it to test `reset-root`? + -- Or to notice a change in hashing? , pushPullTest "one-term" fmt From 581a481ea0b1bd8656e6c06c090c095bad2e69b9 Mon Sep 17 00:00:00 2001 From: Arya Irani Date: Mon, 27 Sep 2021 13:12:33 -0400 Subject: [PATCH 014/297] some pattern-matching-related(?) fixes --- parser-typechecker/src/Unison/Runtime/ANF.hs | 6 +++--- parser-typechecker/src/Unison/Runtime/Builtin.hs | 4 ++-- 2 files changed, 5 insertions(+), 5 deletions(-) diff --git a/parser-typechecker/src/Unison/Runtime/ANF.hs b/parser-typechecker/src/Unison/Runtime/ANF.hs index 09fb839024..163f6cd14e 100644 --- a/parser-typechecker/src/Unison/Runtime/ANF.hs +++ b/parser-typechecker/src/Unison/Runtime/ANF.hs @@ -103,7 +103,7 @@ import qualified Unison.Pattern as P import Unison.Reference (Reference(..)) import Unison.Referent (Referent) --- For internal errors +-- For internal errors data CompileExn = CE CallStack (Pretty.Pretty Pretty.ColorText) deriving (Show) instance Exception CompileExn @@ -1022,8 +1022,8 @@ anfBlock (Match' scrut cas) = do , pure . TMatch r $ MatchDataCover Ty.seqViewRef (EC.mapFromList - [ (0, ([], em)) - , (1, ([BX,BX], bd)) + [ (toEnum Ty.seqViewEmpty, ([], em)) + , (toEnum Ty.seqViewElem, ([BX,BX], bd)) ] ) ) diff --git a/parser-typechecker/src/Unison/Runtime/Builtin.hs b/parser-typechecker/src/Unison/Runtime/Builtin.hs index 34adb943d0..347cc0c8dc 100644 --- a/parser-typechecker/src/Unison/Runtime/Builtin.hs +++ b/parser-typechecker/src/Unison/Runtime/Builtin.hs @@ -935,9 +935,9 @@ inMaybeBx arg1 arg2 arg3 mb result cont instr = . TAbss [arg1, arg2] . TMatch arg1 . flip (MatchData Ty.optionalRef) Nothing $ mapFromList - [ (0, ([], TLetD mb UN (TLit $ I 0) + [ (toEnum Ty.noneId, ([], TLetD mb UN (TLit $ I 0) $ TLetD result UN (TFOp instr [mb, arg2]) cont)) - , (1, ([BX], TAbs arg3 . TLetD mb UN (TLit $ I 1) $ TLetD result UN (TFOp instr [mb, arg3, arg2]) cont)) + , (toEnum Ty.someId, ([BX], TAbs arg3 . TLetD mb UN (TLit $ I 1) $ TLetD result UN (TFOp instr [mb, arg3, arg2]) cont)) ] -- a -> b -> ... From 31f9f56b30a196b1dbb22dcde5b4b8c2b05d5d56 Mon Sep 17 00:00:00 2001 From: Arya Irani Date: Mon, 27 Sep 2021 17:25:23 -0400 Subject: [PATCH 015/297] update transcript output --- .../transcripts-using-base/codeops.output.md | 2 +- .../transcripts-using-base/doc.output.md | 8 +-- .../transcripts-using-base/hashing.output.md | 4 +- .../transcripts/ambiguous-metadata.output.md | 8 +-- .../transcripts/bug-strange-closure.output.md | 16 +++--- ...ependents-dependencies-debugfile.output.md | 4 +- unison-src/transcripts/docs.output.md | 4 +- .../fix-1381-excess-propagate.output.md | 2 +- unison-src/transcripts/fix1356.md | 19 ++++--- unison-src/transcripts/fix1356.output.md | 29 +++++++---- unison-src/transcripts/fix2053.output.md | 2 +- unison-src/transcripts/fix2254.output.md | 10 ++-- .../transcripts/isPropagated-exists.output.md | 6 +-- unison-src/transcripts/link.output.md | 8 +-- unison-src/transcripts/mergeloop.output.md | 6 +-- unison-src/transcripts/merges.output.md | 12 ++--- unison-src/transcripts/names.output.md | 6 +-- unison-src/transcripts/propagate.output.md | 2 +- unison-src/transcripts/reflog.output.md | 10 ++-- unison-src/transcripts/squash.output.md | 52 +++++++++---------- unison-src/transcripts/suffixes.output.md | 2 +- 21 files changed, 113 insertions(+), 99 deletions(-) diff --git a/unison-src/transcripts-using-base/codeops.output.md b/unison-src/transcripts-using-base/codeops.output.md index 8f8b2477f2..812b79360e 100644 --- a/unison-src/transcripts-using-base/codeops.output.md +++ b/unison-src/transcripts-using-base/codeops.output.md @@ -274,7 +274,7 @@ to actual show that the serialization works. New test results: - ◉ badLoad serialized78 + ◉ badLoad serialized77 ✅ 1 test(s) passing diff --git a/unison-src/transcripts-using-base/doc.output.md b/unison-src/transcripts-using-base/doc.output.md index d8699b61f2..14c7f69e4b 100644 --- a/unison-src/transcripts-using-base/doc.output.md +++ b/unison-src/transcripts-using-base/doc.output.md @@ -335,7 +335,7 @@ and the rendered output using `display`: Unison definitions can be included in docs. For instance: - structural type Optional a = None | Some a + structural type Optional a = Some a | None sqr x = use Nat * @@ -343,7 +343,7 @@ and the rendered output using `display`: Some rendering targets also support folded source: - structural type Optional a = None | Some a + structural type Optional a = Some a | None sqr x = use Nat * @@ -637,7 +637,7 @@ Lastly, it's common to build longer documents including subdocuments via `{{ sub Unison definitions can be included in docs. For instance: - structural type Optional a = None | Some a + structural type Optional a = Some a | None sqr x = use Nat * @@ -645,7 +645,7 @@ Lastly, it's common to build longer documents including subdocuments via `{{ sub Some rendering targets also support folded source: - structural type Optional a = None | Some a + structural type Optional a = Some a | None sqr x = use Nat * diff --git a/unison-src/transcripts-using-base/hashing.output.md b/unison-src/transcripts-using-base/hashing.output.md index 2f8c322478..8c40ac1eb2 100644 --- a/unison-src/transcripts-using-base/hashing.output.md +++ b/unison-src/transcripts-using-base/hashing.output.md @@ -104,11 +104,11 @@ ex5 = crypto.hmac Sha2_256 mysecret f |> hex 25 | > ex4 ⧩ - "84012f06fbedb0f12ff27cf749927e6316aa2d5998efce7da7666cd16bdae204" + "c9adb8fbda582aeab113379dbd8f6af3ea450df1782780d61d44ad1ef7bff76e" 26 | > ex5 ⧩ - "36a41b5fa0898be800d740dea2091fb1ed8987400239d4863e44a995750bc901" + "b198a72da9e6c11536c0a2118002760a9eac57db7f9a0ce9ebda8cd8a806bd06" ``` And here's the full API: diff --git a/unison-src/transcripts/ambiguous-metadata.output.md b/unison-src/transcripts/ambiguous-metadata.output.md index 6b5f26b1db..b04fb87c43 100644 --- a/unison-src/transcripts/ambiguous-metadata.output.md +++ b/unison-src/transcripts/ambiguous-metadata.output.md @@ -14,10 +14,10 @@ x = 1 New name conflicts: - 1. doc#tj3gfqdnje : #v00j3buk6m + 1. doc#mqm91b53vp : #qpp8bgqet0 ↓ - 2. ┌ doc#d4ormokpf9 : #v00j3buk6m - 3. └ doc#tj3gfqdnje : #v00j3buk6m + 2. ┌ doc#a09ch66esd : #qpp8bgqet0 + 3. └ doc#mqm91b53vp : #qpp8bgqet0 Tip: You can use `todo` to see if this generated any work to do in this namespace and `test` to run the tests. Or you @@ -32,7 +32,7 @@ x = 1 there are multiple matches: foo.doc - doc#tj3gfqdnje + doc#mqm91b53vp Tip: Try again and supply one of the above definitions explicitly. diff --git a/unison-src/transcripts/bug-strange-closure.output.md b/unison-src/transcripts/bug-strange-closure.output.md index 1dfe955f0a..3b1adc4add 100644 --- a/unison-src/transcripts/bug-strange-closure.output.md +++ b/unison-src/transcripts/bug-strange-closure.output.md @@ -88,7 +88,7 @@ We can display the guide before and after adding it to the codebase: Unison definitions can be included in docs. For instance: - structural type Optional a = None | Some a + structural type Optional a = Some a | None sqr x = use Nat * @@ -96,7 +96,7 @@ We can display the guide before and after adding it to the codebase: Some rendering targets also support folded source: - structural type Optional a = None | Some a + structural type Optional a = Some a | None sqr x = use Nat * @@ -289,7 +289,7 @@ We can display the guide before and after adding it to the codebase: Unison definitions can be included in docs. For instance: - structural type Optional a = None | Some a + structural type Optional a = Some a | None sqr x = use Nat * @@ -297,7 +297,7 @@ We can display the guide before and after adding it to the codebase: Some rendering targets also support folded source: - structural type Optional a = None | Some a + structural type Optional a = Some a | None sqr x = use Nat * @@ -496,7 +496,7 @@ rendered = Pretty.get (docFormatConsole doc.guide) Unison definitions can be included in docs. For instance: - structural type Optional a = None | Some a + structural type Optional a = Some a | None sqr x = use Nat * @@ -504,7 +504,7 @@ rendered = Pretty.get (docFormatConsole doc.guide) Some rendering targets also support folded source: - structural type Optional a = None | Some a + structural type Optional a = Some a | None sqr x = use Nat * @@ -690,7 +690,7 @@ rendered = Pretty.get (docFormatConsole doc.guide) Unison definitions can be included in docs. For instance: - structural type Optional a = None | Some a + structural type Optional a = Some a | None sqr x = use Nat * @@ -698,7 +698,7 @@ rendered = Pretty.get (docFormatConsole doc.guide) Some rendering targets also support folded source: - structural type Optional a = None | Some a + structural type Optional a = Some a | None sqr x = use Nat * diff --git a/unison-src/transcripts/dependents-dependencies-debugfile.output.md b/unison-src/transcripts/dependents-dependencies-debugfile.output.md index 63b00362b3..baa8a2172e 100644 --- a/unison-src/transcripts/dependents-dependencies-debugfile.output.md +++ b/unison-src/transcripts/dependents-dependencies-debugfile.output.md @@ -17,8 +17,8 @@ inside.r = d ```ucm .> debug.file - type inside.M#4idrjau939 - type outside.A#0n4pbd0q9u + type inside.M#ld0okei52l + type outside.A#e6mpjfecmg type outside.B#muulibntaq inside.p#fiupm7pl7o inside.q#l5pndeifuh diff --git a/unison-src/transcripts/docs.output.md b/unison-src/transcripts/docs.output.md index a78489a956..3b09535dac 100644 --- a/unison-src/transcripts/docs.output.md +++ b/unison-src/transcripts/docs.output.md @@ -6,10 +6,10 @@ Unison documentation is written in Unison. Documentation is a value of the follo .> view builtin.Doc unique type builtin.Doc - = Link Link + = Join [builtin.Doc] + | Link Link | Source Link | Blob Text - | Join [builtin.Doc] | Signature Term | Evaluate Term diff --git a/unison-src/transcripts/fix-1381-excess-propagate.output.md b/unison-src/transcripts/fix-1381-excess-propagate.output.md index ab71290c24..c1c5ebd306 100644 --- a/unison-src/transcripts/fix-1381-excess-propagate.output.md +++ b/unison-src/transcripts/fix-1381-excess-propagate.output.md @@ -37,7 +37,7 @@ As of the time of this writing, the history for `X` should be a single node, `#4 - □ #gqprjtld8b (start of history) + □ #kgp7jlhvdu (start of history) ``` however, as of release/M1i, we saw an extraneous node appear. If your `ucm` is fixed, you won't see it below: diff --git a/unison-src/transcripts/fix1356.md b/unison-src/transcripts/fix1356.md index f932e8b4f2..abe31b10a1 100644 --- a/unison-src/transcripts/fix1356.md +++ b/unison-src/transcripts/fix1356.md @@ -12,9 +12,14 @@ x.doc = [: I am the documentation for x:] Step 2: add term and documentation, link, and check the documentation ```ucm -.> add -.> link x.doc x -.> docs x +.trunk> add +.trunk> link x.doc x +.trunk> docs x +``` + +Step 2.5: We'll save this for later for some reason. +```ucm +.trunk> alias.term x.doc .backup.x.doc ``` Step 3: Oops I don't like the doc, so I will re-code it! @@ -24,18 +29,18 @@ x.doc = [: I am the documentation for x, and I now look better:] Step 4: I add it and expect to see it ```ucm -.> update -.> docs x +.trunk> update +.trunk> docs x ``` That works great. Let's relink the old doc too. ```ucm -.> link #v8f1hhvs57 x +.trunk> link .backup.x.doc x ``` Let's check that we see both docs: ```ucm -.> docs x +.trunk> docs x ``` diff --git a/unison-src/transcripts/fix1356.output.md b/unison-src/transcripts/fix1356.output.md index 6a322ba061..f5368b7aa1 100644 --- a/unison-src/transcripts/fix1356.output.md +++ b/unison-src/transcripts/fix1356.output.md @@ -20,24 +20,33 @@ x.doc = [: I am the documentation for x:] ``` Step 2: add term and documentation, link, and check the documentation ```ucm -.> add + ☝️ The namespace .trunk is empty. + +.trunk> add ⍟ I've added these definitions: x : Nat x.doc : Doc -.> link x.doc x +.trunk> link x.doc x Updates: - 1. x : Nat + 1. trunk.x : Nat + 2. doc : Doc -.> docs x +.trunk> docs x I am the documentation for x +``` +Step 2.5: We'll save this for later for some reason. +```ucm +.trunk> alias.term x.doc .backup.x.doc + + Done. + ``` Step 3: Oops I don't like the doc, so I will re-code it! ```unison @@ -58,13 +67,13 @@ x.doc = [: I am the documentation for x, and I now look better:] ``` Step 4: I add it and expect to see it ```ucm -.> update +.trunk> update ⍟ I've updated these names to your new definition: x.doc : Doc -.> docs x +.trunk> docs x I am the documentation for x, and I now look better @@ -72,18 +81,18 @@ Step 4: I add it and expect to see it That works great. Let's relink the old doc too. ```ucm -.> link #v8f1hhvs57 x +.trunk> link .backup.x.doc x Updates: - 1. x : Nat - + 2. #v8f1hhvs57 : Doc + 1. trunk.x : Nat + + 2. backup.x.doc : Doc ``` Let's check that we see both docs: ```ucm -.> docs x +.trunk> docs x I am the documentation for x, and I now look better diff --git a/unison-src/transcripts/fix2053.output.md b/unison-src/transcripts/fix2053.output.md index 60fe87aa41..02ccfc0ac4 100644 --- a/unison-src/transcripts/fix2053.output.md +++ b/unison-src/transcripts/fix2053.output.md @@ -4,10 +4,10 @@ go f i as acc = _pattern = List.at i as match _pattern with - None -> acc Some _pattern1 -> use Nat + go f (i + 1) as (acc :+ f _pattern) + None -> acc f a -> go f 0 a [] ``` diff --git a/unison-src/transcripts/fix2254.output.md b/unison-src/transcripts/fix2254.output.md index 21b13d2d98..78ba1d91ee 100644 --- a/unison-src/transcripts/fix2254.output.md +++ b/unison-src/transcripts/fix2254.output.md @@ -75,15 +75,15 @@ Let's do the update now, and verify that the definitions all look good and there .a2> view A NeedsA f f2 f3 g unique type A a b c d - = E a d - | C c + = D d | A a | B b - | D d + | C c + | E a d structural type NeedsA a b - = Zoink Text - | NeedsA (A a b Nat Nat) + = NeedsA (A a b Nat Nat) + | Zoink Text f : A Nat Nat Nat Nat -> Nat f = cases diff --git a/unison-src/transcripts/isPropagated-exists.output.md b/unison-src/transcripts/isPropagated-exists.output.md index 95d751b6dc..845609246a 100644 --- a/unison-src/transcripts/isPropagated-exists.output.md +++ b/unison-src/transcripts/isPropagated-exists.output.md @@ -32,15 +32,15 @@ x = 4 .> links y - 1. #uqdd5t2fgn : #ffb7g9cull + 1. #kea5380m2n : #ffb7g9cull Tip: Try using `display 1` to display the first result or `view 1` to view its source. .> view 1 - #uqdd5t2fgn : #ffb7g9cull - #uqdd5t2fgn = #ffb7g9cull#0 + #kea5380m2n : #ffb7g9cull + #kea5380m2n = #ffb7g9cull#0 ``` Well, it's hard to tell from those hashes, but those are right. We can confirm diff --git a/unison-src/transcripts/link.output.md b/unison-src/transcripts/link.output.md index e507b82102..ed5dbb9111 100644 --- a/unison-src/transcripts/link.output.md +++ b/unison-src/transcripts/link.output.md @@ -101,8 +101,8 @@ We can look at the links we have: ```ucm .> links coolFunction - 1. alice : Author - 2. coolFunction.license : License + 1. coolFunction.license : License + 2. alice : Author 3. coolFunction.doc : Doc Tip: Try using `display 1` to display the first result or @@ -194,11 +194,11 @@ myLibrary.h x = x + 3 Note: The most recent namespace hash is immediately below this message. - ⊙ #o2uud8au0e + ⊙ #da67sk9qv4 - □ #7rksc58cce (start of history) + □ #jil43gmsb8 (start of history) .> unlink coolFunction.doc coolFunction diff --git a/unison-src/transcripts/mergeloop.output.md b/unison-src/transcripts/mergeloop.output.md index f488e3e562..2ecc271ed6 100644 --- a/unison-src/transcripts/mergeloop.output.md +++ b/unison-src/transcripts/mergeloop.output.md @@ -135,9 +135,9 @@ b = 2 `history #som3n4m3space` to view history starting from a given namespace hash. - ⊙ #fe1rlmq8a3 + ⊙ #m0kphpti4f ⑃ - #4pts1280cu - #piag7k4n15 + #qdrtol3ouc + #tc4r914dk6 ``` diff --git a/unison-src/transcripts/merges.output.md b/unison-src/transcripts/merges.output.md index b183f4c8f3..8cb3d02289 100644 --- a/unison-src/transcripts/merges.output.md +++ b/unison-src/transcripts/merges.output.md @@ -112,13 +112,13 @@ We can also delete the fork if we're done with it. (Don't worry, it's still in t Note: The most recent namespace hash is immediately below this message. - ⊙ #8bi2fepmci + ⊙ #h5hh8vjjin - Deletes: feature1.y - ⊙ #0toe0ni06d + ⊙ #vtedbsulj7 + Adds / updates: @@ -129,26 +129,26 @@ We can also delete the fork if we're done with it. (Don't worry, it's still in t Original name New name(s) feature1.y master.y - ⊙ #8hp71hs6bk + ⊙ #82rbe388k1 + Adds / updates: feature1.y - ⊙ #lujhuhd7it + ⊙ #c8e5rj8gen > Moves: Original name New name x master.x - ⊙ #hl2puv6t7v + ⊙ #p0jm4b99qm + Adds / updates: x - □ #rmafm3f1ih (start of history) + □ #5iqf8j8pam (start of history) ``` To resurrect an old version of a namespace, you can learn its hash via the `history` command, then use `fork #namespacehash .newname`. diff --git a/unison-src/transcripts/names.output.md b/unison-src/transcripts/names.output.md index 4e4caa9ae6..66cab3a516 100644 --- a/unison-src/transcripts/names.output.md +++ b/unison-src/transcripts/names.output.md @@ -16,17 +16,17 @@ intTriple = IntTriple(+1, +1, +1) .> names IntTriple Type - Hash: #170h4ackk7 + Hash: #p1iakck1ol Names: IntTriple namespc.another.TripleInt Term - Hash: #170h4ackk7#0 + Hash: #p1iakck1ol#0 Names: IntTriple.IntTriple .> names intTriple Term - Hash: #uif14vd2oj + Hash: #2quul9e9bo Names: intTriple namespc.another.tripleInt ``` diff --git a/unison-src/transcripts/propagate.output.md b/unison-src/transcripts/propagate.output.md index b51c3beb90..c279fe922d 100644 --- a/unison-src/transcripts/propagate.output.md +++ b/unison-src/transcripts/propagate.output.md @@ -43,7 +43,7 @@ And then we add it. 2. -- #v4a90flt15t54qnjbvbdtj42ouqo8dktu5da8g6q30l4frc6l81ttjtov42r1nbj5jq3hh98snlb64tkbb1mc5dk8les96v71b4qr6g#0 Foo.Foo : Foo - 3. -- #31g7t8qcmqqdtpe4bdo1591egqh1q0ltnt69u345gdrdur0n8flfu1ohpjasauc9k81msvi2a4q4b03tp1018sac9esd8d3qmbq4b2g + 3. -- #5k9rns49vrtujrpbiegajeja9qjjs77fju3usg1i1dpeo44kefkbce776u1kvqhvtutk6a6f178kovr422ocsd4fdsbsg7fprf4o0dg fooToInt : Foo -> Int diff --git a/unison-src/transcripts/reflog.output.md b/unison-src/transcripts/reflog.output.md index d79279afb8..2fcad1653a 100644 --- a/unison-src/transcripts/reflog.output.md +++ b/unison-src/transcripts/reflog.output.md @@ -59,16 +59,16 @@ y = 2 most recent, along with the command that got us there. Try: `fork 2 .old` - `fork #12c7nag7oi .old` to make an old namespace + `fork #dh7ar0q7k3 .old` to make an old namespace accessible again, - `reset-root #12c7nag7oi` to reset the root namespace and + `reset-root #dh7ar0q7k3` to reset the root namespace and its history to that of the specified namespace. - 1. #sjer10g2l4 : add - 2. #12c7nag7oi : add - 3. #rmafm3f1ih : builtins.merge + 1. #p3t83odhnd : add + 2. #dh7ar0q7k3 : add + 3. #5iqf8j8pam : builtins.merge 4. #sjg2v58vn2 : (initial reflogged namespace) ``` diff --git a/unison-src/transcripts/squash.output.md b/unison-src/transcripts/squash.output.md index d4af31bcc5..c5e8d67acd 100644 --- a/unison-src/transcripts/squash.output.md +++ b/unison-src/transcripts/squash.output.md @@ -13,7 +13,7 @@ Let's look at some examples. We'll start with a namespace with just the builtins - □ #7ogvf7kc1m (start of history) + □ #l37iis22pp (start of history) .> fork builtin builtin2 @@ -42,21 +42,21 @@ Now suppose we `fork` a copy of builtin, then rename `Nat.+` to `frobnicate`, th Note: The most recent namespace hash is immediately below this message. - ⊙ #b4nitt6goc + ⊙ #q14lnbgbmf > Moves: Original name New name Nat.frobnicate Nat.+ - ⊙ #s92i00andp + ⊙ #t5kpt7ps8p > Moves: Original name New name Nat.+ Nat.frobnicate - □ #7ogvf7kc1m (start of history) + □ #l37iis22pp (start of history) ``` If we merge that back into `builtin`, we get that same chain of history: @@ -71,21 +71,21 @@ If we merge that back into `builtin`, we get that same chain of history: Note: The most recent namespace hash is immediately below this message. - ⊙ #b4nitt6goc + ⊙ #q14lnbgbmf > Moves: Original name New name Nat.frobnicate Nat.+ - ⊙ #s92i00andp + ⊙ #t5kpt7ps8p > Moves: Original name New name Nat.+ Nat.frobnicate - □ #7ogvf7kc1m (start of history) + □ #l37iis22pp (start of history) ``` Let's try again, but using a `merge.squash` (or just `squash`) instead. The history will be unchanged: @@ -106,7 +106,7 @@ Let's try again, but using a `merge.squash` (or just `squash`) instead. The hist - □ #7ogvf7kc1m (start of history) + □ #l37iis22pp (start of history) ``` The churn that happened in `mybuiltin` namespace ended up back in the same spot, so the squash merge of that namespace with our original namespace had no effect. @@ -191,47 +191,47 @@ At this point, Alice and Bob both have some history beyond what's in trunk: - □ #hkrqt3tm05 (start of history) + □ #md499voko1 (start of history) .> history alice Note: The most recent namespace hash is immediately below this message. - ⊙ #uollchacf2 + ⊙ #4suqgqt42i > Moves: Original name New name neatoFun productionReadyId - ⊙ #7b6lii2lmc + ⊙ #gvn274aiis > Moves: Original name New name radNumber superRadNumber - ⊙ #1l7bsgu3om + ⊙ #mg2d7kkbck + Adds / updates: bodaciousNumero neatoFun radNumber - □ #hkrqt3tm05 (start of history) + □ #md499voko1 (start of history) .> history bob Note: The most recent namespace hash is immediately below this message. - ⊙ #aicts31vr6 + ⊙ #gvaau8vv11 + Adds / updates: babyDon'tHurtMe no whatIsLove - □ #hkrqt3tm05 (start of history) + □ #md499voko1 (start of history) ``` Alice then squash merges into `trunk`, as does Bob. It's as if Alice and Bob both made their changes in one single commit. @@ -257,13 +257,13 @@ Alice then squash merges into `trunk`, as does Bob. It's as if Alice and Bob bot Note: The most recent namespace hash is immediately below this message. - ⊙ #gjfd096e1s + ⊙ #n6ps07cufq + Adds / updates: bodaciousNumero productionReadyId superRadNumber - □ #hkrqt3tm05 (start of history) + □ #md499voko1 (start of history) .> merge.squash bob trunk @@ -285,19 +285,19 @@ Alice then squash merges into `trunk`, as does Bob. It's as if Alice and Bob bot Note: The most recent namespace hash is immediately below this message. - ⊙ #k7bfk3l7uv + ⊙ #qksvuct0ji + Adds / updates: babyDon'tHurtMe no whatIsLove - ⊙ #gjfd096e1s + ⊙ #n6ps07cufq + Adds / updates: bodaciousNumero productionReadyId superRadNumber - □ #hkrqt3tm05 (start of history) + □ #md499voko1 (start of history) ``` Since squash merges don't produce any merge nodes, we can `undo` a couple times to get back to our starting state: @@ -336,7 +336,7 @@ Since squash merges don't produce any merge nodes, we can `undo` a couple times - □ #hkrqt3tm05 (start of history) + □ #md499voko1 (start of history) ``` This time, we'll first squash Alice and Bob's changes together before squashing their combined changes into `trunk`. The resulting `trunk` will have just a single entry in it, combining both Alice and Bob's changes: @@ -380,14 +380,14 @@ This time, we'll first squash Alice and Bob's changes together before squashing Note: The most recent namespace hash is immediately below this message. - ⊙ #ka70nifphh + ⊙ #vlsmrlnl0o + Adds / updates: babyDon'tHurtMe bodaciousNumero no productionReadyId superRadNumber whatIsLove - □ #hkrqt3tm05 (start of history) + □ #md499voko1 (start of history) ``` So, there you have it. With squashing, you can control the granularity of your history. @@ -420,7 +420,7 @@ Another thing we can do is `squash` into an empty namespace. This effectively ma - □ #sui24env59 (start of history) + □ #hqtl9h3ldp (start of history) ``` There's nothing really special here, `squash src dest` discards `src` history that comes after the LCA of `src` and `dest`, it's just that in the case of an empty namespace, that LCA is the beginning of time (the empty namespace), so all the history of `src` is discarded. @@ -485,13 +485,13 @@ This checks to see that squashing correctly preserves deletions: Note: The most recent namespace hash is immediately below this message. - ⊙ #ct8sk813ij + ⊙ #auri54jlhn - Deletes: Nat.* Nat.+ - □ #7ogvf7kc1m (start of history) + □ #l37iis22pp (start of history) ``` Notice that `Nat.+` and `Nat.*` are deleted by the squash, and we see them deleted in one atomic step in the history. diff --git a/unison-src/transcripts/suffixes.output.md b/unison-src/transcripts/suffixes.output.md index 12c6e31513..033b333413 100644 --- a/unison-src/transcripts/suffixes.output.md +++ b/unison-src/transcripts/suffixes.output.md @@ -38,7 +38,7 @@ The `view` and `display` commands also benefit from this: ```ucm .> view List.drop - builtin builtin.List.drop : Nat -> [a] -> [a] + builtin builtin.List.drop : builtin.Nat -> [a] -> [a] .> display bar.a From 60eac682eb06ffd002a829aebc03671ca8e92b5e Mon Sep 17 00:00:00 2001 From: Arya Irani Date: Sat, 2 Oct 2021 12:39:52 -0400 Subject: [PATCH 016/297] typo in comment --- codebase2/codebase-sqlite/U/Codebase/Sqlite/Operations.hs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/codebase2/codebase-sqlite/U/Codebase/Sqlite/Operations.hs b/codebase2/codebase-sqlite/U/Codebase/Sqlite/Operations.hs index 946f779249..106b2ad283 100644 --- a/codebase2/codebase-sqlite/U/Codebase/Sqlite/Operations.hs +++ b/codebase2/codebase-sqlite/U/Codebase/Sqlite/Operations.hs @@ -1381,7 +1381,7 @@ dependents r = do cIds <- traverse s2cReferenceId sIds pure $ Set.fromList cIds --- | returns a list of known definitions referencing `r` +-- | returns a list of known definitions referencing `h` dependentsOfComponent :: EDB m => H.Hash -> m (Set C.Reference.Id) dependentsOfComponent h = do oId <- primaryHashToExistingObjectId h From 60174c7da1f6b99fc171d2ebb42415617c325840 Mon Sep 17 00:00:00 2001 From: Arya Irani Date: Sat, 2 Oct 2021 13:21:14 -0400 Subject: [PATCH 017/297] more re-org MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit - move Unison.Codebase.Branch.BranchDiff into its own module (not to be confused with Unison.Codebase.BranchDiff) 😥 - move Unison.Codebase.Branch.Raw into its own module - move various types from Unison.Codebase.Branch into Unicon.Codebase.Branch.Type to avoid cyclical dependencies with conversion code - temporarily, orphan Hashable Branch0 instance which delegates to the Convert module - use H.hashPatch instead of Hashable for hashing patches - remove unused V1 Caching and serialization code from Unison.Codebase.Branch - dropped `Typing` from the hash function for TermEdit, because it is info that is implied by the hashes. I forget: do we even need these stored in the codebase? later: - audit lenses for Branch (basically, reorg to minimize recomputing deep*) - use canned hashing for Causal and Branch too — and also consider switching to the separate branch and causal hashes thing --- .../src/Unison/Codebase/Branch.hs | 231 ++---------------- .../src/Unison/Codebase/Branch/BranchDiff.hs | 61 +++++ .../src/Unison/Codebase/Branch/Merge.hs | 26 +- .../src/Unison/Codebase/Branch/Raw.hs | 21 ++ .../src/Unison/Codebase/Branch/Type.hs | 71 ++++++ .../src/Unison/Codebase/Patch.hs | 6 - .../Codebase/SqliteCodebase/Conversions.hs | 14 +- .../src/Unison/Codebase/TermEdit.hs | 11 - .../src/Unison/Codebase/TypeEdit.hs | 6 - .../src/Unison/Hashing/V2/Branch.hs | 37 +++ .../src/Unison/Hashing/V2/Causal.hs | 15 ++ .../src/Unison/Hashing/V2/Convert.hs | 108 ++++++++ .../src/Unison/Hashing/V2/Patch.hs | 26 ++ .../src/Unison/Hashing/V2/TermEdit.hs | 12 + .../src/Unison/Hashing/V2/TypeEdit.hs | 12 + .../tests/Unison/Test/GitSync.hs | 6 +- .../unison-parser-typechecker.cabal | 8 + unison-core/src/Unison/Pattern.hs | 24 -- unison-core/src/Unison/Reference.hs | 5 - 19 files changed, 422 insertions(+), 278 deletions(-) create mode 100644 parser-typechecker/src/Unison/Codebase/Branch/BranchDiff.hs create mode 100644 parser-typechecker/src/Unison/Codebase/Branch/Raw.hs create mode 100644 parser-typechecker/src/Unison/Codebase/Branch/Type.hs create mode 100644 parser-typechecker/src/Unison/Hashing/V2/Branch.hs create mode 100644 parser-typechecker/src/Unison/Hashing/V2/Causal.hs create mode 100644 parser-typechecker/src/Unison/Hashing/V2/Patch.hs create mode 100644 parser-typechecker/src/Unison/Hashing/V2/TermEdit.hs create mode 100644 parser-typechecker/src/Unison/Hashing/V2/TypeEdit.hs diff --git a/parser-typechecker/src/Unison/Codebase/Branch.hs b/parser-typechecker/src/Unison/Codebase/Branch.hs index 5fbb1d5e99..1eae01ffd9 100644 --- a/parser-typechecker/src/Unison/Codebase/Branch.hs +++ b/parser-typechecker/src/Unison/Codebase/Branch.hs @@ -3,11 +3,11 @@ {-# LANGUAGE PatternSynonyms #-} {-# LANGUAGE ViewPatterns #-} {-# LANGUAGE RankNTypes #-} +{-# OPTIONS_GHC -Wno-orphans #-} module Unison.Codebase.Branch ( -- * Branch types Branch(..) - , BranchDiff(..) , UnwrappedBranch , Branch0(..) , Raw(..) @@ -23,7 +23,6 @@ module Unison.Codebase.Branch , empty , empty0 , discardHistory0 - , toCausalRaw , transform -- * Branch tests , isEmpty @@ -31,8 +30,6 @@ module Unison.Codebase.Branch , isOne , before , lca - -- * diff - , diff0 -- * properties , head , headHash @@ -69,10 +66,6 @@ module Unison.Codebase.Branch -- ** Term/type queries , deepReferents , deepTypeReferences - -- * Branch serialization - , cachedRead - , Cache - , sync ) where import Unison.Prelude hiding (empty) @@ -80,111 +73,43 @@ import Unison.Prelude hiding (empty) import Prelude hiding (head,read,subtract) import Control.Lens hiding ( children, cons, transform, uncons ) -import qualified Control.Monad.State as State -import Control.Monad.State ( StateT ) import Data.Bifunctor ( second ) import qualified Data.Map as Map -import qualified Data.Map.Merge.Lazy as Map import qualified Data.Set as Set +import Unison.Codebase.Branch.Raw (Raw (Raw)) +import Unison.Codebase.Branch.Type + ( Branch (..), + Branch0 (..), + EditHash, + Hash, + Star, + UnwrappedBranch, + edits, + head, + headHash, + history, + ) import qualified Unison.Codebase.Patch as Patch import Unison.Codebase.Patch ( Patch ) import qualified Unison.Codebase.Causal as Causal -import Unison.Codebase.Causal ( Causal - , pattern RawOne - , pattern RawCons - , pattern RawMerge - ) +import Unison.Codebase.Causal (Causal) import Unison.Codebase.Path ( Path(..) ) import qualified Unison.Codebase.Path as Path import Unison.NameSegment ( NameSegment ) import qualified Unison.NameSegment as NameSegment import qualified Unison.Codebase.Metadata as Metadata -import qualified Unison.Hash as Hash import Unison.Hashable ( Hashable ) import qualified Unison.Hashable as H +import qualified Unison.Hashing.V2.Convert as H import Unison.Name ( Name(..) ) import qualified Unison.Name as Name import Unison.Reference ( Reference ) import Unison.Referent ( Referent ) - -import qualified U.Util.Cache as Cache import qualified Unison.Util.Relation as R -import Unison.Util.Relation ( Relation ) import qualified Unison.Util.Relation4 as R4 import qualified Unison.Util.Star3 as Star3 import qualified Unison.Util.List as List --- | A node in the Unison namespace hierarchy --- along with its history. -newtype Branch m = Branch { _history :: UnwrappedBranch m } - deriving (Eq, Ord) -type UnwrappedBranch m = Causal m Raw (Branch0 m) - -type Hash = Causal.RawHash Raw -type EditHash = Hash.Hash - -type Star r n = Metadata.Star r n - --- | A node in the Unison namespace hierarchy. --- --- '_terms' and '_types' are the declarations at this level. --- '_children' are the nodes one level below us. --- '_edits' are the 'Patch's stored at this node in the code. --- --- The @deep*@ fields are derived from the four above. -data Branch0 m = Branch0 - { _terms :: Star Referent NameSegment - , _types :: Star Reference NameSegment - , _children :: Map NameSegment (Branch m) - -- ^ Note the 'Branch' here, not 'Branch0'. - -- Every level in the tree has a history. - , _edits :: Map NameSegment (EditHash, m Patch) - -- names and metadata for this branch and its children - -- (ref, (name, value)) iff ref has metadata `value` at name `name` - , deepTerms :: Relation Referent Name - , deepTypes :: Relation Reference Name - , deepTermMetadata :: Metadata.R4 Referent Name - , deepTypeMetadata :: Metadata.R4 Reference Name - , deepPaths :: Set Path - , deepEdits :: Map Name EditHash - } - --- Represents a shallow diff of a Branch0. --- Each of these `Star`s contain metadata as well, so an entry in --- `added` or `removed` could be an update to the metadata. -data BranchDiff = BranchDiff - { addedTerms :: Star Referent NameSegment - , removedTerms :: Star Referent NameSegment - , addedTypes :: Star Reference NameSegment - , removedTypes :: Star Reference NameSegment - , changedPatches :: Map NameSegment Patch.PatchDiff - } deriving (Eq, Ord, Show) - -instance Semigroup BranchDiff where - left <> right = BranchDiff - { addedTerms = addedTerms left <> addedTerms right - , removedTerms = removedTerms left <> removedTerms right - , addedTypes = addedTypes left <> addedTypes right - , removedTypes = removedTypes left <> removedTypes right - , changedPatches = - Map.unionWith (<>) (changedPatches left) (changedPatches right) - } - -instance Monoid BranchDiff where - mappend = (<>) - mempty = BranchDiff mempty mempty mempty mempty mempty - --- The raw Branch -data Raw = Raw - { _termsR :: Star Referent NameSegment - , _typesR :: Star Reference NameSegment - , _childrenR :: Map NameSegment Hash - , _editsR :: Map NameSegment EditHash - } - -makeLenses ''Branch -makeLensesFor [("_edits", "edits")] ''Branch0 - deepReferents :: Branch0 m -> Set Referent deepReferents = R.dom . deepTerms @@ -242,12 +167,6 @@ branch0 terms types children edits = go (nameSeg, b) = Map.mapKeys (nameSegToName nameSeg `Name.joinDot`) . deepEdits $ head b -head :: Branch m -> Branch0 m -head (Branch c) = Causal.head c - -headHash :: Branch m -> Hash -headHash (Branch c) = Causal.currentHash c - -- | a version of `deepEdits` that returns the `m Patch` as well. deepEdits' :: Branch0 m -> Map Name (EditHash, m Patch) deepEdits' b = go id b where @@ -277,96 +196,6 @@ toList0 = go Path.empty where go p b = (p, b) : (Map.toList (_children b) >>= (\(seg, cb) -> go (Path.snoc p seg) (head cb) )) -instance Eq (Branch0 m) where - a == b = view terms a == view terms b - && view types a == view types b - && view children a == view children b - && (fmap fst . view edits) a == (fmap fst . view edits) b - --- This type is a little ugly, so we wrap it up with a nice type alias for --- use outside this module. -type Cache m = Cache.Cache (Causal.RawHash Raw) (UnwrappedBranch m) - --- Can use `Cache.nullCache` to disable caching if needed -cachedRead :: forall m . MonadIO m - => Cache m - -> Causal.Deserialize m Raw Raw - -> (EditHash -> m Patch) - -> Hash - -> m (Branch m) -cachedRead cache deserializeRaw deserializeEdits h = - Branch <$> Causal.cachedRead cache d h - where - fromRaw :: Raw -> m (Branch0 m) - fromRaw Raw {..} = do - children <- traverse go _childrenR - edits <- for _editsR $ \hash -> (hash,) . pure <$> deserializeEdits hash - pure $ branch0 _termsR _typesR children edits - go = cachedRead cache deserializeRaw deserializeEdits - d :: Causal.Deserialize m Raw (Branch0 m) - d h = deserializeRaw h >>= \case - RawOne raw -> RawOne <$> fromRaw raw - RawCons raw h -> flip RawCons h <$> fromRaw raw - RawMerge raw hs -> flip RawMerge hs <$> fromRaw raw - -sync - :: Monad m - => (Hash -> m Bool) - -> Causal.Serialize m Raw Raw - -> (EditHash -> m Patch -> m ()) - -> Branch m - -> m () -sync exists serializeRaw serializeEdits b = do - _written <- State.execStateT (sync' exists serializeRaw serializeEdits b) mempty - -- traceM $ "Branch.sync wrote " <> show (Set.size written) <> " namespace files." - pure () - --- serialize a `Branch m` indexed by the hash of its corresponding Raw -sync' - :: forall m - . Monad m - => (Hash -> m Bool) - -> Causal.Serialize m Raw Raw - -> (EditHash -> m Patch -> m ()) - -> Branch m - -> StateT (Set Hash) m () -sync' exists serializeRaw serializeEdits b = Causal.sync exists - serialize0 - (view history b) - where - serialize0 :: Causal.Serialize (StateT (Set Hash) m) Raw (Branch0 m) - serialize0 h b0 = case b0 of - RawOne b0 -> do - writeB0 b0 - lift $ serializeRaw h $ RawOne (toRaw b0) - RawCons b0 ht -> do - writeB0 b0 - lift $ serializeRaw h $ RawCons (toRaw b0) ht - RawMerge b0 hs -> do - writeB0 b0 - lift $ serializeRaw h $ RawMerge (toRaw b0) hs - where - writeB0 :: Branch0 m -> StateT (Set Hash) m () - writeB0 b0 = do - for_ (view children b0) $ \c -> do - queued <- State.get - when (Set.notMember (headHash c) queued) $ - sync' exists serializeRaw serializeEdits c - for_ (view edits b0) (lift . uncurry serializeEdits) - - -- this has to serialize the branch0 and its descendants in the tree, - -- and then serialize the rest of the history of the branch as well - -toRaw :: Branch0 m -> Raw -toRaw Branch0 {..} = - Raw _terms _types (headHash <$> _children) (fst <$> _edits) - -toCausalRaw :: Branch m -> Causal.Raw Raw Raw -toCausalRaw = \case - Branch (Causal.One _h e) -> RawOne (toRaw e) - Branch (Causal.Cons _h e (ht, _m)) -> RawCons (toRaw e) ht - Branch (Causal.Merge _h e tls) -> RawMerge (toRaw e) (Map.keysSet tls) - -- returns `Nothing` if no Branch at `path` or if Branch is empty at `path` getAt :: Path -> Branch m @@ -466,11 +295,11 @@ modifyPatches seg f = mapMOf edits update p' <- case Map.lookup seg m of Nothing -> pure $ f Patch.empty Just (_, p) -> f <$> p - let h = H.accumulate' p' + let h = H.hashPatch p' pure $ Map.insert seg (h, pure p') m replacePatch :: Applicative m => NameSegment -> Patch -> Branch0 m -> Branch0 m -replacePatch n p = over edits (Map.insert n (H.accumulate' p, pure p)) +replacePatch n p = over edits (Map.insert n (H.hashPatch p, pure p)) deletePatch :: NameSegment -> Branch0 m -> Branch0 m deletePatch n = over edits (Map.delete n) @@ -544,12 +373,7 @@ stepManyAt0M actions b = go (toList actions) b where currentAction (set children c2 b) instance Hashable (Branch0 m) where - tokens b = - [ H.accumulateToken (_terms b) - , H.accumulateToken (_types b) - , H.accumulateToken (headHash <$> _children b) - , H.accumulateToken (fst <$> _edits b) - ] + tokens = H.tokensBranch0 -- todo: consider inlining these into Actions2 addTermName @@ -575,23 +399,6 @@ deleteTypeName _ _ b = b lca :: Monad m => Branch m -> Branch m -> m (Maybe (Branch m)) lca (Branch a) (Branch b) = fmap Branch <$> Causal.lca a b -diff0 :: Monad m => Branch0 m -> Branch0 m -> m BranchDiff -diff0 old new = do - newEdits <- sequenceA $ snd <$> _edits new - oldEdits <- sequenceA $ snd <$> _edits old - let diffEdits = Map.merge (Map.mapMissing $ \_ p -> Patch.diff p mempty) - (Map.mapMissing $ \_ p -> Patch.diff mempty p) - (Map.zipWithMatched (const Patch.diff)) - newEdits - oldEdits - pure $ BranchDiff - { addedTerms = Star3.difference (_terms new) (_terms old) - , removedTerms = Star3.difference (_terms old) (_terms new) - , addedTypes = Star3.difference (_types new) (_types old) - , removedTypes = Star3.difference (_types old) (_types new) - , changedPatches = diffEdits - } - transform :: Functor m => (forall a . m a -> n a) -> Branch m -> Branch n transform f b = case _history b of causal -> Branch . Causal.transform f $ transformB0s f causal diff --git a/parser-typechecker/src/Unison/Codebase/Branch/BranchDiff.hs b/parser-typechecker/src/Unison/Codebase/Branch/BranchDiff.hs new file mode 100644 index 0000000000..5be4fd2cf8 --- /dev/null +++ b/parser-typechecker/src/Unison/Codebase/Branch/BranchDiff.hs @@ -0,0 +1,61 @@ +module Unison.Codebase.Branch.BranchDiff where + +import Data.Map (Map) +import qualified Data.Map as Map +import qualified Data.Map.Merge.Lazy as MapMerge +import Unison.Codebase.Branch.Type (Branch0(_types,_terms,_edits)) +import qualified Unison.Codebase.Metadata as Metadata +import qualified Unison.Codebase.Patch as Patch +import Unison.NameSegment (NameSegment) +import Unison.Reference (Reference) +import Unison.Referent (Referent) +import qualified Unison.Util.Star3 as Star3 + +type Star r n = Metadata.Star r n + +-- Represents a shallow diff of a Branch0. +-- Each of these `Star`s contain metadata as well, so an entry in +-- `added` or `removed` could be an update to the metadata. +data BranchDiff = BranchDiff + { addedTerms :: Star Referent NameSegment, + removedTerms :: Star Referent NameSegment, + addedTypes :: Star Reference NameSegment, + removedTypes :: Star Reference NameSegment, + changedPatches :: Map NameSegment Patch.PatchDiff + } + deriving (Eq, Ord, Show) + +diff0 :: Monad m => Branch0 m -> Branch0 m -> m BranchDiff +diff0 old new = do + newEdits <- sequenceA $ snd <$> _edits new + oldEdits <- sequenceA $ snd <$> _edits old + let diffEdits = + MapMerge.merge + (MapMerge.mapMissing $ \_ p -> Patch.diff p mempty) + (MapMerge.mapMissing $ \_ p -> Patch.diff mempty p) + (MapMerge.zipWithMatched (const Patch.diff)) + newEdits + oldEdits + pure $ + BranchDiff + { addedTerms = Star3.difference (_terms new) (_terms old), + removedTerms = Star3.difference (_terms old) (_terms new), + addedTypes = Star3.difference (_types new) (_types old), + removedTypes = Star3.difference (_types old) (_types new), + changedPatches = diffEdits + } + +instance Semigroup BranchDiff where + left <> right = + BranchDiff + { addedTerms = addedTerms left <> addedTerms right, + removedTerms = removedTerms left <> removedTerms right, + addedTypes = addedTypes left <> addedTypes right, + removedTypes = removedTypes left <> removedTypes right, + changedPatches = + Map.unionWith (<>) (changedPatches left) (changedPatches right) + } + +instance Monoid BranchDiff where + mappend = (<>) + mempty = BranchDiff mempty mempty mempty mempty mempty diff --git a/parser-typechecker/src/Unison/Codebase/Branch/Merge.hs b/parser-typechecker/src/Unison/Codebase/Branch/Merge.hs index 99e9462ae4..2a5b09e9aa 100644 --- a/parser-typechecker/src/Unison/Codebase/Branch/Merge.hs +++ b/parser-typechecker/src/Unison/Codebase/Branch/Merge.hs @@ -11,6 +11,16 @@ module Unison.Codebase.Branch.Merge import Unison.Prelude hiding (empty) import Unison.Codebase.Branch + ( head, + isEmpty0, + isEmpty, + discardHistory0, + empty0, + cons, + branch0, + Branch0(_children, _edits, _terms, _types), + EditHash, + Branch(..) ) import Prelude hiding (head, read, subtract) import qualified Data.Map as Map @@ -18,10 +28,12 @@ import qualified Data.Map.Merge.Lazy as Map import qualified Unison.Codebase.Causal as Causal import Unison.Codebase.Patch (Patch) import qualified Unison.Codebase.Patch as Patch -import qualified Unison.Hashable as H +import qualified Unison.Hashing.V2.Convert as H import Unison.Util.Map (unionWithM) import qualified Unison.Util.Relation as R import qualified Unison.Util.Star3 as Star3 +import qualified Unison.Codebase.Branch.BranchDiff as BDiff +import Unison.Codebase.Branch.BranchDiff (BranchDiff (BranchDiff)) data MergeMode = RegularMerge | SquashMerge deriving (Eq,Ord,Show) @@ -44,8 +56,8 @@ merge'' lca mode (Branch x) (Branch y) = combine :: Maybe (Branch0 m) -> Branch0 m -> Branch0 m -> m (Branch0 m) combine Nothing l r = merge0 lca mode l r combine (Just ca) l r = do - dl <- diff0 ca l - dr <- diff0 ca r + dl <- BDiff.diff0 ca l + dr <- BDiff.diff0 ca r head0 <- apply ca (dl <> dr) children <- Map.mergeA (Map.traverseMaybeMissing $ combineMissing ca) @@ -64,13 +76,13 @@ merge'' lca mode (Branch x) (Branch y) = else pure $ Just nw apply :: Branch0 m -> BranchDiff -> m (Branch0 m) - apply b0 BranchDiff {..} = do + apply b0 (BranchDiff addedTerms removedTerms addedTypes removedTypes changedPatches) = do patches <- sequenceA $ Map.differenceWith patchMerge (pure @m <$> _edits b0) changedPatches let newPatches = makePatch <$> Map.difference changedPatches (_edits b0) makePatch Patch.PatchDiff {..} = let p = Patch.Patch _addedTermEdits _addedTypeEdits - in (H.accumulate' p, pure p) + in (H.hashPatch p, pure p) pure $ branch0 (Star3.difference (_terms b0) removedTerms <> addedTerms) (Star3.difference (_types b0) removedTypes <> addedTypes) (_children b0) @@ -84,7 +96,7 @@ merge'' lca mode (Branch x) (Branch y) = , _typeEdits = R.difference (Patch._typeEdits p) _removedTypeEdits <> _addedTypeEdits } - pure (H.accumulate' np, pure np) + pure (H.hashPatch np, pure np) merge0 :: forall m. Monad m => (Branch m -> Branch m -> m (Maybe (Branch m))) -> MergeMode -> Branch0 m -> Branch0 m -> m (Branch0 m) @@ -102,4 +114,4 @@ merge0 lca mode b1 b2 = do e1 <- m1 e2 <- m2 let e3 = e1 <> e2 - pure (H.accumulate' e3, pure e3) + pure (H.hashPatch e3, pure e3) diff --git a/parser-typechecker/src/Unison/Codebase/Branch/Raw.hs b/parser-typechecker/src/Unison/Codebase/Branch/Raw.hs new file mode 100644 index 0000000000..333605e9c8 --- /dev/null +++ b/parser-typechecker/src/Unison/Codebase/Branch/Raw.hs @@ -0,0 +1,21 @@ +module Unison.Codebase.Branch.Raw where + +import Data.Map (Map) +import qualified Unison.Codebase.Metadata as Metadata +import Unison.Hash (Hash) +import qualified Unison.Hash as Hash +import Unison.NameSegment (NameSegment) +import Unison.Reference (Reference) +import Unison.Referent (Referent) + +type Star r n = Metadata.Star r n + +type EditHash = Hash.Hash + +-- The raw Branch +data Raw = Raw + { _termsR :: Star Referent NameSegment, + _typesR :: Star Reference NameSegment, + _childrenR :: Map NameSegment Hash, + _editsR :: Map NameSegment EditHash + } \ No newline at end of file diff --git a/parser-typechecker/src/Unison/Codebase/Branch/Type.hs b/parser-typechecker/src/Unison/Codebase/Branch/Type.hs new file mode 100644 index 0000000000..393d2f9a34 --- /dev/null +++ b/parser-typechecker/src/Unison/Codebase/Branch/Type.hs @@ -0,0 +1,71 @@ +{-# LANGUAGE TemplateHaskell #-} + +module Unison.Codebase.Branch.Type where + +import Control.Lens (makeLenses, makeLensesFor) +import Data.Map (Map) +import Data.Set (Set) +import Unison.Codebase.Branch.Raw (Raw) +import Unison.Codebase.Causal (Causal) +import qualified Unison.Codebase.Causal as Causal +import qualified Unison.Codebase.Metadata as Metadata +import Unison.Codebase.Patch (Patch) +import Unison.Codebase.Path (Path) +import qualified Unison.Hash as Hash +import Unison.Name (Name) +import Unison.NameSegment (NameSegment) +import Unison.Reference (Reference) +import Unison.Referent (Referent) +import Unison.Util.Relation (Relation) + +-- | A node in the Unison namespace hierarchy +-- along with its history. +newtype Branch m = Branch {_history :: UnwrappedBranch m} + deriving (Eq, Ord) + +type UnwrappedBranch m = Causal m Raw (Branch0 m) + +type Hash = Causal.RawHash Raw + +type EditHash = Hash.Hash + +type Star r n = Metadata.Star r n + +head :: Branch m -> Branch0 m +head (Branch c) = Causal.head c + +headHash :: Branch m -> Hash +headHash (Branch c) = Causal.currentHash c + +-- | A node in the Unison namespace hierarchy. +-- +-- '_terms' and '_types' are the declarations at this level. +-- '_children' are the nodes one level below us. +-- '_edits' are the 'Patch's stored at this node in the code. +-- +-- The @deep*@ fields are derived from the four above. +data Branch0 m = Branch0 + { _terms :: Star Referent NameSegment, + _types :: Star Reference NameSegment, + -- | Note the 'Branch' here, not 'Branch0'. + -- Every level in the tree has a history. + _children :: Map NameSegment (Branch m), + _edits :: Map NameSegment (EditHash, m Patch), + -- names and metadata for this branch and its children + -- (ref, (name, value)) iff ref has metadata `value` at name `name` + deepTerms :: Relation Referent Name, + deepTypes :: Relation Reference Name, + deepTermMetadata :: Metadata.R4 Referent Name, + deepTypeMetadata :: Metadata.R4 Reference Name, + deepPaths :: Set Path, + deepEdits :: Map Name EditHash + } + +makeLenses ''Branch +makeLensesFor [("_edits", "edits")] ''Branch0 + +instance Eq (Branch0 m) where + a == b = _terms a == _terms b + && _types a == _types b + && _children a == _children b + && (fmap fst . _edits) a == (fmap fst . _edits) b \ No newline at end of file diff --git a/parser-typechecker/src/Unison/Codebase/Patch.hs b/parser-typechecker/src/Unison/Codebase/Patch.hs index a5cbdd5902..6b7a55fac2 100644 --- a/parser-typechecker/src/Unison/Codebase/Patch.hs +++ b/parser-typechecker/src/Unison/Codebase/Patch.hs @@ -14,8 +14,6 @@ import Unison.Codebase.TermEdit ( TermEdit, Typing(Same) ) import qualified Unison.Codebase.TermEdit as TermEdit import Unison.Codebase.TypeEdit ( TypeEdit ) import qualified Unison.Codebase.TypeEdit as TypeEdit -import Unison.Hashable ( Hashable ) -import qualified Unison.Hashable as H import Unison.Reference ( Reference ) import qualified Unison.Util.Relation as R import Unison.Util.Relation ( Relation ) @@ -119,10 +117,6 @@ instance Monoid Patch where mappend = (<>) mempty = Patch mempty mempty -instance Hashable Patch where - tokens e = [ H.Hashed (H.accumulate (H.tokens (_termEdits e))), - H.Hashed (H.accumulate (H.tokens (_typeEdits e))) ] - instance Semigroup PatchDiff where a <> b = PatchDiff { _addedTermEdits = _addedTermEdits a <> _addedTermEdits b diff --git a/parser-typechecker/src/Unison/Codebase/SqliteCodebase/Conversions.hs b/parser-typechecker/src/Unison/Codebase/SqliteCodebase/Conversions.hs index e819a2ff22..5eb37f8bc4 100644 --- a/parser-typechecker/src/Unison/Codebase/SqliteCodebase/Conversions.hs +++ b/parser-typechecker/src/Unison/Codebase/SqliteCodebase/Conversions.hs @@ -443,13 +443,15 @@ causalbranch1to2 (V1.Branch.Branch c) = causal1to2' hash1to2cb hash1to2c branch1 V1.Causal.Cons (h1to22 -> (hc, hb)) e (ht, mt) -> V2.Causal hc hb (Map.singleton (h1to2 ht) (causal1to2 h1to22 h1to2 e1to2 <$> mt)) (e1to2 e) V1.Causal.Merge (h1to22 -> (hc, hb)) e parents -> V2.Causal hc hb (Map.bimap h1to2 (causal1to2 h1to22 h1to2 e1to2 <$>) parents) (e1to2 e) + -- todo: this could be a pure function branch1to2 :: forall m. Monad m => V1.Branch.Branch0 m -> m (V2.Branch.Branch m) - branch1to2 b = do - terms <- pure $ doTerms (V1.Branch._terms b) - types <- pure $ doTypes (V1.Branch._types b) - patches <- pure $ doPatches (V1.Branch._edits b) - children <- pure $ doChildren (V1.Branch._children b) - pure $ V2.Branch.Branch terms types patches children + branch1to2 b = + pure $ + V2.Branch.Branch + (doTerms (V1.Branch._terms b)) + (doTypes (V1.Branch._types b)) + (doPatches (V1.Branch._edits b)) + (doChildren (V1.Branch._children b)) where -- is there a more readable way to structure these that's also linear? doTerms :: V1.Branch.Star V1.Referent.Referent V1.NameSegment -> Map V2.Branch.NameSegment (Map V2.Referent.Referent (m V2.Branch.MdValues)) diff --git a/parser-typechecker/src/Unison/Codebase/TermEdit.hs b/parser-typechecker/src/Unison/Codebase/TermEdit.hs index df753c84c5..39a24a7009 100644 --- a/parser-typechecker/src/Unison/Codebase/TermEdit.hs +++ b/parser-typechecker/src/Unison/Codebase/TermEdit.hs @@ -1,7 +1,5 @@ module Unison.Codebase.TermEdit where -import Unison.Hashable (Hashable) -import qualified Unison.Hashable as H import Unison.Reference (Reference) data TermEdit = Replace Reference Typing | Deprecate @@ -17,15 +15,6 @@ references Deprecate = [] data Typing = Same | Subtype | Different deriving (Eq, Ord, Show) -instance Hashable Typing where - tokens Same = [H.Tag 0] - tokens Subtype = [H.Tag 1] - tokens Different = [H.Tag 2] - -instance Hashable TermEdit where - tokens (Replace r t) = [H.Tag 0] ++ H.tokens r ++ H.tokens t - tokens Deprecate = [H.Tag 1] - toReference :: TermEdit -> Maybe Reference toReference (Replace r _) = Just r toReference Deprecate = Nothing diff --git a/parser-typechecker/src/Unison/Codebase/TypeEdit.hs b/parser-typechecker/src/Unison/Codebase/TypeEdit.hs index a6d2cd665c..de3599db6a 100644 --- a/parser-typechecker/src/Unison/Codebase/TypeEdit.hs +++ b/parser-typechecker/src/Unison/Codebase/TypeEdit.hs @@ -1,8 +1,6 @@ module Unison.Codebase.TypeEdit where import Unison.Reference (Reference) -import Unison.Hashable (Hashable) -import qualified Unison.Hashable as H data TypeEdit = Replace Reference | Deprecate deriving (Eq, Ord, Show) @@ -11,10 +9,6 @@ references :: TypeEdit -> [Reference] references (Replace r) = [r] references Deprecate = [] -instance Hashable TypeEdit where - tokens (Replace r) = H.Tag 0 : H.tokens r - tokens Deprecate = [H.Tag 1] - toReference :: TypeEdit -> Maybe Reference toReference (Replace r) = Just r toReference Deprecate = Nothing diff --git a/parser-typechecker/src/Unison/Hashing/V2/Branch.hs b/parser-typechecker/src/Unison/Hashing/V2/Branch.hs new file mode 100644 index 0000000000..83a7766d2a --- /dev/null +++ b/parser-typechecker/src/Unison/Hashing/V2/Branch.hs @@ -0,0 +1,37 @@ +{-# LANGUAGE DerivingVia #-} +{-# LANGUAGE PatternSynonyms #-} +{-# LANGUAGE RankNTypes #-} +{-# LANGUAGE RecordWildCards #-} +{-# LANGUAGE TemplateHaskell #-} +{-# LANGUAGE ViewPatterns #-} + +module Unison.Hashing.V2.Branch (Raw (..), MdValues (..)) where + +import Unison.Hash (Hash) +import Unison.Hashable (Hashable) +import qualified Unison.Hashable as H +import Unison.Hashing.V2.Reference (Reference) +import Unison.Hashing.V2.Referent (Referent) +import Unison.NameSegment (NameSegment) +import Unison.Prelude + +type MetadataValue = Reference + +newtype MdValues = MdValues (Set MetadataValue) + deriving (Eq, Ord, Show) + deriving (Hashable) via Set MetadataValue + +data Raw = Raw + { terms :: Map NameSegment (Map Referent MdValues), + types :: Map NameSegment (Map Reference MdValues), + patches :: Map NameSegment Hash, + children :: Map NameSegment Hash + } + +instance Hashable Raw where + tokens b = + [ H.accumulateToken (terms b), + H.accumulateToken (types b), + H.accumulateToken (children b), + H.accumulateToken (patches b) + ] diff --git a/parser-typechecker/src/Unison/Hashing/V2/Causal.hs b/parser-typechecker/src/Unison/Hashing/V2/Causal.hs new file mode 100644 index 0000000000..a1c11bf41d --- /dev/null +++ b/parser-typechecker/src/Unison/Hashing/V2/Causal.hs @@ -0,0 +1,15 @@ +{-# LANGUAGE DeriveGeneric #-} +{-# LANGUAGE RankNTypes #-} +{-# LANGUAGE RecordWildCards #-} + +module Unison.Hashing.V2.Causal (Causal (..)) where + +import Data.Set (Set) +import Unison.Hash (Hash) +import Unison.Hashable (Hashable) +import qualified Unison.Hashable as H + +data Causal e = Causal {current :: e, parents :: Set Hash} + +instance Hashable e => Hashable (Causal e) where + tokens c = H.tokens (current c, parents c) diff --git a/parser-typechecker/src/Unison/Hashing/V2/Convert.hs b/parser-typechecker/src/Unison/Hashing/V2/Convert.hs index c2b781edb5..7c05d1e4fd 100644 --- a/parser-typechecker/src/Unison/Hashing/V2/Convert.hs +++ b/parser-typechecker/src/Unison/Hashing/V2/Convert.hs @@ -1,8 +1,11 @@ +{-# LANGUAGE PartialTypeSignatures #-} {-# LANGUAGE ViewPatterns #-} module Unison.Hashing.V2.Convert ( ResolutionResult, + tokensBranch0, hashDecls, + hashPatch, hashClosedTerm, hashTermComponents, hashTypeComponents, @@ -13,23 +16,42 @@ where import Control.Lens (over, _3) import qualified Control.Lens as Lens +import Data.Bifunctor (bimap) +import Data.Foldable (toList) import Data.Map (Map) +import qualified Data.Map as Map import Data.Set (Set) import qualified Data.Set as Set import qualified Unison.ABT as ABT +import qualified Unison.Codebase.Branch.Type as Memory.Branch +import qualified Unison.Codebase.Causal as Memory.Causal +import qualified Unison.Codebase.Patch as Memory.Patch +import qualified Unison.Codebase.TermEdit as Memory.TermEdit +import qualified Unison.Codebase.TypeEdit as Memory.TypeEdit import qualified Unison.DataDeclaration as Memory.DD +import Unison.Hash (Hash) +import Unison.Hashable (Accumulate, Token) +import qualified Unison.Hashable as H +import qualified Unison.Hashing.V2.Branch as Hashing.Branch +import qualified Unison.Hashing.V2.Causal as Hashing.Causal import qualified Unison.Hashing.V2.DataDeclaration as Hashing.DD +import qualified Unison.Hashing.V2.Patch as Hashing.Patch import qualified Unison.Hashing.V2.Pattern as Hashing.Pattern import qualified Unison.Hashing.V2.Reference as Hashing.Reference import qualified Unison.Hashing.V2.Referent as Hashing.Referent import qualified Unison.Hashing.V2.Term as Hashing.Term +import qualified Unison.Hashing.V2.TermEdit as Hashing.TermEdit import qualified Unison.Hashing.V2.Type as Hashing.Type +import qualified Unison.Hashing.V2.TypeEdit as Hashing.TypeEdit +import Unison.NameSegment (NameSegment) import Unison.Names.ResolutionResult (ResolutionResult) import qualified Unison.Pattern as Memory.Pattern import qualified Unison.Reference as Memory.Reference import qualified Unison.Referent as Memory.Referent import qualified Unison.Term as Memory.Term import qualified Unison.Type as Memory.Type +import qualified Unison.Util.Relation as Relation +import qualified Unison.Util.Star3 as Memory.Star3 import Unison.Var (Var) typeToReference :: Var v => Memory.Type.Type v a -> Memory.Reference.Reference @@ -234,3 +256,89 @@ h2mReference = \case h2mReferenceId :: Hashing.Reference.Id -> Memory.Reference.Id h2mReferenceId (Hashing.Reference.Id h i) = Memory.Reference.Id h i + +m2hPatch :: Memory.Patch.Patch -> Hashing.Patch.Patch +m2hPatch (Memory.Patch.Patch termEdits typeEdits) = + Hashing.Patch.Patch termEdits' typeEdits' + where + typeEdits' = + Map.fromList + . map (bimap m2hReference (Set.map m2hTypeEdit)) + . Map.toList + $ Relation.toMultimap typeEdits + termEdits' = + Map.fromList + . map (bimap (Hashing.Referent.Ref . m2hReference) (Set.map m2hTermEdit)) + . Map.toList + $ Relation.toMultimap termEdits + m2hTermEdit = \case + Memory.TermEdit.Replace r _ -> Hashing.TermEdit.Replace (Hashing.Referent.Ref $ m2hReference r) + Memory.TermEdit.Deprecate -> Hashing.TermEdit.Deprecate + m2hTypeEdit = \case + Memory.TypeEdit.Replace r -> Hashing.TypeEdit.Replace (m2hReference r) + Memory.TypeEdit.Deprecate -> Hashing.TypeEdit.Deprecate + +hashPatch :: Memory.Patch.Patch -> Hash +hashPatch = H.accumulate' . m2hPatch + +hashBranch :: Memory.Branch.Branch m -> Hash +hashBranch = H.accumulate . tokensBranch + +tokensBranch :: Accumulate h => Memory.Branch.Branch m -> [Token h] +tokensBranch = H.tokens . m2hCausal . Memory.Branch._history + +tokensBranch0 :: Accumulate h => Memory.Branch.Branch0 m -> [Token h] +tokensBranch0 = H.tokens . m2hBranch + +m2hCausal :: Memory.Branch.UnwrappedBranch m -> Hashing.Causal.Causal Hashing.Branch.Raw +m2hCausal = \case + Memory.Causal.One _h e -> + Hashing.Causal.Causal (m2hBranch e) mempty + Memory.Causal.Cons _h e (ht, _) -> + Hashing.Causal.Causal (m2hBranch e) $ Set.singleton (Memory.Causal.unRawHash ht) + Memory.Causal.Merge _h e ts -> + Hashing.Causal.Causal (m2hBranch e) $ Set.map Memory.Causal.unRawHash (Map.keysSet ts) + +m2hBranch :: Memory.Branch.Branch0 m -> Hashing.Branch.Raw +m2hBranch b = + Hashing.Branch.Raw + (doTerms (Memory.Branch._terms b)) + (doTypes (Memory.Branch._types b)) + (doPatches (Memory.Branch._edits b)) + (doChildren (Memory.Branch._children b)) + where + -- is there a more readable way to structure these that's also linear? + doTerms :: Memory.Branch.Star Memory.Referent.Referent NameSegment -> Map NameSegment (Map Hashing.Referent.Referent Hashing.Branch.MdValues) + doTerms s = + Map.fromList + [ (ns, m2) + | ns <- toList . Relation.ran $ Memory.Star3.d1 s, + let m2 = + Map.fromList + [ (m2hReferent r, md) + | r <- toList . Relation.lookupRan ns $ Memory.Star3.d1 s, + let mdrefs1to2 (_typeR1, valR1) = m2hReference valR1 + md = Hashing.Branch.MdValues . Set.map mdrefs1to2 . Relation.lookupDom r $ Memory.Star3.d3 s + ] + ] + + doTypes :: Memory.Branch.Star Memory.Reference.Reference NameSegment -> Map NameSegment (Map Hashing.Reference.Reference Hashing.Branch.MdValues) + doTypes s = + Map.fromList + [ (ns, m2) + | ns <- toList . Relation.ran $ Memory.Star3.d1 s, + let m2 = + Map.fromList + [ (m2hReference r, md) + | r <- toList . Relation.lookupRan ns $ Memory.Star3.d1 s, + let mdrefs1to2 (_typeR1, valR1) = m2hReference valR1 + md :: Hashing.Branch.MdValues + md = Hashing.Branch.MdValues . Set.map mdrefs1to2 . Relation.lookupDom r $ Memory.Star3.d3 s + ] + ] + + doPatches :: Map NameSegment (Memory.Branch.EditHash, m Memory.Patch.Patch) -> Map NameSegment Hash + doPatches = Map.map fst + + doChildren :: Map NameSegment (Memory.Branch.Branch m) -> Map NameSegment Hash + doChildren = Map.map (Memory.Causal.unRawHash . Memory.Branch.headHash) diff --git a/parser-typechecker/src/Unison/Hashing/V2/Patch.hs b/parser-typechecker/src/Unison/Hashing/V2/Patch.hs new file mode 100644 index 0000000000..9b87ba32f1 --- /dev/null +++ b/parser-typechecker/src/Unison/Hashing/V2/Patch.hs @@ -0,0 +1,26 @@ +{-# LANGUAGE TemplateHaskell #-} +{-# LANGUAGE PatternSynonyms #-} +{-# LANGUAGE RecordWildCards #-} + +module Unison.Hashing.V2.Patch (Patch(..)) where + +import Unison.Hashing.V2.Reference (Reference) +import Data.Map (Map) +import Unison.Hashing.V2.Referent (Referent) +import Data.Set (Set) +import Unison.Hashing.V2.TermEdit (TermEdit) +import Unison.Hashing.V2.TypeEdit (TypeEdit) +import Unison.Hashable (Hashable) +import qualified Unison.Hashable as H + +data Patch = Patch + { termEdits :: Map Referent (Set TermEdit), + typeEdits :: Map Reference (Set TypeEdit) + } + +instance Hashable Patch where + tokens p = + [ H.accumulateToken (termEdits p), + H.accumulateToken (typeEdits p) + ] + diff --git a/parser-typechecker/src/Unison/Hashing/V2/TermEdit.hs b/parser-typechecker/src/Unison/Hashing/V2/TermEdit.hs new file mode 100644 index 0000000000..b75773c09c --- /dev/null +++ b/parser-typechecker/src/Unison/Hashing/V2/TermEdit.hs @@ -0,0 +1,12 @@ +module Unison.Hashing.V2.TermEdit (TermEdit (..)) where + +import Unison.Hashable (Hashable) +import qualified Unison.Hashable as H +import Unison.Hashing.V2.Referent (Referent) + +data TermEdit = Replace Referent | Deprecate + deriving (Eq, Ord, Show) + +instance Hashable TermEdit where + tokens (Replace r) = [H.Tag 0] ++ H.tokens r + tokens Deprecate = [H.Tag 1] diff --git a/parser-typechecker/src/Unison/Hashing/V2/TypeEdit.hs b/parser-typechecker/src/Unison/Hashing/V2/TypeEdit.hs new file mode 100644 index 0000000000..bc9bddfb54 --- /dev/null +++ b/parser-typechecker/src/Unison/Hashing/V2/TypeEdit.hs @@ -0,0 +1,12 @@ +module Unison.Hashing.V2.TypeEdit (TypeEdit(..)) where + +import Unison.Hashable (Hashable) +import qualified Unison.Hashable as H +import Unison.Hashing.V2.Reference (Reference) + +data TypeEdit = Replace Reference | Deprecate + deriving (Eq, Ord, Show) + +instance Hashable TypeEdit where + tokens (Replace r) = H.Tag 0 : H.tokens r + tokens Deprecate = [H.Tag 1] diff --git a/parser-typechecker/tests/Unison/Test/GitSync.hs b/parser-typechecker/tests/Unison/Test/GitSync.hs index 532d4e8fa9..f5ac0804d1 100644 --- a/parser-typechecker/tests/Unison/Test/GitSync.hs +++ b/parser-typechecker/tests/Unison/Test/GitSync.hs @@ -194,12 +194,16 @@ test = scope "gitsync22" . tests $ ```ucm .> pull ${repo} .> history - .> reset-root #t21 + .> reset-root #97u .> history ``` |]) -- Not sure why this hash is here. -- Is it to test `reset-root`? -- Or to notice a change in hashing? + -- Or to test that two distinct points of history were pulled? + -- It would be great to not need the explicit hash here, + -- since it does change periodically. + -- Though, I guess that should also be rare, so maybe this is fine. , pushPullTest "one-term" fmt diff --git a/parser-typechecker/unison-parser-typechecker.cabal b/parser-typechecker/unison-parser-typechecker.cabal index d49851fb88..7b3c526cc1 100644 --- a/parser-typechecker/unison-parser-typechecker.cabal +++ b/parser-typechecker/unison-parser-typechecker.cabal @@ -28,8 +28,11 @@ library Unison.Builtin.Terms Unison.Codebase Unison.Codebase.Branch + Unison.Codebase.Branch.BranchDiff Unison.Codebase.Branch.Merge Unison.Codebase.Branch.Names + Unison.Codebase.Branch.Raw + Unison.Codebase.Branch.Type Unison.Codebase.BranchDiff Unison.Codebase.BranchUtil Unison.Codebase.BuiltinAnnotation @@ -102,14 +105,19 @@ library Unison.Hashing.V1.Term Unison.Hashing.V1.Type Unison.Hashing.V2.ABT + Unison.Hashing.V2.Branch + Unison.Hashing.V2.Causal Unison.Hashing.V2.Convert Unison.Hashing.V2.DataDeclaration + Unison.Hashing.V2.Patch Unison.Hashing.V2.Pattern Unison.Hashing.V2.Reference Unison.Hashing.V2.Reference.Util Unison.Hashing.V2.Referent Unison.Hashing.V2.Term + Unison.Hashing.V2.TermEdit Unison.Hashing.V2.Type + Unison.Hashing.V2.TypeEdit Unison.Lexer Unison.Lexer.Pos Unison.NamePrinter diff --git a/unison-core/src/Unison/Pattern.hs b/unison-core/src/Unison/Pattern.hs index 909288615d..7ef394d1d9 100644 --- a/unison-core/src/Unison/Pattern.hs +++ b/unison-core/src/Unison/Pattern.hs @@ -10,7 +10,6 @@ import qualified Data.Map as Map import qualified Data.Set as Set import qualified Unison.ConstructorType as CT import Unison.DataDeclaration.ConstructorId (ConstructorId) -import qualified Unison.Hashable as H import Unison.LabeledDependency (LabeledDependency) import qualified Unison.LabeledDependency as LD import Unison.Reference (Reference) @@ -64,11 +63,6 @@ updateDependencies tms p = case p of SequenceOp loc lhs op rhs -> SequenceOp loc (updateDependencies tms lhs) op (updateDependencies tms rhs) -instance H.Hashable SeqOp where - tokens Cons = [H.Tag 0] - tokens Snoc = [H.Tag 1] - tokens Concat = [H.Tag 2] - instance Show (Pattern loc) where show (Unbound _ ) = "Unbound" show (Var _ ) = "Var" @@ -104,24 +98,6 @@ setLoc p loc = case p of SequenceOp _ ph op pt -> SequenceOp loc ph op pt x -> fmap (const loc) x -instance H.Hashable (Pattern p) where - tokens (Unbound _) = [H.Tag 0] - tokens (Var _) = [H.Tag 1] - tokens (Boolean _ b) = H.Tag 2 : [H.Tag $ if b then 1 else 0] - tokens (Int _ n) = H.Tag 3 : [H.Int n] - tokens (Nat _ n) = H.Tag 4 : [H.Nat n] - tokens (Float _ f) = H.Tag 5 : H.tokens f - tokens (Constructor _ r n args) = - [H.Tag 6, H.accumulateToken r, H.Nat $ fromIntegral n, H.accumulateToken args] - tokens (EffectPure _ p) = H.Tag 7 : H.tokens p - tokens (EffectBind _ r n args k) = - [H.Tag 8, H.accumulateToken r, H.Nat $ fromIntegral n, H.accumulateToken args, H.accumulateToken k] - tokens (As _ p) = H.Tag 9 : H.tokens p - tokens (Text _ t) = H.Tag 10 : H.tokens t - tokens (SequenceLiteral _ ps) = H.Tag 11 : concatMap H.tokens ps - tokens (SequenceOp _ l op r) = H.Tag 12 : H.tokens op ++ H.tokens l ++ H.tokens r - tokens (Char _ c) = H.Tag 13 : H.tokens c - instance Eq (Pattern loc) where Unbound _ == Unbound _ = True Var _ == Var _ = True diff --git a/unison-core/src/Unison/Reference.hs b/unison-core/src/Unison/Reference.hs index 84a8c16b41..2699203367 100644 --- a/unison-core/src/Unison/Reference.hs +++ b/unison-core/src/Unison/Reference.hs @@ -35,7 +35,6 @@ import Unison.Prelude import qualified Data.Map as Map import qualified Data.Text as Text import qualified Unison.Hash as H -import Unison.Hashable as Hashable import Unison.ShortHash (ShortHash) import qualified Unison.ShortHash as SH import Data.Char (isDigit) @@ -170,7 +169,3 @@ groupByComponent refs = done $ foldl' insert Map.empty refs instance Show Id where show = SH.toString . SH.take 5 . toShortHash . DerivedId instance Show Reference where show = SH.toString . SH.take 5 . toShortHash - -instance Hashable.Hashable Reference where - tokens (Builtin txt) = [Hashable.Tag 0, Hashable.Text txt] - tokens (DerivedId (Id h i)) = [Hashable.Tag 1, Hashable.Bytes (H.toBytes h), Hashable.Nat i] From 4110fe501f7af049654225fa06d48a7f5dabc94d Mon Sep 17 00:00:00 2001 From: Arya Irani Date: Mon, 4 Oct 2021 20:10:36 -0400 Subject: [PATCH 018/297] updated hashes in transcripts --- .../fix-1381-excess-propagate.output.md | 2 +- unison-src/transcripts/link.output.md | 4 +- unison-src/transcripts/mergeloop.output.md | 6 +-- unison-src/transcripts/merges.output.md | 12 ++--- unison-src/transcripts/reflog.output.md | 12 ++--- unison-src/transcripts/squash.output.md | 52 +++++++++---------- 6 files changed, 44 insertions(+), 44 deletions(-) diff --git a/unison-src/transcripts/fix-1381-excess-propagate.output.md b/unison-src/transcripts/fix-1381-excess-propagate.output.md index c1c5ebd306..23e46270f7 100644 --- a/unison-src/transcripts/fix-1381-excess-propagate.output.md +++ b/unison-src/transcripts/fix-1381-excess-propagate.output.md @@ -37,7 +37,7 @@ As of the time of this writing, the history for `X` should be a single node, `#4 - □ #kgp7jlhvdu (start of history) + □ #49c1c5kdrf (start of history) ``` however, as of release/M1i, we saw an extraneous node appear. If your `ucm` is fixed, you won't see it below: diff --git a/unison-src/transcripts/link.output.md b/unison-src/transcripts/link.output.md index ed5dbb9111..eb852121bb 100644 --- a/unison-src/transcripts/link.output.md +++ b/unison-src/transcripts/link.output.md @@ -194,11 +194,11 @@ myLibrary.h x = x + 3 Note: The most recent namespace hash is immediately below this message. - ⊙ #da67sk9qv4 + ⊙ #l3kl2s1g8u - □ #jil43gmsb8 (start of history) + □ #hcaq4np5kg (start of history) .> unlink coolFunction.doc coolFunction diff --git a/unison-src/transcripts/mergeloop.output.md b/unison-src/transcripts/mergeloop.output.md index 2ecc271ed6..b081461c28 100644 --- a/unison-src/transcripts/mergeloop.output.md +++ b/unison-src/transcripts/mergeloop.output.md @@ -135,9 +135,9 @@ b = 2 `history #som3n4m3space` to view history starting from a given namespace hash. - ⊙ #m0kphpti4f + ⊙ #lt6ilkmejb ⑃ - #qdrtol3ouc - #tc4r914dk6 + #d1o3h34d1d + #q5gf6eubo8 ``` diff --git a/unison-src/transcripts/merges.output.md b/unison-src/transcripts/merges.output.md index 5349a33de5..c701e6288b 100644 --- a/unison-src/transcripts/merges.output.md +++ b/unison-src/transcripts/merges.output.md @@ -112,13 +112,13 @@ We can also delete the fork if we're done with it. (Don't worry, it's still in t Note: The most recent namespace hash is immediately below this message. - ⊙ #ur696qo4bk + ⊙ #k73en4ol5u - Deletes: feature1.y - ⊙ #gt1h21l569 + ⊙ #kppiai21uj + Adds / updates: @@ -129,26 +129,26 @@ We can also delete the fork if we're done with it. (Don't worry, it's still in t Original name New name(s) feature1.y master.y - ⊙ #fvm746gget + ⊙ #aeq3mapevq + Adds / updates: feature1.y - ⊙ #blkp43cgkh + ⊙ #elt46i2p4t > Moves: Original name New name x master.x - ⊙ #iollrn37ut + ⊙ #3fahnivq8k + Adds / updates: x - □ #2nhqjjv2k7 (start of history) + □ #0t3l3a96j4 (start of history) ``` To resurrect an old version of a namespace, you can learn its hash via the `history` command, then use `fork #namespacehash .newname`. diff --git a/unison-src/transcripts/reflog.output.md b/unison-src/transcripts/reflog.output.md index 3d5961a233..561e725dfc 100644 --- a/unison-src/transcripts/reflog.output.md +++ b/unison-src/transcripts/reflog.output.md @@ -59,17 +59,17 @@ y = 2 most recent, along with the command that got us there. Try: `fork 2 .old` - `fork #02dm9hg2b0 .old` to make an old namespace + `fork #48p1ddrv45 .old` to make an old namespace accessible again, - `reset-root #02dm9hg2b0` to reset the root namespace and + `reset-root #48p1ddrv45` to reset the root namespace and its history to that of the specified namespace. - 1. #gf7d1fct1r : add - 2. #02dm9hg2b0 : add - 3. #2nhqjjv2k7 : builtins.merge - 4. #sjg2v58vn2 : (initial reflogged namespace) + 1. #ja190ddf7a : add + 2. #48p1ddrv45 : add + 3. #0t3l3a96j4 : builtins.merge + 4. #1juguqe7eo : (initial reflogged namespace) ``` If we `reset-root` to its previous value, `y` disappears. diff --git a/unison-src/transcripts/squash.output.md b/unison-src/transcripts/squash.output.md index d2a3b757d7..5f7f081625 100644 --- a/unison-src/transcripts/squash.output.md +++ b/unison-src/transcripts/squash.output.md @@ -13,7 +13,7 @@ Let's look at some examples. We'll start with a namespace with just the builtins - □ #r5tr835861 (start of history) + □ #ljnpbrl0o0 (start of history) .> fork builtin builtin2 @@ -42,21 +42,21 @@ Now suppose we `fork` a copy of builtin, then rename `Nat.+` to `frobnicate`, th Note: The most recent namespace hash is immediately below this message. - ⊙ #bc6u6h4gpr + ⊙ #ku5ibs1nsr > Moves: Original name New name Nat.frobnicate Nat.+ - ⊙ #lu6del4qbm + ⊙ #p8cl8dadf3 > Moves: Original name New name Nat.+ Nat.frobnicate - □ #r5tr835861 (start of history) + □ #ljnpbrl0o0 (start of history) ``` If we merge that back into `builtin`, we get that same chain of history: @@ -71,21 +71,21 @@ If we merge that back into `builtin`, we get that same chain of history: Note: The most recent namespace hash is immediately below this message. - ⊙ #bc6u6h4gpr + ⊙ #ku5ibs1nsr > Moves: Original name New name Nat.frobnicate Nat.+ - ⊙ #lu6del4qbm + ⊙ #p8cl8dadf3 > Moves: Original name New name Nat.+ Nat.frobnicate - □ #r5tr835861 (start of history) + □ #ljnpbrl0o0 (start of history) ``` Let's try again, but using a `merge.squash` (or just `squash`) instead. The history will be unchanged: @@ -106,7 +106,7 @@ Let's try again, but using a `merge.squash` (or just `squash`) instead. The hist - □ #r5tr835861 (start of history) + □ #ljnpbrl0o0 (start of history) ``` The churn that happened in `mybuiltin` namespace ended up back in the same spot, so the squash merge of that namespace with our original namespace had no effect. @@ -191,47 +191,47 @@ At this point, Alice and Bob both have some history beyond what's in trunk: - □ #md499voko1 (start of history) + □ #hdqr1brb29 (start of history) .> history alice Note: The most recent namespace hash is immediately below this message. - ⊙ #4suqgqt42i + ⊙ #bin0oov6bc > Moves: Original name New name neatoFun productionReadyId - ⊙ #gvn274aiis + ⊙ #ccsg56jgoh > Moves: Original name New name radNumber superRadNumber - ⊙ #mg2d7kkbck + ⊙ #a7iq2ak3tk + Adds / updates: bodaciousNumero neatoFun radNumber - □ #md499voko1 (start of history) + □ #hdqr1brb29 (start of history) .> history bob Note: The most recent namespace hash is immediately below this message. - ⊙ #gvaau8vv11 + ⊙ #h90udgqpmb + Adds / updates: babyDon'tHurtMe no whatIsLove - □ #md499voko1 (start of history) + □ #hdqr1brb29 (start of history) ``` Alice then squash merges into `trunk`, as does Bob. It's as if Alice and Bob both made their changes in one single commit. @@ -257,13 +257,13 @@ Alice then squash merges into `trunk`, as does Bob. It's as if Alice and Bob bot Note: The most recent namespace hash is immediately below this message. - ⊙ #n6ps07cufq + ⊙ #ui8bdim4lp + Adds / updates: bodaciousNumero productionReadyId superRadNumber - □ #md499voko1 (start of history) + □ #hdqr1brb29 (start of history) .> merge.squash bob trunk @@ -285,19 +285,19 @@ Alice then squash merges into `trunk`, as does Bob. It's as if Alice and Bob bot Note: The most recent namespace hash is immediately below this message. - ⊙ #qksvuct0ji + ⊙ #ibjuc8j93d + Adds / updates: babyDon'tHurtMe no whatIsLove - ⊙ #n6ps07cufq + ⊙ #ui8bdim4lp + Adds / updates: bodaciousNumero productionReadyId superRadNumber - □ #md499voko1 (start of history) + □ #hdqr1brb29 (start of history) ``` Since squash merges don't produce any merge nodes, we can `undo` a couple times to get back to our starting state: @@ -336,7 +336,7 @@ Since squash merges don't produce any merge nodes, we can `undo` a couple times - □ #md499voko1 (start of history) + □ #hdqr1brb29 (start of history) ``` This time, we'll first squash Alice and Bob's changes together before squashing their combined changes into `trunk`. The resulting `trunk` will have just a single entry in it, combining both Alice and Bob's changes: @@ -380,14 +380,14 @@ This time, we'll first squash Alice and Bob's changes together before squashing Note: The most recent namespace hash is immediately below this message. - ⊙ #vlsmrlnl0o + ⊙ #q9stiuk3ke + Adds / updates: babyDon'tHurtMe bodaciousNumero no productionReadyId superRadNumber whatIsLove - □ #md499voko1 (start of history) + □ #hdqr1brb29 (start of history) ``` So, there you have it. With squashing, you can control the granularity of your history. @@ -420,7 +420,7 @@ Another thing we can do is `squash` into an empty namespace. This effectively ma - □ #hqtl9h3ldp (start of history) + □ #3vvamd3psi (start of history) ``` There's nothing really special here, `squash src dest` discards `src` history that comes after the LCA of `src` and `dest`, it's just that in the case of an empty namespace, that LCA is the beginning of time (the empty namespace), so all the history of `src` is discarded. @@ -485,13 +485,13 @@ This checks to see that squashing correctly preserves deletions: Note: The most recent namespace hash is immediately below this message. - ⊙ #o2sbu36ahm + ⊙ #j5p1fc9jq8 - Deletes: Nat.* Nat.+ - □ #r5tr835861 (start of history) + □ #ljnpbrl0o0 (start of history) ``` Notice that `Nat.+` and `Nat.*` are deleted by the squash, and we see them deleted in one atomic step in the history. From 201db24d25ac69fb09a085b01ecdd150c4e9d5f4 Mon Sep 17 00:00:00 2001 From: Arya Irani Date: Mon, 4 Oct 2021 20:10:50 -0400 Subject: [PATCH 019/297] updated hashes in AuthorInfo.hs --- parser-typechecker/src/Unison/Codebase/Editor/AuthorInfo.hs | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/parser-typechecker/src/Unison/Codebase/Editor/AuthorInfo.hs b/parser-typechecker/src/Unison/Codebase/Editor/AuthorInfo.hs index 638d672187..c76b232b2e 100644 --- a/parser-typechecker/src/Unison/Codebase/Editor/AuthorInfo.hs +++ b/parser-typechecker/src/Unison/Codebase/Editor/AuthorInfo.hs @@ -65,5 +65,5 @@ createAuthorInfo a t = createAuthorInfo' . unpack <$> liftIO (getRandomBytes 32) (guidType, guidTypeRef) = (Type.ref a guidTypeRef, unsafeParse guidHash) unsafeParse = either error id . Reference.fromText guidHash = "#rc29vdqe019p56kupcgkg07fkib86r3oooatbmsgfbdsgpmjhsh00l307iuts3r973q5etb61vbjkes42b6adb3mkorusvmudiuorno" - copyrightHolderHash = "#aohndsu9bl844vspujp142j5aijv86rifmnrbnjvpv3h3f3aekn45rj5s1uf1ucrrtm5urbc5d1ajtm7lqq1tr8lkgv5fathp6arqug" - authorHash = "#5hi1vvs5t1gmu6vn1kpqmgksou8ie872j31gc294lgqks71di6gm3d4ugnrr4mq8ov0ap1e20lq099d5g6jjf9c6cbp361m9r9n5g50" + copyrightHolderHash = "#jeaknsbobmr6pdj9bga290pj1qckqsemiu1qkg7l9s6p88ot111218jkoe6l19hjpdqctpd0c87capaf3j5qlcim1uh1pq23pu0ebsg" + authorHash = "#i8f8ru3p8ijof9r26lskplmjj45rle8jdh31n62cef2r0tbj6fgjkcu2ljh4m44lo16if0fcdp7eb5fqo1iard47l4cllo7g244kmo0" From f2e0e189355e58756fe879d2154ef0baf466233d Mon Sep 17 00:00:00 2001 From: Arya Irani Date: Tue, 5 Oct 2021 09:27:36 -0400 Subject: [PATCH 020/297] address warnings --- .../src/Unison/Hashing/V2/Convert.hs | 20 +++++++++++-------- 1 file changed, 12 insertions(+), 8 deletions(-) diff --git a/parser-typechecker/src/Unison/Hashing/V2/Convert.hs b/parser-typechecker/src/Unison/Hashing/V2/Convert.hs index 7c05d1e4fd..c5d8e44f25 100644 --- a/parser-typechecker/src/Unison/Hashing/V2/Convert.hs +++ b/parser-typechecker/src/Unison/Hashing/V2/Convert.hs @@ -281,17 +281,21 @@ m2hPatch (Memory.Patch.Patch termEdits typeEdits) = hashPatch :: Memory.Patch.Patch -> Hash hashPatch = H.accumulate' . m2hPatch -hashBranch :: Memory.Branch.Branch m -> Hash -hashBranch = H.accumulate . tokensBranch - -tokensBranch :: Accumulate h => Memory.Branch.Branch m -> [Token h] -tokensBranch = H.tokens . m2hCausal . Memory.Branch._history - tokensBranch0 :: Accumulate h => Memory.Branch.Branch0 m -> [Token h] tokensBranch0 = H.tokens . m2hBranch -m2hCausal :: Memory.Branch.UnwrappedBranch m -> Hashing.Causal.Causal Hashing.Branch.Raw -m2hCausal = \case +-- hashing of branches isn't currently delegated here, because it's enmeshed +-- with every `cons` or `step` function. I think it would be good to do while +-- we're updating the hash function, but I'm also not looking forward to doing it +-- and it's not clearly a problem yet. +_hashBranch :: Memory.Branch.Branch m -> Hash +_hashBranch = H.accumulate . _tokensBranch + +_tokensBranch :: Accumulate h => Memory.Branch.Branch m -> [Token h] +_tokensBranch = H.tokens . _m2hCausal . Memory.Branch._history + +_m2hCausal :: Memory.Branch.UnwrappedBranch m -> Hashing.Causal.Causal Hashing.Branch.Raw +_m2hCausal = \case Memory.Causal.One _h e -> Hashing.Causal.Causal (m2hBranch e) mempty Memory.Causal.Cons _h e (ht, _) -> From 90dd6fb2b14327d867edee700bf21ee19688ac78 Mon Sep 17 00:00:00 2001 From: Arya Irani <538571+aryairani@users.noreply.github.com> Date: Thu, 14 Oct 2021 19:45:16 -0400 Subject: [PATCH 021/297] Update parser-typechecker/src/Unison/Codebase/Editor/HandleCommand.hs --- parser-typechecker/src/Unison/Codebase/Editor/HandleCommand.hs | 1 - 1 file changed, 1 deletion(-) diff --git a/parser-typechecker/src/Unison/Codebase/Editor/HandleCommand.hs b/parser-typechecker/src/Unison/Codebase/Editor/HandleCommand.hs index fb1a13bb6b..4b0819bcd9 100644 --- a/parser-typechecker/src/Unison/Codebase/Editor/HandleCommand.hs +++ b/parser-typechecker/src/Unison/Codebase/Editor/HandleCommand.hs @@ -134,7 +134,6 @@ commandLine config awaitInput setBranchRef rt notifyUser notifyNumbered loadSour SyncRemoteRootBranch repo branch syncMode -> lift $ Codebase.pushGitRootBranch codebase branch repo syncMode LoadTerm r -> lift $ Codebase.getTerm codebase r - -- LoadTermComponentWithType :: H.Hash -> Command m i v (Maybe [(Term v Ann, Type v Ann)]) LoadTypeOfTerm r -> lift $ Codebase.getTypeOfTerm codebase r LoadTermComponentWithTypes h -> lift $ Codebase.getTermComponentWithTypes codebase h LoadType r -> lift $ Codebase.getTypeDeclaration codebase r From c668ca9d23db30e8079dacc001347d6f7c5b938d Mon Sep 17 00:00:00 2001 From: Arya Irani <538571+aryairani@users.noreply.github.com> Date: Thu, 14 Oct 2021 19:45:23 -0400 Subject: [PATCH 022/297] Update parser-typechecker/src/Unison/Codebase/Branch/Type.hs --- parser-typechecker/src/Unison/Codebase/Branch/Type.hs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/parser-typechecker/src/Unison/Codebase/Branch/Type.hs b/parser-typechecker/src/Unison/Codebase/Branch/Type.hs index 393d2f9a34..f7442f016e 100644 --- a/parser-typechecker/src/Unison/Codebase/Branch/Type.hs +++ b/parser-typechecker/src/Unison/Codebase/Branch/Type.hs @@ -68,4 +68,4 @@ instance Eq (Branch0 m) where a == b = _terms a == _terms b && _types a == _types b && _children a == _children b - && (fmap fst . _edits) a == (fmap fst . _edits) b \ No newline at end of file + && (fmap fst . _edits) a == (fmap fst . _edits) b From 6d5ac667a5fae8907b59463ea70c1263d5a0f5b6 Mon Sep 17 00:00:00 2001 From: Arya Irani Date: Tue, 19 Oct 2021 11:01:26 -0400 Subject: [PATCH 023/297] first group commit --- .../U/Codebase/Sqlite/Branch/Format.hs | 90 +++++++- .../src/Unison/Codebase/SqliteCodebase.hs | 15 +- .../SqliteCodebase/MigrateSchema12.hs | 196 ++++++++++++++++++ .../unison-parser-typechecker.cabal | 1 + 4 files changed, 296 insertions(+), 6 deletions(-) create mode 100644 parser-typechecker/src/Unison/Codebase/SqliteCodebase/MigrateSchema12.hs diff --git a/codebase2/codebase-sqlite/U/Codebase/Sqlite/Branch/Format.hs b/codebase2/codebase-sqlite/U/Codebase/Sqlite/Branch/Format.hs index 1c9c18a870..f9f0182ade 100644 --- a/codebase2/codebase-sqlite/U/Codebase/Sqlite/Branch/Format.hs +++ b/codebase2/codebase-sqlite/U/Codebase/Sqlite/Branch/Format.hs @@ -6,12 +6,18 @@ import U.Codebase.Sqlite.Branch.Full (LocalBranch) import U.Codebase.Sqlite.DbId (CausalHashId, BranchObjectId, ObjectId, PatchObjectId, TextId) import Data.ByteString (ByteString) --- |you can use the exact same `BranchLocalIds` when converting between `Full` and `Diff` +-- | A 'BranchFormat' is a deserialized namespace object (@object.bytes@). +-- +-- you can use the exact same `BranchLocalIds` when converting between `Full` and `Diff` data BranchFormat = Full BranchLocalIds LocalBranch | Diff BranchObjectId BranchLocalIds LocalDiff deriving Show +-- | A 'BranchLocalIds' is a mapping between local ids (local to this object) encoded as offsets, and actual database ids. +-- +-- For example, a @branchTextLookup@ vector of @[50, 74]@ means "local id 0 corresponds to database text id 50, and +-- local id 1 corresponds to database text id 74". data BranchLocalIds = LocalIds { branchTextLookup :: Vector TextId, branchDefnLookup :: Vector ObjectId, @@ -23,3 +29,85 @@ data BranchLocalIds = LocalIds data SyncBranchFormat = SyncFull BranchLocalIds ByteString | SyncDiff BranchObjectId BranchLocalIds ByteString + +{- +projects.arya.message = "hello, world" -> -> #abc +projects.arya.program = printLine message -> printLine #abc -> #def + +projects.arya { + terms = { "message" -> #abc + , "program" -> #def + } +} + +text table = + { 1 -> "hello, world" + , 2 -> "message" + , 3 -> "program" + } + +hash table = + { 10 -> "abc" + , 11 -> "def" + } + +object table = + { ... + } + +projects.arya { + terms = { TextId 2 -> Reference { builtin = null, object = ObjectId 20, position = 0 } + , TextId 3 -> Reference { builtin = null, object = ObjectId 21, position = 0 } + } +} + +stored in original codebase: +projects.arya = BranchFormat.Full { + localIds = { + text = [2, 3] + hash = [10, 11] + object = [20, 21] + } + localBranch = { + terms = { LocalTextId 0 -> Reference { builtin = null, object = LocalObjectId 0, position = 0 } + , LocalTextId 1 -> Reference { builtin = null, object = LocalObjectId 1, position = 0 } + } + ... + } +} + +write to dest codebase: +text table = + { ... + , 901 -> "hello, world" + , 902 -> "message" + , 903 -> "program" + } + +hash table = + { ... + , 500 -> "abc" + , 501 -> "def" + } + +projects.arya { + + -- updated copy of original localIds, with new mapping + localIds = { + text = [902, 903] + hash = [500, 501] + object = [300, 301] + } + + -- copy unmodified from original + localBranch = { + terms = { LocalTextId 0 -> Reference { builtin = null, object = LocalObjectId 0, position = 0 } + , LocalTextId 1 -> Reference { builtin = null, object = LocalObjectId 1, position = 0 } + } + ... + } +} + + + +-} diff --git a/parser-typechecker/src/Unison/Codebase/SqliteCodebase.hs b/parser-typechecker/src/Unison/Codebase/SqliteCodebase.hs index 0c605d4279..aa0654ef63 100644 --- a/parser-typechecker/src/Unison/Codebase/SqliteCodebase.hs +++ b/parser-typechecker/src/Unison/Codebase/SqliteCodebase.hs @@ -276,8 +276,8 @@ sqliteCodebase debugName root = do termCache <- Cache.semispaceCache 8192 -- pure Cache.nullCache -- to disable typeOfTermCache <- Cache.semispaceCache 8192 declCache <- Cache.semispaceCache 1024 - runReaderT Q.schemaVersion conn >>= \case - SchemaVersion 1 -> do + let + startCodebase = do rootBranchCache <- newTVarIO Nothing -- The v1 codebase interface has operations to read and write individual definitions -- whereas the v2 codebase writes them as complete components. These two fields buffer @@ -799,6 +799,9 @@ sqliteCodebase debugName root = do (Just \l r -> runDB conn $ fromJust <$> before l r) in code ) + runReaderT Q.schemaVersion conn >>= \case + SchemaVersion 2 -> startCodebase + SchemaVersion 1 -> _migrate12 conn >> startCodebase v -> shutdownConnection conn $> Left v -- well one or the other. :zany_face: the thinking being that they wouldn't hash-collide @@ -833,14 +836,16 @@ syncInternal :: Branch m -> m () syncInternal progress srcConn destConn b = time "syncInternal" do + -- We start a savepoint on the src connection because it seemed to speed things up. + -- Mitchell says: that doesn't sound right... why would that be the case? + -- TODO: look into this; this connection should be used only for reads. runDB srcConn $ Q.savepoint "sync" runDB destConn $ Q.savepoint "sync" result <- runExceptT do let syncEnv = Sync22.Env srcConn destConn (16 * 1024 * 1024) -- we want to use sync22 wherever possible - -- so for each branch, we'll check if it exists in the destination branch - -- or if it exists in the source branch, then we can sync22 it - -- oh god but we have to figure out the dbid + -- so for each source branch, we'll check if it exists in the destination codebase + -- or if it exists in the source codebase, then we can sync22 it -- if it doesn't exist in the dest or source branch, -- then just use putBranch to the dest let se :: forall m a. Functor m => (ExceptT Sync22.Error m a -> ExceptT SyncEphemeral.Error m a) diff --git a/parser-typechecker/src/Unison/Codebase/SqliteCodebase/MigrateSchema12.hs b/parser-typechecker/src/Unison/Codebase/SqliteCodebase/MigrateSchema12.hs new file mode 100644 index 0000000000..4a94cd8dd8 --- /dev/null +++ b/parser-typechecker/src/Unison/Codebase/SqliteCodebase/MigrateSchema12.hs @@ -0,0 +1,196 @@ +{-# LANGUAGE PartialTypeSignatures #-} +{-# LANGUAGE PatternSynonyms #-} + +module Unison.Codebase.SqliteCodebase.MigrateSchema12 where + +import Control.Monad.Reader (MonadReader) +import Control.Monad.State (MonadState) +import U.Codebase.Sqlite.Connection (Connection) +import U.Codebase.Sqlite.DbId (CausalHashId, HashId, ObjectId) +import U.Codebase.Sqlite.ObjectType (ObjectType) +import qualified U.Codebase.Sqlite.ObjectType as OT +import qualified U.Codebase.Sqlite.Queries as Q +import qualified U.Codebase.Sqlite.Reference as S.Reference +import U.Codebase.Sync (Sync (Sync)) +import qualified U.Codebase.Sync as Sync +import qualified U.Codebase.WatchKind as WK +import Unison.Prelude (ByteString, Map, MonadIO) +import Unison.Reference (Pos) +import Unison.Referent (ConstructorId) + +-- lookupCtor :: ConstructorMapping -> ObjectId -> Pos -> ConstructorId -> Maybe (Pos, ConstructorId) +-- lookupCtor (ConstructorMapping cm) oid pos cid = +-- Map.lookup oid cm >>= (Vector.!? fromIntegral pos) >>= (Vector.!? cid) + +-- lookupTermRef :: TermLookup -> S.Reference -> Maybe S.Reference +-- lookupTermRef _tl (ReferenceBuiltin t) = Just (ReferenceBuiltin t) +-- lookupTermRef tl (ReferenceDerived id) = ReferenceDerived <$> lookupTermRefId tl id + +-- lookupTermRefId :: TermLookup -> S.Reference.Id -> Maybe S.Reference.Id +-- lookupTermRefId tl (Id oid pos) = Id oid <$> lookupTermPos tl oid pos + +-- lookupTermPos :: TermLookup -> ObjectId -> Pos -> Maybe Pos +-- lookupTermPos (TermLookup tl) oid pos = Map.lookup oid tl >>= (Vector.!? fromIntegral pos) + +-- newtype ConstructorMapping = ConstructorMapping (Map ObjectId (Vector (Vector (Pos, ConstructorId)))) +-- newtype TermLookup = TermLookup (Map ObjectId (Vector Pos)) + +data MigrationState = MigrationState + { declLookup :: Map ObjectId (Map Pos Pos), + ctorLookup :: Map (ObjectId, Pos) (Map ConstructorId ConstructorId), + termLookup :: Map ObjectId (Map Pos Pos) + } + +{- +* Load entire codebase as a list +* Pick a term from the codebase +* Look up the references inside the term +* If any haven't been processed, add them to the "to process" stack, push the term you were working on back onto that stack +* Rebuild & rehash the term, store that +* For any data constructor terms inside, + * Store a map from old ConstructorId to new, based on the old and new reference hashes +* After rebuilding a cycle, map old Pos to new +-} + +-- Q: can we plan to hold the whole mapping in memory? ✅ +-- Q: a) update database in-place? or b) write to separate database and then overwrite? leaning (b). +-- note: we do need to rebuild namespaces, although we don't need to rehash them. + +-- cycle position index `Pos` +-- constructor index `ConstructorId` + +{- +data Maybe a = (Just Bar | Nothing X) + +-- changes due to missing size from ref(Y) +data X = MkX Y + +-- know old hash and old cycle positions +data Y = MkY Int +-} + +data Entity + = O ObjectId + | C CausalHashId + | W WK.WatchKind S.Reference.IdH + deriving (Eq, Ord, Show) + +data Env = Env {db :: Connection} + +-- -> m (TrySyncResult h) +migrationSync :: + (MonadIO m, MonadState MigrationState m, MonadReader Env m) => + Sync m Entity +migrationSync = Sync \case + -- To sync an object, + -- * If we have already synced it, we are done. + -- * Otherwise, read the object from the database and switch on its object type. + -- * See next steps below v + -- + -- To sync a term component object, + -- * If we have not already synced all dependencies, push syncing them onto the front of the work queue. + -- * Otherwise, ??? + -- + -- To sync a decl component object, + -- * If we have not already synced all dependencies, push syncing them onto the front of the work queue. + -- * Otherwise, ??? + -- + -- To sync a namespace object, + -- * Deserialize it and compute its dependencies (terms, types, patches, children). + -- * If we have not already synced all of its dependencies, push syncing them onto the front of the work queue. + -- * To sync a 'BranchFull', + -- * We need to make a new 'BranchFull' in memory, then insert it into the database under a new object id. + -- * Wait, we need to preserve the ordering of the types/terms, either by not changing them (but the orderings of the + -- reference ids used in keys is definitely not preserved by this migration), or by permuting the local id vectors, + -- but we may be at a level too low or high for us to care? + -- * Its 'LocalBranch' must have all references changed in-place per the (old (object id, pos) => new (object id, pos)) mapping. + -- position of any term/decl within its component has changed. Therefore, we need to adjust each referent's component position + -- * We need to recompute the References contained in the Branch body since the pos within the reference may have changed. + -- We can look up the position changes in our Migration State, they must have been added when computing the objects pointed to by the reference. + -- * The normalized object IDs within the body _likely_ don't need to change. + -- * Its 'BranchLocalIds' must be translated from the old codebase object IDs to the new object IDs, + -- we can use our MigrationState to look these up, since they must have already been migrated. + -- * To sync a 'BranchDiff', + -- * ??? + -- + -- To sync a patch object, ??? + -- + -- To sync a Causal + --- * If we haven't yet synced its parents, push them onto the work queue + --- * If we haven't yet synced the causal's value (namespace), push it onto the work queue. + --- * Rehash the Causal's valueHash AND CausalHash, and add the new causal, its hashes, and hash objects to the codebase under a fresh object ID + O objId -> do + let alreadySynced :: m Bool + alreadySynced = undefined + alreadySynced >>= \case + False -> do + (hId, objType, bytes) <- runSrc $ Q.loadObjectWithHashIdAndTypeById oId + migrateObject objType hId bytes + True -> pure Sync.PreviouslyDone + -- result <- runValidateT @(Set Entity) @m @ObjectId case objType of + -- To sync a causal, + -- 1. ??? + -- 2. Synced + C causalHashID -> _ + -- To sync a watch result, + -- 1. ??? + -- 2. Synced + W watchKind idH -> _ + +-- data ObjectType +-- = TermComponent -- 0 +-- | DeclComponent -- 1 +-- | Namespace -- 2 +-- | Patch -- 3 + +migrateObject :: ObjectType -> HashId -> ByteString -> m _ +migrateObject objType hash bytes = case objType of + OT.TermComponent -> migrateTermComponent hash bytes + OT.DeclComponent -> migrateTermComponent hash bytes + OT.Namespace -> migrateNamespace hash bytes + OT.Patch -> migratePatch hash bytes + +migratePatch :: HashId -> ByteString -> m _ +migratePatch = error "not implemented" + +migrateNamespace :: HashId -> ByteString -> m _ +migrateNamespace = error "not implemented" + +migrateTermComponent :: HashId -> ByteString -> m _ +migrateTermComponent = error "not implemented" + +-- | migrate sqlite codebase from version 1 to 2, return False and rollback on failure +migrateSchema12 :: Applicative m => Connection -> m Bool +migrateSchema12 db = do + -- todo: drop and recreate corrected type/mentions index schema + -- do we want to garbage collect at this time? ✅ + -- or just convert everything without going in dependency order? ✅ + error "todo: go through " + -- todo: double-hash all the types and produce an constructor mapping + -- object ids will stay the same + -- todo: rehash all the terms using the new constructor mapping + -- and adding the type to the term + -- do we want to diff namespaces at this time? ❌ + -- do we want to look at supporting multiple simultaneous representations of objects at this time? + pure "todo: migrate12" + pure True + +-- -- remember that the component order might be different +-- rehashDeclComponent :: [Decl v a] -> (Hash, ConstructorMappings) +-- rehashDeclComponent decls = fmap decls <&> \case +-- +-- -- +-- error "todo: rehashDeclComponent" + +-- rewriteDeclComponent :: DeclFormat.LocallyIndexedComponent -> (Hash, DeclFormat.LocallyIndexedComponent, ConstructorMappings) +-- rewriteDeclComponent = +-- -- +-- error "todo: rehashDeclComponent" + +-- rehashDeclComponent :: [Decl v a] -> (Hash, DeclFormat.LocallyIndexedComponent, ConstructorMappings) + +-- rehashTermComponent :: ConstructorMappings -> TermFormat.LocallyIndexedComponent -> (Hash, TermFormat.LocallyIndexedComponent) +-- rehashTermComponent = error "todo: rehashTermComponent" + +-- -- getConstructor :: ConstructorMappings -> ObjectId -> Pos -> ConstructorId +-- -- getConstructor cm diff --git a/parser-typechecker/unison-parser-typechecker.cabal b/parser-typechecker/unison-parser-typechecker.cabal index 4e1353a16f..e3a17b4725 100644 --- a/parser-typechecker/unison-parser-typechecker.cabal +++ b/parser-typechecker/unison-parser-typechecker.cabal @@ -73,6 +73,7 @@ library Unison.Codebase.SqliteCodebase.Branch.Dependencies Unison.Codebase.SqliteCodebase.Conversions Unison.Codebase.SqliteCodebase.GitError + Unison.Codebase.SqliteCodebase.MigrateSchema12 Unison.Codebase.SqliteCodebase.SyncEphemeral Unison.Codebase.SyncMode Unison.Codebase.TermEdit From 0497f7dc36d0946ba2d51c13883830663e1eb651 Mon Sep 17 00:00:00 2001 From: Chris Penner Date: Tue, 19 Oct 2021 15:28:43 -0600 Subject: [PATCH 024/297] More pairing notes --- .../U/Codebase/Sqlite/Decl/Format.hs | 52 +++++++++++++- .../U/Codebase/Sqlite/LocalIds.hs | 3 + .../U/Codebase/Sqlite/Term/Format.hs | 72 ++++++++++++++++++- codebase2/codebase-sqlite/sql/create.sql | 12 +++- .../SqliteCodebase/MigrateSchema12.hs | 39 +++++++--- 5 files changed, 162 insertions(+), 16 deletions(-) diff --git a/codebase2/codebase-sqlite/U/Codebase/Sqlite/Decl/Format.hs b/codebase2/codebase-sqlite/U/Codebase/Sqlite/Decl/Format.hs index 633f784b74..e2475b731a 100644 --- a/codebase2/codebase-sqlite/U/Codebase/Sqlite/Decl/Format.hs +++ b/codebase2/codebase-sqlite/U/Codebase/Sqlite/Decl/Format.hs @@ -16,10 +16,60 @@ data DeclFormat = Decl LocallyIndexedComponent -- | V1: Decls included `Hash`es inline -- V2: Instead of `Hash`, we use a smaller index. -data LocallyIndexedComponent +newtype LocallyIndexedComponent = LocallyIndexedComponent (Vector (LocalIds, Decl Symbol)) deriving Show +-- [OldDecl] ==map==> [NewDecl] ==number==> [(NewDecl, Int)] ==sort==> [(NewDecl, Int)] ==> permutation is map snd of that + + +-- type List a = Nil | Cons (List a) + +-- unique type Thunk = Thunk (Int ->{MakeThunk} Int) +-- ability MakeThunk where go : (Int -> Int) -> Thunk + +-- What mitchell thinks unhashComponent is doing: +-- +-- Take a recursive type like +-- +-- Fix \myself -> Alternatives [Nil, Cons a myself] +-- +-- And write it with variables in place of recursive mentions like +-- +-- (Var 1, Alternatives [Nil, Cons a (Var 1)] + +-- can derive `original` from Hash + [OldDecl] +-- original :: Map Reference.Id (Decl v a) + +-- named, rewritten_dependencies :: Map (Reference.Id {old}) (v, Decl v a {old pos in references}) +-- named = Decl.unhashComponent original + +-- Mapping from the sky: (Reference.Id -> Reference.Id) + +-- rewritten_dependencies = replace_dependency_pos's skymap named + +-- new_references :: Map v (Reference.Id {new}, DataDeclaration v a) +-- new_references = Unison.Hashing.V2.Convert.hashDecls $ Map.toList $ Foldable.toList rewritten_dependencies +hashDecls :: + Var v => + Map v (Memory.DD.DataDeclaration v a) -> + ResolutionResult v a [(v, Memory.Reference.Id, Memory.DD.DataDeclaration v a)] + +-- compute correspondence between `v`s in `fst <$> named` compared to `fst <$> new_references` to get a Reference.Id -> Reference.Id mapping +-- mitchell tapped out before understanding the following line +-- compute correspondence between constructors names & constructor indices in corresponding decls +-- submit/mappend these two correspondences to sky mapping + +-- Swap the Reference positions according to our map of already computed swaps +-- Hydrate into the parser-typechecker version, get the new hash +-- reserialize it into the sqlite format +-- Compare the old and new sqlite versions to add those ConstructorID/Pos mappings to our context. + +-- unrelated Q: +-- do we kinda have circular dependency issues here? +-- parser-typechecker depends on codebase2, but we are talking about doing things at the parser-typechecker level in this migration +-- answer: no + type Decl v = DeclR TypeRef v type Type v = ABT.Term F v () diff --git a/codebase2/codebase-sqlite/U/Codebase/Sqlite/LocalIds.hs b/codebase2/codebase-sqlite/U/Codebase/Sqlite/LocalIds.hs index d7e7bf85ea..382f59660c 100644 --- a/codebase2/codebase-sqlite/U/Codebase/Sqlite/LocalIds.hs +++ b/codebase2/codebase-sqlite/U/Codebase/Sqlite/LocalIds.hs @@ -24,6 +24,9 @@ type WatchLocalIds = LocalIds' TextId HashId newtype LocalTextId = LocalTextId Word64 deriving (Eq, Ord, Show, Num, Real, Enum, Integral, Bits) via Word64 -- | represents an index into a defnLookup +-- +-- In this context, "definition" means an object that is either a term component or a (type) decl component, not a +-- patch, namespace, or any other kind of object. newtype LocalDefnId = LocalDefnId Word64 deriving (Eq, Ord, Show, Num, Real, Enum, Integral, Bits) via Word64 -- | a local index to a hash, used when the corresponding object is allowed to be absent diff --git a/codebase2/codebase-sqlite/U/Codebase/Sqlite/Term/Format.hs b/codebase2/codebase-sqlite/U/Codebase/Sqlite/Term/Format.hs index b3a2f3e152..c6ce3d203c 100644 --- a/codebase2/codebase-sqlite/U/Codebase/Sqlite/Term/Format.hs +++ b/codebase2/codebase-sqlite/U/Codebase/Sqlite/Term/Format.hs @@ -6,25 +6,91 @@ import Data.ByteString (ByteString) import Data.Vector (Vector) import U.Codebase.Reference (Reference') import U.Codebase.Referent (Referent') -import U.Codebase.Sqlite.LocalIds - ( LocalIds', LocalTextId, LocalDefnId, WatchLocalIds ) -import U.Codebase.Sqlite.Symbol ( Symbol ) +import U.Codebase.Sqlite.LocalIds (LocalDefnId, LocalIds', LocalTextId, WatchLocalIds) +import U.Codebase.Sqlite.Symbol (Symbol) import qualified U.Codebase.Term as Term import qualified U.Core.ABT as ABT import qualified U.Codebase.Type as Type import qualified U.Codebase.Sqlite.Reference as Sqlite import U.Codebase.Sqlite.DbId (ObjectId, TextId) +-- | +-- * Builtin terms are represented as local text ids. +-- * Non-builtin terms are represented as local definition ids, with an added distinguished element (here @Nothing@) +-- which represents a self-reference. type TermRef = Reference' LocalTextId (Maybe LocalDefnId) + +-- | +-- * Builtin types are represented as a local text id. +-- * Non-builtin types are represented by a local definition id. type TypeRef = Reference' LocalTextId LocalDefnId + type TermLink = Referent' TermRef TypeRef type TypeLink = TypeRef +-- | A 'LocallyIndexedComponent' is a vector that has one element per member of the component (invariant: 1+). +-- +-- Each element is a term, which is represented as: +-- +-- * Lookup vectors that map local ids to database ids for texts and objects referenced by the term. +-- * The term itself, with internal references to local ids (offsets into the lookup vectors). +-- * The term's type, also with internal references to local id. type LocallyIndexedComponent = LocallyIndexedComponent' TextId ObjectId newtype LocallyIndexedComponent' t d = LocallyIndexedComponent (Vector (LocalIds' t d, Term, Type)) deriving Show +{- +message = "hello, world" -> ABT { ... { Term.F.Text "hello, world" } } -> hashes to (#abc, 0) +program = printLine message -> ABT { ... { Term.F.App (ReferenceBuiltin ##io.PrintLine) (Reference #abc 0) } } -> hashes to (#def, 0) + +text table = + id text + -- ------------ + 1 hello, world + 2 message + 3 program + 4 Text + 5 IO + 6 Unit + 7 io.PrintLine + +hash table = + id base32 + -- ------ + 10 abc + 11 def + +hash_object table = + hash_id object_id hash_version + ------- --------- ------------ + 10 20 2 + +object table = + { 20 -> + LocallyIndexedComponent [ + (localIds = LocalIds { + text = [1,4] + defs = [] + }, + term = ABT { ... { Term.F.Text (LocalTextId 0) } }, + type = ABT { ... { Term.FT.Ref (Builtin (LocalTextId 1)) }} + ) + ], + + 21 -> + LocallyIndexedComponent [ + (localIds = LocalIds { + text = [7,5,6] + defs = [20] + }, + term = ABT { ... { Term.F.App (ReferenceBuiltin (LocalTextId 7) (ReferenceId (LocalDefnId 0) 0) } }, + type = ABT { ... { Term.FT.App (Term.FT.Ref (Builtin (LocalTextId 0))) (Term.FT.Ref (Builtin (LocalTextId 1))) } } + ) + ], + } +-} + type F = Term.F' LocalTextId TermRef TypeRef TermLink TypeLink Symbol diff --git a/codebase2/codebase-sqlite/sql/create.sql b/codebase2/codebase-sqlite/sql/create.sql index 75ecd63d3d..ddf2fc8361 100644 --- a/codebase2/codebase-sqlite/sql/create.sql +++ b/codebase2/codebase-sqlite/sql/create.sql @@ -120,20 +120,30 @@ CREATE TABLE causal_metadata ( CREATE INDEX causal_metadata_causal_id ON causal_metadata(causal_id); CREATE TABLE watch_result ( + -- See Note [Watch expression identifier] hash_id INTEGER NOT NULL CONSTRAINT watch_result_fk1 REFERENCES hash(id), component_index INTEGER NOT NULL, - result BLOB NOT NULL, + + result BLOB NOT NULL, -- evaluated result of the watch expression PRIMARY KEY (hash_id, component_index) ) WITHOUT ROWID; + CREATE TABLE watch ( + -- See Note [Watch expression identifier] hash_id INTEGER NOT NULL CONSTRAINT watch_fk1 REFERENCES hash(id), component_index INTEGER NOT NULL, + watch_kind_id INTEGER NOT NULL CONSTRAINT watch_fk2 REFERENCES watch_kind_description(id), PRIMARY KEY (hash_id, component_index, watch_kind_id) ) WITHOUT ROWID; CREATE INDEX watch_kind ON watch(watch_kind_id); +-- Note [Watch expression identifier] +-- The hash_id + component_index is an unevaluated term reference. We use hash_id instead of object_id because the +-- unevaluated term may not exist in the codebase: it is not added merely by watching it without a name, e.g `> 2 + 3`. + + CREATE TABLE watch_kind_description ( id INTEGER PRIMARY KEY NOT NULL, description TEXT UNIQUE NOT NULL diff --git a/parser-typechecker/src/Unison/Codebase/SqliteCodebase/MigrateSchema12.hs b/parser-typechecker/src/Unison/Codebase/SqliteCodebase/MigrateSchema12.hs index 4a94cd8dd8..531da5e027 100644 --- a/parser-typechecker/src/Unison/Codebase/SqliteCodebase/MigrateSchema12.hs +++ b/parser-typechecker/src/Unison/Codebase/SqliteCodebase/MigrateSchema12.hs @@ -17,6 +17,7 @@ import qualified U.Codebase.WatchKind as WK import Unison.Prelude (ByteString, Map, MonadIO) import Unison.Reference (Pos) import Unison.Referent (ConstructorId) +import Data.Set (Set) -- lookupCtor :: ConstructorMapping -> ObjectId -> Pos -> ConstructorId -> Maybe (Pos, ConstructorId) -- lookupCtor (ConstructorMapping cm) oid pos cid = @@ -35,12 +36,27 @@ import Unison.Referent (ConstructorId) -- newtype ConstructorMapping = ConstructorMapping (Map ObjectId (Vector (Vector (Pos, ConstructorId)))) -- newtype TermLookup = TermLookup (Map ObjectId (Vector Pos)) +type TypeIdentifier = (ObjectId, Pos) +type Old a = a +type New a = a data MigrationState = MigrationState - { declLookup :: Map ObjectId (Map Pos Pos), - ctorLookup :: Map (ObjectId, Pos) (Map ConstructorId ConstructorId), - termLookup :: Map ObjectId (Map Pos Pos) + -- Mapping between old cycle-position -> new cycle-position for a given Decl object. + { declLookup :: Map (Old ObjectId) (Map (Old Pos) (New Pos)), + -- Mapping between contructor indexes for the type identified by (ObjectId, Pos) + ctorLookup :: Map (Old TypeIdentifier) (Map (Old ConstructorId) (New ConstructorId)), + -- This provides the info needed for rewriting a term. You'll access it with a function :: Old + termLookup :: Map (Old ObjectId) (New ObjectId, Map (Old Pos) (New Pos)), + objLookup :: Map (Old ObjectId) (New ObjectId), + + + -- + componentPositionMapping :: Map ObjectId (Map (Old Pos) (New Pos)), + constructorIDMapping :: Map ObjectId (Map (Old ConstructorId) (New ConstructorId)), + completed :: Set ObjectId } + -- declLookup :: Map ObjectId (Map Pos (Pos, Map ConstructorId ConstructorId)), + {- * Load entire codebase as a list * Pick a term from the codebase @@ -87,11 +103,11 @@ migrationSync = Sync \case -- * Otherwise, read the object from the database and switch on its object type. -- * See next steps below v -- - -- To sync a term component object, + -- To sync a decl component object, -- * If we have not already synced all dependencies, push syncing them onto the front of the work queue. -- * Otherwise, ??? -- - -- To sync a decl component object, + -- To sync a term component object, -- * If we have not already synced all dependencies, push syncing them onto the front of the work queue. -- * Otherwise, ??? -- @@ -104,16 +120,17 @@ migrationSync = Sync \case -- reference ids used in keys is definitely not preserved by this migration), or by permuting the local id vectors, -- but we may be at a level too low or high for us to care? -- * Its 'LocalBranch' must have all references changed in-place per the (old (object id, pos) => new (object id, pos)) mapping. - -- position of any term/decl within its component has changed. Therefore, we need to adjust each referent's component position - -- * We need to recompute the References contained in the Branch body since the pos within the reference may have changed. - -- We can look up the position changes in our Migration State, they must have been added when computing the objects pointed to by the reference. - -- * The normalized object IDs within the body _likely_ don't need to change. + -- * The local IDs within the body _likely_ don't need to change. (why likely?) -- * Its 'BranchLocalIds' must be translated from the old codebase object IDs to the new object IDs, -- we can use our MigrationState to look these up, since they must have already been migrated. -- * To sync a 'BranchDiff', - -- * ??? + -- * These don't exist in schema v1; we can error if we encounter one. + -- + -- To sync a patch object + -- * Rewrite all old hashes in the patch to the new hashes. -- - -- To sync a patch object, ??? + -- To sync a watch expression + -- * ??? -- -- To sync a Causal --- * If we haven't yet synced its parents, push them onto the work queue From 2a1f537461a2a611718613f5e9a0a99e0d2dc315 Mon Sep 17 00:00:00 2001 From: Chris Penner Date: Thu, 21 Oct 2021 10:51:45 -0600 Subject: [PATCH 025/297] WIP --- .../Unison/Codebase/SqliteCodebase/MigrateSchema12.hs | 10 +++++++++- 1 file changed, 9 insertions(+), 1 deletion(-) diff --git a/parser-typechecker/src/Unison/Codebase/SqliteCodebase/MigrateSchema12.hs b/parser-typechecker/src/Unison/Codebase/SqliteCodebase/MigrateSchema12.hs index 531da5e027..a6c17090cd 100644 --- a/parser-typechecker/src/Unison/Codebase/SqliteCodebase/MigrateSchema12.hs +++ b/parser-typechecker/src/Unison/Codebase/SqliteCodebase/MigrateSchema12.hs @@ -163,7 +163,7 @@ migrationSync = Sync \case migrateObject :: ObjectType -> HashId -> ByteString -> m _ migrateObject objType hash bytes = case objType of OT.TermComponent -> migrateTermComponent hash bytes - OT.DeclComponent -> migrateTermComponent hash bytes + OT.DeclComponent -> migrateDeclComponent hash bytes OT.Namespace -> migrateNamespace hash bytes OT.Patch -> migratePatch hash bytes @@ -176,6 +176,14 @@ migrateNamespace = error "not implemented" migrateTermComponent :: HashId -> ByteString -> m _ migrateTermComponent = error "not implemented" +migrateDeclComponent :: HashId -> ByteString -> m _ +migrateDeclComponent hashId declFormatBytes = do + let DeclFormat locallyIndexedComponent = case runGetS S.getDeclFormat declFormatBytes of + Left err -> error "something went wrong" + Right declFormat -> declFormat + let unhashed = Decl.unhashComponent + + -- | migrate sqlite codebase from version 1 to 2, return False and rollback on failure migrateSchema12 :: Applicative m => Connection -> m Bool migrateSchema12 db = do From 6f080b2eb61985fd8b5b61b87bf21a45da02ba88 Mon Sep 17 00:00:00 2001 From: Chris Penner Date: Thu, 21 Oct 2021 12:34:52 -0600 Subject: [PATCH 026/297] Pseudocode --- .../SqliteCodebase/MigrateSchema12.hs | 135 ++++++++++++++++-- 1 file changed, 127 insertions(+), 8 deletions(-) diff --git a/parser-typechecker/src/Unison/Codebase/SqliteCodebase/MigrateSchema12.hs b/parser-typechecker/src/Unison/Codebase/SqliteCodebase/MigrateSchema12.hs index a6c17090cd..3b3a630ca7 100644 --- a/parser-typechecker/src/Unison/Codebase/SqliteCodebase/MigrateSchema12.hs +++ b/parser-typechecker/src/Unison/Codebase/SqliteCodebase/MigrateSchema12.hs @@ -18,6 +18,7 @@ import Unison.Prelude (ByteString, Map, MonadIO) import Unison.Reference (Pos) import Unison.Referent (ConstructorId) import Data.Set (Set) +import qualified U.Codebase.Sqlite.Operations as Ops -- lookupCtor :: ConstructorMapping -> ObjectId -> Pos -> ConstructorId -> Maybe (Pos, ConstructorId) -- lookupCtor (ConstructorMapping cm) oid pos cid = @@ -41,9 +42,11 @@ type Old a = a type New a = a data MigrationState = MigrationState -- Mapping between old cycle-position -> new cycle-position for a given Decl object. - { declLookup :: Map (Old ObjectId) (Map (Old Pos) (New Pos)), + { -- declLookup :: Map (Old ObjectId) (Map (Old Pos) (New Pos)), + declLookup :: Map (Old Reference.Id) (New Reference.Id), -- Mapping between contructor indexes for the type identified by (ObjectId, Pos) ctorLookup :: Map (Old TypeIdentifier) (Map (Old ConstructorId) (New ConstructorId)), + ctorLookup' :: Map (Old Referent.Id) (New Referent.Id), -- This provides the info needed for rewriting a term. You'll access it with a function :: Old termLookup :: Map (Old ObjectId) (New ObjectId, Map (Old Pos) (New Pos)), objLookup :: Map (Old ObjectId) (New ObjectId), @@ -53,7 +56,7 @@ data MigrationState = MigrationState componentPositionMapping :: Map ObjectId (Map (Old Pos) (New Pos)), constructorIDMapping :: Map ObjectId (Map (Old ConstructorId) (New ConstructorId)), completed :: Set ObjectId - } + } deriving Generic -- declLookup :: Map ObjectId (Map Pos (Pos, Map ConstructorId ConstructorId)), @@ -176,12 +179,128 @@ migrateNamespace = error "not implemented" migrateTermComponent :: HashId -> ByteString -> m _ migrateTermComponent = error "not implemented" -migrateDeclComponent :: HashId -> ByteString -> m _ -migrateDeclComponent hashId declFormatBytes = do - let DeclFormat locallyIndexedComponent = case runGetS S.getDeclFormat declFormatBytes of - Left err -> error "something went wrong" - Right declFormat -> declFormat - let unhashed = Decl.unhashComponent +migrateDeclComponent :: EDB m => Codebase m v a -> HashId -> m _ +migrateDeclComponent Codebase{..} hashId = do + hash <- Ops.loadHashByHashId hashId + declComponent :: [Decl v a] <- Codebase.getDeclComponent (Cv.hash2to1 hash) >>= \case + Nothing -> error "handle this" -- not non-fatal! + Just dc -> pure dc + + -- type Decl = Either EffectDeclaration DataDeclaration + let componentIDMap :: Map Reference.Id (Decl v a) + componentIDMap = Map.fromList $ Reference.componentFor hash declComponent + + let unhashed :: Map Reference.Id (v, Decl v a) + unhashed = DD.unhashComponent componentIDMap +-- data DataDeclaration v a = DataDeclaration { +-- modifier :: Modifier, +-- annotation :: a, +-- bound :: [v], +-- constructors' :: [(a, v, Type v a)] +-- } deriving (Eq, Show, Functor) + + let allTypes :: [Type v a] + allTypes = (\(_, _, typ) -> typ) . concatMap (constructors' . snd) . Map.values $ unhashed + let allContainedReferences :: [Reference.Id] + allContainedReferences = foldMap (ABT.find findReferenceIds) (constructors) + + + +-- get references: +-- +-- references :: Term f v a -> [Reference.Id] +-- +-- are all those references keys in our skymap? +-- yes => migrate term +-- no => returh those references (as Entity, though) as more work to do + +-- how to turn Reference.Id into Entity? +-- need its ObjectId, + +-- Term f v a -> ValidateT (Seq Reference.Id) m (Term f v a) +-- +recordRefsInType :: MonadState MigrationState m => Type v a -> WriterT [Reference.Id] m (Type v a) +recordRefsInType = _ + +findReferenceIds :: Type v a -> FindAction Reference.Id +findReferenceIds = Term.out >>> \case + Tm (Ref (Reference.DerivedId r)) -> Found r + x -> Continue + + + + +-- data DataDeclaration v a = DataDeclaration { +-- modifier :: Modifier, +-- annotation :: a, +-- bound :: [v], +-- constructors' :: [(a, v, Type v a)] +-- } deriving (Eq, Show, Functor) + + +-- compute correspondence between `v`s in `fst <$> named` compared to `fst <$> new_references` to get a Reference.Id -> Reference.Id mapping +-- mitchell tapped out before understanding the following line +-- compute correspondence between constructors names & constructor indices in corresponding decls +-- submit/mappend these two correspondences to sky mapping + +-- Swap the Reference positions according to our map of already computed swaps +-- Hydrate into the parser-typechecker version, get the new hash +-- reserialize it into the sqlite format +-- Compare the old and new sqlite versions to add those ConstructorID/Pos mappings to our context. + +-- unrelated Q: +-- do we kinda have circular dependency issues here? +-- parser-typechecker depends on codebase2, but we are talking about doing things at the parser-typechecker level in this migration +-- answer: no + + -- unhashComponent + -- :: forall v a. Var v => Map Reference.Id (Decl v a) -> Map Reference.Id (v, Decl v a) + + let dataDecls = fmap (either toDataDecl id) declComponent + -- DD.unhashComponent + + + -- [OldDecl] ==map==> [NewDecl] ==number==> [(NewDecl, Int)] ==sort==> [(NewDecl, Int)] ==> permutation is map snd of that + + +-- type List a = Nil | Cons (List a) + +-- unique type Thunk = Thunk (Int ->{MakeThunk} Int) +-- ability MakeThunk where go : (Int -> Int) -> Thunk + +-- What mitchell thinks unhashComponent is doing: +-- +-- Take a recursive type like +-- +-- Fix \myself -> Alternatives [Nil, Cons a myself] +-- +-- And write it with variables in place of recursive mentions like +-- +-- (Var 1, Alternatives [Nil, Cons a (Var 1)] + +-- can derive `original` from Hash + [OldDecl] +-- original :: Map Reference.Id (Decl v a) + +-- named, rewritten_dependencies :: Map (Reference.Id {old}) (v, Decl v a {old pos in references}) +-- named = Decl.unhashComponent original + +-- Mapping from the sky: (Reference.Id -> Reference.Id) + +-- rewritten_dependencies = replace_dependency_pos's skymap named + +-- new_references :: Map v (Reference.Id {new}, DataDeclaration v a) +-- new_references = Unison.Hashing.V2.Convert.hashDecls $ Map.toList $ Foldable.toList rewritten_dependencies + + + + + + -- let DeclFormat locallyIndexedComponent = case runGetS S.getDeclFormat declFormatBytes of + -- Left err -> error "something went wrong" + -- Right declFormat -> declFormat + + -- Operations.hs converts from S level to C level + -- SqliteCodebase.hs converts from C level to -- | migrate sqlite codebase from version 1 to 2, return False and rollback on failure From 9b898c6188753527f84e85e57b6ad1437359d2f6 Mon Sep 17 00:00:00 2001 From: Chris Penner Date: Thu, 21 Oct 2021 13:45:04 -0600 Subject: [PATCH 027/297] Updates --- .../U/Codebase/Sqlite/Decl/Format.hs | 8 +- .../SqliteCodebase/MigrateSchema12.hs | 94 +++++++++++++------ 2 files changed, 67 insertions(+), 35 deletions(-) diff --git a/codebase2/codebase-sqlite/U/Codebase/Sqlite/Decl/Format.hs b/codebase2/codebase-sqlite/U/Codebase/Sqlite/Decl/Format.hs index e2475b731a..24c42c6b67 100644 --- a/codebase2/codebase-sqlite/U/Codebase/Sqlite/Decl/Format.hs +++ b/codebase2/codebase-sqlite/U/Codebase/Sqlite/Decl/Format.hs @@ -50,10 +50,10 @@ newtype LocallyIndexedComponent -- new_references :: Map v (Reference.Id {new}, DataDeclaration v a) -- new_references = Unison.Hashing.V2.Convert.hashDecls $ Map.toList $ Foldable.toList rewritten_dependencies -hashDecls :: - Var v => - Map v (Memory.DD.DataDeclaration v a) -> - ResolutionResult v a [(v, Memory.Reference.Id, Memory.DD.DataDeclaration v a)] +-- hashDecls :: +-- Var v => +-- Map v (Memory.DD.DataDeclaration v a) -> +-- ResolutionResult v a [(v, Memory.Reference.Id, Memory.DD.DataDeclaration v a)] -- compute correspondence between `v`s in `fst <$> named` compared to `fst <$> new_references` to get a Reference.Id -> Reference.Id mapping -- mitchell tapped out before understanding the following line diff --git a/parser-typechecker/src/Unison/Codebase/SqliteCodebase/MigrateSchema12.hs b/parser-typechecker/src/Unison/Codebase/SqliteCodebase/MigrateSchema12.hs index 3b3a630ca7..a0fb7b8abd 100644 --- a/parser-typechecker/src/Unison/Codebase/SqliteCodebase/MigrateSchema12.hs +++ b/parser-typechecker/src/Unison/Codebase/SqliteCodebase/MigrateSchema12.hs @@ -1,8 +1,11 @@ {-# LANGUAGE PartialTypeSignatures #-} {-# LANGUAGE PatternSynonyms #-} +{-# LANGUAGE RecordWildCards #-} +{-# LANGUAGE DeriveGeneric #-} module Unison.Codebase.SqliteCodebase.MigrateSchema12 where +import Unison.Prelude import Control.Monad.Reader (MonadReader) import Control.Monad.State (MonadState) import U.Codebase.Sqlite.Connection (Connection) @@ -19,6 +22,22 @@ import Unison.Reference (Pos) import Unison.Referent (ConstructorId) import Data.Set (Set) import qualified U.Codebase.Sqlite.Operations as Ops +import qualified Unison.Reference as Reference +import qualified Unison.Referent as Referent +import Unison.Codebase (Codebase (Codebase)) +import qualified Unison.DataDeclaration as DD +import qualified Unison.Codebase as Codebase +import qualified Unison.Codebase.SqliteCodebase.Conversions as Cv +import qualified Data.Map as Map +import Unison.Type (Type) +import qualified Unison.ABT as ABT +import Control.Monad.Trans.Writer.CPS (WriterT) +import qualified Unison.Type as Type +import qualified Data.List as List +import Control.Lens +import Control.Monad.State.Strict +import Control.Monad.Except (runExceptT) +import Control.Monad.Trans.Except (throwE) -- lookupCtor :: ConstructorMapping -> ObjectId -> Pos -> ConstructorId -> Maybe (Pos, ConstructorId) -- lookupCtor (ConstructorMapping cm) oid pos cid = @@ -51,7 +70,6 @@ data MigrationState = MigrationState termLookup :: Map (Old ObjectId) (New ObjectId, Map (Old Pos) (New Pos)), objLookup :: Map (Old ObjectId) (New ObjectId), - -- componentPositionMapping :: Map ObjectId (Map (Old Pos) (New Pos)), constructorIDMapping :: Map ObjectId (Map (Old ConstructorId) (New ConstructorId)), @@ -163,10 +181,10 @@ migrationSync = Sync \case -- | Namespace -- 2 -- | Patch -- 3 -migrateObject :: ObjectType -> HashId -> ByteString -> m _ -migrateObject objType hash bytes = case objType of +migrateObject :: Codebase m v a -> ObjectType -> HashId -> ByteString -> m _ +migrateObject codebase objType hash bytes = case objType of OT.TermComponent -> migrateTermComponent hash bytes - OT.DeclComponent -> migrateDeclComponent hash bytes + OT.DeclComponent -> migrateDeclComponent codebase hash OT.Namespace -> migrateNamespace hash bytes OT.Patch -> migratePatch hash bytes @@ -179,18 +197,19 @@ migrateNamespace = error "not implemented" migrateTermComponent :: HashId -> ByteString -> m _ migrateTermComponent = error "not implemented" -migrateDeclComponent :: EDB m => Codebase m v a -> HashId -> m _ -migrateDeclComponent Codebase{..} hashId = do - hash <- Ops.loadHashByHashId hashId - declComponent :: [Decl v a] <- Codebase.getDeclComponent (Cv.hash2to1 hash) >>= \case - Nothing -> error "handle this" -- not non-fatal! +migrateDeclComponent :: Ops.EDB m => Codebase m v a -> HashId -> m (Sync.TrySyncResult Reference.Id) +migrateDeclComponent Codebase{..} hashId = fmap (either id id) . runExceptT $ do + hash' <- lift $ Ops.loadHashByHashId hashId + let hash = Cv.hash2to1 hash' + declComponent :: [DD.Decl v a] <- lift (getDeclComponent hash) >>= \case + Nothing -> error "handle this" -- not non-fatal! Just dc -> pure dc - + -- type Decl = Either EffectDeclaration DataDeclaration - let componentIDMap :: Map Reference.Id (Decl v a) + let componentIDMap :: Map Reference.Id (DD.Decl v a) componentIDMap = Map.fromList $ Reference.componentFor hash declComponent - let unhashed :: Map Reference.Id (v, Decl v a) + let unhashed :: Map Reference.Id (v, DD.Decl v a) unhashed = DD.unhashComponent componentIDMap -- data DataDeclaration v a = DataDeclaration { -- modifier :: Modifier, @@ -198,12 +217,26 @@ migrateDeclComponent Codebase{..} hashId = do -- bound :: [v], -- constructors' :: [(a, v, Type v a)] -- } deriving (Eq, Show, Functor) - + let allTypes :: [Type v a] - allTypes = (\(_, _, typ) -> typ) . concatMap (constructors' . snd) . Map.values $ unhashed + allTypes = + unhashed + ^.. traversed + . _2 + . beside coerced id + . to DD.constructors' + . traversed + . _3 let allContainedReferences :: [Reference.Id] - allContainedReferences = foldMap (ABT.find findReferenceIds) (constructors) - + allContainedReferences = foldMap (ABT.find findReferenceIds) allTypes + -- unmigratedIds :: [Reference.Id] + declMap <- gets declLookup + let unmigratedIds :: [Reference.Id] + unmigratedIds = filter (\ref -> not (Map.member ref declMap)) allContainedReferences + when (not . null $ unmigratedIds) $ throwE (Sync.Missing unmigratedIds) + -- At this point we know we have all the required mappings from old references to new ones. + _ + -- get references: @@ -212,20 +245,20 @@ migrateDeclComponent Codebase{..} hashId = do -- -- are all those references keys in our skymap? -- yes => migrate term --- no => returh those references (as Entity, though) as more work to do +-- no => returh those references (as Entity, though) as more work to do -- how to turn Reference.Id into Entity? -- need its ObjectId, -- Term f v a -> ValidateT (Seq Reference.Id) m (Term f v a) --- +-- recordRefsInType :: MonadState MigrationState m => Type v a -> WriterT [Reference.Id] m (Type v a) recordRefsInType = _ -findReferenceIds :: Type v a -> FindAction Reference.Id -findReferenceIds = Term.out >>> \case - Tm (Ref (Reference.DerivedId r)) -> Found r - x -> Continue +findReferenceIds :: Type v a -> ABT.FindAction Reference.Id +findReferenceIds = ABT.out >>> \case + ABT.Tm (Type.Ref (Reference.DerivedId r)) -> ABT.Found r + x -> ABT.Continue @@ -252,14 +285,13 @@ findReferenceIds = Term.out >>> \case -- do we kinda have circular dependency issues here? -- parser-typechecker depends on codebase2, but we are talking about doing things at the parser-typechecker level in this migration -- answer: no - + -- unhashComponent -- :: forall v a. Var v => Map Reference.Id (Decl v a) -> Map Reference.Id (v, Decl v a) - let dataDecls = fmap (either toDataDecl id) declComponent -- DD.unhashComponent - - + + -- [OldDecl] ==map==> [NewDecl] ==number==> [(NewDecl, Int)] ==sort==> [(NewDecl, Int)] ==> permutation is map snd of that @@ -276,7 +308,7 @@ findReferenceIds = Term.out >>> \case -- -- And write it with variables in place of recursive mentions like -- --- (Var 1, Alternatives [Nil, Cons a (Var 1)] +-- (Var 1, Alternatives [Nil, Cons a (Var 1)] -- can derive `original` from Hash + [OldDecl] -- original :: Map Reference.Id (Decl v a) @@ -291,8 +323,8 @@ findReferenceIds = Term.out >>> \case -- new_references :: Map v (Reference.Id {new}, DataDeclaration v a) -- new_references = Unison.Hashing.V2.Convert.hashDecls $ Map.toList $ Foldable.toList rewritten_dependencies - - + + -- let DeclFormat locallyIndexedComponent = case runGetS S.getDeclFormat declFormatBytes of @@ -300,8 +332,8 @@ findReferenceIds = Term.out >>> \case -- Right declFormat -> declFormat -- Operations.hs converts from S level to C level - -- SqliteCodebase.hs converts from C level to - + -- SqliteCodebase.hs converts from C level to + -- | migrate sqlite codebase from version 1 to 2, return False and rollback on failure migrateSchema12 :: Applicative m => Connection -> m Bool From 5f495f0147feee0161a27b3280cbc77e7ffba8a7 Mon Sep 17 00:00:00 2001 From: Chris Penner Date: Thu, 21 Oct 2021 14:10:43 -0600 Subject: [PATCH 028/297] Implement migrateDeclComponent --- parser-typechecker/package.yaml | 1 + .../SqliteCodebase/MigrateSchema12.hs | 39 +++++++++++++++---- .../unison-parser-typechecker.cabal | 1 + unison-core/src/Unison/DataDeclaration.hs | 14 ++++++- 4 files changed, 47 insertions(+), 8 deletions(-) diff --git a/parser-typechecker/package.yaml b/parser-typechecker/package.yaml index 0f5eb36a95..c2fedc51a9 100644 --- a/parser-typechecker/package.yaml +++ b/parser-typechecker/package.yaml @@ -126,6 +126,7 @@ library: - unison-util-relation - open-browser - uri-encode + - generic-lens executables: prettyprintdemo: diff --git a/parser-typechecker/src/Unison/Codebase/SqliteCodebase/MigrateSchema12.hs b/parser-typechecker/src/Unison/Codebase/SqliteCodebase/MigrateSchema12.hs index a0fb7b8abd..be7f49e505 100644 --- a/parser-typechecker/src/Unison/Codebase/SqliteCodebase/MigrateSchema12.hs +++ b/parser-typechecker/src/Unison/Codebase/SqliteCodebase/MigrateSchema12.hs @@ -2,6 +2,7 @@ {-# LANGUAGE PatternSynonyms #-} {-# LANGUAGE RecordWildCards #-} {-# LANGUAGE DeriveGeneric #-} +{-# LANGUAGE DataKinds #-} module Unison.Codebase.SqliteCodebase.MigrateSchema12 where @@ -36,8 +37,11 @@ import qualified Unison.Type as Type import qualified Data.List as List import Control.Lens import Control.Monad.State.Strict -import Control.Monad.Except (runExceptT) +import Control.Monad.Except (runExceptT, ExceptT) import Control.Monad.Trans.Except (throwE) +import Data.Either.Extra (maybeToEither) +import Data.Generics.Product +import Data.Generics.Sum -- lookupCtor :: ConstructorMapping -> ObjectId -> Pos -> ConstructorId -> Maybe (Pos, ConstructorId) -- lookupCtor (ConstructorMapping cm) oid pos cid = @@ -169,11 +173,11 @@ migrationSync = Sync \case -- To sync a causal, -- 1. ??? -- 2. Synced - C causalHashID -> _ + C causalHashID -> undefined -- To sync a watch result, -- 1. ??? -- 2. Synced - W watchKind idH -> _ + W watchKind idH -> undefined -- data ObjectType -- = TermComponent -- 0 @@ -197,7 +201,7 @@ migrateNamespace = error "not implemented" migrateTermComponent :: HashId -> ByteString -> m _ migrateTermComponent = error "not implemented" -migrateDeclComponent :: Ops.EDB m => Codebase m v a -> HashId -> m (Sync.TrySyncResult Reference.Id) +migrateDeclComponent :: forall m v a. Ops.EDB m => Codebase m v a -> HashId -> m (Sync.TrySyncResult Reference.Id) migrateDeclComponent Codebase{..} hashId = fmap (either id id) . runExceptT $ do hash' <- lift $ Ops.loadHashByHashId hashId let hash = Cv.hash2to1 hash' @@ -227,6 +231,7 @@ migrateDeclComponent Codebase{..} hashId = fmap (either id id) . runExceptT $ do . to DD.constructors' . traversed . _3 + let allContainedReferences :: [Reference.Id] allContainedReferences = foldMap (ABT.find findReferenceIds) allTypes -- unmigratedIds :: [Reference.Id] @@ -235,7 +240,27 @@ migrateDeclComponent Codebase{..} hashId = fmap (either id id) . runExceptT $ do unmigratedIds = filter (\ref -> not (Map.member ref declMap)) allContainedReferences when (not . null $ unmigratedIds) $ throwE (Sync.Missing unmigratedIds) -- At this point we know we have all the required mappings from old references to new ones. - _ + let remapTerm :: Type v a -> ExceptT (Sync.TrySyncResult Reference.Id) m (Type v a) + remapTerm typ = either throwE pure $ ABT.visit' (remapReferences declMap) typ + + result :: [DD.Decl v a] <- declComponent + & traversed + . beside DD.asDataDecl_ id + . DD.constructors_ + . traversed + . _3 + %%~ remapTerm + -- putTypeDeclaration + pure Sync.Done + + + +remapReferences :: Map (Old Reference.Id) (New Reference.Id) + -> Type.F (Type v a) + -> Either (Sync.TrySyncResult Reference.Id) (Type.F (Type v a)) +remapReferences declMap = \case + (Type.Ref (Reference.DerivedId refId)) -> (Type.Ref . Reference.DerivedId) <$> maybeToEither (Sync.Missing [refId]) (Map.lookup refId declMap) + x -> pure x @@ -252,8 +277,8 @@ migrateDeclComponent Codebase{..} hashId = fmap (either id id) . runExceptT $ do -- Term f v a -> ValidateT (Seq Reference.Id) m (Term f v a) -- -recordRefsInType :: MonadState MigrationState m => Type v a -> WriterT [Reference.Id] m (Type v a) -recordRefsInType = _ +-- recordRefsInType :: MonadState MigrationState m => Type v a -> WriterT [Reference.Id] m (Type v a) +-- recordRefsInType = _ findReferenceIds :: Type v a -> ABT.FindAction Reference.Id findReferenceIds = ABT.out >>> \case diff --git a/parser-typechecker/unison-parser-typechecker.cabal b/parser-typechecker/unison-parser-typechecker.cabal index 4d04da4b3c..54ec16fce5 100644 --- a/parser-typechecker/unison-parser-typechecker.cabal +++ b/parser-typechecker/unison-parser-typechecker.cabal @@ -243,6 +243,7 @@ library , fingertree , fsnotify , fuzzyfind + , generic-lens , generic-monoid , hashable , hashtables diff --git a/unison-core/src/Unison/DataDeclaration.hs b/unison-core/src/Unison/DataDeclaration.hs index 0899fc4587..ff414f02fa 100644 --- a/unison-core/src/Unison/DataDeclaration.hs +++ b/unison-core/src/Unison/DataDeclaration.hs @@ -32,12 +32,15 @@ module Unison.DataDeclaration withEffectDeclM, amap, updateDependencies, + + constructors_, + asDataDecl_, ) where import Unison.Prelude -import Control.Lens (over, _3) +import Control.Lens (over, _3, Iso', iso, Lens', lens) import Control.Monad.State (evalState) import Data.Bifunctor (bimap, first, second) import qualified Data.Map as Map @@ -89,10 +92,19 @@ data DataDeclaration v a = DataDeclaration { constructors' :: [(a, v, Type v a)] } deriving (Eq, Show, Functor) +constructors_ :: Lens' (DataDeclaration v a) [(a, v, Type v a)] +constructors_ = lens getter setter + where + getter = constructors' + setter dd ctors = dd{constructors'=ctors} + newtype EffectDeclaration v a = EffectDeclaration { toDataDecl :: DataDeclaration v a } deriving (Eq,Show,Functor) +asDataDecl_ :: Iso' (EffectDeclaration v a) (DataDeclaration v a) +asDataDecl_ = iso toDataDecl EffectDeclaration + withEffectDeclM :: Functor f => (DataDeclaration v a -> f (DataDeclaration v' a')) -> EffectDeclaration v a From 6c07b36b926c47b0748fb00b4301ae5df54bcb75 Mon Sep 17 00:00:00 2001 From: Arya Irani Date: Thu, 21 Oct 2021 18:26:32 -0400 Subject: [PATCH 029/297] starting to flesh out DeclComponent case --- .../SqliteCodebase/MigrateSchema12.hs | 103 ++++++++++++++---- 1 file changed, 81 insertions(+), 22 deletions(-) diff --git a/parser-typechecker/src/Unison/Codebase/SqliteCodebase/MigrateSchema12.hs b/parser-typechecker/src/Unison/Codebase/SqliteCodebase/MigrateSchema12.hs index be7f49e505..9e0cfe6de6 100644 --- a/parser-typechecker/src/Unison/Codebase/SqliteCodebase/MigrateSchema12.hs +++ b/parser-typechecker/src/Unison/Codebase/SqliteCodebase/MigrateSchema12.hs @@ -42,6 +42,8 @@ import Control.Monad.Trans.Except (throwE) import Data.Either.Extra (maybeToEither) import Data.Generics.Product import Data.Generics.Sum +import qualified Unison.Hash as Unison +import qualified Unison.Hashing.V2.Convert as Convert -- lookupCtor :: ConstructorMapping -> ObjectId -> Pos -> ConstructorId -> Maybe (Pos, ConstructorId) -- lookupCtor (ConstructorMapping cm) oid pos cid = @@ -110,12 +112,22 @@ data X = MkX Y data Y = MkY Int -} -data Entity - = O ObjectId +data Entity' + = TComponent Unison.Hash + | DComponent Unison.Hash + | Patch ObjectId + | NS ObjectId | C CausalHashId - | W WK.WatchKind S.Reference.IdH + | W WK.WatchKind S.Reference.IdH -- Hash Reference.Id deriving (Eq, Ord, Show) + +-- data Entity +-- = O ObjectId -- Hash +-- | C CausalHashId +-- | W WK.WatchKind S.Reference.IdH -- Hash Reference.Id +-- deriving (Eq, Ord, Show) + data Env = Env {db :: Connection} -- -> m (TrySyncResult h) @@ -201,10 +213,8 @@ migrateNamespace = error "not implemented" migrateTermComponent :: HashId -> ByteString -> m _ migrateTermComponent = error "not implemented" -migrateDeclComponent :: forall m v a. Ops.EDB m => Codebase m v a -> HashId -> m (Sync.TrySyncResult Reference.Id) -migrateDeclComponent Codebase{..} hashId = fmap (either id id) . runExceptT $ do - hash' <- lift $ Ops.loadHashByHashId hashId - let hash = Cv.hash2to1 hash' +migrateDeclComponent :: forall m v a. Codebase m v a -> Unison.Hash -> m (Sync.TrySyncResult Entity') +migrateDeclComponent Codebase{..} hash = fmap (either id id) . runExceptT $ do declComponent :: [DD.Decl v a] <- lift (getDeclComponent hash) >>= \case Nothing -> error "handle this" -- not non-fatal! Just dc -> pure dc @@ -227,7 +237,7 @@ migrateDeclComponent Codebase{..} hashId = fmap (either id id) . runExceptT $ do unhashed ^.. traversed . _2 - . beside coerced id + . beside asDataDecl_ id . to DD.constructors' . traversed . _3 @@ -238,28 +248,77 @@ migrateDeclComponent Codebase{..} hashId = fmap (either id id) . runExceptT $ do declMap <- gets declLookup let unmigratedIds :: [Reference.Id] unmigratedIds = filter (\ref -> not (Map.member ref declMap)) allContainedReferences - when (not . null $ unmigratedIds) $ throwE (Sync.Missing unmigratedIds) + when (not . null $ unmigratedIds) do + let unmigratedHashes :: [Unison.Hash] + unmigratedHashes = + nubOrd (map Reference.idToHash unmigratedIds) + throwE (Sync.Missing (map DComponent unmigratedHashes)) + -- At this point we know we have all the required mappings from old references to new ones. - let remapTerm :: Type v a -> ExceptT (Sync.TrySyncResult Reference.Id) m (Type v a) - remapTerm typ = either throwE pure $ ABT.visit' (remapReferences declMap) typ - - result :: [DD.Decl v a] <- declComponent - & traversed - . beside DD.asDataDecl_ id - . DD.constructors_ - . traversed - . _3 - %%~ remapTerm - -- putTypeDeclaration + let remapTerm :: Type v a -> Type v a + remapTerm typ = runIdentity $ ABT.visit' (remapReferences declMap) typ + + let remappedReferences :: Map (Old Reference.Id) (v, DD.Decl v a) + remappedReferences = unhashed + & traversed -- Traverse map of reference IDs + . _2 -- Select the DataDeclaration + . beside DD.asDataDecl_ id -- Unpack effect decls + . DD.constructors_ -- Get the data constructors + . traversed -- traverse the list of them + . _3 -- Select the Type term. + %~ remapTerm + let vToOldReference :: Map v (Old Reference.Id) + vToOldReference = Map.fromList . fmap swap . Map.toList . fmap fst $ remappedReferences + + -- hashDecls :: + -- Var v => + -- Map v (Memory.DD.DataDeclaration v a) -> + -- ResolutionResult v a [(v, Memory.Reference.Id, Memory.DD.DataDeclaration v a)] + + let newComponent :: ([(v, Reference.Id, DD.DataDeclaration v a)]) + newComponent = Convert.hashDecls (Map.fromList $ Map.elems remappedReferences) + for newComponent $ \(v, newReferenceId, dd) -> do + field @"declLookup" %= Map.insert (vToReference Map.! v) newReferenceId + putTypeDeclaration newReference (_ d) pure Sync.Done +structural type Ping x = P1 (Pong x) + P1 : forall x. Pong x -> Ping x + +structural type Pong x = P2 (Ping x) | P3 Nat + P2 : forall x. Ping x -> Pong x + P3 : forall x. Nat -> Pong x + + + + +end up with +decl Ping (Ref.Id #abc pos=0) +decl Pong (Ref.Id #abc pos=1) +ctor P1: #abc pos=0 cid=0 +ctor P2: #abc pos=1 cid=0 +ctor P3: #abc pos=1 cid=1 + +we unhashComponent and get: +{ X -> structural type X x = AAA (Y x) +, Y -> structural type Y x = BBB (X x) | CCC Nat } + + + + + + + remapReferences :: Map (Old Reference.Id) (New Reference.Id) -> Type.F (Type v a) - -> Either (Sync.TrySyncResult Reference.Id) (Type.F (Type v a)) + -> Identity Type.F (Type v a) remapReferences declMap = \case - (Type.Ref (Reference.DerivedId refId)) -> (Type.Ref . Reference.DerivedId) <$> maybeToEither (Sync.Missing [refId]) (Map.lookup refId declMap) + (Type.Ref (Reference.DerivedId refId)) -> + fromMaybe + (error $ "Expected reference to exist in decl mapping, but it wasn't found: " <> show refId) + (Sync.Missing [DComponent Reference.idToHash refId]) (Map.lookup refId declMap) x -> pure x From e013511dd86b0462fd9937f5c394aebae27892c9 Mon Sep 17 00:00:00 2001 From: Chris Penner Date: Fri, 22 Oct 2021 12:52:10 -0600 Subject: [PATCH 030/297] More transformations --- .../SqliteCodebase/MigrateSchema12.hs | 113 ++++++++++++++---- unison-core/src/Unison/ABT.hs | 51 ++++++-- 2 files changed, 131 insertions(+), 33 deletions(-) diff --git a/parser-typechecker/src/Unison/Codebase/SqliteCodebase/MigrateSchema12.hs b/parser-typechecker/src/Unison/Codebase/SqliteCodebase/MigrateSchema12.hs index 9e0cfe6de6..c761ded407 100644 --- a/parser-typechecker/src/Unison/Codebase/SqliteCodebase/MigrateSchema12.hs +++ b/parser-typechecker/src/Unison/Codebase/SqliteCodebase/MigrateSchema12.hs @@ -44,6 +44,7 @@ import Data.Generics.Product import Data.Generics.Sum import qualified Unison.Hash as Unison import qualified Unison.Hashing.V2.Convert as Convert +import Unison.Hashing.V1.Term (unhashComponent) -- lookupCtor :: ConstructorMapping -> ObjectId -> Pos -> ConstructorId -> Maybe (Pos, ConstructorId) -- lookupCtor (ConstructorMapping cm) oid pos cid = @@ -112,7 +113,7 @@ data X = MkX Y data Y = MkY Int -} -data Entity' +data Entity = TComponent Unison.Hash | DComponent Unison.Hash | Patch ObjectId @@ -210,10 +211,30 @@ migratePatch = error "not implemented" migrateNamespace :: HashId -> ByteString -> m _ migrateNamespace = error "not implemented" -migrateTermComponent :: HashId -> ByteString -> m _ -migrateTermComponent = error "not implemented" +migrateTermComponent :: forall m v a. Codebase m v a -> Unison.Hash -> m (Sync.TrySyncResult Entity) +migrateTermComponent Codebase{..} hash = fmap (either id id) . runExceptT $ do + -- getTermComponentWithTypes :: Hash -> m (Maybe [(Term v a, Type v a)]), + component <- getTermComponentWithTypes hash >>= \case + Nothing -> error $ "Hash was missing from codebase: " <> show hash + Just component -> pure component + + let componentIDMap :: Map Reference.Id (Term v a, Type v a) + componentIDMap = Map.fromList $ Reference.componentFor hash component + let unhashed :: Map Reference.Id (v, Term v a) + unhashed = unhashComponent (fst <$> componentIDMap) + let allContainedReferences :: [Reference.Id] + allContainedReferences = foldMap (ABT.find findReferenceIds) (snd <$> Map.elems) + when (not . null $ allContainedReferences) $ throwE $ Sync.Missing allContainedReferences + + -- unhashComponent :: forall v a. Var v + -- => Map Reference.Id (Term v a) + -- -> Map Reference.Id (v, Term v a) + + undefined + + -migrateDeclComponent :: forall m v a. Codebase m v a -> Unison.Hash -> m (Sync.TrySyncResult Entity') +migrateDeclComponent :: forall m v a. Codebase m v a -> Unison.Hash -> m (Sync.TrySyncResult Entity) migrateDeclComponent Codebase{..} hash = fmap (either id id) . runExceptT $ do declComponent :: [DD.Decl v a] <- lift (getDeclComponent hash) >>= \case Nothing -> error "handle this" -- not non-fatal! @@ -275,34 +296,34 @@ migrateDeclComponent Codebase{..} hash = fmap (either id id) . runExceptT $ do -- Map v (Memory.DD.DataDeclaration v a) -> -- ResolutionResult v a [(v, Memory.Reference.Id, Memory.DD.DataDeclaration v a)] - let newComponent :: ([(v, Reference.Id, DD.DataDeclaration v a)]) + let newComponent :: [(v, Reference.Id, DD.DataDeclaration v a)] newComponent = Convert.hashDecls (Map.fromList $ Map.elems remappedReferences) - for newComponent $ \(v, newReferenceId, dd) -> do - field @"declLookup" %= Map.insert (vToReference Map.! v) newReferenceId + for_ newComponent $ \(v, newReferenceId, dd) -> do + field @"declLookup" %= Map.insert (vToOldReference Map.! v) newReferenceId putTypeDeclaration newReference (_ d) pure Sync.Done -structural type Ping x = P1 (Pong x) - P1 : forall x. Pong x -> Ping x +-- structural type Ping x = P1 (Pong x) +-- P1 : forall x. Pong x -> Ping x -structural type Pong x = P2 (Ping x) | P3 Nat - P2 : forall x. Ping x -> Pong x - P3 : forall x. Nat -> Pong x +-- structural type Pong x = P2 (Ping x) | P3 Nat +-- P2 : forall x. Ping x -> Pong x +-- P3 : forall x. Nat -> Pong x -end up with -decl Ping (Ref.Id #abc pos=0) -decl Pong (Ref.Id #abc pos=1) -ctor P1: #abc pos=0 cid=0 -ctor P2: #abc pos=1 cid=0 -ctor P3: #abc pos=1 cid=1 - -we unhashComponent and get: -{ X -> structural type X x = AAA (Y x) -, Y -> structural type Y x = BBB (X x) | CCC Nat } +-- end up with +-- decl Ping (Ref.Id #abc pos=0) +-- decl Pong (Ref.Id #abc pos=1) +-- ctor P1: #abc pos=0 cid=0 +-- ctor P2: #abc pos=1 cid=0 +-- ctor P3: #abc pos=1 cid=1 +-- +-- we unhashComponent and get: +-- { X -> structural type X x = AAA (Y x) +-- , Y -> structural type Y x = BBB (X x) | CCC Nat } @@ -339,13 +360,55 @@ remapReferences declMap = \case -- recordRefsInType :: MonadState MigrationState m => Type v a -> WriterT [Reference.Id] m (Type v a) -- recordRefsInType = _ -findReferenceIds :: Type v a -> ABT.FindAction Reference.Id -findReferenceIds = ABT.out >>> \case +findTypeReferenceIds :: Type v a -> ABT.FindAction Reference.Id +findTypeReferenceIds = ABT.out >>> \case ABT.Tm (Type.Ref (Reference.DerivedId r)) -> ABT.Found r x -> ABT.Continue +findTermReferenceIds :: Term f v a -> [Reference.Id] +findTermReferenceIds t = flip ABT.find (ABT.out t) \case + ABT.Tm f -> ABT.Found (findReferencesInTermF f) + x -> ABT.Continue - +findReferencesInTermF :: Unison.Term.F typeVar typeAnn patternAnn a -> [Reference.Id] +findReferencesInTermF = ABT.find \case + Ref (Reference.DerivedId refId) -> ABT.Found [refId] + Constructor (Reference.DerivedId refId) _conID -> ABT.Found [refId] + Request (Reference.DerivedId refId) _conID -> ABT.Found [refId] + Ann _ typ -> ABT.Found (ABT.find findTypeReferenceIds typ) + TermLink referent -> case referent of + Ref' (Reference.DerivedId refId) -> ABT.Found [refId] + -- Double check that ConType isn't part of the hash, remove it if it is. + Con' (Reference.DerivedId refId) _conID _conType -> ABT.Found [refId] + TypeLink (Reference.DerivedId refId) -> ABT.Found [refId] + Match _ matchCases -> foldMap (\MatchCase pat _ -> findReferencesInPattern pat) matchCases + _ -> ABT.Continue + +findReferencesInPattern :: Traversal' () -> Pattern loc -> [Reference.Id] +findReferencesInPattern + = \case + Unbound{} -> [] + Var{} -> [] + Boolean{} -> [] + Int{} -> [] + Nat{} -> [] + Float{} -> [] + Text{} -> [] + Char{} -> [] + Constructor _loc ref _constructorId ps -> + let rs = case ref of + DerivedId refId -> [refId] + Builtin{} -> [] + in rs <> foldMap findReferencesInPattern ps + As _loc p -> findReferencesInPattern p + EffectPure _loc p -> findReferencesInPattern p + EffectBind _loc ref _constructorId ps p + -> let rs = case ref of + DerivedId refId -> [refId] + Builtin{} -> [] + in rs <> foldMap findReferencesInPattern (p:ps) + SequenceLiteral _loc ps -> foldMap findReferencesInPattern ps + SequenceOp _loc p _seqOp p' -> foldMap findReferencesInPattern [p, p'] -- data DataDeclaration v a = DataDeclaration { -- modifier :: Modifier, diff --git a/unison-core/src/Unison/ABT.hs b/unison-core/src/Unison/ABT.hs index 00ad0613fa..869d793d01 100644 --- a/unison-core/src/Unison/ABT.hs +++ b/unison-core/src/Unison/ABT.hs @@ -1,17 +1,20 @@ -- Based on: http://semantic-domain.blogspot.com/2015/03/abstract-binding-trees.html {-# LANGUAGE DeriveFoldable #-} {-# LANGUAGE DeriveTraversable #-} +{-# LANGUAGE DeriveDataTypeable #-} {-# LANGUAGE OverloadedStrings #-} {-# LANGUAGE PatternSynonyms #-} {-# LANGUAGE Rank2Types #-} {-# LANGUAGE ViewPatterns #-} +{-# LANGUAGE StandaloneDeriving #-} +{-# LANGUAGE UndecidableInstances #-} module Unison.ABT where import Unison.Prelude import Prelude hiding (abs, cycle) -import Control.Lens (Lens', use, (.=)) +import Control.Lens (Lens', use, (.=), Fold, folding, LensLike', Setter') import Control.Monad.State (MonadState, evalState) import qualified Data.Foldable as Foldable import Data.Functor.Identity (runIdentity) @@ -25,12 +28,17 @@ data ABT f v r = Var v | Cycle r | Abs v r - | Tm (f r) deriving (Functor, Foldable, Traversable) + | Tm (f r) + deriving (Functor, Foldable, Traversable) + -- deriving Data + -- | At each level in the tree, we store the set of free variables and -- a value of type `a`. Variables are of type `v`. data Term f v a = Term { freeVars :: Set v, annotation :: a, out :: ABT f v (Term f v a) } +-- deriving instance (Data a, Data v, Typeable f, Data (f (Term f v a)), Ord v) => Data (Term f v a) + -- | A class for variables. -- -- * `Set.notMember (freshIn vs v) vs`: @@ -454,6 +462,9 @@ foreachSubterm f e = case out e of subterms :: (Ord v, Traversable f) => Term f v a -> [Term f v a] subterms t = runIdentity $ foreachSubterm pure t +subterms_ :: (Ord v, Traversable f) => Fold (Term f v a) (Term f v a) +subterms_ = folding subterms + -- | `visit f t` applies an effectful function to each subtree of -- `t` and sequences the results. When `f` returns `Nothing`, `visit` -- descends into the children of the current subtree. When `f` returns @@ -471,8 +482,22 @@ visit f t = flip fromMaybe (f t) $ case out t of Abs x e -> abs' (annotation t) x <$> visit f e Tm body -> tm' (annotation t) <$> traverse (visit f) body +subTermsSetter_ :: (Traversable f, Ord v) => Setter' (Term f v a) (Term f v a) +subTermsSetter_ f tm = visit (Just . f) tm + +visitT :: (Traversable f, Monad m, Ord v) + => (Term f v a) -> m (Term f v a) + -> Term f v a + -> m (Term f v a) +visitT f tm = f tm >>= \(Term ) + Var _ -> pure t + Cycle body -> cycle' (annotation t) <$> visit' f body + Abs x e -> abs' (annotation t) x <$> visit' f e + Tm body -> f body >>= (fmap (tm' (annotation t)) . traverse (visit' f)) + + -- | Apply an effectful function to an ABT tree top down, sequencing the results. -visit' :: (Traversable f, Applicative g, Monad g, Ord v) +visit' :: (Traversable f, Monad g, Ord v) => (f (Term f v a) -> g (f (Term f v a))) -> Term f v a -> g (Term f v a) @@ -491,11 +516,21 @@ rewriteDown :: (Traversable f, Ord v) => (Term f v a -> Term f v a) -> Term f v a -> Term f v a -rewriteDown f t = let t' = f t in case out t' of - Var _ -> t' - Cycle body -> cycle' (annotation t) (rewriteDown f body) - Abs x e -> abs' (annotation t) x (rewriteDown f e) - Tm body -> tm' (annotation t) (rewriteDown f `fmap` body) +rewriteDown f tm = runIdentity $ rewriteDown (Identity . f) tm + + +-- | Setter' (Term f v a) (Term f v a) +rewriteDown_ :: (Traversable f, Monad m, Ord v) + => (Term f v a -> m (Term f v a)) + -> Term f v a + -> m (Term f v a) +rewriteDown_ f t = do + t' <- f t + case out t' of + Var _ -> t' + Cycle body -> cycle' (annotation t') <$> rewriteDown' f body + Abs x e -> abs' (annotation t') x <$> rewriteDown' f e + Tm body -> tm' (annotation t') <$> (rewriteDown' f `fmap` body) data Subst f v a = Subst { freshen :: forall m v' . Monad m => (v -> m v') -> m v' From 0640f62152fed88f1a421ecb5cfcdd1d4d211052 Mon Sep 17 00:00:00 2001 From: Chris Penner Date: Fri, 22 Oct 2021 14:17:10 -0600 Subject: [PATCH 031/297] Updates from pairing (not compiling) --- .../SqliteCodebase/MigrateSchema12.hs | 43 ++++++++++++++++--- 1 file changed, 36 insertions(+), 7 deletions(-) diff --git a/parser-typechecker/src/Unison/Codebase/SqliteCodebase/MigrateSchema12.hs b/parser-typechecker/src/Unison/Codebase/SqliteCodebase/MigrateSchema12.hs index c761ded407..0b0149e704 100644 --- a/parser-typechecker/src/Unison/Codebase/SqliteCodebase/MigrateSchema12.hs +++ b/parser-typechecker/src/Unison/Codebase/SqliteCodebase/MigrateSchema12.hs @@ -69,7 +69,7 @@ type New a = a data MigrationState = MigrationState -- Mapping between old cycle-position -> new cycle-position for a given Decl object. { -- declLookup :: Map (Old ObjectId) (Map (Old Pos) (New Pos)), - declLookup :: Map (Old Reference.Id) (New Reference.Id), + referenceMapping :: Map (Old Reference.Id) (New Reference.Id), -- Mapping between contructor indexes for the type identified by (ObjectId, Pos) ctorLookup :: Map (Old TypeIdentifier) (Map (Old ConstructorId) (New ConstructorId)), ctorLookup' :: Map (Old Referent.Id) (New Referent.Id), @@ -131,6 +131,7 @@ data Entity data Env = Env {db :: Connection} + -- -> m (TrySyncResult h) migrationSync :: (MonadIO m, MonadState MigrationState m, MonadReader Env m) => @@ -218,18 +219,46 @@ migrateTermComponent Codebase{..} hash = fmap (either id id) . runExceptT $ do Nothing -> error $ "Hash was missing from codebase: " <> show hash Just component -> pure component - let componentIDMap :: Map Reference.Id (Term v a, Type v a) + let componentIDMap :: Map (Old Reference.Id) (Term v a, Type v a) componentIDMap = Map.fromList $ Reference.componentFor hash component - let unhashed :: Map Reference.Id (v, Term v a) + let unhashed :: Map (Old Reference.Id) (v, Term v a) unhashed = unhashComponent (fst <$> componentIDMap) - let allContainedReferences :: [Reference.Id] + let vToOldReferenceMapping :: Map v (Old Reference.Id) + vToOldReferenceMapping = unhashed + & Map.toList + & fmap (\(refId, (v, _trm)) -> (v, refId)) + & Map.fromList + let allContainedReferences :: [Old Reference.Id] allContainedReferences = foldMap (ABT.find findReferenceIds) (snd <$> Map.elems) + -- performance nit: we should probably only `Missing` the unbuilt ones when (not . null $ allContainedReferences) $ throwE $ Sync.Missing allContainedReferences - -- unhashComponent :: forall v a. Var v - -- => Map Reference.Id (Term v a) - -- -> Map Reference.Id (v, Term v a) + let remappedReferences :: Map (Old Reference.Id) (v, Term v a, Type v a) + = Map.zipWith (\(v, trm) (_, typ) -> (v, trm, typ)) unhashed componentIDMap + & undefined -- do the remapping + let newTermComponents :: Map v (New Reference.Id, Term v a) + newTermComponents = + remappedReferences + & Map.elems + & fmap (\(v, trm, _typ) -> (v, trm)) + & Map.fromList + & Convert.hashTermComponents + ifor newTermComponents $ \v (newReferenceId, trm) -> do + let oldReferenceId = vToOldReference Map.! v + let (_, _, typ) = remappedReferences Map.! oldReferenceId + field @"referenceMap" %= Map.insert oldReferenceId newReferenceId + putTermDeclaration newReferenceId typ trm + -- what's this for? + -- let newTypeComponents :: Map v (Reference.Id, Type v a) + -- newTypeComponents = (Map.fromList $ Map.elems remappedReferences) + +-- on hold: incorporating term's type in term's hash + +-- hashTypeComponents :: Var v => Map v (Memory.Type.Type v a) -> Map v (Memory.Reference.Id, Memory.Type.Type v a) +-- hashTermComponents :: Var v => Map v (Memory.Term.Term v a) -> Map v (Memory.Reference.Id, Memory.Term.Term v a) +-- +-- hashTermComponents :: Var v => Map v (Memory.Term.Term v a) -> (Hash, [Memory.Term.Term v a]) undefined From 3946e25e8cf8fdf369647baacc0fdb796f12e959 Mon Sep 17 00:00:00 2001 From: Chris Penner Date: Sun, 24 Oct 2021 22:17:48 -0600 Subject: [PATCH 032/297] Reference Traversals --- parser-typechecker/package.yaml | 2 + .../SqliteCodebase/MigrateSchema12.hs | 237 ++++++++---------- .../Unison/Test/Codebase/MigrateSchema12.hs | 12 + .../unison-parser-typechecker.cabal | 2 + unison-core/package.yaml | 2 + unison-core/src/Unison/ABT.hs | 59 +++-- unison-core/src/Unison/Blank.hs | 5 +- unison-core/src/Unison/Reference.hs | 12 +- unison-core/src/Unison/Referent'.hs | 6 +- unison-core/src/Unison/Term.hs | 24 ++ unison-core/src/Unison/Type.hs | 6 + unison-core/unison-core1.cabal | 4 +- 12 files changed, 209 insertions(+), 162 deletions(-) create mode 100644 parser-typechecker/tests/Unison/Test/Codebase/MigrateSchema12.hs diff --git a/parser-typechecker/package.yaml b/parser-typechecker/package.yaml index c2fedc51a9..1c08275147 100644 --- a/parser-typechecker/package.yaml +++ b/parser-typechecker/package.yaml @@ -59,6 +59,7 @@ library: - fingertree - fsnotify - generic-monoid + - generic-lens - hashable - hashtables - haskeline @@ -88,6 +89,7 @@ library: - regex-tdfa - safe - safe-exceptions + - semialign - mwc-random - NanoID - servant diff --git a/parser-typechecker/src/Unison/Codebase/SqliteCodebase/MigrateSchema12.hs b/parser-typechecker/src/Unison/Codebase/SqliteCodebase/MigrateSchema12.hs index 0b0149e704..adee0a658f 100644 --- a/parser-typechecker/src/Unison/Codebase/SqliteCodebase/MigrateSchema12.hs +++ b/parser-typechecker/src/Unison/Codebase/SqliteCodebase/MigrateSchema12.hs @@ -3,48 +3,43 @@ {-# LANGUAGE RecordWildCards #-} {-# LANGUAGE DeriveGeneric #-} {-# LANGUAGE DataKinds #-} +{-# LANGUAGE RankNTypes #-} module Unison.Codebase.SqliteCodebase.MigrateSchema12 where import Unison.Prelude import Control.Monad.Reader (MonadReader) -import Control.Monad.State (MonadState) import U.Codebase.Sqlite.Connection (Connection) -import U.Codebase.Sqlite.DbId (CausalHashId, HashId, ObjectId) +import U.Codebase.Sqlite.DbId (CausalHashId, ObjectId) import U.Codebase.Sqlite.ObjectType (ObjectType) import qualified U.Codebase.Sqlite.ObjectType as OT -import qualified U.Codebase.Sqlite.Queries as Q import qualified U.Codebase.Sqlite.Reference as S.Reference import U.Codebase.Sync (Sync (Sync)) import qualified U.Codebase.Sync as Sync import qualified U.Codebase.WatchKind as WK -import Unison.Prelude (ByteString, Map, MonadIO) import Unison.Reference (Pos) import Unison.Referent (ConstructorId) -import Data.Set (Set) -import qualified U.Codebase.Sqlite.Operations as Ops import qualified Unison.Reference as Reference import qualified Unison.Referent as Referent import Unison.Codebase (Codebase (Codebase)) import qualified Unison.DataDeclaration as DD import qualified Unison.Codebase as Codebase -import qualified Unison.Codebase.SqliteCodebase.Conversions as Cv import qualified Data.Map as Map import Unison.Type (Type) import qualified Unison.ABT as ABT -import Control.Monad.Trans.Writer.CPS (WriterT) import qualified Unison.Type as Type -import qualified Data.List as List -import Control.Lens import Control.Monad.State.Strict -import Control.Monad.Except (runExceptT, ExceptT) +import Control.Monad.Except (runExceptT) import Control.Monad.Trans.Except (throwE) -import Data.Either.Extra (maybeToEither) import Data.Generics.Product -import Data.Generics.Sum import qualified Unison.Hash as Unison import qualified Unison.Hashing.V2.Convert as Convert -import Unison.Hashing.V1.Term (unhashComponent) +import Control.Lens +import qualified Data.Zip as Zip +import qualified Unison.Term as Term +import Data.List.Extra (nubOrd) +import Unison.Hash (Hash) +import Data.Tuple (swap) -- lookupCtor :: ConstructorMapping -> ObjectId -> Pos -> ConstructorId -> Maybe (Pos, ConstructorId) -- lookupCtor (ConstructorMapping cm) oid pos cid = @@ -175,14 +170,16 @@ migrationSync = Sync \case --- * If we haven't yet synced its parents, push them onto the work queue --- * If we haven't yet synced the causal's value (namespace), push it onto the work queue. --- * Rehash the Causal's valueHash AND CausalHash, and add the new causal, its hashes, and hash objects to the codebase under a fresh object ID - O objId -> do - let alreadySynced :: m Bool - alreadySynced = undefined - alreadySynced >>= \case - False -> do - (hId, objType, bytes) <- runSrc $ Q.loadObjectWithHashIdAndTypeById oId - migrateObject objType hId bytes - True -> pure Sync.PreviouslyDone + TComponent _hash -> undefined + DComponent _hash -> undefined + -- O objId -> do + -- let alreadySynced :: m Bool + -- alreadySynced = undefined + -- alreadySynced >>= \case + -- False -> do + -- (hId, objType, bytes) <- runSrc $ Q.loadObjectWithHashIdAndTypeById oId + -- migrateObject objType hId bytes + -- True -> pure Sync.PreviouslyDone -- result <- runValidateT @(Set Entity) @m @ObjectId case objType of -- To sync a causal, -- 1. ??? @@ -193,62 +190,68 @@ migrationSync = Sync \case -- 2. Synced W watchKind idH -> undefined + Patch{} -> undefined + NS{} -> undefined + -- data ObjectType -- = TermComponent -- 0 -- | DeclComponent -- 1 -- | Namespace -- 2 -- | Patch -- 3 -migrateObject :: Codebase m v a -> ObjectType -> HashId -> ByteString -> m _ +migrateObject :: Codebase m v a -> ObjectType -> Hash -> ByteString -> m _ migrateObject codebase objType hash bytes = case objType of - OT.TermComponent -> migrateTermComponent hash bytes + OT.TermComponent -> migrateTermComponent codebase hash OT.DeclComponent -> migrateDeclComponent codebase hash OT.Namespace -> migrateNamespace hash bytes OT.Patch -> migratePatch hash bytes -migratePatch :: HashId -> ByteString -> m _ +migratePatch :: Hash -> ByteString -> m _ migratePatch = error "not implemented" -migrateNamespace :: HashId -> ByteString -> m _ +migrateNamespace :: Hash -> ByteString -> m _ migrateNamespace = error "not implemented" -migrateTermComponent :: forall m v a. Codebase m v a -> Unison.Hash -> m (Sync.TrySyncResult Entity) +migrateTermComponent :: forall m v a. Codebase m v a -> Unison.Hash -> m (Sync.TrySyncResult Entity) migrateTermComponent Codebase{..} hash = fmap (either id id) . runExceptT $ do -- getTermComponentWithTypes :: Hash -> m (Maybe [(Term v a, Type v a)]), - component <- getTermComponentWithTypes hash >>= \case + component <- lift (getTermComponentWithTypes hash) >>= \case Nothing -> error $ "Hash was missing from codebase: " <> show hash Just component -> pure component - let componentIDMap :: Map (Old Reference.Id) (Term v a, Type v a) + let componentIDMap :: Map (Old Reference.Id) (Term.Term v a, Type v a) componentIDMap = Map.fromList $ Reference.componentFor hash component - let unhashed :: Map (Old Reference.Id) (v, Term v a) - unhashed = unhashComponent (fst <$> componentIDMap) + let unhashed :: Map (Old Reference.Id) (v, Term.Term v a) + unhashed = Term.unhashComponent (fst <$> componentIDMap) let vToOldReferenceMapping :: Map v (Old Reference.Id) vToOldReferenceMapping = unhashed & Map.toList & fmap (\(refId, (v, _trm)) -> (v, refId)) & Map.fromList - let allContainedReferences :: [Old Reference.Id] - allContainedReferences = foldMap (ABT.find findReferenceIds) (snd <$> Map.elems) - -- performance nit: we should probably only `Missing` the unbuilt ones - when (not . null $ allContainedReferences) $ throwE $ Sync.Missing allContainedReferences - - let remappedReferences :: Map (Old Reference.Id) (v, Term v a, Type v a) - = Map.zipWith (\(v, trm) (_, typ) -> (v, trm, typ)) unhashed componentIDMap + referencesMap <- gets referenceMapping + let allMissingReferences :: [Old Reference.Id] + allMissingReferences = unhashed ^.. traversed . types @Reference.Id . filtered (\r -> Map.notMember r referencesMap) + -- foldMap (ABT.find findReferenceIds) (snd <$> Map.elems) + when (not . null $ allMissingReferences) + $ throwE $ Sync.Missing . nubOrd $ (TComponent . Reference.idToHash <$> allMissingReferences) + + let remappedReferences :: Map (Old Reference.Id) (v, Term.Term v a, Type v a) + = Zip.zipWith (\(v, trm) (_, typ) -> (v, trm, typ)) unhashed componentIDMap & undefined -- do the remapping - let newTermComponents :: Map v (New Reference.Id, Term v a) - newTermComponents = + let newTermComponents :: Map v (New Reference.Id, Term.Term v a) + newTermComponents = remappedReferences & Map.elems & fmap (\(v, trm, _typ) -> (v, trm)) & Map.fromList & Convert.hashTermComponents - + ifor newTermComponents $ \v (newReferenceId, trm) -> do - let oldReferenceId = vToOldReference Map.! v + let oldReferenceId = vToOldReferenceMapping Map.! v let (_, _, typ) = remappedReferences Map.! oldReferenceId - field @"referenceMap" %= Map.insert oldReferenceId newReferenceId - putTermDeclaration newReferenceId typ trm + field @"referenceMapping" %= Map.insert oldReferenceId newReferenceId + lift $ putTerm newReferenceId trm typ + -- what's this for? -- let newTypeComponents :: Map v (Reference.Id, Type v a) -- newTypeComponents = (Map.fromList $ Map.elems remappedReferences) @@ -263,7 +266,7 @@ migrateTermComponent Codebase{..} hash = fmap (either id id) . runExceptT $ do -migrateDeclComponent :: forall m v a. Codebase m v a -> Unison.Hash -> m (Sync.TrySyncResult Entity) +migrateDeclComponent :: forall m v a. (Generic v, Generic a) => Codebase m v a -> Unison.Hash -> m (Sync.TrySyncResult Entity) migrateDeclComponent Codebase{..} hash = fmap (either id id) . runExceptT $ do declComponent :: [DD.Decl v a] <- lift (getDeclComponent hash) >>= \case Nothing -> error "handle this" -- not non-fatal! @@ -287,26 +290,31 @@ migrateDeclComponent Codebase{..} hash = fmap (either id id) . runExceptT $ do unhashed ^.. traversed . _2 - . beside asDataDecl_ id + . beside DD.asDataDecl_ id . to DD.constructors' . traversed . _3 - let allContainedReferences :: [Reference.Id] - allContainedReferences = foldMap (ABT.find findReferenceIds) allTypes - -- unmigratedIds :: [Reference.Id] - declMap <- gets declLookup - let unmigratedIds :: [Reference.Id] - unmigratedIds = filter (\ref -> not (Map.member ref declMap)) allContainedReferences - when (not . null $ unmigratedIds) do - let unmigratedHashes :: [Unison.Hash] - unmigratedHashes = - nubOrd (map Reference.idToHash unmigratedIds) - throwE (Sync.Missing (map DComponent unmigratedHashes)) + migratedReferences <- gets referenceMapping + let unmigratedHashes :: [Hash] + unmigratedHashes = + allTypes + ^.. traversed -- Every type in the list + . ABT.subterms_ -- All subterms in each type + . ABT.baseFunctor_ -- Focus Type.F + . Type._Ref -- Only the Ref constructor has references + . Reference._DerivedId + . filtered (\r -> Map.notMember r migratedReferences) + . to Reference.idToHash + -- foldMap (ABT.find (findMissingTypeReferenceIds migratedReferences)) allTypes + + when (not . null $ unmigratedHashes) do + throwE (Sync.Missing (map DComponent $ nubOrd unmigratedHashes)) -- At this point we know we have all the required mappings from old references to new ones. let remapTerm :: Type v a -> Type v a - remapTerm typ = runIdentity $ ABT.visit' (remapReferences declMap) typ + remapTerm = typeReferences %~ \ref -> Map.findWithDefault (error "unmigrated reference") ref migratedReferences + -- runIdentity $ ABT.visit' (remapReferences declMap) typ let remappedReferences :: Map (Old Reference.Id) (v, DD.Decl v a) remappedReferences = unhashed @@ -326,13 +334,42 @@ migrateDeclComponent Codebase{..} hash = fmap (either id id) . runExceptT $ do -- ResolutionResult v a [(v, Memory.Reference.Id, Memory.DD.DataDeclaration v a)] let newComponent :: [(v, Reference.Id, DD.DataDeclaration v a)] - newComponent = Convert.hashDecls (Map.fromList $ Map.elems remappedReferences) + newComponent = remappedReferences + & Map.elems + & Map.fromList + & fmap DD.asDataDecl + & Convert.hashDecls + & fromRight (error "unexpected resolution error") for_ newComponent $ \(v, newReferenceId, dd) -> do - field @"declLookup" %= Map.insert (vToOldReference Map.! v) newReferenceId - putTypeDeclaration newReference (_ d) + field @"referenceMapping" %= Map.insert (vToOldReference Map.! v) newReferenceId + lift $ putTypeDeclaration newReferenceId (_ dd) -- Need to somehow keep decl type through this transformation? pure Sync.Done +typeReferences :: Traversal' (Type v a) (Reference.Id) +typeReferences = + ABT.rewriteDown_ -- Focus all terms + . ABT.baseFunctor_ -- Focus Type.F + . Type._Ref -- Only the Ref constructor has references + . Reference._DerivedId + +termReferences :: (Monad m, Ord v) => LensLike' m (Term.Term v a) (Reference.Id) +termReferences = + ABT.rewriteDown_ -- Focus all terms + . ABT.baseFunctor_ -- Focus Type.F + . _ + +termFReferences :: Traversal' (Term.F tv ta pa a) (Reference.Id) +termFReferences f t = + t & Term._Ref %%~ f + & Term._Constructor . _1 %%~ f + & Term._Request . _1 %%~ f + & Term._Ann . _2 . typeReferences %%~ f + & Term._Match . _2 . _ %%~ f + & Term._TermLink %%~ f + & Term._TypeLink %%~ f + + -- structural type Ping x = P1 (Pong x) -- P1 : forall x. Pong x -> Ping x @@ -349,7 +386,7 @@ migrateDeclComponent Codebase{..} hash = fmap (either id id) . runExceptT $ do -- ctor P1: #abc pos=0 cid=0 -- ctor P2: #abc pos=1 cid=0 -- ctor P3: #abc pos=1 cid=1 --- +-- -- we unhashComponent and get: -- { X -> structural type X x = AAA (Y x) -- , Y -> structural type Y x = BBB (X x) | CCC Nat } @@ -357,19 +394,15 @@ migrateDeclComponent Codebase{..} hash = fmap (either id id) . runExceptT $ do - - - - remapReferences :: Map (Old Reference.Id) (New Reference.Id) -> Type.F (Type v a) - -> Identity Type.F (Type v a) + -> Type.F (Type v a) remapReferences declMap = \case - (Type.Ref (Reference.DerivedId refId)) -> + (Type.Ref (Reference.DerivedId refId)) -> Type.Ref . Reference.DerivedId $ fromMaybe (error $ "Expected reference to exist in decl mapping, but it wasn't found: " <> show refId) - (Sync.Missing [DComponent Reference.idToHash refId]) (Map.lookup refId declMap) - x -> pure x + (Map.lookup refId declMap) + x -> x @@ -389,63 +422,13 @@ remapReferences declMap = \case -- recordRefsInType :: MonadState MigrationState m => Type v a -> WriterT [Reference.Id] m (Type v a) -- recordRefsInType = _ -findTypeReferenceIds :: Type v a -> ABT.FindAction Reference.Id -findTypeReferenceIds = ABT.out >>> \case - ABT.Tm (Type.Ref (Reference.DerivedId r)) -> ABT.Found r - x -> ABT.Continue - -findTermReferenceIds :: Term f v a -> [Reference.Id] -findTermReferenceIds t = flip ABT.find (ABT.out t) \case - ABT.Tm f -> ABT.Found (findReferencesInTermF f) - x -> ABT.Continue - -findReferencesInTermF :: Unison.Term.F typeVar typeAnn patternAnn a -> [Reference.Id] -findReferencesInTermF = ABT.find \case - Ref (Reference.DerivedId refId) -> ABT.Found [refId] - Constructor (Reference.DerivedId refId) _conID -> ABT.Found [refId] - Request (Reference.DerivedId refId) _conID -> ABT.Found [refId] - Ann _ typ -> ABT.Found (ABT.find findTypeReferenceIds typ) - TermLink referent -> case referent of - Ref' (Reference.DerivedId refId) -> ABT.Found [refId] - -- Double check that ConType isn't part of the hash, remove it if it is. - Con' (Reference.DerivedId refId) _conID _conType -> ABT.Found [refId] - TypeLink (Reference.DerivedId refId) -> ABT.Found [refId] - Match _ matchCases -> foldMap (\MatchCase pat _ -> findReferencesInPattern pat) matchCases - _ -> ABT.Continue - -findReferencesInPattern :: Traversal' () -> Pattern loc -> [Reference.Id] -findReferencesInPattern - = \case - Unbound{} -> [] - Var{} -> [] - Boolean{} -> [] - Int{} -> [] - Nat{} -> [] - Float{} -> [] - Text{} -> [] - Char{} -> [] - Constructor _loc ref _constructorId ps -> - let rs = case ref of - DerivedId refId -> [refId] - Builtin{} -> [] - in rs <> foldMap findReferencesInPattern ps - As _loc p -> findReferencesInPattern p - EffectPure _loc p -> findReferencesInPattern p - EffectBind _loc ref _constructorId ps p - -> let rs = case ref of - DerivedId refId -> [refId] - Builtin{} -> [] - in rs <> foldMap findReferencesInPattern (p:ps) - SequenceLiteral _loc ps -> foldMap findReferencesInPattern ps - SequenceOp _loc p _seqOp p' -> foldMap findReferencesInPattern [p, p'] - --- data DataDeclaration v a = DataDeclaration { --- modifier :: Modifier, --- annotation :: a, --- bound :: [v], --- constructors' :: [(a, v, Type v a)] --- } deriving (Eq, Show, Functor) - +-- findMissingReferencesInTermF :: +-- (Generic typeVar, Generic typeAnn, Generic patternAnn) => +-- Term.F typeVar typeAnn patternAnn () -> +-- [Reference.Id] +-- findMissingReferencesInTermF t = +-- -- TODO: Test that this descends into Match cases and finds everything it needs to. +-- t ^.. types @Reference.Id -- compute correspondence between `v`s in `fst <$> named` compared to `fst <$> new_references` to get a Reference.Id -> Reference.Id mapping -- mitchell tapped out before understanding the following line diff --git a/parser-typechecker/tests/Unison/Test/Codebase/MigrateSchema12.hs b/parser-typechecker/tests/Unison/Test/Codebase/MigrateSchema12.hs new file mode 100644 index 0000000000..30fdbdd94e --- /dev/null +++ b/parser-typechecker/tests/Unison/Test/Codebase/MigrateSchema12.hs @@ -0,0 +1,12 @@ +module Unison.Test.Codebase.Migration12 where + +testType :: Type v a +testType = _ + +test :: Test () +test = + scope "migrate12" + . tests + $ [ scope "threeWayMerge.ex1" + . expect $ Causal.head testThreeWay == Set.fromList [3, 4] + ] diff --git a/parser-typechecker/unison-parser-typechecker.cabal b/parser-typechecker/unison-parser-typechecker.cabal index 54ec16fce5..a77250d0d6 100644 --- a/parser-typechecker/unison-parser-typechecker.cabal +++ b/parser-typechecker/unison-parser-typechecker.cabal @@ -275,6 +275,7 @@ library , regex-tdfa , safe , safe-exceptions + , semialign , servant , servant-docs , servant-openapi3 @@ -353,6 +354,7 @@ executable tests Unison.Test.Cache Unison.Test.ClearCache Unison.Test.Codebase.Causal + Unison.Test.Codebase.MigrateSchema12 Unison.Test.Codebase.Path Unison.Test.CodebaseInit Unison.Test.ColorText diff --git a/unison-core/package.yaml b/unison-core/package.yaml index 35483fd7c5..5de9c09d11 100644 --- a/unison-core/package.yaml +++ b/unison-core/package.yaml @@ -14,6 +14,7 @@ library: - either - extra - fuzzyfind + - generic-lens - lens - prelude-extras - memory @@ -32,6 +33,7 @@ default-extensions: - ApplicativeDo - BlockArguments - DeriveFunctor + - DeriveGeneric - DerivingStrategies - DoAndIfThenElse - FlexibleContexts diff --git a/unison-core/src/Unison/ABT.hs b/unison-core/src/Unison/ABT.hs index 869d793d01..b27a17f0e5 100644 --- a/unison-core/src/Unison/ABT.hs +++ b/unison-core/src/Unison/ABT.hs @@ -14,10 +14,10 @@ module Unison.ABT where import Unison.Prelude import Prelude hiding (abs, cycle) -import Control.Lens (Lens', use, (.=), Fold, folding, LensLike', Setter') +import Control.Lens (Lens', use, (.=), Fold, folding, Setter', lens, (%%~)) import Control.Monad.State (MonadState, evalState) import qualified Data.Foldable as Foldable -import Data.Functor.Identity (runIdentity) +import Data.Functor.Identity (runIdentity, Identity (Identity)) import Data.List hiding (cycle) import qualified Data.Map as Map import qualified Data.Set as Set @@ -28,14 +28,31 @@ data ABT f v r = Var v | Cycle r | Abs v r - | Tm (f r) - deriving (Functor, Foldable, Traversable) - -- deriving Data - + | Tm (f r) + deriving (Functor, Foldable, Traversable, Generic) + -- | At each level in the tree, we store the set of free variables and -- a value of type `a`. Variables are of type `v`. data Term f v a = Term { freeVars :: Set v, annotation :: a, out :: ABT f v (Term f v a) } + deriving Generic + +abt_ :: Lens' (Term f v a) (ABT f v (Term f v a)) +abt_ = lens out setter + where + setter (Term fv a _) abt = Term fv a abt + +-- a.k.a. baseFunctor_ :: Traversal' (Term f v a) (f _) +baseFunctor_ :: + Applicative m => + (f (Term f v a) -> m (f (Term f v a))) -> + Term f v a -> + m (Term f v a) +baseFunctor_ f t = + t & abt_ %%~ \case + Tm fx -> Tm <$> f (fx) + x -> pure x + -- deriving instance (Data a, Data v, Typeable f, Data (f (Term f v a)), Ord v) => Data (Term f v a) @@ -446,7 +463,7 @@ freeVarOccurrences except t = Tm body -> foldMap go body foreachSubterm - :: (Traversable f, Applicative g, Ord v) + :: (Traversable f, Applicative g) => (Term f v a -> g b) -> Term f v a -> g [b] @@ -459,10 +476,10 @@ foreachSubterm f e = case out e of <$> f e <*> (join . Foldable.toList <$> traverse (foreachSubterm f) body) -subterms :: (Ord v, Traversable f) => Term f v a -> [Term f v a] +subterms :: (Traversable f) => Term f v a -> [Term f v a] subterms t = runIdentity $ foreachSubterm pure t -subterms_ :: (Ord v, Traversable f) => Fold (Term f v a) (Term f v a) +subterms_ :: (Traversable f) => Fold (Term f v a) (Term f v a) subterms_ = folding subterms -- | `visit f t` applies an effectful function to each subtree of @@ -485,17 +502,6 @@ visit f t = flip fromMaybe (f t) $ case out t of subTermsSetter_ :: (Traversable f, Ord v) => Setter' (Term f v a) (Term f v a) subTermsSetter_ f tm = visit (Just . f) tm -visitT :: (Traversable f, Monad m, Ord v) - => (Term f v a) -> m (Term f v a) - -> Term f v a - -> m (Term f v a) -visitT f tm = f tm >>= \(Term ) - Var _ -> pure t - Cycle body -> cycle' (annotation t) <$> visit' f body - Abs x e -> abs' (annotation t) x <$> visit' f e - Tm body -> f body >>= (fmap (tm' (annotation t)) . traverse (visit' f)) - - -- | Apply an effectful function to an ABT tree top down, sequencing the results. visit' :: (Traversable f, Monad g, Ord v) => (f (Term f v a) -> g (f (Term f v a))) @@ -516,7 +522,7 @@ rewriteDown :: (Traversable f, Ord v) => (Term f v a -> Term f v a) -> Term f v a -> Term f v a -rewriteDown f tm = runIdentity $ rewriteDown (Identity . f) tm +rewriteDown f tm = runIdentity $ rewriteDown_ (Identity . f) tm -- | Setter' (Term f v a) (Term f v a) @@ -527,10 +533,10 @@ rewriteDown_ :: (Traversable f, Monad m, Ord v) rewriteDown_ f t = do t' <- f t case out t' of - Var _ -> t' - Cycle body -> cycle' (annotation t') <$> rewriteDown' f body - Abs x e -> abs' (annotation t') x <$> rewriteDown' f e - Tm body -> tm' (annotation t') <$> (rewriteDown' f `fmap` body) + Var v -> pure (annotatedVar (annotation t') v) + Cycle body -> cycle' (annotation t') <$> rewriteDown_ f body + Abs x e -> abs' (annotation t') x <$> rewriteDown_ f e + Tm body -> tm' (annotation t') <$> (traverse (rewriteDown_ f) body) data Subst f v a = Subst { freshen :: forall m v' . Monad m => (v -> m v') -> m v' @@ -593,7 +599,8 @@ reannotateUp g t = case out t of -- Find all subterms that match a predicate. Prune the search for speed. -- (Some patterns of pruning can cut the complexity of the search.) -data FindAction x = Found x | Prune | Continue deriving Show +data FindAction x = Found x | Prune | Continue + deriving (Show, Functor) find :: (Ord v, Foldable f, Functor f) => (Term f v a -> FindAction x) -> Term f v a diff --git a/unison-core/src/Unison/Blank.hs b/unison-core/src/Unison/Blank.hs index d10199db95..e56bc649a6 100644 --- a/unison-core/src/Unison/Blank.hs +++ b/unison-core/src/Unison/Blank.hs @@ -1,4 +1,5 @@ module Unison.Blank where +import Unison.Prelude loc :: Recorded loc -> loc loc (Placeholder loc _) = loc @@ -14,9 +15,9 @@ data Recorded loc = Placeholder loc String -- A name to be resolved with type-directed name resolution. | Resolve loc String - deriving (Show, Eq, Ord, Functor) + deriving (Show, Eq, Ord, Functor, Generic) data Blank loc = Blank | Recorded (Recorded loc) - deriving (Show, Eq, Ord, Functor) + deriving (Show, Eq, Ord, Functor, Generic) diff --git a/unison-core/src/Unison/Reference.hs b/unison-core/src/Unison/Reference.hs index 2699203367..6552ca31e7 100644 --- a/unison-core/src/Unison/Reference.hs +++ b/unison-core/src/Unison/Reference.hs @@ -2,12 +2,14 @@ {-# LANGUAGE OverloadedStrings #-} {-# LANGUAGE PatternSynonyms #-} {-# LANGUAGE ViewPatterns #-} +{-# LANGUAGE DataKinds #-} module Unison.Reference (Reference, pattern Builtin, pattern Derived, pattern DerivedId, + _DerivedId, Id(..), Pos, CycleSize, Size, @@ -28,7 +30,8 @@ module Unison.Reference unsafeId, toShortHash, idToHash, - idToShortHash) where + idToShortHash, + ) where import Unison.Prelude @@ -38,6 +41,8 @@ import qualified Unison.Hash as H import Unison.ShortHash (ShortHash) import qualified Unison.ShortHash as SH import Data.Char (isDigit) +import Control.Lens (Prism') +import Data.Generics.Sum (_Ctor) -- | Either a builtin or a user defined (hashed) top-level declaration. -- @@ -50,13 +55,16 @@ data Reference -- The `Pos` refers to a particular element of the component -- and the `Size` is the number of elements in the component. -- Using an ugly name so no one tempted to use this - | DerivedId Id deriving (Eq, Ord) + | DerivedId Id deriving (Eq, Ord, Generic) pattern Derived :: H.Hash -> Pos -> Reference pattern Derived h i = DerivedId (Id h i) {-# COMPLETE Builtin, Derived #-} +_DerivedId :: Prism' Reference Id +_DerivedId = _Ctor @"DerivedId" + -- | @Pos@ is a position into a cycle of size @Size@, as cycles are hashed together. data Id = Id H.Hash Pos deriving (Eq, Ord) diff --git a/unison-core/src/Unison/Referent'.hs b/unison-core/src/Unison/Referent'.hs index 0d2689956f..88df22d500 100644 --- a/unison-core/src/Unison/Referent'.hs +++ b/unison-core/src/Unison/Referent'.hs @@ -7,7 +7,7 @@ import Unison.ConstructorType (ConstructorType) import Unison.DataDeclaration.ConstructorId (ConstructorId) import Unison.Hashable (Hashable (tokens)) import qualified Unison.Hashable as H -import Unison.Prelude (Word64) +import Unison.Prelude (Word64, Generic) -- | Specifies a term. -- @@ -20,7 +20,7 @@ import Unison.Prelude (Word64) -- -- When @Con'@ then @r@ is a type declaration. data Referent' r = Ref' r | Con' r ConstructorId ConstructorType - deriving (Show, Ord, Eq, Functor) + deriving (Show, Ord, Eq, Functor, Generic) isConstructor :: Referent' r -> Bool isConstructor Con' {} = True @@ -48,4 +48,4 @@ fold fr fc = \case instance Hashable r => Hashable (Referent' r) where tokens (Ref' r) = [H.Tag 0] ++ H.tokens r - tokens (Con' r i dt) = [H.Tag 2] ++ H.tokens r ++ H.tokens (fromIntegral i :: Word64) ++ H.tokens dt \ No newline at end of file + tokens (Con' r i dt) = [H.Tag 2] ++ H.tokens r ++ H.tokens (fromIntegral i :: Word64) ++ H.tokens dt diff --git a/unison-core/src/Unison/Term.hs b/unison-core/src/Unison/Term.hs index c1b0ef397f..d928c5a9be 100644 --- a/unison-core/src/Unison/Term.hs +++ b/unison-core/src/Unison/Term.hs @@ -7,6 +7,7 @@ {-# LANGUAGE UnicodeSyntax #-} {-# LANGUAGE ViewPatterns #-} {-# LANGUAGE BangPatterns #-} +{-# LANGUAGE DataKinds #-} module Unison.Term where @@ -45,6 +46,8 @@ import qualified Unison.Name as Name import qualified Unison.LabeledDependency as LD import Unison.LabeledDependency (LabeledDependency) import qualified Data.Set.NonEmpty as NES +import Data.Generics.Sum (_Ctor) +import Control.Lens (Prism') data MatchCase loc a = MatchCase (Pattern loc) (Maybe a) a deriving (Show,Eq,Foldable,Functor,Generic,Generic1,Traversable) @@ -93,6 +96,27 @@ data F typeVar typeAnn patternAnn a | TypeLink Reference deriving (Foldable,Functor,Generic,Generic1,Traversable) +_Ref :: Prism' (F tv ta pa a) Reference +_Ref = _Ctor @"Ref" + +_Match :: Prism' (F tv ta pa a) (a, [MatchCase pa a]) +_Match = _Ctor @"Match" + +_Constructor :: Prism' (F tv ta pa a) (Reference, Int) +_Constructor = _Ctor @"Constructor" + +_Request :: Prism' (F tv ta pa a) (Reference, Int) +_Request = _Ctor @"Request" + +_Ann :: Prism' (F tv ta pa a) (a, ABT.Term Type.F tv ta) +_Ann = _Ctor @"Ann" + +_TermLink :: Prism' (F tv ta pa a) Referent +_TermLink = _Ctor @"TermLink" + +_TypeLink :: Prism' (F tv ta pa a) Reference +_TypeLink = _Ctor @"TypeLink" + type IsTop = Bool -- | Like `Term v`, but with an annotation of type `a` at every level in the tree diff --git a/unison-core/src/Unison/Type.hs b/unison-core/src/Unison/Type.hs index 4abc826f3d..85c22b3219 100644 --- a/unison-core/src/Unison/Type.hs +++ b/unison-core/src/Unison/Type.hs @@ -4,6 +4,7 @@ {-# LANGUAGE PatternSynonyms #-} {-# LANGUAGE Rank2Types #-} {-# LANGUAGE ViewPatterns #-} +{-# LANGUAGE DataKinds #-} module Unison.Type where @@ -26,6 +27,8 @@ import qualified Unison.Settings as Settings import qualified Unison.Names.ResolutionResult as Names import qualified Unison.Name as Name import qualified Unison.Util.List as List +import Control.Lens (Prism') +import Data.Generics.Sum (_Ctor) -- | Base functor for types in the Unison language data F a @@ -45,6 +48,9 @@ instance Eq1 F where (==#) = (==) instance Ord1 F where compare1 = compare instance Show1 F where showsPrec1 = showsPrec +_Ref :: Prism' (F a) Reference +_Ref = _Ctor @"Ref" + -- | Types are represented as ABTs over the base functor F, with variables in `v` type Type v a = ABT.Term F v a diff --git a/unison-core/unison-core1.cabal b/unison-core/unison-core1.cabal index 6b2f756470..5dc31dd834 100644 --- a/unison-core/unison-core1.cabal +++ b/unison-core/unison-core1.cabal @@ -3,8 +3,6 @@ cabal-version: 1.12 -- This file has been generated from package.yaml by hpack version 0.34.4. -- -- see: https://github.com/sol/hpack --- --- hash: 376639c26bffa766c92695ee10dfb8e173ff944dd0483f4aa41b3a6a79887099 name: unison-core1 version: 0.0.0 @@ -70,6 +68,7 @@ library ApplicativeDo BlockArguments DeriveFunctor + DeriveGeneric DerivingStrategies DoAndIfThenElse FlexibleContexts @@ -91,6 +90,7 @@ library , either , extra , fuzzyfind + , generic-lens , lens , memory , mtl From e9e39726899759d124495df59fe9833d884178f6 Mon Sep 17 00:00:00 2001 From: Chris Penner Date: Sun, 24 Oct 2021 22:49:41 -0600 Subject: [PATCH 033/297] More rewrites --- .../SqliteCodebase/MigrateSchema12.hs | 77 ++++++++++++------- unison-core/src/Unison/Hash.hs | 2 +- unison-core/src/Unison/Term.hs | 15 +++- 3 files changed, 61 insertions(+), 33 deletions(-) diff --git a/parser-typechecker/src/Unison/Codebase/SqliteCodebase/MigrateSchema12.hs b/parser-typechecker/src/Unison/Codebase/SqliteCodebase/MigrateSchema12.hs index adee0a658f..103ce1f359 100644 --- a/parser-typechecker/src/Unison/Codebase/SqliteCodebase/MigrateSchema12.hs +++ b/parser-typechecker/src/Unison/Codebase/SqliteCodebase/MigrateSchema12.hs @@ -40,6 +40,9 @@ import qualified Unison.Term as Term import Data.List.Extra (nubOrd) import Unison.Hash (Hash) import Data.Tuple (swap) +import Unison.Pattern (Pattern) +import Unison.Var (Var) +import Control.Monad.Trans.Writer.CPS (runWriterT, tell, execWriterT, execWriter) -- lookupCtor :: ConstructorMapping -> ObjectId -> Pos -> ConstructorId -> Maybe (Pos, ConstructorId) -- lookupCtor (ConstructorMapping cm) oid pos cid = @@ -184,11 +187,11 @@ migrationSync = Sync \case -- To sync a causal, -- 1. ??? -- 2. Synced - C causalHashID -> undefined + C _causalHashID -> undefined -- To sync a watch result, -- 1. ??? -- 2. Synced - W watchKind idH -> undefined + W _watchKind _idH -> undefined Patch{} -> undefined NS{} -> undefined @@ -199,23 +202,23 @@ migrationSync = Sync \case -- | Namespace -- 2 -- | Patch -- 3 -migrateObject :: Codebase m v a -> ObjectType -> Hash -> ByteString -> m _ +migrateObject :: (Var v, Monad m) => Codebase m v a -> ObjectType -> Hash -> ByteString -> StateT MigrationState m (Sync.TrySyncResult Entity) migrateObject codebase objType hash bytes = case objType of OT.TermComponent -> migrateTermComponent codebase hash OT.DeclComponent -> migrateDeclComponent codebase hash OT.Namespace -> migrateNamespace hash bytes OT.Patch -> migratePatch hash bytes -migratePatch :: Hash -> ByteString -> m _ +migratePatch :: Hash -> ByteString -> m (Sync.TrySyncResult Entity) migratePatch = error "not implemented" -migrateNamespace :: Hash -> ByteString -> m _ +migrateNamespace :: Hash -> ByteString -> m (Sync.TrySyncResult Entity) migrateNamespace = error "not implemented" -migrateTermComponent :: forall m v a. Codebase m v a -> Unison.Hash -> m (Sync.TrySyncResult Entity) +migrateTermComponent :: forall m v a. (Ord v, Var v, Monad m) => Codebase m v a -> Unison.Hash -> StateT MigrationState m (Sync.TrySyncResult Entity) migrateTermComponent Codebase{..} hash = fmap (either id id) . runExceptT $ do -- getTermComponentWithTypes :: Hash -> m (Maybe [(Term v a, Type v a)]), - component <- lift (getTermComponentWithTypes hash) >>= \case + component <- (lift . lift $ getTermComponentWithTypes hash) >>= \case Nothing -> error $ "Hash was missing from codebase: " <> show hash Just component -> pure component @@ -230,7 +233,14 @@ migrateTermComponent Codebase{..} hash = fmap (either id id) . runExceptT $ do & Map.fromList referencesMap <- gets referenceMapping let allMissingReferences :: [Old Reference.Id] - allMissingReferences = unhashed ^.. traversed . types @Reference.Id . filtered (\r -> Map.notMember r referencesMap) + allMissingReferences = + execWriter $ unhashed + & traversed + & traversed + . _2 + . termReferences_ + . filtered (\r -> Map.notMember r referencesMap) %%~ \r -> tell [r] *> pure r + -- unhashed ^.. traversed . types @Reference.Id . filtered (\r -> Map.notMember r referencesMap) -- foldMap (ABT.find findReferenceIds) (snd <$> Map.elems) when (not . null $ allMissingReferences) $ throwE $ Sync.Missing . nubOrd $ (TComponent . Reference.idToHash <$> allMissingReferences) @@ -250,7 +260,7 @@ migrateTermComponent Codebase{..} hash = fmap (either id id) . runExceptT $ do let oldReferenceId = vToOldReferenceMapping Map.! v let (_, _, typ) = remappedReferences Map.! oldReferenceId field @"referenceMapping" %= Map.insert oldReferenceId newReferenceId - lift $ putTerm newReferenceId trm typ + lift . lift $ putTerm newReferenceId trm typ -- what's this for? -- let newTypeComponents :: Map v (Reference.Id, Type v a) @@ -266,11 +276,17 @@ migrateTermComponent Codebase{..} hash = fmap (either id id) . runExceptT $ do -migrateDeclComponent :: forall m v a. (Generic v, Generic a) => Codebase m v a -> Unison.Hash -> m (Sync.TrySyncResult Entity) -migrateDeclComponent Codebase{..} hash = fmap (either id id) . runExceptT $ do - declComponent :: [DD.Decl v a] <- lift (getDeclComponent hash) >>= \case - Nothing -> error "handle this" -- not non-fatal! - Just dc -> pure dc +migrateDeclComponent :: + forall m v a. + (Ord v, Var v, Monad m) => + Codebase m v a -> + Unison.Hash -> + StateT MigrationState m (Sync.TrySyncResult Entity) +migrateDeclComponent Codebase {..} hash = fmap (either id id) . runExceptT $ do + declComponent :: [DD.Decl v a] <- + (lift . lift $ getDeclComponent hash) >>= \case + Nothing -> error "handle this" -- not non-fatal! + Just dc -> pure dc -- type Decl = Either EffectDeclaration DataDeclaration let componentIDMap :: Map Reference.Id (DD.Decl v a) @@ -342,32 +358,35 @@ migrateDeclComponent Codebase{..} hash = fmap (either id id) . runExceptT $ do & fromRight (error "unexpected resolution error") for_ newComponent $ \(v, newReferenceId, dd) -> do field @"referenceMapping" %= Map.insert (vToOldReference Map.! v) newReferenceId - lift $ putTypeDeclaration newReferenceId (_ dd) -- Need to somehow keep decl type through this transformation? + lift . lift $ putTypeDeclaration newReferenceId (_ dd) -- Need to somehow keep decl type through this transformation? pure Sync.Done -typeReferences :: Traversal' (Type v a) (Reference.Id) +typeReferences :: (Monad m, Ord v) => LensLike' m (Type v a) (Reference.Id) typeReferences = ABT.rewriteDown_ -- Focus all terms . ABT.baseFunctor_ -- Focus Type.F . Type._Ref -- Only the Ref constructor has references . Reference._DerivedId -termReferences :: (Monad m, Ord v) => LensLike' m (Term.Term v a) (Reference.Id) -termReferences = +termReferences_ :: (Monad m, Ord v) => LensLike' m (Term.Term v a) (Reference.Id) +termReferences_ = ABT.rewriteDown_ -- Focus all terms . ABT.baseFunctor_ -- Focus Type.F - . _ - -termFReferences :: Traversal' (Term.F tv ta pa a) (Reference.Id) -termFReferences f t = - t & Term._Ref %%~ f - & Term._Constructor . _1 %%~ f - & Term._Request . _1 %%~ f - & Term._Ann . _2 . typeReferences %%~ f - & Term._Match . _2 . _ %%~ f - & Term._TermLink %%~ f - & Term._TypeLink %%~ f + . termFReferences_ + +termFReferences_ :: (Ord tv, Monad m) => LensLike' m (Term.F tv ta pa a) (Reference.Id) +termFReferences_ f t = + (t & Term._Ref . Reference._DerivedId %%~ f) + >>= Term._Constructor . _1 . Reference._DerivedId %%~ f + >>= Term._Request . _1 . Reference._DerivedId %%~ f + >>= Term._Ann . _2 . typeReferences %%~ f + >>= Term._Match . _2 . traversed . Term.matchPattern_ . patternReferences_ %%~ f + >>= Term._TermLink . types @Reference.Id %%~ f + >>= Term._TypeLink . types @Reference.Id %%~ f + +patternReferences_ :: Traversal' (Pattern loc) Reference.Id +patternReferences_ = _ -- types @Reference.Id -- structural type Ping x = P1 (Pong x) diff --git a/unison-core/src/Unison/Hash.hs b/unison-core/src/Unison/Hash.hs index f9fb8a6078..a6cf39b3dc 100644 --- a/unison-core/src/Unison/Hash.hs +++ b/unison-core/src/Unison/Hash.hs @@ -31,7 +31,7 @@ import qualified Data.Text as Text import qualified Data.Set as Set -- | Hash which uniquely identifies a Unison type or term -newtype Hash = Hash { toBytes :: SBS.ShortByteString } deriving (Eq,Ord,Generic) +newtype Hash = Hash { toBytes :: SBS.ShortByteString } deriving (Eq,Ord,Generic, Data) instance Show Hash where show h = take 999 $ Text.unpack (base32Hex h) diff --git a/unison-core/src/Unison/Term.hs b/unison-core/src/Unison/Term.hs index d928c5a9be..ec62c37321 100644 --- a/unison-core/src/Unison/Term.hs +++ b/unison-core/src/Unison/Term.hs @@ -47,10 +47,19 @@ import qualified Unison.LabeledDependency as LD import Unison.LabeledDependency (LabeledDependency) import qualified Data.Set.NonEmpty as NES import Data.Generics.Sum (_Ctor) -import Control.Lens (Prism') +import Control.Lens (Prism', Lens', lens) -data MatchCase loc a = MatchCase (Pattern loc) (Maybe a) a - deriving (Show,Eq,Foldable,Functor,Generic,Generic1,Traversable) +data MatchCase loc a = MatchCase + { matchPattern :: Pattern loc, + matchGuard :: (Maybe a), + matchBody :: a + } + deriving (Show, Eq, Foldable, Functor, Generic, Generic1, Traversable) + +matchPattern_ :: Lens' (MatchCase loc a) (Pattern loc) +matchPattern_ = lens matchPattern setter + where + setter m p = m {matchPattern = p} -- | Base functor for terms in the Unison language -- We need `typeVar` because the term and type variables may differ. From 2f12a34df8eb733645711a0549a907530a3e1961 Mon Sep 17 00:00:00 2001 From: Chris Penner Date: Sun, 24 Oct 2021 22:57:07 -0600 Subject: [PATCH 034/297] foldSetter --- .../SqliteCodebase/MigrateSchema12.hs | 39 ++++++++++--------- 1 file changed, 20 insertions(+), 19 deletions(-) diff --git a/parser-typechecker/src/Unison/Codebase/SqliteCodebase/MigrateSchema12.hs b/parser-typechecker/src/Unison/Codebase/SqliteCodebase/MigrateSchema12.hs index 103ce1f359..c908e982fa 100644 --- a/parser-typechecker/src/Unison/Codebase/SqliteCodebase/MigrateSchema12.hs +++ b/parser-typechecker/src/Unison/Codebase/SqliteCodebase/MigrateSchema12.hs @@ -42,7 +42,7 @@ import Unison.Hash (Hash) import Data.Tuple (swap) import Unison.Pattern (Pattern) import Unison.Var (Var) -import Control.Monad.Trans.Writer.CPS (runWriterT, tell, execWriterT, execWriter) +import Control.Monad.Trans.Writer.CPS (runWriterT, tell, execWriterT, execWriter, Writer) -- lookupCtor :: ConstructorMapping -> ObjectId -> Pos -> ConstructorId -> Maybe (Pos, ConstructorId) -- lookupCtor (ConstructorMapping cm) oid pos cid = @@ -234,13 +234,13 @@ migrateTermComponent Codebase{..} hash = fmap (either id id) . runExceptT $ do referencesMap <- gets referenceMapping let allMissingReferences :: [Old Reference.Id] allMissingReferences = - execWriter $ unhashed - & traversed - & traversed - . _2 - . termReferences_ - . filtered (\r -> Map.notMember r referencesMap) %%~ \r -> tell [r] *> pure r - -- unhashed ^.. traversed . types @Reference.Id . filtered (\r -> Map.notMember r referencesMap) + unhashed + & foldSetter + ( traversed + . _2 + . termReferences_ + . filtered (\r -> Map.notMember r referencesMap) + ) -- foldMap (ABT.find findReferenceIds) (snd <$> Map.elems) when (not . null $ allMissingReferences) $ throwE $ Sync.Missing . nubOrd $ (TComponent . Reference.idToHash <$> allMissingReferences) @@ -312,20 +312,18 @@ migrateDeclComponent Codebase {..} hash = fmap (either id id) . runExceptT $ do . _3 migratedReferences <- gets referenceMapping - let unmigratedHashes :: [Hash] - unmigratedHashes = + let unmigratedRefIds :: [Reference.Id] + unmigratedRefIds = allTypes - ^.. traversed -- Every type in the list - . ABT.subterms_ -- All subterms in each type - . ABT.baseFunctor_ -- Focus Type.F - . Type._Ref -- Only the Ref constructor has references - . Reference._DerivedId - . filtered (\r -> Map.notMember r migratedReferences) - . to Reference.idToHash + & foldSetter + ( traversed -- Every type in the list + . typeReferences + . filtered (\r -> Map.notMember r migratedReferences) + ) -- foldMap (ABT.find (findMissingTypeReferenceIds migratedReferences)) allTypes - when (not . null $ unmigratedHashes) do - throwE (Sync.Missing (map DComponent $ nubOrd unmigratedHashes)) + when (not . null $ unmigratedRefIds) do + throwE (Sync.Missing (map DComponent . nubOrd . fmap Reference.idToHash $ unmigratedRefIds)) -- At this point we know we have all the required mappings from old references to new ones. let remapTerm :: Type v a -> Type v a @@ -548,3 +546,6 @@ migrateSchema12 db = do -- -- getConstructor :: ConstructorMappings -> ObjectId -> Pos -> ConstructorId -- -- getConstructor cm + +foldSetter :: LensLike (Writer [a]) s t a a -> s -> [a] +foldSetter t s = execWriter (s & t %%~ \a -> tell [a] *> pure a) From 767b08fc48a1a7026004a2a18cdcdb268fbfd9f1 Mon Sep 17 00:00:00 2001 From: Chris Penner Date: Mon, 25 Oct 2021 09:17:27 -0600 Subject: [PATCH 035/297] Remove unused Data instance --- unison-core/src/Unison/Hash.hs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/unison-core/src/Unison/Hash.hs b/unison-core/src/Unison/Hash.hs index a6cf39b3dc..f9fb8a6078 100644 --- a/unison-core/src/Unison/Hash.hs +++ b/unison-core/src/Unison/Hash.hs @@ -31,7 +31,7 @@ import qualified Data.Text as Text import qualified Data.Set as Set -- | Hash which uniquely identifies a Unison type or term -newtype Hash = Hash { toBytes :: SBS.ShortByteString } deriving (Eq,Ord,Generic, Data) +newtype Hash = Hash { toBytes :: SBS.ShortByteString } deriving (Eq,Ord,Generic) instance Show Hash where show h = take 999 $ Text.unpack (base32Hex h) From 9dd25091a8348a7a5466113cdb5ff0a105f85c12 Mon Sep 17 00:00:00 2001 From: Mitchell Rosen Date: Mon, 25 Oct 2021 14:18:07 -0400 Subject: [PATCH 036/297] get migrate12 compiling --- .../SqliteCodebase/MigrateSchema12.hs | 367 ++++++++++-------- unison-core/src/Unison/Term.hs | 4 +- 2 files changed, 209 insertions(+), 162 deletions(-) diff --git a/parser-typechecker/src/Unison/Codebase/SqliteCodebase/MigrateSchema12.hs b/parser-typechecker/src/Unison/Codebase/SqliteCodebase/MigrateSchema12.hs index c908e982fa..005d0cd5bb 100644 --- a/parser-typechecker/src/Unison/Codebase/SqliteCodebase/MigrateSchema12.hs +++ b/parser-typechecker/src/Unison/Codebase/SqliteCodebase/MigrateSchema12.hs @@ -1,14 +1,24 @@ +{-# LANGUAGE DataKinds #-} +{-# LANGUAGE DeriveGeneric #-} {-# LANGUAGE PartialTypeSignatures #-} {-# LANGUAGE PatternSynonyms #-} -{-# LANGUAGE RecordWildCards #-} -{-# LANGUAGE DeriveGeneric #-} -{-# LANGUAGE DataKinds #-} {-# LANGUAGE RankNTypes #-} +{-# LANGUAGE RecordWildCards #-} module Unison.Codebase.SqliteCodebase.MigrateSchema12 where -import Unison.Prelude +import Control.Lens +import Control.Monad.Except (runExceptT) import Control.Monad.Reader (MonadReader) +import Control.Monad.State.Strict +import Control.Monad.Trans.Except (throwE) +import Control.Monad.Trans.Writer.CPS (Writer, execWriter, execWriterT, runWriterT, tell) +import Data.Generics.Product +import Data.Generics.Sum +import Data.List.Extra (nubOrd) +import qualified Data.Map as Map +import Data.Tuple (swap) +import qualified Data.Zip as Zip import U.Codebase.Sqlite.Connection (Connection) import U.Codebase.Sqlite.DbId (CausalHashId, ObjectId) import U.Codebase.Sqlite.ObjectType (ObjectType) @@ -17,32 +27,23 @@ import qualified U.Codebase.Sqlite.Reference as S.Reference import U.Codebase.Sync (Sync (Sync)) import qualified U.Codebase.Sync as Sync import qualified U.Codebase.WatchKind as WK -import Unison.Reference (Pos) -import Unison.Referent (ConstructorId) -import qualified Unison.Reference as Reference -import qualified Unison.Referent as Referent +import qualified Unison.ABT as ABT import Unison.Codebase (Codebase (Codebase)) -import qualified Unison.DataDeclaration as DD import qualified Unison.Codebase as Codebase -import qualified Data.Map as Map -import Unison.Type (Type) -import qualified Unison.ABT as ABT -import qualified Unison.Type as Type -import Control.Monad.State.Strict -import Control.Monad.Except (runExceptT) -import Control.Monad.Trans.Except (throwE) -import Data.Generics.Product +import qualified Unison.DataDeclaration as DD +import Unison.Hash (Hash) import qualified Unison.Hash as Unison import qualified Unison.Hashing.V2.Convert as Convert -import Control.Lens -import qualified Data.Zip as Zip -import qualified Unison.Term as Term -import Data.List.Extra (nubOrd) -import Unison.Hash (Hash) -import Data.Tuple (swap) import Unison.Pattern (Pattern) +import Unison.Prelude +import Unison.Reference (Pos) +import qualified Unison.Reference as Reference +import Unison.Referent (ConstructorId) +import qualified Unison.Referent as Referent +import qualified Unison.Term as Term +import Unison.Type (Type) +import qualified Unison.Type as Type import Unison.Var (Var) -import Control.Monad.Trans.Writer.CPS (runWriterT, tell, execWriterT, execWriter, Writer) -- lookupCtor :: ConstructorMapping -> ObjectId -> Pos -> ConstructorId -> Maybe (Pos, ConstructorId) -- lookupCtor (ConstructorMapping cm) oid pos cid = @@ -62,26 +63,29 @@ import Control.Monad.Trans.Writer.CPS (runWriterT, tell, execWriterT, execWriter -- newtype TermLookup = TermLookup (Map ObjectId (Vector Pos)) type TypeIdentifier = (ObjectId, Pos) + type Old a = a + type New a = a + data MigrationState = MigrationState -- Mapping between old cycle-position -> new cycle-position for a given Decl object. { -- declLookup :: Map (Old ObjectId) (Map (Old Pos) (New Pos)), - referenceMapping :: Map (Old Reference.Id) (New Reference.Id), + referenceMapping :: Map (Old SomeReferenceId) (New SomeReferenceId), -- Mapping between contructor indexes for the type identified by (ObjectId, Pos) ctorLookup :: Map (Old TypeIdentifier) (Map (Old ConstructorId) (New ConstructorId)), ctorLookup' :: Map (Old Referent.Id) (New Referent.Id), -- This provides the info needed for rewriting a term. You'll access it with a function :: Old termLookup :: Map (Old ObjectId) (New ObjectId, Map (Old Pos) (New Pos)), objLookup :: Map (Old ObjectId) (New ObjectId), - -- componentPositionMapping :: Map ObjectId (Map (Old Pos) (New Pos)), constructorIDMapping :: Map ObjectId (Map (Old ConstructorId) (New ConstructorId)), completed :: Set ObjectId - } deriving Generic + } + deriving (Generic) - -- declLookup :: Map ObjectId (Map Pos (Pos, Map ConstructorId ConstructorId)), +-- declLookup :: Map ObjectId (Map Pos (Pos, Map ConstructorId ConstructorId)), {- * Load entire codebase as a list @@ -120,7 +124,6 @@ data Entity | W WK.WatchKind S.Reference.IdH -- Hash Reference.Id deriving (Eq, Ord, Show) - -- data Entity -- = O ObjectId -- Hash -- | C CausalHashId @@ -129,7 +132,6 @@ data Entity data Env = Env {db :: Connection} - -- -> m (TrySyncResult h) migrationSync :: (MonadIO m, MonadState MigrationState m, MonadReader Env m) => @@ -192,9 +194,8 @@ migrationSync = Sync \case -- 1. ??? -- 2. Synced W _watchKind _idH -> undefined - - Patch{} -> undefined - NS{} -> undefined + Patch {} -> undefined + NS {} -> undefined -- data ObjectType -- = TermComponent -- 0 @@ -216,66 +217,66 @@ migrateNamespace :: Hash -> ByteString -> m (Sync.TrySyncResult Entity) migrateNamespace = error "not implemented" migrateTermComponent :: forall m v a. (Ord v, Var v, Monad m) => Codebase m v a -> Unison.Hash -> StateT MigrationState m (Sync.TrySyncResult Entity) -migrateTermComponent Codebase{..} hash = fmap (either id id) . runExceptT $ do +migrateTermComponent Codebase {..} hash = fmap (either id id) . runExceptT $ do -- getTermComponentWithTypes :: Hash -> m (Maybe [(Term v a, Type v a)]), - component <- (lift . lift $ getTermComponentWithTypes hash) >>= \case - Nothing -> error $ "Hash was missing from codebase: " <> show hash - Just component -> pure component + component <- + (lift . lift $ getTermComponentWithTypes hash) >>= \case + Nothing -> error $ "Hash was missing from codebase: " <> show hash + Just component -> pure component let componentIDMap :: Map (Old Reference.Id) (Term.Term v a, Type v a) componentIDMap = Map.fromList $ Reference.componentFor hash component let unhashed :: Map (Old Reference.Id) (v, Term.Term v a) unhashed = Term.unhashComponent (fst <$> componentIDMap) let vToOldReferenceMapping :: Map v (Old Reference.Id) - vToOldReferenceMapping = unhashed - & Map.toList - & fmap (\(refId, (v, _trm)) -> (v, refId)) - & Map.fromList + vToOldReferenceMapping = + unhashed + & Map.toList + & fmap (\(refId, (v, _trm)) -> (v, refId)) + & Map.fromList referencesMap <- gets referenceMapping - let allMissingReferences :: [Old Reference.Id] + let allMissingReferences :: [Old SomeReferenceId] allMissingReferences = unhashed - & foldSetter - ( traversed - . _2 - . termReferences_ - . filtered (\r -> Map.notMember r referencesMap) - ) - -- foldMap (ABT.find findReferenceIds) (snd <$> Map.elems) - when (not . null $ allMissingReferences) - $ throwE $ Sync.Missing . nubOrd $ (TComponent . Reference.idToHash <$> allMissingReferences) - - let remappedReferences :: Map (Old Reference.Id) (v, Term.Term v a, Type v a) - = Zip.zipWith (\(v, trm) (_, typ) -> (v, trm, typ)) unhashed componentIDMap - & undefined -- do the remapping + & foldSetter + ( traversed + . _2 + . termReferences_ + . filtered (\r -> Map.notMember r referencesMap) + ) + -- foldMap (ABT.find findReferenceIds) (snd <$> Map.elems) + when (not . null $ allMissingReferences) $ + throwE $ Sync.Missing . nubOrd $ (someReferenceIdToEntity <$> allMissingReferences) + + let remappedReferences :: Map (Old Reference.Id) (v, Term.Term v a, Type v a) = + Zip.zipWith (\(v, trm) (_, typ) -> (v, trm, typ)) unhashed componentIDMap + & undefined -- do the remapping let newTermComponents :: Map v (New Reference.Id, Term.Term v a) newTermComponents = remappedReferences - & Map.elems - & fmap (\(v, trm, _typ) -> (v, trm)) - & Map.fromList - & Convert.hashTermComponents + & Map.elems + & fmap (\(v, trm, _typ) -> (v, trm)) + & Map.fromList + & Convert.hashTermComponents ifor newTermComponents $ \v (newReferenceId, trm) -> do let oldReferenceId = vToOldReferenceMapping Map.! v let (_, _, typ) = remappedReferences Map.! oldReferenceId - field @"referenceMapping" %= Map.insert oldReferenceId newReferenceId + field @"referenceMapping" %= Map.insert (TermReference oldReferenceId) (TermReference newReferenceId) lift . lift $ putTerm newReferenceId trm typ -- what's this for? -- let newTypeComponents :: Map v (Reference.Id, Type v a) - -- newTypeComponents = (Map.fromList $ Map.elems remappedReferences) + -- newTypeComponents = (Map.fromList $ Map.elems remappedReferences) --- on hold: incorporating term's type in term's hash + -- on hold: incorporating term's type in term's hash --- hashTypeComponents :: Var v => Map v (Memory.Type.Type v a) -> Map v (Memory.Reference.Id, Memory.Type.Type v a) --- hashTermComponents :: Var v => Map v (Memory.Term.Term v a) -> Map v (Memory.Reference.Id, Memory.Term.Term v a) --- --- hashTermComponents :: Var v => Map v (Memory.Term.Term v a) -> (Hash, [Memory.Term.Term v a]) + -- hashTypeComponents :: Var v => Map v (Memory.Type.Type v a) -> Map v (Memory.Reference.Id, Memory.Type.Type v a) + -- hashTermComponents :: Var v => Map v (Memory.Term.Term v a) -> Map v (Memory.Reference.Id, Memory.Term.Term v a) + -- + -- hashTermComponents :: Var v => Map v (Memory.Term.Term v a) -> (Hash, [Memory.Term.Term v a]) undefined - - migrateDeclComponent :: forall m v a. (Ord v, Var v, Monad m) => @@ -289,56 +290,70 @@ migrateDeclComponent Codebase {..} hash = fmap (either id id) . runExceptT $ do Just dc -> pure dc -- type Decl = Either EffectDeclaration DataDeclaration - let componentIDMap :: Map Reference.Id (DD.Decl v a) + let componentIDMap :: Map (Old Reference.Id) (DD.Decl v a) componentIDMap = Map.fromList $ Reference.componentFor hash declComponent - let unhashed :: Map Reference.Id (v, DD.Decl v a) + let unhashed :: Map (Old Reference.Id) (v, DD.Decl v a) unhashed = DD.unhashComponent componentIDMap --- data DataDeclaration v a = DataDeclaration { --- modifier :: Modifier, --- annotation :: a, --- bound :: [v], --- constructors' :: [(a, v, Type v a)] --- } deriving (Eq, Show, Functor) + -- data DataDeclaration v a = DataDeclaration { + -- modifier :: Modifier, + -- annotation :: a, + -- bound :: [v], + -- constructors' :: [(a, v, Type v a)] + -- } deriving (Eq, Show, Functor) let allTypes :: [Type v a] allTypes = unhashed - ^.. traversed - . _2 - . beside DD.asDataDecl_ id - . to DD.constructors' - . traversed - . _3 + ^.. traversed + . _2 + . beside DD.asDataDecl_ id + . to DD.constructors' + . traversed + . _3 migratedReferences <- gets referenceMapping - let unmigratedRefIds :: [Reference.Id] + let unmigratedRefIds :: [SomeReferenceId] unmigratedRefIds = allTypes - & foldSetter - ( traversed -- Every type in the list - . typeReferences - . filtered (\r -> Map.notMember r migratedReferences) - ) - -- foldMap (ABT.find (findMissingTypeReferenceIds migratedReferences)) allTypes + & foldSetter + ( traversed -- Every type in the list + . typeReferences + . filtered (\r -> Map.notMember r migratedReferences) + ) + -- foldMap (ABT.find (findMissingTypeReferenceIds migratedReferences)) allTypes + + -- ability Foo where + -- bar :: {Foo} Int + + -- what's the data decl that might collide with Foo? + + -- mitchell thinks: + -- + -- type Foo where + -- Bar :: Int -> Foo + -- + -- but that doesn't collide + when (not . null $ unmigratedRefIds) do - throwE (Sync.Missing (map DComponent . nubOrd . fmap Reference.idToHash $ unmigratedRefIds)) + throwE (Sync.Missing (nubOrd . fmap someReferenceIdToEntity $ unmigratedRefIds)) -- At this point we know we have all the required mappings from old references to new ones. let remapTerm :: Type v a -> Type v a remapTerm = typeReferences %~ \ref -> Map.findWithDefault (error "unmigrated reference") ref migratedReferences - -- runIdentity $ ABT.visit' (remapReferences declMap) typ + -- runIdentity $ ABT.visit' (remapReferences declMap) typ let remappedReferences :: Map (Old Reference.Id) (v, DD.Decl v a) - remappedReferences = unhashed - & traversed -- Traverse map of reference IDs - . _2 -- Select the DataDeclaration - . beside DD.asDataDecl_ id -- Unpack effect decls - . DD.constructors_ -- Get the data constructors - . traversed -- traverse the list of them - . _3 -- Select the Type term. - %~ remapTerm + remappedReferences = + unhashed + & traversed -- Traverse map of reference IDs + . _2 -- Select the DataDeclaration + . beside DD.asDataDecl_ id -- Unpack effect decls + . DD.constructors_ -- Get the data constructors + . traversed -- traverse the list of them + . _3 -- Select the Type term. + %~ remapTerm let vToOldReference :: Map v (Old Reference.Id) vToOldReference = Map.fromList . fmap swap . Map.toList . fmap fst $ remappedReferences @@ -347,45 +362,79 @@ migrateDeclComponent Codebase {..} hash = fmap (either id id) . runExceptT $ do -- Map v (Memory.DD.DataDeclaration v a) -> -- ResolutionResult v a [(v, Memory.Reference.Id, Memory.DD.DataDeclaration v a)] - let newComponent :: [(v, Reference.Id, DD.DataDeclaration v a)] - newComponent = remappedReferences - & Map.elems - & Map.fromList - & fmap DD.asDataDecl - & Convert.hashDecls - & fromRight (error "unexpected resolution error") + let newComponent :: [(v, Reference.Id, DD.Decl v a)] + newComponent = + remappedReferences + & Map.elems + & Map.fromList + & Convert.hashDecls' + & fromRight (error "unexpected resolution error") for_ newComponent $ \(v, newReferenceId, dd) -> do - field @"referenceMapping" %= Map.insert (vToOldReference Map.! v) newReferenceId - lift . lift $ putTypeDeclaration newReferenceId (_ dd) -- Need to somehow keep decl type through this transformation? + -- do the member of the component itself + let oldReferenceId = vToOldReference Map.! v + field @"referenceMapping" %= Map.insert (TypeReference oldReferenceId) (TypeReference newReferenceId) + + -- do each constructor of the member + -- have: + -- * the old list of constructors + -- -> can get from unhashed, or wherever + -- * the new list of constructors + -- + -- want: + -- mapping from old ConstructorId to new ConstructorId + let oldConstructorIds :: Map constructorName (Old ConstructorId) + oldConstructorIds = undefined + + ifor_ (DD.constructors' (DD.asDataDecl dd)) \newConstructorId (_ann, name, _type) -> do + field @"referenceMapping" + %= Map.insert + (ConstructorReference oldReferenceId (oldConstructorIds Map.! name)) + (ConstructorReference newReferenceId newConstructorId) + + lift . lift $ putTypeDeclaration newReferenceId dd -- Need to somehow keep decl type through this transformation? pure Sync.Done - -typeReferences :: (Monad m, Ord v) => LensLike' m (Type v a) (Reference.Id) +typeReferences :: (Monad m, Ord v) => LensLike' m (Type v a) SomeReferenceId typeReferences = - ABT.rewriteDown_ -- Focus all terms - . ABT.baseFunctor_ -- Focus Type.F - . Type._Ref -- Only the Ref constructor has references - . Reference._DerivedId - -termReferences_ :: (Monad m, Ord v) => LensLike' m (Term.Term v a) (Reference.Id) + ABT.rewriteDown_ -- Focus all terms + . ABT.baseFunctor_ -- Focus Type.F + . Type._Ref -- Only the Ref constructor has references + . Reference._DerivedId + . unsafeInsidePrism (_Ctor @"TypeReference") + +-- | This is only lawful so long as your changes to 's' won't cause the prism to fail to match. +unsafeInsidePrism :: Prism' s a -> Lens' a s +unsafeInsidePrism p f a = do + fromMaybe a . preview p <$> f (review p a) + +termReferences_ :: (Monad m, Ord v) => LensLike' m (Term.Term v a) SomeReferenceId termReferences_ = - ABT.rewriteDown_ -- Focus all terms - . ABT.baseFunctor_ -- Focus Type.F - . termFReferences_ + ABT.rewriteDown_ -- Focus all terms + . ABT.baseFunctor_ -- Focus Term.F + . termFReferences_ -termFReferences_ :: (Ord tv, Monad m) => LensLike' m (Term.F tv ta pa a) (Reference.Id) +termFReferences_ :: (Ord tv, Monad m) => LensLike' m (Term.F tv ta pa a) SomeReferenceId termFReferences_ f t = - (t & Term._Ref . Reference._DerivedId %%~ f) - >>= Term._Constructor . _1 . Reference._DerivedId %%~ f - >>= Term._Request . _1 . Reference._DerivedId %%~ f - >>= Term._Ann . _2 . typeReferences %%~ f - >>= Term._Match . _2 . traversed . Term.matchPattern_ . patternReferences_ %%~ f - >>= Term._TermLink . types @Reference.Id %%~ f - >>= Term._TypeLink . types @Reference.Id %%~ f - -patternReferences_ :: Traversal' (Pattern loc) Reference.Id -patternReferences_ = _ -- types @Reference.Id - + (t & Term._Ref . Reference._DerivedId . unsafeInsidePrism (_Ctor @"TermReference") %%~ f) + >>= Term._Constructor . thing . unsafeInsidePrism (_Ctor @"ConstructorReference") %%~ f + >>= Term._Request . thing . unsafeInsidePrism (_Ctor @"ConstructorReference") %%~ f + >>= Term._Ann . _2 . typeReferences %%~ f + >>= Term._Match . _2 . traversed . Term.matchPattern_ . patternReferences_ %%~ f + >>= Term._TermLink . referentReferences %%~ f + >>= Term._TypeLink . Reference._DerivedId . unsafeInsidePrism (_Ctor @"TypeReference") %%~ f + +-- fixme rename +thing :: Traversal' (Reference.Reference, ConstructorId) (Reference.Id, ConstructorId) +thing f s = + case s of + (Reference.Builtin _, _) -> pure s + (Reference.DerivedId n, c) -> (\(n', c') -> (Reference.DerivedId n', c')) <$> f (n, c) + +patternReferences_ :: Traversal' (Pattern loc) SomeReferenceId +patternReferences_ = undefined -- types @Reference.Id + +referentReferences :: Traversal' Referent.Referent SomeReferenceId +referentReferences = undefined -- structural type Ping x = P1 (Pong x) -- P1 : forall x. Pong x -> Ping x @@ -394,9 +443,6 @@ patternReferences_ = _ -- types @Reference.Id -- P2 : forall x. Ping x -> Pong x -- P3 : forall x. Nat -> Pong x - - - -- end up with -- decl Ping (Ref.Id #abc pos=0) -- decl Pong (Ref.Id #abc pos=1) @@ -408,20 +454,28 @@ patternReferences_ = _ -- types @Reference.Id -- { X -> structural type X x = AAA (Y x) -- , Y -> structural type Y x = BBB (X x) | CCC Nat } - - - -remapReferences :: Map (Old Reference.Id) (New Reference.Id) - -> Type.F (Type v a) - -> Type.F (Type v a) +remapReferences :: + Map (Old Reference.Id) (New Reference.Id) -> + Type.F (Type v a) -> + Type.F (Type v a) remapReferences declMap = \case - (Type.Ref (Reference.DerivedId refId)) -> Type.Ref . Reference.DerivedId $ - fromMaybe - (error $ "Expected reference to exist in decl mapping, but it wasn't found: " <> show refId) - (Map.lookup refId declMap) + (Type.Ref (Reference.DerivedId refId)) -> + Type.Ref . Reference.DerivedId $ + fromMaybe + (error $ "Expected reference to exist in decl mapping, but it wasn't found: " <> show refId) + (Map.lookup refId declMap) x -> x +type SomeReferenceId = SomeReference Reference.Id + +data SomeReference ref + = TermReference ref + | TypeReference ref + | ConstructorReference ref ConstructorId + deriving (Eq, Functor, Generic, Ord) +someReferenceIdToEntity :: SomeReferenceId -> Entity +someReferenceIdToEntity = undefined -- get references: -- @@ -462,14 +516,12 @@ remapReferences declMap = \case -- parser-typechecker depends on codebase2, but we are talking about doing things at the parser-typechecker level in this migration -- answer: no - -- unhashComponent - -- :: forall v a. Var v => Map Reference.Id (Decl v a) -> Map Reference.Id (v, Decl v a) - - -- DD.unhashComponent - +-- unhashComponent +-- :: forall v a. Var v => Map Reference.Id (Decl v a) -> Map Reference.Id (v, Decl v a) - -- [OldDecl] ==map==> [NewDecl] ==number==> [(NewDecl, Int)] ==sort==> [(NewDecl, Int)] ==> permutation is map snd of that +-- DD.unhashComponent +-- [OldDecl] ==map==> [NewDecl] ==number==> [(NewDecl, Int)] ==sort==> [(NewDecl, Int)] ==> permutation is map snd of that -- type List a = Nil | Cons (List a) @@ -499,17 +551,12 @@ remapReferences declMap = \case -- new_references :: Map v (Reference.Id {new}, DataDeclaration v a) -- new_references = Unison.Hashing.V2.Convert.hashDecls $ Map.toList $ Foldable.toList rewritten_dependencies +-- let DeclFormat locallyIndexedComponent = case runGetS S.getDeclFormat declFormatBytes of +-- Left err -> error "something went wrong" +-- Right declFormat -> declFormat - - - - -- let DeclFormat locallyIndexedComponent = case runGetS S.getDeclFormat declFormatBytes of - -- Left err -> error "something went wrong" - -- Right declFormat -> declFormat - - -- Operations.hs converts from S level to C level - -- SqliteCodebase.hs converts from C level to - +-- Operations.hs converts from S level to C level +-- SqliteCodebase.hs converts from C level to -- | migrate sqlite codebase from version 1 to 2, return False and rollback on failure migrateSchema12 :: Applicative m => Connection -> m Bool diff --git a/unison-core/src/Unison/Term.hs b/unison-core/src/Unison/Term.hs index ec62c37321..b727f5b417 100644 --- a/unison-core/src/Unison/Term.hs +++ b/unison-core/src/Unison/Term.hs @@ -111,10 +111,10 @@ _Ref = _Ctor @"Ref" _Match :: Prism' (F tv ta pa a) (a, [MatchCase pa a]) _Match = _Ctor @"Match" -_Constructor :: Prism' (F tv ta pa a) (Reference, Int) +_Constructor :: Prism' (F tv ta pa a) (Reference, ConstructorId) _Constructor = _Ctor @"Constructor" -_Request :: Prism' (F tv ta pa a) (Reference, Int) +_Request :: Prism' (F tv ta pa a) (Reference, ConstructorId) _Request = _Ctor @"Request" _Ann :: Prism' (F tv ta pa a) (a, ABT.Term Type.F tv ta) From f30bbafa0cf71a1ec64ead0f362c7ba5c05468c2 Mon Sep 17 00:00:00 2001 From: Mitchell Rosen Date: Mon, 25 Oct 2021 17:01:57 -0400 Subject: [PATCH 037/297] begin working towards migrating branches --- .../U/Codebase/Sqlite/Operations.hs | 4 + .../SqliteCodebase/MigrateSchema12.hs | 196 ++++++++---------- 2 files changed, 95 insertions(+), 105 deletions(-) diff --git a/codebase2/codebase-sqlite/U/Codebase/Sqlite/Operations.hs b/codebase2/codebase-sqlite/U/Codebase/Sqlite/Operations.hs index 951240f7bc..941dc72a1e 100644 --- a/codebase2/codebase-sqlite/U/Codebase/Sqlite/Operations.hs +++ b/codebase2/codebase-sqlite/U/Codebase/Sqlite/Operations.hs @@ -1057,6 +1057,10 @@ loadBranchByCausalHashId id = do (liftQ . Q.loadBranchObjectIdByCausalHashId) id >>= traverse loadBranchByObjectId +-- FIXME this doesn't belong in this module +loadDbBranchByObjectId :: EDB m => Db.BranchObjectId -> m (S.DbBranch m) +loadDbBranchByObjectId = undefined + loadBranchByObjectId :: EDB m => Db.BranchObjectId -> m (C.Branch.Branch m) loadBranchByObjectId id = do deserializeBranchObject id >>= \case diff --git a/parser-typechecker/src/Unison/Codebase/SqliteCodebase/MigrateSchema12.hs b/parser-typechecker/src/Unison/Codebase/SqliteCodebase/MigrateSchema12.hs index 005d0cd5bb..fe9da0e382 100644 --- a/parser-typechecker/src/Unison/Codebase/SqliteCodebase/MigrateSchema12.hs +++ b/parser-typechecker/src/Unison/Codebase/SqliteCodebase/MigrateSchema12.hs @@ -9,7 +9,7 @@ module Unison.Codebase.SqliteCodebase.MigrateSchema12 where import Control.Lens import Control.Monad.Except (runExceptT) -import Control.Monad.Reader (MonadReader) +import Control.Monad.Reader (MonadReader, ReaderT, ask) import Control.Monad.State.Strict import Control.Monad.Trans.Except (throwE) import Control.Monad.Trans.Writer.CPS (Writer, execWriter, execWriterT, runWriterT, tell) @@ -17,6 +17,7 @@ import Data.Generics.Product import Data.Generics.Sum import Data.List.Extra (nubOrd) import qualified Data.Map as Map +import qualified U.Codebase.Sqlite.Operations as Ops import Data.Tuple (swap) import qualified Data.Zip as Zip import U.Codebase.Sqlite.Connection (Connection) @@ -68,6 +69,10 @@ type Old a = a type New a = a +type ConstructorName v = v + +type ComponentName v = v + data MigrationState = MigrationState -- Mapping between old cycle-position -> new cycle-position for a given Decl object. { -- declLookup :: Map (Old ObjectId) (Map (Old Pos) (New Pos)), @@ -118,24 +123,19 @@ data Y = MkY Int data Entity = TComponent Unison.Hash | DComponent Unison.Hash + | B ObjectId + -- haven't proven we need these yet | Patch ObjectId - | NS ObjectId | C CausalHashId | W WK.WatchKind S.Reference.IdH -- Hash Reference.Id deriving (Eq, Ord, Show) --- data Entity --- = O ObjectId -- Hash --- | C CausalHashId --- | W WK.WatchKind S.Reference.IdH -- Hash Reference.Id --- deriving (Eq, Ord, Show) - -data Env = Env {db :: Connection} +data Env m v a = Env {db :: Connection, codebase :: Codebase m v a} -- -> m (TrySyncResult h) migrationSync :: - (MonadIO m, MonadState MigrationState m, MonadReader Env m) => - Sync m Entity + (MonadIO m, Var v) => + Sync (ReaderT (Env m v a) (StateT MigrationState m)) Entity migrationSync = Sync \case -- To sync an object, -- * If we have already synced it, we are done. @@ -175,50 +175,64 @@ migrationSync = Sync \case --- * If we haven't yet synced its parents, push them onto the work queue --- * If we haven't yet synced the causal's value (namespace), push it onto the work queue. --- * Rehash the Causal's valueHash AND CausalHash, and add the new causal, its hashes, and hash objects to the codebase under a fresh object ID - TComponent _hash -> undefined - DComponent _hash -> undefined - -- O objId -> do - -- let alreadySynced :: m Bool - -- alreadySynced = undefined - -- alreadySynced >>= \case - -- False -> do - -- (hId, objType, bytes) <- runSrc $ Q.loadObjectWithHashIdAndTypeById oId - -- migrateObject objType hId bytes - -- True -> pure Sync.PreviouslyDone - -- result <- runValidateT @(Set Entity) @m @ObjectId case objType of - -- To sync a causal, - -- 1. ??? - -- 2. Synced + TComponent hash -> do + Env{codebase} <- ask + lift (migrateTermComponent codebase hash) + DComponent hash -> do + Env{codebase} <- ask + lift (migrateDeclComponent codebase hash) + B objectId -> do + Env{db} <- ask + migrateBranch db objectId C _causalHashID -> undefined -- To sync a watch result, -- 1. ??? -- 2. Synced W _watchKind _idH -> undefined Patch {} -> undefined - NS {} -> undefined - --- data ObjectType --- = TermComponent -- 0 --- | DeclComponent -- 1 --- | Namespace -- 2 --- | Patch -- 3 - -migrateObject :: (Var v, Monad m) => Codebase m v a -> ObjectType -> Hash -> ByteString -> StateT MigrationState m (Sync.TrySyncResult Entity) -migrateObject codebase objType hash bytes = case objType of - OT.TermComponent -> migrateTermComponent codebase hash - OT.DeclComponent -> migrateDeclComponent codebase hash - OT.Namespace -> migrateNamespace hash bytes - OT.Patch -> migratePatch hash bytes migratePatch :: Hash -> ByteString -> m (Sync.TrySyncResult Entity) migratePatch = error "not implemented" -migrateNamespace :: Hash -> ByteString -> m (Sync.TrySyncResult Entity) -migrateNamespace = error "not implemented" +migrateBranch :: Connection -> ObjectId -> m (Sync.TrySyncResult Entity) +migrateBranch conn objectId = do + -- note for tomorrow: we want to just load the (Branch m) instead, forget the DbBranch + dbBranch <- Ops.loadDbBranchByObjectId objectId + + let allMissingTypes = undefined + let allMissingTerms = undefined + let allMissingPatches = undefined + let allMissingChildren = undefined + let allMissingPredecessors = undefined + + -- Identify dependencies and bail out if they aren't all built + let allMissingReferences :: [Entity] + allMissingReferences = + allMissingTypes ++ + allMissingTerms ++ + allMissingPatches ++ + allMissingChildren ++ + allMissingPredecessors + + when (not . null $ allMissingReferences) $ + throwE $ Sync.Missing allMissingReferences + + -- Migrate branch + + error "not implemented" + + +-- type DbBranch = Branch' TextId ObjectId PatchObjectId (BranchObjectId, CausalHashId) + +-- data Branch' t h p c = Branch +-- { terms :: Map t (Map (Referent'' t h) (MetadataSetFormat' t h)), +-- types :: Map t (Map (Reference' t h) (MetadataSetFormat' t h)), +-- patches :: Map t p, +-- children :: Map t c +-- } migrateTermComponent :: forall m v a. (Ord v, Var v, Monad m) => Codebase m v a -> Unison.Hash -> StateT MigrationState m (Sync.TrySyncResult Entity) migrateTermComponent Codebase {..} hash = fmap (either id id) . runExceptT $ do - -- getTermComponentWithTypes :: Hash -> m (Maybe [(Term v a, Type v a)]), component <- (lift . lift $ getTermComponentWithTypes hash) >>= \case Nothing -> error $ "Hash was missing from codebase: " <> show hash @@ -234,7 +248,12 @@ migrateTermComponent Codebase {..} hash = fmap (either id id) . runExceptT $ do & Map.toList & fmap (\(refId, (v, _trm)) -> (v, refId)) & Map.fromList + referencesMap <- gets referenceMapping + let getMigratedReference :: Old SomeReferenceId -> New SomeReferenceId + getMigratedReference ref = + Map.findWithDefault (error "unmigrated reference") ref referencesMap + let allMissingReferences :: [Old SomeReferenceId] allMissingReferences = unhashed @@ -244,38 +263,35 @@ migrateTermComponent Codebase {..} hash = fmap (either id id) . runExceptT $ do . termReferences_ . filtered (\r -> Map.notMember r referencesMap) ) - -- foldMap (ABT.find findReferenceIds) (snd <$> Map.elems) + when (not . null $ allMissingReferences) $ throwE $ Sync.Missing . nubOrd $ (someReferenceIdToEntity <$> allMissingReferences) let remappedReferences :: Map (Old Reference.Id) (v, Term.Term v a, Type v a) = - Zip.zipWith (\(v, trm) (_, typ) -> (v, trm, typ)) unhashed componentIDMap - & undefined -- do the remapping - let newTermComponents :: Map v (New Reference.Id, Term.Term v a) + Zip.zipWith + ( \(v, trm) (_, typ) -> + ( v, + trm & termReferences_ %~ getMigratedReference, + typ & typeReferences_ %~ getMigratedReference + ) + ) + unhashed + componentIDMap + + let newTermComponents :: Map v (New Reference.Id, Term.Term v a, Type v a) newTermComponents = remappedReferences & Map.elems - & fmap (\(v, trm, _typ) -> (v, trm)) + & fmap (\(v, trm, typ) -> (v, (trm, typ))) & Map.fromList - & Convert.hashTermComponents + & Convert.hashTermComponents' - ifor newTermComponents $ \v (newReferenceId, trm) -> do + ifor newTermComponents $ \v (newReferenceId, trm, typ) -> do let oldReferenceId = vToOldReferenceMapping Map.! v - let (_, _, typ) = remappedReferences Map.! oldReferenceId field @"referenceMapping" %= Map.insert (TermReference oldReferenceId) (TermReference newReferenceId) lift . lift $ putTerm newReferenceId trm typ - -- what's this for? - -- let newTypeComponents :: Map v (Reference.Id, Type v a) - -- newTypeComponents = (Map.fromList $ Map.elems remappedReferences) - - -- on hold: incorporating term's type in term's hash - - -- hashTypeComponents :: Var v => Map v (Memory.Type.Type v a) -> Map v (Memory.Reference.Id, Memory.Type.Type v a) - -- hashTermComponents :: Var v => Map v (Memory.Term.Term v a) -> Map v (Memory.Reference.Id, Memory.Term.Term v a) - -- - -- hashTermComponents :: Var v => Map v (Memory.Term.Term v a) -> (Hash, [Memory.Term.Term v a]) - undefined + pure Sync.Done migrateDeclComponent :: forall m v a. @@ -289,18 +305,11 @@ migrateDeclComponent Codebase {..} hash = fmap (either id id) . runExceptT $ do Nothing -> error "handle this" -- not non-fatal! Just dc -> pure dc - -- type Decl = Either EffectDeclaration DataDeclaration let componentIDMap :: Map (Old Reference.Id) (DD.Decl v a) componentIDMap = Map.fromList $ Reference.componentFor hash declComponent let unhashed :: Map (Old Reference.Id) (v, DD.Decl v a) unhashed = DD.unhashComponent componentIDMap - -- data DataDeclaration v a = DataDeclaration { - -- modifier :: Modifier, - -- annotation :: a, - -- bound :: [v], - -- constructors' :: [(a, v, Type v a)] - -- } deriving (Eq, Show, Functor) let allTypes :: [Type v a] allTypes = @@ -318,31 +327,16 @@ migrateDeclComponent Codebase {..} hash = fmap (either id id) . runExceptT $ do allTypes & foldSetter ( traversed -- Every type in the list - . typeReferences + . typeReferences_ . filtered (\r -> Map.notMember r migratedReferences) ) - -- foldMap (ABT.find (findMissingTypeReferenceIds migratedReferences)) allTypes - - -- ability Foo where - -- bar :: {Foo} Int - - -- what's the data decl that might collide with Foo? - - -- mitchell thinks: - -- - -- type Foo where - -- Bar :: Int -> Foo - -- - -- but that doesn't collide - when (not . null $ unmigratedRefIds) do throwE (Sync.Missing (nubOrd . fmap someReferenceIdToEntity $ unmigratedRefIds)) -- At this point we know we have all the required mappings from old references to new ones. let remapTerm :: Type v a -> Type v a - remapTerm = typeReferences %~ \ref -> Map.findWithDefault (error "unmigrated reference") ref migratedReferences - -- runIdentity $ ABT.visit' (remapReferences declMap) typ + remapTerm = typeReferences_ %~ \ref -> Map.findWithDefault (error "unmigrated reference") ref migratedReferences let remappedReferences :: Map (Old Reference.Id) (v, DD.Decl v a) remappedReferences = @@ -357,11 +351,6 @@ migrateDeclComponent Codebase {..} hash = fmap (either id id) . runExceptT $ do let vToOldReference :: Map v (Old Reference.Id) vToOldReference = Map.fromList . fmap swap . Map.toList . fmap fst $ remappedReferences - -- hashDecls :: - -- Var v => - -- Map v (Memory.DD.DataDeclaration v a) -> - -- ResolutionResult v a [(v, Memory.Reference.Id, Memory.DD.DataDeclaration v a)] - let newComponent :: [(v, Reference.Id, DD.Decl v a)] newComponent = remappedReferences @@ -370,20 +359,16 @@ migrateDeclComponent Codebase {..} hash = fmap (either id id) . runExceptT $ do & Convert.hashDecls' & fromRight (error "unexpected resolution error") for_ newComponent $ \(v, newReferenceId, dd) -> do - -- do the member of the component itself let oldReferenceId = vToOldReference Map.! v field @"referenceMapping" %= Map.insert (TypeReference oldReferenceId) (TypeReference newReferenceId) - -- do each constructor of the member - -- have: - -- * the old list of constructors - -- -> can get from unhashed, or wherever - -- * the new list of constructors - -- - -- want: - -- mapping from old ConstructorId to new ConstructorId - let oldConstructorIds :: Map constructorName (Old ConstructorId) - oldConstructorIds = undefined + let oldConstructorIds :: Map (ConstructorName v) (Old ConstructorId) + oldConstructorIds = + (componentIDMap Map.! oldReferenceId) + & DD.asDataDecl + & DD.constructors' + & imap (\constructorId (_ann, name, _type) -> (name, constructorId)) + & Map.fromList ifor_ (DD.constructors' (DD.asDataDecl dd)) \newConstructorId (_ann, name, _type) -> do field @"referenceMapping" @@ -391,11 +376,11 @@ migrateDeclComponent Codebase {..} hash = fmap (either id id) . runExceptT $ do (ConstructorReference oldReferenceId (oldConstructorIds Map.! name)) (ConstructorReference newReferenceId newConstructorId) - lift . lift $ putTypeDeclaration newReferenceId dd -- Need to somehow keep decl type through this transformation? + lift . lift $ putTypeDeclaration newReferenceId dd pure Sync.Done -typeReferences :: (Monad m, Ord v) => LensLike' m (Type v a) SomeReferenceId -typeReferences = +typeReferences_ :: (Monad m, Ord v) => LensLike' m (Type v a) SomeReferenceId +typeReferences_ = ABT.rewriteDown_ -- Focus all terms . ABT.baseFunctor_ -- Focus Type.F . Type._Ref -- Only the Ref constructor has references @@ -418,7 +403,7 @@ termFReferences_ f t = (t & Term._Ref . Reference._DerivedId . unsafeInsidePrism (_Ctor @"TermReference") %%~ f) >>= Term._Constructor . thing . unsafeInsidePrism (_Ctor @"ConstructorReference") %%~ f >>= Term._Request . thing . unsafeInsidePrism (_Ctor @"ConstructorReference") %%~ f - >>= Term._Ann . _2 . typeReferences %%~ f + >>= Term._Ann . _2 . typeReferences_ %%~ f >>= Term._Match . _2 . traversed . Term.matchPattern_ . patternReferences_ %%~ f >>= Term._TermLink . referentReferences %%~ f >>= Term._TypeLink . Reference._DerivedId . unsafeInsidePrism (_Ctor @"TypeReference") %%~ f @@ -467,6 +452,7 @@ remapReferences declMap = \case x -> x type SomeReferenceId = SomeReference Reference.Id +type SomeReferenceObjId = SomeReference ObjectId data SomeReference ref = TermReference ref From 502b89f6d5d80a386e36ac830206eabf4fb4cd3c Mon Sep 17 00:00:00 2001 From: Mitchell Rosen Date: Tue, 26 Oct 2021 23:31:57 -0400 Subject: [PATCH 038/297] work on 10/26 --- .../U/Codebase/Sqlite/Branch/Full.hs | 2 + .../U/Codebase/Sqlite/Operations.hs | 47 +++++++++++-- codebase2/codebase/U/Codebase/Branch.hs | 2 +- .../src/Unison/Codebase/SqliteCodebase.hs | 2 +- .../SqliteCodebase/MigrateSchema12.hs | 67 ++++++++++++++++--- .../src/Unison/Hashing/V2/Convert.hs | 14 ++++ .../src/Unison/Runtime/Pattern.hs | 2 +- .../Unison/Test/Codebase/MigrateSchema12.hs | 4 +- unison-core/src/Unison/Term.hs | 2 +- 9 files changed, 123 insertions(+), 19 deletions(-) diff --git a/codebase2/codebase-sqlite/U/Codebase/Sqlite/Branch/Full.hs b/codebase2/codebase-sqlite/U/Codebase/Sqlite/Branch/Full.hs index 1cbe01e6f0..89c9ebd592 100644 --- a/codebase2/codebase-sqlite/U/Codebase/Sqlite/Branch/Full.hs +++ b/codebase2/codebase-sqlite/U/Codebase/Sqlite/Branch/Full.hs @@ -27,6 +27,8 @@ data Branch' t h p c = Branch } deriving Show + + type LocalMetadataSet = MetadataSetFormat' LocalTextId LocalDefnId type DbMetadataSet = MetadataSetFormat' TextId ObjectId diff --git a/codebase2/codebase-sqlite/U/Codebase/Sqlite/Operations.hs b/codebase2/codebase-sqlite/U/Codebase/Sqlite/Operations.hs index 941dc72a1e..a475933c9f 100644 --- a/codebase2/codebase-sqlite/U/Codebase/Sqlite/Operations.hs +++ b/codebase2/codebase-sqlite/U/Codebase/Sqlite/Operations.hs @@ -913,6 +913,48 @@ saveRootBranch c = do Q.setNamespaceRoot chId pure (boId, chId) +-- saveBranch is kind of a "deep save causal" + +-- we want a "shallow save causal" that could take a +-- forall m e. Causal m CausalHash BranchHash e +-- +-- data Identity a = Identity +-- e == () +-- +-- data C.Branch m = Branch +-- { terms :: Map NameSegment (Map Referent (m MdValues)), +-- types :: Map NameSegment (Map Reference (m MdValues)), +-- patches :: Map NameSegment (PatchHash, m Patch), +-- children :: Map NameSegment (Causal m) +-- } +-- +-- U.Codebase.Sqlite.Branch.Full.Branch' +-- type ShallowBranch = Branch' NameSegment Hash PatchHash CausalHash +-- data ShallowBranch causalHash patchHash = ShallowBranch +-- { terms :: Map NameSegment (Map Referent MdValues), +-- types :: Map NameSegment (Map Reference MdValues), +-- patches :: Map NameSegment patchHash, +-- children :: Map NameSegment causalHash +-- } +-- +-- data Causal m hc he e = Causal +-- { causalHash :: hc, +-- valueHash :: he, +-- parents :: Map hc (m (Causal m hc he e)), +-- value :: m e +-- } +-- data ShallowCausal causalHash branchHash = ShallowCausal +-- { causalHash :: causalHash, +-- valueHash :: branchHash, +-- parents :: Set causalHash, +-- } +-- +-- References, but also values +-- Shallow - Hash? representation of the database relationships + +saveBranch' :: EDB m => C.Branch.Branch m -> m Db.BranchObjectId +saveBranch' = undefined + saveBranch :: EDB m => C.Branch.Causal m -> m (Db.BranchObjectId, Db.CausalHashId) saveBranch (C.Causal hc he parents me) = do when debug $ traceM $ "\nOperations.saveBranch \n hc = " ++ show hc ++ ",\n he = " ++ show he ++ ",\n parents = " ++ show (Map.keys parents) @@ -1051,16 +1093,11 @@ loadCausalByCausalHashId id = do pure (h, loadCausalByCausalHashId hId) pure $ C.Causal hc hb (Map.fromList loadParents) loadNamespace --- | is this even a thing? loading a branch by causal hash? yes I guess so. loadBranchByCausalHashId :: EDB m => Db.CausalHashId -> m (Maybe (C.Branch.Branch m)) loadBranchByCausalHashId id = do (liftQ . Q.loadBranchObjectIdByCausalHashId) id >>= traverse loadBranchByObjectId --- FIXME this doesn't belong in this module -loadDbBranchByObjectId :: EDB m => Db.BranchObjectId -> m (S.DbBranch m) -loadDbBranchByObjectId = undefined - loadBranchByObjectId :: EDB m => Db.BranchObjectId -> m (C.Branch.Branch m) loadBranchByObjectId id = do deserializeBranchObject id >>= \case diff --git a/codebase2/codebase/U/Codebase/Branch.hs b/codebase2/codebase/U/Codebase/Branch.hs index dcbf73a040..576e18b735 100644 --- a/codebase2/codebase/U/Codebase/Branch.hs +++ b/codebase2/codebase/U/Codebase/Branch.hs @@ -16,7 +16,7 @@ newtype NameSegment = NameSegment Text deriving (Eq, Ord, Show) type MetadataType = Reference type MetadataValue = Reference data MdValues = MdValues (Map MetadataValue MetadataType) deriving (Eq, Ord, Show) - + type Causal m = C.Causal m CausalHash BranchHash (Branch m) -- | V2.Branch is like V1.Branch0; I would rename it, at least temporarily, but too hard. diff --git a/parser-typechecker/src/Unison/Codebase/SqliteCodebase.hs b/parser-typechecker/src/Unison/Codebase/SqliteCodebase.hs index feb466c969..60c4e65587 100644 --- a/parser-typechecker/src/Unison/Codebase/SqliteCodebase.hs +++ b/parser-typechecker/src/Unison/Codebase/SqliteCodebase.hs @@ -821,7 +821,7 @@ sqliteCodebase debugName root = do ) runReaderT Q.schemaVersion conn >>= \case SchemaVersion 2 -> startCodebase - SchemaVersion 1 -> _migrate12 conn >> startCodebase + SchemaVersion 1 -> undefined -- migrate12 conn >> startCodebase v -> shutdownConnection conn $> Left v -- well one or the other. :zany_face: the thinking being that they wouldn't hash-collide diff --git a/parser-typechecker/src/Unison/Codebase/SqliteCodebase/MigrateSchema12.hs b/parser-typechecker/src/Unison/Codebase/SqliteCodebase/MigrateSchema12.hs index fe9da0e382..f7059e2f5a 100644 --- a/parser-typechecker/src/Unison/Codebase/SqliteCodebase/MigrateSchema12.hs +++ b/parser-typechecker/src/Unison/Codebase/SqliteCodebase/MigrateSchema12.hs @@ -8,8 +8,8 @@ module Unison.Codebase.SqliteCodebase.MigrateSchema12 where import Control.Lens -import Control.Monad.Except (runExceptT) -import Control.Monad.Reader (MonadReader, ReaderT, ask) +import Control.Monad.Except (runExceptT, ExceptT) +import Control.Monad.Reader (MonadReader, ReaderT (runReaderT), ask) import Control.Monad.State.Strict import Control.Monad.Trans.Except (throwE) import Control.Monad.Trans.Writer.CPS (Writer, execWriter, execWriterT, runWriterT, tell) @@ -45,6 +45,8 @@ import qualified Unison.Term as Term import Unison.Type (Type) import qualified Unison.Type as Type import Unison.Var (Var) +import U.Codebase.HashTags (CausalHash) +import qualified U.Codebase.Causal as C -- lookupCtor :: ConstructorMapping -> ObjectId -> Pos -> ConstructorId -> Maybe (Pos, ConstructorId) -- lookupCtor (ConstructorMapping cm) oid pos cid = @@ -77,6 +79,7 @@ data MigrationState = MigrationState -- Mapping between old cycle-position -> new cycle-position for a given Decl object. { -- declLookup :: Map (Old ObjectId) (Map (Old Pos) (New Pos)), referenceMapping :: Map (Old SomeReferenceId) (New SomeReferenceId), + causalMapping :: Map (Old CausalHash) (New CausalHash), -- Mapping between contructor indexes for the type identified by (ObjectId, Pos) ctorLookup :: Map (Old TypeIdentifier) (Map (Old ConstructorId) (New ConstructorId)), ctorLookup' :: Map (Old Referent.Id) (New Referent.Id), @@ -123,10 +126,10 @@ data Y = MkY Int data Entity = TComponent Unison.Hash | DComponent Unison.Hash - | B ObjectId + | C CausalHash -- haven't proven we need these yet + | B ObjectId | Patch ObjectId - | C CausalHashId | W WK.WatchKind S.Reference.IdH -- Hash Reference.Id deriving (Eq, Ord, Show) @@ -183,8 +186,10 @@ migrationSync = Sync \case lift (migrateDeclComponent codebase hash) B objectId -> do Env{db} <- ask - migrateBranch db objectId - C _causalHashID -> undefined + lift (migrateBranch db objectId) + C causalHash -> do + Env{db} <- ask + lift (migrateCausal db causalHash) -- To sync a watch result, -- 1. ??? -- 2. Synced @@ -194,10 +199,54 @@ migrationSync = Sync \case migratePatch :: Hash -> ByteString -> m (Sync.TrySyncResult Entity) migratePatch = error "not implemented" -migrateBranch :: Connection -> ObjectId -> m (Sync.TrySyncResult Entity) -migrateBranch conn objectId = do +runDB :: MonadIO m => Connection -> ReaderT Connection (ExceptT Ops.Error m) a -> m a +runDB conn = (runExceptT >=> err) . flip runReaderT conn + where + err = \case Left err -> error $ show err; Right a -> pure a + +-- loadCausalBranchByCausalHash :: EDB m => CausalHash -> m (Maybe (C.Branch.Causal m)) +migrateCausal :: MonadIO m => Connection -> CausalHash -> StateT MigrationState m (Sync.TrySyncResult Entity) +migrateCausal conn causalHash = runDB conn $ do + C.Causal{..} <- Ops.loadCausalBranchByCausalHash causalHash >>= \case + Nothing -> error $ "Expected causal to exist but it didn't" <> show causalHash + Just c -> pure c + migratedCausals <- gets causalMapping + -- Plan: + -- * Load a C.Causal + -- * Ensure its parent causals and branch (value hash) have been migrated + -- * Rewrite the value-hash and parent causal hashes + -- * Save the new causal (Do we change the self-hash?) + -- + -- let unMigratedParents = + + + + + + undefined + +-- data Causal m hc he e = Causal +-- { causalHash :: hc, +-- valueHash :: he, +-- parents :: Map hc (m (Causal m hc he e)), +-- value :: m e +-- } + + -- data C.Branch m = Branch + -- { terms :: Map NameSegment (Map Referent (m MdValues)), + -- types :: Map NameSegment (Map Reference (m MdValues)), + -- patches :: Map NameSegment (PatchHash, m Patch), + -- children :: Map NameSegment (Causal m) + -- } + +migrateBranch :: Monad m => Connection -> ObjectId -> StateT MigrationState m (Sync.TrySyncResult Entity) +migrateBranch conn objectID = fmap (either id id) . runExceptT $ do -- note for tomorrow: we want to just load the (Branch m) instead, forget the DbBranch - dbBranch <- Ops.loadDbBranchByObjectId objectId + -- dbBranch <- Ops.loadDbBranchByObjectId objectId + + -- Plan: + -- * Load a C.Branch by converting Branch Hash into a branch object ID + -- * let allMissingTypes = undefined let allMissingTerms = undefined diff --git a/parser-typechecker/src/Unison/Hashing/V2/Convert.hs b/parser-typechecker/src/Unison/Hashing/V2/Convert.hs index 8050895e2c..7c2c1e9447 100644 --- a/parser-typechecker/src/Unison/Hashing/V2/Convert.hs +++ b/parser-typechecker/src/Unison/Hashing/V2/Convert.hs @@ -5,9 +5,11 @@ module Unison.Hashing.V2.Convert ( ResolutionResult, tokensBranch0, hashDecls, + hashDecls', hashPatch, hashClosedTerm, hashTermComponents, + hashTermComponents', hashTypeComponents, typeToReference, typeToReferenceMentions, @@ -72,6 +74,11 @@ hashTermComponents = fmap h2mTermResult . Hashing.Term.hashComponents . fmap m2h h2mTermResult :: Ord v => (Hashing.Reference.Id, Hashing.Term.Term v a) -> (Memory.Reference.Id, Memory.Term.Term v a) h2mTermResult (id, tm) = (h2mReferenceId id, h2mTerm tm) +-- TODO: remove non-prime version +-- include type in hash +hashTermComponents' :: Var v => Map v (Memory.Term.Term v a, Memory.Type.Type v a) -> Map v (Memory.Reference.Id, Memory.Term.Term v a, Memory.Type.Type v a) +hashTermComponents' = undefined + hashClosedTerm :: Var v => Memory.Term.Term v a -> Memory.Reference.Id hashClosedTerm = h2mReferenceId . Hashing.Term.hashClosedTerm . m2hTerm @@ -201,6 +208,13 @@ hashDecls memDecls = do h2mDeclResult :: Ord v => (v, Hashing.Reference.Id, Hashing.DD.DataDeclaration v a) -> (v, Memory.Reference.Id, Memory.DD.DataDeclaration v a) h2mDeclResult (v, id, dd) = (v, h2mReferenceId id, h2mDecl dd) +-- TODO: rename hashDecls to hashDataDecls, remove tick from this +hashDecls' :: + Var v => + Map v (Memory.DD.Decl v a) -> + ResolutionResult v a [(v, Memory.Reference.Id, Memory.DD.Decl v a)] +hashDecls' memDecls = undefined + m2hDecl :: Ord v => Memory.DD.DataDeclaration v a -> Hashing.DD.DataDeclaration v a m2hDecl (Memory.DD.DataDeclaration mod ann bound ctors) = Hashing.DD.DataDeclaration (m2hModifier mod) ann bound $ fmap (Lens.over _3 m2hType) ctors diff --git a/parser-typechecker/src/Unison/Runtime/Pattern.hs b/parser-typechecker/src/Unison/Runtime/Pattern.hs index f3ac9bd960..f5939eee28 100644 --- a/parser-typechecker/src/Unison/Runtime/Pattern.hs +++ b/parser-typechecker/src/Unison/Runtime/Pattern.hs @@ -31,7 +31,7 @@ import qualified Unison.Pattern as P import Unison.Reference (Reference(..)) import Unison.Runtime.ANF (internalBug) import Unison.Symbol (Symbol) -import Unison.Term hiding (Term) +import Unison.Term hiding (Term, matchPattern) import qualified Unison.Term as Tm import Unison.Var (Var, typed, freshIn, freshenId, Type(Pattern)) diff --git a/parser-typechecker/tests/Unison/Test/Codebase/MigrateSchema12.hs b/parser-typechecker/tests/Unison/Test/Codebase/MigrateSchema12.hs index 30fdbdd94e..1266cdc219 100644 --- a/parser-typechecker/tests/Unison/Test/Codebase/MigrateSchema12.hs +++ b/parser-typechecker/tests/Unison/Test/Codebase/MigrateSchema12.hs @@ -1,5 +1,6 @@ -module Unison.Test.Codebase.Migration12 where +module Unison.Test.Codebase.MigrateSchema12 where +{- testType :: Type v a testType = _ @@ -10,3 +11,4 @@ test = $ [ scope "threeWayMerge.ex1" . expect $ Causal.head testThreeWay == Set.fromList [3, 4] ] +-} \ No newline at end of file diff --git a/unison-core/src/Unison/Term.hs b/unison-core/src/Unison/Term.hs index b727f5b417..5af7c9d073 100644 --- a/unison-core/src/Unison/Term.hs +++ b/unison-core/src/Unison/Term.hs @@ -51,7 +51,7 @@ import Control.Lens (Prism', Lens', lens) data MatchCase loc a = MatchCase { matchPattern :: Pattern loc, - matchGuard :: (Maybe a), + matchGuard :: Maybe a, matchBody :: a } deriving (Show, Eq, Foldable, Functor, Generic, Generic1, Traversable) From d761a5eb2c7486a6a725f58ca02890756685e1b5 Mon Sep 17 00:00:00 2001 From: Chris Penner Date: Thu, 28 Oct 2021 13:57:27 -0600 Subject: [PATCH 039/297] Pairing checkpoint --- .../U/Codebase/Sqlite/Branch/Full.hs | 63 ++++++++++++++++++- .../U/Codebase/Sqlite/Causal.hs | 25 ++++++++ codebase2/codebase-sqlite/package.yaml | 18 ++++++ .../unison-codebase-sqlite.cabal | 20 +++++- parser-typechecker/package.yaml | 1 + .../SqliteCodebase/MigrateSchema12.hs | 29 +++++++++ .../src/Unison/Hashing/V2/Causal.hs | 17 +++-- .../src/Unison/Hashing/V2/Convert.hs | 19 +++--- .../unison-parser-typechecker.cabal | 4 ++ 9 files changed, 179 insertions(+), 17 deletions(-) create mode 100644 codebase2/codebase-sqlite/U/Codebase/Sqlite/Causal.hs diff --git a/codebase2/codebase-sqlite/U/Codebase/Sqlite/Branch/Full.hs b/codebase2/codebase-sqlite/U/Codebase/Sqlite/Branch/Full.hs index 89c9ebd592..c188ae538c 100644 --- a/codebase2/codebase-sqlite/U/Codebase/Sqlite/Branch/Full.hs +++ b/codebase2/codebase-sqlite/U/Codebase/Sqlite/Branch/Full.hs @@ -1,10 +1,12 @@ {-# LANGUAGE RankNTypes #-} {-# LANGUAGE ScopedTypeVariables #-} +{-# LANGUAGE TypeApplications #-} +{-# LANGUAGE DataKinds #-} +{-# LANGUAGE DeriveGeneric #-} +{-# LANGUAGE RecordWildCards #-} module U.Codebase.Sqlite.Branch.Full where -import Data.Map (Map) -import Data.Set (Set) import U.Codebase.Reference (Reference') import U.Codebase.Referent (Referent') import U.Codebase.Sqlite.DbId (BranchObjectId, CausalHashId, ObjectId, PatchObjectId, TextId) @@ -12,6 +14,8 @@ import U.Codebase.Sqlite.LocalIds (LocalBranchChildId, LocalDefnId, LocalPatchOb import qualified U.Util.Map as Map import Data.Bifunctor (Bifunctor(bimap)) import qualified Data.Set as Set +import Control.Lens (Traversal) +import Unison.Prelude type LocalBranch = Branch' LocalTextId LocalDefnId LocalPatchObjectId LocalBranchChildId @@ -25,8 +29,61 @@ data Branch' t h p c = Branch patches :: Map t p, children :: Map t c } - deriving Show + deriving (Show, Generic) + +branchHashes_ :: Traversal (Branch' t h p c) (Branch' t h' p c) h h' +branchHashes_ _f _ = undefined + -- Branch <$> traverse (\m -> Map.mapKeys) + +branchCausalHashes_ :: Traversal (Branch' t h p c) (Branch' t h p c') c c' +branchCausalHashes_ f Branch{..} = + Branch terms types patches <$> traverse f children + + +-- convertBranch :: (DB m, MonadState MigrationState m) => DbBranch -> m DbBranch +-- convertBranch dbBranch = _ + +-- DbBranch -- migrate --> DbBranch -- hydrate for the hash --> Hashing.V2.Branch -- put (Hash, toBranchFormat(DbBranch)) --> COOL + +-- function that reads a DbBranch out of codebase + +-- Traversal' DbBranch SomeReferenceObjectId +-- DB m => LensLike' m SomeReferenceObjectId SomeReferenceId +-- MonadState MigrationState m => SomeReferenceId -> m SomeReferenceId + +-- Traversal' DbBranch (BranchId, CausalHashId) +-- MonadState MigrationState m => (BranchId, CausalHashId) -> m (BranchId, CausalHashId) + +-- Traversal' DbBranch PatchId +-- MonadState MigrationState m => PatchObjectId -> m PatchObjectId + +-- totalThing :: (MonadState MigrationState DB m => LensLike' m DbBranch SomeReferenceId +-- totalThing = intoHashes . overSomeRefs + +-- Store (Old ObjectId) -> (New ObjectId) sky map +-- Store (New ObjectId) -> (New Hash) map/cache + +-- function which lifts a function over SomeReference's to run over a DBBranch by inflating Hashes +-- function which remaps references in a Branch + +-- function that takes DbBranch to (LocalIds, LocalBranch) + +-- function that takes a DbBranch to a Hashing.V2.Branch + +-- function that writes (Hash, LocalBranch) to codebase + +-- database has a root CausalHashId +-- from CausalHashId, we can look up ValueHashId and +-- +-- old object id --db--> old hash --mem--> new hash --db--> new object id +-- +-- Branch +-- { terms :: Map TextId (Map (Referent'' TextId ObjectId) (MetadataSetFormat' TextId ObjectId)), +-- types :: Map TextId (Map (Reference' TextId ObjectId) (MetadataSetFormat' TextId ObjectId)), +-- patches :: Map TextId PatchObjectId, +-- children :: Map TextId (BranchObjectId, CausalHashId) +-- } type LocalMetadataSet = MetadataSetFormat' LocalTextId LocalDefnId diff --git a/codebase2/codebase-sqlite/U/Codebase/Sqlite/Causal.hs b/codebase2/codebase-sqlite/U/Codebase/Sqlite/Causal.hs new file mode 100644 index 0000000000..08421a3bb1 --- /dev/null +++ b/codebase2/codebase-sqlite/U/Codebase/Sqlite/Causal.hs @@ -0,0 +1,25 @@ +{-# LANGUAGE RecordWildCards #-} +module U.Codebase.Sqlite.Causal where + +import Unison.Prelude + +data GDbCausal causalHash valueHash = + DbCausal { selfHash :: causalHash, valueHash :: valueHash, parents :: Set causalHash } + +-- type DbCausal = GDbCausal CausalHashId BranchHashId + +-- causalHashes_ :: Traversal (GDbCausal ch vh) (GDbCausal ch' vh) ch ch' +-- causalHashes_ f DbCausal {..} = +-- DbCausal <$> f selfHash <*> pure valueHash <*> (fmap Set.fromList . traverse f . Set.toList $ parents) + +-- valueHashes_ :: Lens (GDbCausal ch vh) (GDbCausal ch vh) vh vh' +-- valueHashes_ f DbCausal{..} = +-- (\p vh -> DbCausal selfHash vh p) parents <$> f valueHash + + +-- data Causal m hc he e = Causal +-- { causalHash :: hc, +-- valueHash :: he, +-- parents :: Map hc (m (Causal m hc he e)), +-- value :: m e +-- } diff --git a/codebase2/codebase-sqlite/package.yaml b/codebase2/codebase-sqlite/package.yaml index c423bc55a0..253dfedb38 100644 --- a/codebase2/codebase-sqlite/package.yaml +++ b/codebase2/codebase-sqlite/package.yaml @@ -1,6 +1,22 @@ name: unison-codebase-sqlite github: unisonweb/unison +default-extensions: + - ApplicativeDo + - BlockArguments + - DeriveFunctor + - DerivingStrategies + - DoAndIfThenElse + - FlexibleContexts + - FlexibleInstances + - LambdaCase + - MultiParamTypeClasses + - NamedFieldPuns + - PatternSynonyms + - ScopedTypeVariables + - TupleSections + - TypeApplications + library: source-dirs: . @@ -15,6 +31,7 @@ dependencies: - extra - here - lens + - generic-lens - monad-validate - mtl - safe @@ -26,6 +43,7 @@ dependencies: - unison-codebase - unison-codebase-sync - unison-core + - unison-prelude - unison-util - unison-util-serialization - unison-util-term diff --git a/codebase2/codebase-sqlite/unison-codebase-sqlite.cabal b/codebase2/codebase-sqlite/unison-codebase-sqlite.cabal index 8043965ae4..f06323daa0 100644 --- a/codebase2/codebase-sqlite/unison-codebase-sqlite.cabal +++ b/codebase2/codebase-sqlite/unison-codebase-sqlite.cabal @@ -4,7 +4,7 @@ cabal-version: 1.12 -- -- see: https://github.com/sol/hpack -- --- hash: 3106bd32bedf162883882818669a81a3e1ca7c60af26ec9cd945fadb39f0d5aa +-- hash: 4227133c6f1df043f279939d16c10379d28386b6b9604af6c8e17bd0a87748ce name: unison-codebase-sqlite version: 0.0.0 @@ -23,6 +23,7 @@ library U.Codebase.Sqlite.Branch.Diff U.Codebase.Sqlite.Branch.Format U.Codebase.Sqlite.Branch.Full + U.Codebase.Sqlite.Causal U.Codebase.Sqlite.Connection U.Codebase.Sqlite.DbId U.Codebase.Sqlite.Decl.Format @@ -46,12 +47,28 @@ library Paths_unison_codebase_sqlite hs-source-dirs: ./ + default-extensions: + ApplicativeDo + BlockArguments + DeriveFunctor + DerivingStrategies + DoAndIfThenElse + FlexibleContexts + FlexibleInstances + LambdaCase + MultiParamTypeClasses + NamedFieldPuns + PatternSynonyms + ScopedTypeVariables + TupleSections + TypeApplications build-depends: base , bytes , bytestring , containers , extra + , generic-lens , here , lens , monad-validate @@ -63,6 +80,7 @@ library , unison-codebase , unison-codebase-sync , unison-core + , unison-prelude , unison-util , unison-util-serialization , unison-util-term diff --git a/parser-typechecker/package.yaml b/parser-typechecker/package.yaml index 1c08275147..e04d0c7561 100644 --- a/parser-typechecker/package.yaml +++ b/parser-typechecker/package.yaml @@ -6,6 +6,7 @@ default-extensions: - ApplicativeDo - BlockArguments - DeriveFunctor + - DeriveGeneric - DerivingStrategies - DoAndIfThenElse - FlexibleContexts diff --git a/parser-typechecker/src/Unison/Codebase/SqliteCodebase/MigrateSchema12.hs b/parser-typechecker/src/Unison/Codebase/SqliteCodebase/MigrateSchema12.hs index f7059e2f5a..d277bc2b36 100644 --- a/parser-typechecker/src/Unison/Codebase/SqliteCodebase/MigrateSchema12.hs +++ b/parser-typechecker/src/Unison/Codebase/SqliteCodebase/MigrateSchema12.hs @@ -47,6 +47,7 @@ import qualified Unison.Type as Type import Unison.Var (Var) import U.Codebase.HashTags (CausalHash) import qualified U.Codebase.Causal as C +import qualified U.Codebase.Sqlite.Branch.Full as S -- lookupCtor :: ConstructorMapping -> ObjectId -> Pos -> ConstructorId -> Maybe (Pos, ConstructorId) -- lookupCtor (ConstructorMapping cm) oid pos cid = @@ -239,6 +240,13 @@ migrateCausal conn causalHash = runDB conn $ do -- children :: Map NameSegment (Causal m) -- } +-- data Branch' t h p c = Branch +-- { terms :: Map t (Map (Referent'' t h) (MetadataSetFormat' t h)), +-- types :: Map t (Map (Reference' t h) (MetadataSetFormat' t h)), +-- patches :: Map t p, +-- children :: Map t c +-- } + migrateBranch :: Monad m => Connection -> ObjectId -> StateT MigrationState m (Sync.TrySyncResult Entity) migrateBranch conn objectID = fmap (either id id) . runExceptT $ do -- note for tomorrow: we want to just load the (Branch m) instead, forget the DbBranch @@ -268,8 +276,29 @@ migrateBranch conn objectID = fmap (either id id) . runExceptT $ do -- Migrate branch + error "not implemented" +dbBranchObjRefs_ :: Traversal' S.DbBranch (SomeReference ObjectId) +dbBranchObjRefs_ = _ + + -- convertBranch :: (DB m, MonadState MigrationState m) => DbBranch -> m DbBranch +-- convertBranch dbBranch = _ + +-- DbBranch -- migrate --> DbBranch -- hydrate for the hash --> Hashing.V2.Branch -- put (Hash, toBranchFormat(DbBranch)) --> COOL + +-- function that reads a DbBranch out of codebase + +-- Traversal' DbBranch SomeReferenceObjectId +-- DB m => LensLike' m SomeReferenceObjectId SomeReferenceId +-- MonadState MigrationState m => SomeReferenceId -> m SomeReferenceId + +-- Traversal' DbBranch (BranchId, CausalHashId) +-- MonadState MigrationState m => (BranchId, CausalHashId) -> m (BranchId, CausalHashId) + +-- Traversal' DbBranch PatchId +-- MonadState MigrationState m => PatchObjectId -> m PatchObjectId + -- type DbBranch = Branch' TextId ObjectId PatchObjectId (BranchObjectId, CausalHashId) diff --git a/parser-typechecker/src/Unison/Hashing/V2/Causal.hs b/parser-typechecker/src/Unison/Hashing/V2/Causal.hs index a1c11bf41d..9d7a5101f6 100644 --- a/parser-typechecker/src/Unison/Hashing/V2/Causal.hs +++ b/parser-typechecker/src/Unison/Hashing/V2/Causal.hs @@ -2,14 +2,23 @@ {-# LANGUAGE RankNTypes #-} {-# LANGUAGE RecordWildCards #-} -module Unison.Hashing.V2.Causal (Causal (..)) where +module Unison.Hashing.V2.Causal (Causal (..), + hashCausal) where import Data.Set (Set) import Unison.Hash (Hash) import Unison.Hashable (Hashable) import qualified Unison.Hashable as H +import qualified Data.Set as Set -data Causal e = Causal {current :: e, parents :: Set Hash} +hashCausal :: H.Accumulate h => Causal -> [(H.Token h)] +hashCausal c = H.tokens $ + [ selfHash c + , branchHash c + ] ++ (Set.toList $ parents c) -instance Hashable e => Hashable (Causal e) where - tokens c = H.tokens (current c, parents c) +data Causal = + Causal { selfHash :: Hash, branchHash :: Hash, parents :: Set Hash } + +instance Hashable Causal where + tokens c = hashCausal c diff --git a/parser-typechecker/src/Unison/Hashing/V2/Convert.hs b/parser-typechecker/src/Unison/Hashing/V2/Convert.hs index 7c2c1e9447..a55cc9f7f9 100644 --- a/parser-typechecker/src/Unison/Hashing/V2/Convert.hs +++ b/parser-typechecker/src/Unison/Hashing/V2/Convert.hs @@ -213,7 +213,7 @@ hashDecls' :: Var v => Map v (Memory.DD.Decl v a) -> ResolutionResult v a [(v, Memory.Reference.Id, Memory.DD.Decl v a)] -hashDecls' memDecls = undefined +hashDecls' = undefined m2hDecl :: Ord v => Memory.DD.DataDeclaration v a -> Hashing.DD.DataDeclaration v a m2hDecl (Memory.DD.DataDeclaration mod ann bound ctors) = @@ -308,14 +308,15 @@ _hashBranch = H.accumulate . _tokensBranch _tokensBranch :: Accumulate h => Memory.Branch.Branch m -> [Token h] _tokensBranch = H.tokens . _m2hCausal . Memory.Branch._history -_m2hCausal :: Memory.Branch.UnwrappedBranch m -> Hashing.Causal.Causal Hashing.Branch.Raw -_m2hCausal = \case - Memory.Causal.One _h e -> - Hashing.Causal.Causal (m2hBranch e) mempty - Memory.Causal.Cons _h e (ht, _) -> - Hashing.Causal.Causal (m2hBranch e) $ Set.singleton (Memory.Causal.unRawHash ht) - Memory.Causal.Merge _h e ts -> - Hashing.Causal.Causal (m2hBranch e) $ Set.map Memory.Causal.unRawHash (Map.keysSet ts) +_m2hCausal :: Memory.Branch.UnwrappedBranch m -> Hashing.Causal.Causal -- Hashing.Branch.Raw +_m2hCausal = undefined -- TODO: re-implement + -- \case + --Memory.Causal.One _h e -> + -- Hashing.Causal.Causal (m2hBranch e) mempty + --Memory.Causal.Cons _h e (ht, _) -> + -- Hashing.Causal.Causal (m2hBranch e) $ Set.singleton (Memory.Causal.unRawHash ht) + --Memory.Causal.Merge _h e ts -> + -- Hashing.Causal.Causal (m2hBranch e) $ Set.map Memory.Causal.unRawHash (Map.keysSet ts) m2hBranch :: Memory.Branch.Branch0 m -> Hashing.Branch.Raw m2hBranch b = diff --git a/parser-typechecker/unison-parser-typechecker.cabal b/parser-typechecker/unison-parser-typechecker.cabal index a77250d0d6..bcc81c35a8 100644 --- a/parser-typechecker/unison-parser-typechecker.cabal +++ b/parser-typechecker/unison-parser-typechecker.cabal @@ -202,6 +202,7 @@ library ApplicativeDo BlockArguments DeriveFunctor + DeriveGeneric DerivingStrategies DoAndIfThenElse FlexibleContexts @@ -324,6 +325,7 @@ executable prettyprintdemo ApplicativeDo BlockArguments DeriveFunctor + DeriveGeneric DerivingStrategies DoAndIfThenElse FlexibleContexts @@ -391,6 +393,7 @@ executable tests ApplicativeDo BlockArguments DeriveFunctor + DeriveGeneric DerivingStrategies DoAndIfThenElse FlexibleContexts @@ -447,6 +450,7 @@ benchmark runtime ApplicativeDo BlockArguments DeriveFunctor + DeriveGeneric DerivingStrategies DoAndIfThenElse FlexibleContexts From 93437c7631c7435e785f177cf87466a5da66ade6 Mon Sep 17 00:00:00 2001 From: Chris Penner Date: Thu, 28 Oct 2021 15:34:34 -0600 Subject: [PATCH 040/297] Transformations over DBBranch written --- .../U/Codebase/Sqlite/Branch/Full.hs | 18 ++-- codebase2/codebase/U/Codebase/Reference.hs | 9 +- codebase2/codebase/U/Codebase/Referent.hs | 19 +++- codebase2/codebase/package.yaml | 19 ++++ codebase2/codebase/unison-codebase.cabal | 20 +++- .../SqliteCodebase/MigrateSchema12.hs | 101 ++++++++++++++---- 6 files changed, 154 insertions(+), 32 deletions(-) diff --git a/codebase2/codebase-sqlite/U/Codebase/Sqlite/Branch/Full.hs b/codebase2/codebase-sqlite/U/Codebase/Sqlite/Branch/Full.hs index c188ae538c..265c7efcf8 100644 --- a/codebase2/codebase-sqlite/U/Codebase/Sqlite/Branch/Full.hs +++ b/codebase2/codebase-sqlite/U/Codebase/Sqlite/Branch/Full.hs @@ -14,7 +14,7 @@ import U.Codebase.Sqlite.LocalIds (LocalBranchChildId, LocalDefnId, LocalPatchOb import qualified U.Util.Map as Map import Data.Bifunctor (Bifunctor(bimap)) import qualified Data.Set as Set -import Control.Lens (Traversal) +import Control.Lens (Traversal, Traversal') import Unison.Prelude type LocalBranch = Branch' LocalTextId LocalDefnId LocalPatchObjectId LocalBranchChildId @@ -36,9 +36,9 @@ branchHashes_ _f _ = undefined -- Branch <$> traverse (\m -> Map.mapKeys) branchCausalHashes_ :: Traversal (Branch' t h p c) (Branch' t h p c') c c' -branchCausalHashes_ f Branch{..} = +branchCausalHashes_ f Branch{..} = Branch terms types patches <$> traverse f children - + -- convertBranch :: (DB m, MonadState MigrationState m) => DbBranch -> m DbBranch -- convertBranch dbBranch = _ @@ -73,11 +73,11 @@ branchCausalHashes_ f Branch{..} = -- function that writes (Hash, LocalBranch) to codebase -- database has a root CausalHashId --- from CausalHashId, we can look up ValueHashId and --- +-- from CausalHashId, we can look up ValueHashId and +-- -- old object id --db--> old hash --mem--> new hash --db--> new object id --- +-- -- Branch -- { terms :: Map TextId (Map (Referent'' TextId ObjectId) (MetadataSetFormat' TextId ObjectId)), -- types :: Map TextId (Map (Reference' TextId ObjectId) (MetadataSetFormat' TextId ObjectId)), @@ -93,6 +93,12 @@ type DbMetadataSet = MetadataSetFormat' TextId ObjectId data MetadataSetFormat' t h = Inline (Set (Reference' t h)) deriving Show +metadataSetFormatReferences_ :: + (Ord t, Ord h) => + Traversal' (MetadataSetFormat' t h) (Reference' t h) +metadataSetFormatReferences_ f (Inline refs) = + fmap (Inline . Set.fromList) . traverse f . Set.toList $ refs + quadmap :: forall t h p c t' h' p' c'. (Ord t', Ord h') => (t -> t') -> (h -> h') -> (p -> p') -> (c -> c') -> Branch' t h p c -> Branch' t' h' p' c' quadmap ft fh fp fc (Branch terms types patches children) = Branch diff --git a/codebase2/codebase/U/Codebase/Reference.hs b/codebase2/codebase/U/Codebase/Reference.hs index 41bbcd62af..31107821ab 100644 --- a/codebase2/codebase/U/Codebase/Reference.hs +++ b/codebase2/codebase/U/Codebase/Reference.hs @@ -10,7 +10,7 @@ module U.Codebase.Reference where import Data.Text (Text) import Data.Word (Word64) import U.Util.Hash (Hash) -import Control.Lens (lens, Lens, Bifunctor(..), Traversal) +import Control.Lens (lens, Lens, Bifunctor(..), Traversal, Prism, prism) import Data.Bitraversable (Bitraversable(..)) import Data.Bifoldable (Bifoldable(..)) @@ -23,6 +23,13 @@ data Reference' t h | ReferenceDerived (Id' h) deriving (Eq, Ord, Show) +_ReferenceDerived :: Prism (Reference' t h) (Reference' t h') (Id' h) (Id' h') +_ReferenceDerived = prism embed project + where + embed (Id h pos) = ReferenceDerived (Id h pos) + project (ReferenceDerived id') = Right id' + project (ReferenceBuiltin t) = Left (ReferenceBuiltin t) + pattern Derived :: h -> Pos -> Reference' t h pattern Derived h i = ReferenceDerived (Id h i) diff --git a/codebase2/codebase/U/Codebase/Referent.hs b/codebase2/codebase/U/Codebase/Referent.hs index b11707a079..3b4d227c28 100644 --- a/codebase2/codebase/U/Codebase/Referent.hs +++ b/codebase2/codebase/U/Codebase/Referent.hs @@ -1,10 +1,11 @@ {-# LANGUAGE LambdaCase #-} {-# LANGUAGE FlexibleInstances #-} {-# LANGUAGE TypeSynonymInstances #-} +{-# LANGUAGE TypeApplications #-} +{-# LANGUAGE DataKinds #-} module U.Codebase.Referent where -import Data.Text (Text) import U.Codebase.Reference (Reference, Reference') import qualified U.Codebase.Reference as Reference import U.Util.Hash (Hash) @@ -12,14 +13,22 @@ import Data.Bifunctor (Bifunctor(..)) import Data.Bifoldable (Bifoldable(..)) import Data.Bitraversable (Bitraversable(..)) import U.Codebase.Decl (ConstructorId) +import Control.Lens (Prism) +import Data.Generics.Sum (_Ctor) +import Unison.Prelude type Referent = Referent' Reference Reference type ReferentH = Referent' (Reference' Text (Maybe Hash)) (Reference' Text Hash) -data Referent' rTm rTp - = Ref rTm - | Con rTp ConstructorId - deriving (Eq, Ord, Show) +data Referent' termRef typeRef + = Ref termRef + | Con typeRef ConstructorId + deriving (Eq, Ord, Show, Generic) + +_Ref :: Prism (Referent' tmr tyr) (Referent' tmr' tyr) tmr tmr' +_Ref = _Ctor @"Ref" +_Con :: Prism (Referent' tmr tyr) (Referent' tmr tyr') (tyr, ConstructorId) (tyr', ConstructorId) +_Con = _Ctor @"Con" type Id = Id' Hash Hash data Id' hTm hTp diff --git a/codebase2/codebase/package.yaml b/codebase2/codebase/package.yaml index d995f56cb0..5cd83cb575 100644 --- a/codebase2/codebase/package.yaml +++ b/codebase2/codebase/package.yaml @@ -1,14 +1,33 @@ name: unison-codebase github: unisonweb/unison +default-extensions: + - ApplicativeDo + - BlockArguments + - DeriveFunctor + - DeriveGeneric + - DerivingStrategies + - DoAndIfThenElse + - FlexibleContexts + - FlexibleInstances + - LambdaCase + - MultiParamTypeClasses + - NamedFieldPuns + - PatternSynonyms + - ScopedTypeVariables + - TupleSections + - TypeApplications + library: source-dirs: . dependencies: - base - containers + - generic-lens - lens - mtl - text - unison-core - unison-util + - unison-prelude diff --git a/codebase2/codebase/unison-codebase.cabal b/codebase2/codebase/unison-codebase.cabal index e957211442..d0f15314e2 100644 --- a/codebase2/codebase/unison-codebase.cabal +++ b/codebase2/codebase/unison-codebase.cabal @@ -4,7 +4,7 @@ cabal-version: 1.12 -- -- see: https://github.com/sol/hpack -- --- hash: 007d5ba1a9afa5423c79bb923619ab440794eb2496188191ac82b66ee645901c +-- hash: b4d3c77715f39c915cacffccf179f1ed62bce29ba013cecc3a25c847f5851233 name: unison-codebase version: 0.0.0 @@ -36,12 +36,30 @@ library Paths_unison_codebase hs-source-dirs: ./ + default-extensions: + ApplicativeDo + BlockArguments + DeriveFunctor + DeriveGeneric + DerivingStrategies + DoAndIfThenElse + FlexibleContexts + FlexibleInstances + LambdaCase + MultiParamTypeClasses + NamedFieldPuns + PatternSynonyms + ScopedTypeVariables + TupleSections + TypeApplications build-depends: base , containers + , generic-lens , lens , mtl , text , unison-core + , unison-prelude , unison-util default-language: Haskell2010 diff --git a/parser-typechecker/src/Unison/Codebase/SqliteCodebase/MigrateSchema12.hs b/parser-typechecker/src/Unison/Codebase/SqliteCodebase/MigrateSchema12.hs index d277bc2b36..dc1638f687 100644 --- a/parser-typechecker/src/Unison/Codebase/SqliteCodebase/MigrateSchema12.hs +++ b/parser-typechecker/src/Unison/Codebase/SqliteCodebase/MigrateSchema12.hs @@ -48,6 +48,10 @@ import Unison.Var (Var) import U.Codebase.HashTags (CausalHash) import qualified U.Codebase.Causal as C import qualified U.Codebase.Sqlite.Branch.Full as S +import qualified U.Codebase.Reference as UReference +import qualified U.Codebase.Referent as UReferent +import Numeric.Lens (integral) +import Control.Lens.Unsound (adjoin) -- lookupCtor :: ConstructorMapping -> ObjectId -> Pos -> ConstructorId -> Maybe (Pos, ConstructorId) -- lookupCtor (ConstructorMapping cm) oid pos cid = @@ -87,6 +91,7 @@ data MigrationState = MigrationState -- This provides the info needed for rewriting a term. You'll access it with a function :: Old termLookup :: Map (Old ObjectId) (New ObjectId, Map (Old Pos) (New Pos)), objLookup :: Map (Old ObjectId) (New ObjectId), + hashByObj :: Map (Old ObjectId) (Old Hash), -- componentPositionMapping :: Map ObjectId (Map (Old Pos) (New Pos)), constructorIDMapping :: Map ObjectId (Map (Old ConstructorId) (New ConstructorId)), @@ -218,12 +223,12 @@ migrateCausal conn causalHash = runDB conn $ do -- * Rewrite the value-hash and parent causal hashes -- * Save the new causal (Do we change the self-hash?) -- - -- let unMigratedParents = - + -- let unMigratedParents = + + + + - - - undefined -- data Causal m hc he e = Causal @@ -253,9 +258,8 @@ migrateBranch conn objectID = fmap (either id id) . runExceptT $ do -- dbBranch <- Ops.loadDbBranchByObjectId objectId -- Plan: - -- * Load a C.Branch by converting Branch Hash into a branch object ID - -- * - + -- * Load a C.Branch by converting Branch Hash into a branch object ID + -- * let allMissingTypes = undefined let allMissingTerms = undefined let allMissingPatches = undefined @@ -270,17 +274,51 @@ migrateBranch conn objectID = fmap (either id id) . runExceptT $ do allMissingPatches ++ allMissingChildren ++ allMissingPredecessors - + when (not . null $ allMissingReferences) $ throwE $ Sync.Missing allMissingReferences - + -- Migrate branch - + let oldDBBranch :: S.DbBranch = undefined + + newBranch <- oldDBBranch & dbBranchObjRefs_ %%~ remapObjIdRefs + -- Need to generalize the traversal, and also generalize the Id backing "SomeReference" + -- newBranch <- oldDBBranch & dbBranchObjRefs_ %%~ objIdsToHashed error "not implemented" -dbBranchObjRefs_ :: Traversal' S.DbBranch (SomeReference ObjectId) -dbBranchObjRefs_ = _ + +-- Project an S.Referent'' into its SomeReferenceObjId's +someReferent_ :: Traversal' (S.Referent'' t ObjectId) SomeReferenceObjId +someReferent_ = (UReferent._Ref . someReference_) + `failing` (UReferent._Con + . asPair_ -- Need to unpack the embedded reference AND remap between mismatched Constructor ID types. + . unsafeInsidePrism _ConstructorReference + ) + where + asPair_ f (UReference.ReferenceDerived id', conId) = f (id', fromIntegral conId) + <&> \(newId, newConId) -> (UReference.ReferenceDerived newId, fromIntegral newConId) + asPair_ _ (UReference.ReferenceBuiltin x, conId) = pure (UReference.ReferenceBuiltin x, conId) + +someReference_ :: Traversal' (UReference.Reference' t ObjectId) SomeReferenceObjId +someReference_ = UReference._ReferenceDerived . unsafeInsidePrism _TermReference + +someMetadataSetFormat :: Ord t => Traversal' (S.MetadataSetFormat' t ObjectId) SomeReferenceObjId +someMetadataSetFormat = S.metadataSetFormatReferences_ . someReference_ + +mapReferentMetadata :: (Ord k, Ord t) => + Traversal' k SomeReferenceObjId -> + Traversal' (Map k (S.MetadataSetFormat' t ObjectId)) + (SomeReferenceObjId) +mapReferentMetadata keyTraversal f m = Map.toList m + & traversed . beside keyTraversal someMetadataSetFormat %%~ f + <&> Map.fromList + +dbBranchObjRefs_ :: Traversal' S.DbBranch SomeReferenceObjId +dbBranchObjRefs_ f S.Branch{..} = do + let newTypesMap = types & traversed . mapReferentMetadata someReference_ %%~ f + let newTermsMap = terms & traversed . mapReferentMetadata someReferent_ %%~ f + S.Branch <$> newTermsMap <*> newTypesMap <*> pure patches <*> pure children -- convertBranch :: (DB m, MonadState MigrationState m) => DbBranch -> m DbBranch -- convertBranch dbBranch = _ @@ -463,7 +501,7 @@ typeReferences_ = . ABT.baseFunctor_ -- Focus Type.F . Type._Ref -- Only the Ref constructor has references . Reference._DerivedId - . unsafeInsidePrism (_Ctor @"TypeReference") + . unsafeInsidePrism _TypeReference -- | This is only lawful so long as your changes to 's' won't cause the prism to fail to match. unsafeInsidePrism :: Prism' s a -> Lens' a s @@ -478,13 +516,13 @@ termReferences_ = termFReferences_ :: (Ord tv, Monad m) => LensLike' m (Term.F tv ta pa a) SomeReferenceId termFReferences_ f t = - (t & Term._Ref . Reference._DerivedId . unsafeInsidePrism (_Ctor @"TermReference") %%~ f) - >>= Term._Constructor . thing . unsafeInsidePrism (_Ctor @"ConstructorReference") %%~ f - >>= Term._Request . thing . unsafeInsidePrism (_Ctor @"ConstructorReference") %%~ f + (t & Term._Ref . Reference._DerivedId . unsafeInsidePrism _TermReference %%~ f) + >>= Term._Constructor . thing . unsafeInsidePrism _ConstructorReference %%~ f + >>= Term._Request . thing . unsafeInsidePrism _ConstructorReference %%~ f >>= Term._Ann . _2 . typeReferences_ %%~ f >>= Term._Match . _2 . traversed . Term.matchPattern_ . patternReferences_ %%~ f >>= Term._TermLink . referentReferences %%~ f - >>= Term._TypeLink . Reference._DerivedId . unsafeInsidePrism (_Ctor @"TypeReference") %%~ f + >>= Term._TypeLink . Reference._DerivedId . unsafeInsidePrism _TypeReference %%~ f -- fixme rename thing :: Traversal' (Reference.Reference, ConstructorId) (Reference.Id, ConstructorId) @@ -530,7 +568,21 @@ remapReferences declMap = \case x -> x type SomeReferenceId = SomeReference Reference.Id -type SomeReferenceObjId = SomeReference ObjectId +type SomeReferenceObjId = SomeReference (UReference.Id' ObjectId) + +objIdsToHashed :: MonadState MigrationState m => SomeReferenceObjId -> m SomeReferenceId +objIdsToHashed = someRef_ %%~ \(UReference.Id objId pos) -> do + objMapping <- gets hashByObj + case Map.lookup objId objMapping of + Nothing -> error $ "Expected object mapping for ID: " <> show objId + Just hash -> pure (Reference.Id hash pos) + +remapObjIdRefs :: MonadState MigrationState m => SomeReferenceObjId -> m SomeReferenceObjId +remapObjIdRefs = someRef_ %%~ \(UReference.Id objId pos) -> do + objMapping <- gets objLookup + case Map.lookup objId objMapping of + Nothing -> error $ "Expected object mapping for ID: " <> show objId + Just newObjId -> pure (UReference.Id newObjId pos) data SomeReference ref = TermReference ref @@ -538,6 +590,17 @@ data SomeReference ref | ConstructorReference ref ConstructorId deriving (Eq, Functor, Generic, Ord) +someRef_ :: Traversal (SomeReference ref) (SomeReference ref') ref ref' +someRef_ = param @0 + +_TermReference :: Prism (SomeReference ref) (SomeReference ref') ref ref' +_TermReference = _Ctor @"TermReference" +_TypeReference :: Prism (SomeReference ref) (SomeReference ref') ref ref' +_TypeReference = _Ctor @"TypeReference" + +_ConstructorReference :: Prism (SomeReference ref) (SomeReference ref') (ref, ConstructorId) (ref', ConstructorId) +_ConstructorReference = _Ctor @"ConstructorReference" + someReferenceIdToEntity :: SomeReferenceId -> Entity someReferenceIdToEntity = undefined From 66eafa927c7ea0a14034a6b091e7c595f73849ce Mon Sep 17 00:00:00 2001 From: Arya Irani Date: Fri, 29 Oct 2021 17:49:57 -0400 Subject: [PATCH 041/297] splitting up saveBranch factored out localToDbBranch :: BranchLocalIds -> LocalBranch -> DbBranch and dbToLocalBranch :: DbBranch -> (BranchLocalIds, LocalBranch) into U.Codebase.Sqlite.Branch.Format.hs and split that functionality out of Ops.loadBranch created MigrateSchema12.DbHelpers module (name tbd) to convert from db format to hashing format threw away junk from Hashing.V2.Referent, including ConstructorType consolidated references to ConstructorId accidentally formatted Pattern.hs, V2/Term.hs --- .../U/Codebase/Sqlite/Branch/Format.hs | 57 +++- .../U/Codebase/Sqlite/Branch/Full.hs | 43 +-- .../U/Codebase/Sqlite/Operations.hs | 49 +-- codebase2/codebase/U/Codebase/Term.hs | 4 +- .../src/Unison/Builtin/Decls.hs | 7 +- parser-typechecker/src/Unison/Codebase.hs | 4 +- .../src/Unison/Codebase/SqliteCodebase.hs | 4 +- .../SqliteCodebase/MigrateSchema12.hs | 13 +- .../MigrateSchema12/DbHelpers.hs | 87 ++++++ .../src/Unison/Hashing/V2/Branch.hs | 2 +- .../src/Unison/Hashing/V2/Causal.hs | 18 +- .../src/Unison/Hashing/V2/Convert.hs | 89 +++--- .../src/Unison/Hashing/V2/Pattern.hs | 129 ++++---- .../src/Unison/Hashing/V2/Referent.hs | 32 +- .../src/Unison/Hashing/V2/Term.hs | 290 +++++++++--------- .../src/Unison/PrettyPrintEnv.hs | 5 +- .../src/Unison/Typechecker/TypeLookup.hs | 9 +- .../unison-parser-typechecker.cabal | 1 + unison-core/src/Unison/DataDeclaration.hs | 5 +- .../Unison/DataDeclaration/ConstructorId.hs | 12 +- unison-core/src/Unison/HashQualified.hs | 3 +- unison-core/src/Unison/LabeledDependency.hs | 3 +- unison-core/src/Unison/Names3.hs | 3 +- unison-core/src/Unison/Referent'.hs | 8 +- unison-core/src/Unison/Referent.hs | 5 +- unison-core/src/Unison/Term.hs | 3 +- 26 files changed, 490 insertions(+), 395 deletions(-) create mode 100644 parser-typechecker/src/Unison/Codebase/SqliteCodebase/MigrateSchema12/DbHelpers.hs diff --git a/codebase2/codebase-sqlite/U/Codebase/Sqlite/Branch/Format.hs b/codebase2/codebase-sqlite/U/Codebase/Sqlite/Branch/Format.hs index f9f0182ade..7cb9496823 100644 --- a/codebase2/codebase-sqlite/U/Codebase/Sqlite/Branch/Format.hs +++ b/codebase2/codebase-sqlite/U/Codebase/Sqlite/Branch/Format.hs @@ -1,10 +1,28 @@ -module U.Codebase.Sqlite.Branch.Format where +module U.Codebase.Sqlite.Branch.Format + ( BranchFormat (..), + BranchLocalIds (..), + SyncBranchFormat (..), + localToDbBranch, + dbToLocalBranch, + localToDbDiff, + -- dbToLocalDiff, + ) +where -import Data.Vector (Vector) -import U.Codebase.Sqlite.Branch.Diff (LocalDiff) -import U.Codebase.Sqlite.Branch.Full (LocalBranch) -import U.Codebase.Sqlite.DbId (CausalHashId, BranchObjectId, ObjectId, PatchObjectId, TextId) import Data.ByteString (ByteString) +import Data.Vector (Vector) +import qualified Data.Vector as Vector +import U.Codebase.Sqlite.Branch.Diff (LocalDiff, Diff) +import U.Codebase.Sqlite.Branch.Full (DbBranch, LocalBranch) +import qualified U.Codebase.Sqlite.Branch.Full as Branch.Full +import qualified U.Codebase.Sqlite.Branch.Diff as Branch.Diff +import U.Codebase.Sqlite.DbId (BranchObjectId, CausalHashId, ObjectId, PatchObjectId, TextId) +import U.Codebase.Sqlite.LocalIds + ( LocalBranchChildId (..), + LocalDefnId (..), + LocalPatchObjectId (..), + LocalTextId (..), + ) -- | A 'BranchFormat' is a deserialized namespace object (@object.bytes@). -- @@ -12,7 +30,7 @@ import Data.ByteString (ByteString) data BranchFormat = Full BranchLocalIds LocalBranch | Diff BranchObjectId BranchLocalIds LocalDiff - deriving Show + deriving (Show) -- | A 'BranchLocalIds' is a mapping between local ids (local to this object) encoded as offsets, and actual database ids. -- @@ -24,12 +42,35 @@ data BranchLocalIds = LocalIds branchPatchLookup :: Vector PatchObjectId, branchChildLookup :: Vector (BranchObjectId, CausalHashId) } - deriving Show + deriving (Show) data SyncBranchFormat = SyncFull BranchLocalIds ByteString | SyncDiff BranchObjectId BranchLocalIds ByteString +localToDbBranch :: BranchLocalIds -> LocalBranch -> DbBranch +localToDbBranch li = + Branch.Full.quadmap (lookupBranchLocalText li) (lookupBranchLocalDefn li) (lookupBranchLocalPatch li) (lookupBranchLocalChild li) + +dbToLocalBranch :: DbBranch -> (BranchLocalIds, LocalBranch) +dbToLocalBranch = undefined + +lookupBranchLocalText :: BranchLocalIds -> LocalTextId -> TextId +lookupBranchLocalText li (LocalTextId w) = branchTextLookup li Vector.! fromIntegral w + +lookupBranchLocalDefn :: BranchLocalIds -> LocalDefnId -> ObjectId +lookupBranchLocalDefn li (LocalDefnId w) = branchDefnLookup li Vector.! fromIntegral w + +lookupBranchLocalPatch :: BranchLocalIds -> LocalPatchObjectId -> PatchObjectId +lookupBranchLocalPatch li (LocalPatchObjectId w) = branchPatchLookup li Vector.! fromIntegral w + +lookupBranchLocalChild :: BranchLocalIds -> LocalBranchChildId -> (BranchObjectId, CausalHashId) +lookupBranchLocalChild li (LocalBranchChildId w) = branchChildLookup li Vector.! fromIntegral w + +localToDbDiff :: BranchLocalIds -> LocalDiff -> Diff +localToDbDiff li = Branch.Diff.quadmap (lookupBranchLocalText li) (lookupBranchLocalDefn li) (lookupBranchLocalPatch li) (lookupBranchLocalChild li) + + {- projects.arya.message = "hello, world" -> -> #abc projects.arya.program = printLine message -> printLine #abc -> #def @@ -108,6 +149,4 @@ projects.arya { } } - - -} diff --git a/codebase2/codebase-sqlite/U/Codebase/Sqlite/Branch/Full.hs b/codebase2/codebase-sqlite/U/Codebase/Sqlite/Branch/Full.hs index 265c7efcf8..2976e3c46d 100644 --- a/codebase2/codebase-sqlite/U/Codebase/Sqlite/Branch/Full.hs +++ b/codebase2/codebase-sqlite/U/Codebase/Sqlite/Branch/Full.hs @@ -43,34 +43,21 @@ branchCausalHashes_ f Branch{..} = -- convertBranch :: (DB m, MonadState MigrationState m) => DbBranch -> m DbBranch -- convertBranch dbBranch = _ --- DbBranch -- migrate --> DbBranch -- hydrate for the hash --> Hashing.V2.Branch -- put (Hash, toBranchFormat(DbBranch)) --> COOL - --- function that reads a DbBranch out of codebase - --- Traversal' DbBranch SomeReferenceObjectId --- DB m => LensLike' m SomeReferenceObjectId SomeReferenceId --- MonadState MigrationState m => SomeReferenceId -> m SomeReferenceId - --- Traversal' DbBranch (BranchId, CausalHashId) --- MonadState MigrationState m => (BranchId, CausalHashId) -> m (BranchId, CausalHashId) - --- Traversal' DbBranch PatchId --- MonadState MigrationState m => PatchObjectId -> m PatchObjectId - --- totalThing :: (MonadState MigrationState DB m => LensLike' m DbBranch SomeReferenceId --- totalThing = intoHashes . overSomeRefs - --- Store (Old ObjectId) -> (New ObjectId) sky map --- Store (New ObjectId) -> (New Hash) map/cache - --- function which lifts a function over SomeReference's to run over a DBBranch by inflating Hashes --- function which remaps references in a Branch - --- function that takes DbBranch to (LocalIds, LocalBranch) - --- function that takes a DbBranch to a Hashing.V2.Branch - --- function that writes (Hash, LocalBranch) to codebase +-- branch plan +-- =========== +-- 1. function that reads a DbBranch out of codebase +-- ==> loadDbBranchByObjectId +-- 2. function which lifts a function over SomeReference's to run over a DBBranch by inflating Hashes +-- 3. function which remaps references in a Branch +-- ==> Chris's work +-- 4. function that takes DbBranch to (LocalIds, LocalBranch) +-- ==> dbToLocalBranch +-- 5. function that takes a DbBranch to a Hashing.V2.Branch +-- ==> Unison.Codebase.SqliteCodebase.MigrateSchema12.DbHelpers.dbBranchHash +-- 6. saveBranchHash +-- 7. saveBranchObject +-- =============== +-- end branch plan -- database has a root CausalHashId -- from CausalHashId, we can look up ValueHashId and diff --git a/codebase2/codebase-sqlite/U/Codebase/Sqlite/Operations.hs b/codebase2/codebase-sqlite/U/Codebase/Sqlite/Operations.hs index a475933c9f..da5ab610a5 100644 --- a/codebase2/codebase-sqlite/U/Codebase/Sqlite/Operations.hs +++ b/codebase2/codebase-sqlite/U/Codebase/Sqlite/Operations.hs @@ -428,8 +428,9 @@ getCycleLen h = do >>= decodeComponentLengthOnly >>= pure . fromIntegral -getDeclTypeByReference :: EDB m => C.Reference.Id -> m C.Decl.DeclType -getDeclTypeByReference r@(C.Reference.Id h pos) = +-- | Get the 'C.DeclType.DeclType' of a 'C.Reference.Id'. +getDeclTypeById :: EDB m => C.Reference.Id -> m C.Decl.DeclType +getDeclTypeById r@(C.Reference.Id h pos) = runMaybeT (loadDeclByReference r) >>= maybe (throwError $ LegacyUnknownConstructorType h pos) pure >>= pure . C.Decl.declType @@ -927,7 +928,7 @@ saveRootBranch c = do -- patches :: Map NameSegment (PatchHash, m Patch), -- children :: Map NameSegment (Causal m) -- } --- +-- -- U.Codebase.Sqlite.Branch.Full.Branch' -- type ShallowBranch = Branch' NameSegment Hash PatchHash CausalHash -- data ShallowBranch causalHash patchHash = ShallowBranch @@ -952,9 +953,6 @@ saveRootBranch c = do -- References, but also values -- Shallow - Hash? representation of the database relationships -saveBranch' :: EDB m => C.Branch.Branch m -> m Db.BranchObjectId -saveBranch' = undefined - saveBranch :: EDB m => C.Branch.Causal m -> m (Db.BranchObjectId, Db.CausalHashId) saveBranch (C.Causal hc he parents me) = do when debug $ traceM $ "\nOperations.saveBranch \n hc = " ++ show hc ++ ",\n he = " ++ show he ++ ",\n parents = " ++ show (Map.keys parents) @@ -1098,11 +1096,11 @@ loadBranchByCausalHashId id = do (liftQ . Q.loadBranchObjectIdByCausalHashId) id >>= traverse loadBranchByObjectId -loadBranchByObjectId :: EDB m => Db.BranchObjectId -> m (C.Branch.Branch m) -loadBranchByObjectId id = do +loadDbBranchByObjectId :: EDB m => Db.BranchObjectId -> m S.DbBranch +loadDbBranchByObjectId id = deserializeBranchObject id >>= \case - S.BranchFormat.Full li f -> s2cBranch (l2sFull li f) - S.BranchFormat.Diff r li d -> doDiff r [l2sDiff li d] + S.BranchFormat.Full li f -> pure (S.BranchFormat.localToDbBranch li f) + S.BranchFormat.Diff r li d -> doDiff r [S.BranchFormat.localToDbDiff li d] where deserializeBranchObject :: EDB m => Db.BranchObjectId -> m S.BranchFormat deserializeBranchObject id = do @@ -1110,21 +1108,14 @@ loadBranchByObjectId id = do (liftQ . Q.loadObjectById) (Db.unBranchObjectId id) >>= getFromBytesOr (ErrBranch id) S.getBranchFormat - l2sFull :: S.BranchFormat.BranchLocalIds -> S.LocalBranch -> S.DbBranch - l2sFull li = - S.Branch.Full.quadmap (lookupBranchLocalText li) (lookupBranchLocalDefn li) (lookupBranchLocalPatch li) (lookupBranchLocalChild li) - - l2sDiff :: S.BranchFormat.BranchLocalIds -> S.Branch.LocalDiff -> S.Branch.Diff - l2sDiff li = S.BranchDiff.quadmap (lookupBranchLocalText li) (lookupBranchLocalDefn li) (lookupBranchLocalPatch li) (lookupBranchLocalChild li) - - doDiff :: EDB m => Db.BranchObjectId -> [S.Branch.Diff] -> m (C.Branch.Branch m) + doDiff :: EDB m => Db.BranchObjectId -> [S.Branch.Diff] -> m S.DbBranch doDiff ref ds = deserializeBranchObject ref >>= \case - S.BranchFormat.Full li f -> joinFull (l2sFull li f) ds - S.BranchFormat.Diff ref' li' d' -> doDiff ref' (l2sDiff li' d' : ds) + S.BranchFormat.Full li f -> joinFull (S.BranchFormat.localToDbBranch li f) ds + S.BranchFormat.Diff ref' li' d' -> doDiff ref' (S.BranchFormat.localToDbDiff li' d' : ds) where - joinFull :: EDB m => S.DbBranch -> [S.Branch.Diff] -> m (C.Branch.Branch m) - joinFull f [] = s2cBranch f + joinFull :: EDB m => S.DbBranch -> [S.Branch.Diff] -> m S.DbBranch + joinFull f [] = pure f joinFull (S.Branch.Full.Branch tms tps patches children) (S.Branch.Diff tms' tps' patches' children' : ds) = joinFull f' ds @@ -1212,17 +1203,9 @@ loadBranchByObjectId id = do let (Set.fromList -> adds, Set.fromList -> removes) = S.BranchDiff.addsRemoves md' in Just . S.MetadataSet.Inline $ (Set.union adds $ Set.difference md removes) - lookupBranchLocalText :: S.BranchLocalIds -> LocalTextId -> Db.TextId - lookupBranchLocalText li (LocalTextId w) = S.BranchFormat.branchTextLookup li Vector.! fromIntegral w - - lookupBranchLocalDefn :: S.BranchLocalIds -> LocalDefnId -> Db.ObjectId - lookupBranchLocalDefn li (LocalDefnId w) = S.BranchFormat.branchDefnLookup li Vector.! fromIntegral w - - lookupBranchLocalPatch :: BranchLocalIds -> LocalPatchObjectId -> Db.PatchObjectId - lookupBranchLocalPatch li (LocalPatchObjectId w) = S.BranchFormat.branchPatchLookup li Vector.! fromIntegral w - - lookupBranchLocalChild :: BranchLocalIds -> LocalBranchChildId -> (Db.BranchObjectId, Db.CausalHashId) - lookupBranchLocalChild li (LocalBranchChildId w) = S.BranchFormat.branchChildLookup li Vector.! fromIntegral w +loadBranchByObjectId :: EDB m => Db.BranchObjectId -> m (C.Branch.Branch m) +loadBranchByObjectId id = + loadDbBranchByObjectId id >>= s2cBranch -- * Patch transformation diff --git a/codebase2/codebase/U/Codebase/Term.hs b/codebase2/codebase/U/Codebase/Term.hs index a17abc1b51..9b96176825 100644 --- a/codebase2/codebase/U/Codebase/Term.hs +++ b/codebase2/codebase/U/Codebase/Term.hs @@ -110,10 +110,10 @@ data Pattern t r | PFloat !Double | PText !t | PChar !Char - | PConstructor !r !Int [Pattern t r] + | PConstructor !r !ConstructorId [Pattern t r] | PAs (Pattern t r) | PEffectPure (Pattern t r) - | PEffectBind !r !Int [Pattern t r] (Pattern t r) + | PEffectBind !r !ConstructorId [Pattern t r] (Pattern t r) | PSequenceLiteral [Pattern t r] | PSequenceOp (Pattern t r) !SeqOp (Pattern t r) deriving (Generic, Functor, Foldable, Traversable, Show) diff --git a/parser-typechecker/src/Unison/Builtin/Decls.hs b/parser-typechecker/src/Unison/Builtin/Decls.hs index 292b830098..b7eb67559a 100644 --- a/parser-typechecker/src/Unison/Builtin/Decls.hs +++ b/parser-typechecker/src/Unison/Builtin/Decls.hs @@ -19,7 +19,7 @@ import Unison.Hashing.V2.Convert (hashDecls) import qualified Unison.Pattern as Pattern import Unison.Reference (Reference) import qualified Unison.Reference as Reference -import Unison.Referent (ConstructorId, Referent) +import Unison.Referent (Referent) import qualified Unison.Referent as Referent import Unison.Symbol (Symbol) import Unison.Term (Term, Term2) @@ -28,6 +28,7 @@ import Unison.Type (Type) import qualified Unison.Type as Type import Unison.Var (Var) import qualified Unison.Var as Var +import Unison.DataDeclaration.ConstructorId (ConstructorId) lookupDeclRef :: Text -> Reference lookupDeclRef str @@ -79,10 +80,10 @@ pairCtorRef, unitCtorRef :: Referent pairCtorRef = Referent.Con pairRef 0 CT.Data unitCtorRef = Referent.Con unitRef 0 CT.Data -constructorId :: Reference -> Text -> Maybe Int +constructorId :: Reference -> Text -> Maybe ConstructorId constructorId ref name = do (_,_,dd) <- find (\(_,r,_) -> Reference.DerivedId r == ref) (builtinDataDecls @Symbol) - elemIndex name $ DD.constructorNames dd + fmap fromIntegral . elemIndex name $ DD.constructorNames dd noneId, someId, okConstructorId, failConstructorId, docBlobId, docLinkId, docSignatureId, docSourceId, docEvaluateId, docJoinId, linkTermId, linkTypeId, eitherRightId, eitherLeftId :: ConstructorId isPropagatedConstructorId, isTestConstructorId, bufferModeNoBufferingId, bufferModeLineBufferingId, bufferModeBlockBufferingId, bufferModeSizedBlockBufferingId :: ConstructorId diff --git a/parser-typechecker/src/Unison/Codebase.hs b/parser-typechecker/src/Unison/Codebase.hs index ef0e20ae60..d782b32bef 100644 --- a/parser-typechecker/src/Unison/Codebase.hs +++ b/parser-typechecker/src/Unison/Codebase.hs @@ -50,6 +50,7 @@ import Unison.Codebase.Type (Codebase (..), GetRootBranchError (..), GitError (G import Unison.CodebasePath (CodebasePath, getCodebaseDir) import Unison.DataDeclaration (Decl) import qualified Unison.DataDeclaration as DD +import Unison.DataDeclaration.ConstructorId (ConstructorId) import Unison.Hash (Hash) import qualified Unison.Hashing.V2.Convert as Hashing import qualified Unison.Parser.Ann as Parser @@ -66,7 +67,6 @@ import qualified Unison.UnisonFile as UF import qualified Unison.Util.Relation as Rel import Unison.Var (Var) import qualified Unison.WatchKind as WK - -- Attempt to find the Branch in the current codebase cache and root up to 3 levels deep -- If not found, attempt to find it in the Codebase (sqlite) getBranchForHash :: Monad m => Codebase m v a -> Branch.Hash -> m (Maybe (Branch m)) @@ -129,7 +129,7 @@ addDefsToCodebase c uf = do goType f (ref, decl) = putTypeDeclaration c ref (f decl) getTypeOfConstructor :: - (Monad m, Ord v) => Codebase m v a -> Reference -> Int -> m (Maybe (Type v a)) + (Monad m, Ord v) => Codebase m v a -> Reference -> ConstructorId -> m (Maybe (Type v a)) getTypeOfConstructor codebase (Reference.DerivedId r) cid = do maybeDecl <- getTypeDeclaration codebase r pure $ case maybeDecl of diff --git a/parser-typechecker/src/Unison/Codebase/SqliteCodebase.hs b/parser-typechecker/src/Unison/Codebase/SqliteCodebase.hs index 60c4e65587..71e360abc9 100644 --- a/parser-typechecker/src/Unison/Codebase/SqliteCodebase.hs +++ b/parser-typechecker/src/Unison/Codebase/SqliteCodebase.hs @@ -301,12 +301,12 @@ sqliteCodebase debugName root = do "I don't know about the builtin type ##" ++ show t ++ ", but I've been asked for it's ConstructorType." - in pure . fromMaybe err $ + in pure . fromMaybe err $ Map.lookup (Reference.Builtin t) Builtins.builtinConstructorType C.Reference.ReferenceDerived i -> getDeclTypeById i getDeclTypeById :: EDB m => C.Reference.Id -> m CT.ConstructorType - getDeclTypeById = fmap Cv.decltype2to1 . Ops.getDeclTypeByReference + getDeclTypeById = fmap Cv.decltype2to1 . Ops.getDeclTypeById getTypeOfTermImpl :: MonadIO m => Reference.Id -> m (Maybe (Type Symbol Ann)) getTypeOfTermImpl id | debug && trace ("getTypeOfTermImpl " ++ show id) False = undefined diff --git a/parser-typechecker/src/Unison/Codebase/SqliteCodebase/MigrateSchema12.hs b/parser-typechecker/src/Unison/Codebase/SqliteCodebase/MigrateSchema12.hs index dc1638f687..e32106a9b8 100644 --- a/parser-typechecker/src/Unison/Codebase/SqliteCodebase/MigrateSchema12.hs +++ b/parser-typechecker/src/Unison/Codebase/SqliteCodebase/MigrateSchema12.hs @@ -39,7 +39,6 @@ import Unison.Pattern (Pattern) import Unison.Prelude import Unison.Reference (Pos) import qualified Unison.Reference as Reference -import Unison.Referent (ConstructorId) import qualified Unison.Referent as Referent import qualified Unison.Term as Term import Unison.Type (Type) @@ -291,12 +290,12 @@ migrateBranch conn objectID = fmap (either id id) . runExceptT $ do -- Project an S.Referent'' into its SomeReferenceObjId's someReferent_ :: Traversal' (S.Referent'' t ObjectId) SomeReferenceObjId someReferent_ = (UReferent._Ref . someReference_) - `failing` (UReferent._Con + `failing` (UReferent._Con . asPair_ -- Need to unpack the embedded reference AND remap between mismatched Constructor ID types. . unsafeInsidePrism _ConstructorReference ) where - asPair_ f (UReference.ReferenceDerived id', conId) = f (id', fromIntegral conId) + asPair_ f (UReference.ReferenceDerived id', conId) = f (id', fromIntegral conId) <&> \(newId, newConId) -> (UReference.ReferenceDerived newId, fromIntegral newConId) asPair_ _ (UReference.ReferenceBuiltin x, conId) = pure (UReference.ReferenceBuiltin x, conId) @@ -306,7 +305,7 @@ someReference_ = UReference._ReferenceDerived . unsafeInsidePrism _TermReference someMetadataSetFormat :: Ord t => Traversal' (S.MetadataSetFormat' t ObjectId) SomeReferenceObjId someMetadataSetFormat = S.metadataSetFormatReferences_ . someReference_ -mapReferentMetadata :: (Ord k, Ord t) => +mapReferentMetadata :: (Ord k, Ord t) => Traversal' k SomeReferenceObjId -> Traversal' (Map k (S.MetadataSetFormat' t ObjectId)) (SomeReferenceObjId) @@ -594,12 +593,12 @@ someRef_ :: Traversal (SomeReference ref) (SomeReference ref') ref ref' someRef_ = param @0 _TermReference :: Prism (SomeReference ref) (SomeReference ref') ref ref' -_TermReference = _Ctor @"TermReference" +_TermReference = undefined -- _Ctor @"TermReference" _TypeReference :: Prism (SomeReference ref) (SomeReference ref') ref ref' -_TypeReference = _Ctor @"TypeReference" +_TypeReference = undefined --_Ctor @"TypeReference" _ConstructorReference :: Prism (SomeReference ref) (SomeReference ref') (ref, ConstructorId) (ref', ConstructorId) -_ConstructorReference = _Ctor @"ConstructorReference" +_ConstructorReference = undefined -- _Ctor @"ConstructorReference" someReferenceIdToEntity :: SomeReferenceId -> Entity someReferenceIdToEntity = undefined diff --git a/parser-typechecker/src/Unison/Codebase/SqliteCodebase/MigrateSchema12/DbHelpers.hs b/parser-typechecker/src/Unison/Codebase/SqliteCodebase/MigrateSchema12/DbHelpers.hs new file mode 100644 index 0000000000..5a0189833f --- /dev/null +++ b/parser-typechecker/src/Unison/Codebase/SqliteCodebase/MigrateSchema12/DbHelpers.hs @@ -0,0 +1,87 @@ +module Unison.Codebase.SqliteCodebase.MigrateSchema12.DbHelpers + ( dbBranchHash, + ) +where + +import qualified U.Codebase.Reference as S hiding (Reference) +import qualified U.Codebase.Reference as S.Reference +import qualified U.Codebase.Referent as S.Referent +import U.Codebase.Sqlite.Branch.Full (DbMetadataSet) +import qualified U.Codebase.Sqlite.Branch.Full as S +import qualified U.Codebase.Sqlite.Branch.Full as S.Branch.Full +import qualified U.Codebase.Sqlite.Branch.Full as S.MetadataSet +import qualified U.Codebase.Sqlite.DbId as Db +import U.Codebase.Sqlite.Queries (EDB) +import qualified U.Codebase.Sqlite.Queries as Q +import qualified U.Codebase.Sqlite.Reference as S +import qualified U.Codebase.Sqlite.Referent as S +import qualified U.Util.Hash +import qualified U.Util.Map as Map +import qualified U.Util.Set as Set +import qualified Unison.Codebase.SqliteCodebase.Conversions as Cv +import Unison.Hash (Hash) +import qualified Unison.Hashable as H +import qualified Unison.Hashing.V2.Branch as Hashing.Branch +import qualified Unison.Hashing.V2.Reference as Hashing (Reference) +import qualified Unison.Hashing.V2.Reference as Hashing.Reference +import qualified Unison.Hashing.V2.Referent as Hashing (Referent) +import qualified Unison.Hashing.V2.Referent as Hashing.Referent +import Unison.NameSegment (NameSegment (..)) +import Unison.Prelude + +dbBranchHash :: EDB m => S.DbBranch -> m Hash +dbBranchHash (S.Branch.Full.Branch tms tps patches children) = + fmap H.accumulate' $ + Hashing.Branch.Raw + <$> doTerms tms + <*> doTypes tps + <*> doPatches patches + <*> doChildren children + where + doTerms :: EDB m => Map Db.TextId (Map S.Referent S.DbMetadataSet) -> m (Map NameSegment (Map Hashing.Referent Hashing.Branch.MdValues)) + doTerms = + Map.bitraverse + s2hNameSegment + (Map.bitraverse s2hReferent s2hMetadataSet) + + doTypes :: EDB m => Map Db.TextId (Map S.Reference S.DbMetadataSet) -> m (Map NameSegment (Map Hashing.Reference Hashing.Branch.MdValues)) + doTypes = + Map.bitraverse + s2hNameSegment + (Map.bitraverse s2hReference s2hMetadataSet) + + doPatches :: EDB m => Map Db.TextId Db.PatchObjectId -> m (Map NameSegment Hash) + doPatches = + Map.bitraverse s2hNameSegment (objectIdToPrimaryHash . Db.unPatchObjectId) + + doChildren :: EDB m => Map Db.TextId (Db.BranchObjectId, Db.CausalHashId) -> m (Map NameSegment Hash) + doChildren = + Map.bitraverse s2hNameSegment \(_boId, chId) -> causalHashIdToHash chId + +s2hMetadataSet :: EDB m => DbMetadataSet -> m Hashing.Branch.MdValues +s2hMetadataSet = \case + S.MetadataSet.Inline rs -> Hashing.Branch.MdValues <$> Set.traverse s2hReference rs + +s2hNameSegment :: EDB m => Db.TextId -> m NameSegment +s2hNameSegment = + fmap NameSegment . Q.loadTextById + +s2hReferent :: EDB m => S.Referent -> m Hashing.Referent +s2hReferent = \case + S.Referent.Ref r -> Hashing.Referent.Ref <$> s2hReference r + S.Referent.Con r cid -> Hashing.Referent.Con <$> s2hReference r <*> pure (fromIntegral cid) + +s2hReference :: EDB m => S.Reference -> m Hashing.Reference +s2hReference = \case + S.ReferenceBuiltin t -> Hashing.Reference.Builtin <$> Q.loadTextById t + S.Reference.Derived h i -> Hashing.Reference.Derived <$> objectIdToPrimaryHash h <*> pure i + +-- Mitchell: Do these variants of Q.* queries belong somewhere else? Or in Q perhaps? + +objectIdToPrimaryHash :: EDB m => Db.ObjectId -> m Hash +objectIdToPrimaryHash = + fmap (Cv.hash2to1 . U.Util.Hash.fromBase32Hex) . Q.loadPrimaryHashByObjectId + +causalHashIdToHash :: EDB m => Db.CausalHashId -> m Hash +causalHashIdToHash = + fmap (Cv.hash2to1 . U.Util.Hash.fromBase32Hex) . Q.loadHashById . Db.unCausalHashId diff --git a/parser-typechecker/src/Unison/Hashing/V2/Branch.hs b/parser-typechecker/src/Unison/Hashing/V2/Branch.hs index 83a7766d2a..0fefd7ffed 100644 --- a/parser-typechecker/src/Unison/Hashing/V2/Branch.hs +++ b/parser-typechecker/src/Unison/Hashing/V2/Branch.hs @@ -25,7 +25,7 @@ data Raw = Raw { terms :: Map NameSegment (Map Referent MdValues), types :: Map NameSegment (Map Reference MdValues), patches :: Map NameSegment Hash, - children :: Map NameSegment Hash + children :: Map NameSegment Hash -- the Causal Hash } instance Hashable Raw where diff --git a/parser-typechecker/src/Unison/Hashing/V2/Causal.hs b/parser-typechecker/src/Unison/Hashing/V2/Causal.hs index 9d7a5101f6..2e939c8469 100644 --- a/parser-typechecker/src/Unison/Hashing/V2/Causal.hs +++ b/parser-typechecker/src/Unison/Hashing/V2/Causal.hs @@ -2,23 +2,23 @@ {-# LANGUAGE RankNTypes #-} {-# LANGUAGE RecordWildCards #-} -module Unison.Hashing.V2.Causal (Causal (..), - hashCausal) where +module Unison.Hashing.V2.Causal + ( Causal (..), + hashCausal, + ) +where import Data.Set (Set) +import qualified Data.Set as Set import Unison.Hash (Hash) import Unison.Hashable (Hashable) import qualified Unison.Hashable as H -import qualified Data.Set as Set hashCausal :: H.Accumulate h => Causal -> [(H.Token h)] -hashCausal c = H.tokens $ - [ selfHash c - , branchHash c - ] ++ (Set.toList $ parents c) +hashCausal c = + H.tokens $ [selfHash c, branchHash c] ++ (Set.toList $ parents c) -data Causal = - Causal { selfHash :: Hash, branchHash :: Hash, parents :: Set Hash } +data Causal = Causal {selfHash :: Hash, branchHash :: Hash, parents :: Set Hash} instance Hashable Causal where tokens c = hashCausal c diff --git a/parser-typechecker/src/Unison/Hashing/V2/Convert.hs b/parser-typechecker/src/Unison/Hashing/V2/Convert.hs index a55cc9f7f9..6f19ceb274 100644 --- a/parser-typechecker/src/Unison/Hashing/V2/Convert.hs +++ b/parser-typechecker/src/Unison/Hashing/V2/Convert.hs @@ -55,6 +55,9 @@ import qualified Unison.Type as Memory.Type import qualified Unison.Util.Relation as Relation import qualified Unison.Util.Star3 as Memory.Star3 import Unison.Var (Var) +import qualified Unison.ConstructorType as Memory.ConstructorType +import Control.Monad.Trans.Writer.CPS (Writer) +import qualified Control.Monad.Trans.Writer.CPS as Writer typeToReference :: Var v => Memory.Type.Type v a -> Memory.Reference.Reference typeToReference = h2mReference . Hashing.Type.toReference . m2hType . Memory.Type.removeAllEffectVars @@ -69,10 +72,12 @@ hashTypeComponents = fmap h2mTypeResult . Hashing.Type.hashComponents . fmap m2h h2mTypeResult (id, tp) = (h2mReferenceId id, h2mType tp) hashTermComponents :: Var v => Map v (Memory.Term.Term v a) -> Map v (Memory.Reference.Id, Memory.Term.Term v a) -hashTermComponents = fmap h2mTermResult . Hashing.Term.hashComponents . fmap m2hTerm +hashTermComponents mTerms = + case Writer.runWriter (traverse m2hTerm mTerms) of + (hTerms, constructorTypes) -> h2mTermResult (constructorTypes Map.!) <$> Hashing.Term.hashComponents hTerms where - h2mTermResult :: Ord v => (Hashing.Reference.Id, Hashing.Term.Term v a) -> (Memory.Reference.Id, Memory.Term.Term v a) - h2mTermResult (id, tm) = (h2mReferenceId id, h2mTerm tm) + h2mTermResult :: Ord v => (Memory.Reference.Reference -> Memory.ConstructorType.ConstructorType) -> (Hashing.Reference.Id, Hashing.Term.Term v a) -> (Memory.Reference.Id, Memory.Term.Term v a) + h2mTermResult getCtorType (id, tm) = (h2mReferenceId id, h2mTerm getCtorType tm) -- TODO: remove non-prime version -- include type in hash @@ -80,33 +85,33 @@ hashTermComponents' :: Var v => Map v (Memory.Term.Term v a, Memory.Type.Type v hashTermComponents' = undefined hashClosedTerm :: Var v => Memory.Term.Term v a -> Memory.Reference.Id -hashClosedTerm = h2mReferenceId . Hashing.Term.hashClosedTerm . m2hTerm - -m2hTerm :: Ord v => Memory.Term.Term v a -> Hashing.Term.Term v a -m2hTerm = ABT.transform \case - Memory.Term.Int i -> Hashing.Term.Int i - Memory.Term.Nat n -> Hashing.Term.Nat n - Memory.Term.Float d -> Hashing.Term.Float d - Memory.Term.Boolean b -> Hashing.Term.Boolean b - Memory.Term.Text t -> Hashing.Term.Text t - Memory.Term.Char c -> Hashing.Term.Char c - Memory.Term.Blank b -> Hashing.Term.Blank b - Memory.Term.Ref r -> Hashing.Term.Ref (m2hReference r) - Memory.Term.Constructor r i -> Hashing.Term.Constructor (m2hReference r) i - Memory.Term.Request r i -> Hashing.Term.Request (m2hReference r) i - Memory.Term.Handle x y -> Hashing.Term.Handle x y - Memory.Term.App f x -> Hashing.Term.App f x - Memory.Term.Ann e t -> Hashing.Term.Ann e (m2hType t) - Memory.Term.List as -> Hashing.Term.List as - Memory.Term.And p q -> Hashing.Term.And p q - Memory.Term.If c t f -> Hashing.Term.If c t f - Memory.Term.Or p q -> Hashing.Term.Or p q - Memory.Term.Lam a -> Hashing.Term.Lam a - Memory.Term.LetRec isTop bs body -> Hashing.Term.LetRec isTop bs body - Memory.Term.Let isTop b body -> Hashing.Term.Let isTop b body - Memory.Term.Match scr cases -> Hashing.Term.Match scr (fmap m2hMatchCase cases) - Memory.Term.TermLink r -> Hashing.Term.TermLink (m2hReferent r) - Memory.Term.TypeLink r -> Hashing.Term.TypeLink (m2hReference r) +hashClosedTerm = h2mReferenceId . Hashing.Term.hashClosedTerm . fst . Writer.runWriter . m2hTerm + +m2hTerm :: Ord v => Memory.Term.Term v a -> Writer (Map Memory.Reference.Reference Memory.ConstructorType.ConstructorType) (Hashing.Term.Term v a) +m2hTerm = ABT.transformM \case + Memory.Term.Int i -> pure (Hashing.Term.Int i) + Memory.Term.Nat n -> pure (Hashing.Term.Nat n) + Memory.Term.Float d -> pure (Hashing.Term.Float d) + Memory.Term.Boolean b -> pure (Hashing.Term.Boolean b) + Memory.Term.Text t -> pure (Hashing.Term.Text t) + Memory.Term.Char c -> pure (Hashing.Term.Char c) + Memory.Term.Blank b -> pure (Hashing.Term.Blank b) + Memory.Term.Ref r -> pure (Hashing.Term.Ref (m2hReference r)) + Memory.Term.Constructor r i -> pure (Hashing.Term.Constructor (m2hReference r) i) + Memory.Term.Request r i -> pure (Hashing.Term.Request (m2hReference r) i) + Memory.Term.Handle x y -> pure (Hashing.Term.Handle x y) + Memory.Term.App f x -> pure (Hashing.Term.App f x) + Memory.Term.Ann e t -> pure (Hashing.Term.Ann e (m2hType t)) + Memory.Term.List as -> pure (Hashing.Term.List as) + Memory.Term.And p q -> pure (Hashing.Term.And p q) + Memory.Term.If c t f -> pure (Hashing.Term.If c t f) + Memory.Term.Or p q -> pure (Hashing.Term.Or p q) + Memory.Term.Lam a -> pure (Hashing.Term.Lam a) + Memory.Term.LetRec isTop bs body -> pure (Hashing.Term.LetRec isTop bs body) + Memory.Term.Let isTop b body -> pure (Hashing.Term.Let isTop b body) + Memory.Term.Match scr cases -> pure (Hashing.Term.Match scr (fmap m2hMatchCase cases)) + Memory.Term.TermLink r -> Hashing.Term.TermLink <$> m2hReferent r + Memory.Term.TypeLink r -> pure (Hashing.Term.TypeLink (m2hReference r)) m2hMatchCase :: Memory.Term.MatchCase a a1 -> Hashing.Term.MatchCase a a1 m2hMatchCase (Memory.Term.MatchCase pat m_a1 a1) = Hashing.Term.MatchCase (m2hPattern pat) m_a1 a1 @@ -134,13 +139,15 @@ m2hSequenceOp = \case Memory.Pattern.Snoc -> Hashing.Pattern.Snoc Memory.Pattern.Concat -> Hashing.Pattern.Concat -m2hReferent :: Memory.Referent.Referent -> Hashing.Referent.Referent +m2hReferent :: Memory.Referent.Referent -> Writer (Map Memory.Reference.Reference Memory.ConstructorType.ConstructorType) Hashing.Referent.Referent m2hReferent = \case - Memory.Referent.Ref ref -> Hashing.Referent.Ref (m2hReference ref) - Memory.Referent.Con ref n ct -> Hashing.Referent.Con (m2hReference ref) n ct + Memory.Referent.Ref ref -> pure (Hashing.Referent.Ref (m2hReference ref)) + Memory.Referent.Con ref n ct -> do + Writer.tell (Map.singleton ref ct) + pure (Hashing.Referent.Con (m2hReference ref) n) -h2mTerm :: Ord v => Hashing.Term.Term v a -> Memory.Term.Term v a -h2mTerm = ABT.transform \case +h2mTerm :: Ord v => (Memory.Reference.Reference -> Memory.ConstructorType.ConstructorType) -> Hashing.Term.Term v a -> Memory.Term.Term v a +h2mTerm getCT = ABT.transform \case Hashing.Term.Int i -> Memory.Term.Int i Hashing.Term.Nat n -> Memory.Term.Nat n Hashing.Term.Float d -> Memory.Term.Float d @@ -162,7 +169,7 @@ h2mTerm = ABT.transform \case Hashing.Term.LetRec isTop bs body -> Memory.Term.LetRec isTop bs body Hashing.Term.Let isTop b body -> Memory.Term.Let isTop b body Hashing.Term.Match scr cases -> Memory.Term.Match scr (h2mMatchCase <$> cases) - Hashing.Term.TermLink r -> Memory.Term.TermLink (h2mReferent r) + Hashing.Term.TermLink r -> Memory.Term.TermLink (h2mReferent getCT r) Hashing.Term.TypeLink r -> Memory.Term.TypeLink (h2mReference r) h2mMatchCase :: Hashing.Term.MatchCase a b -> Memory.Term.MatchCase a b @@ -191,10 +198,12 @@ h2mSequenceOp = \case Hashing.Pattern.Snoc -> Memory.Pattern.Snoc Hashing.Pattern.Concat -> Memory.Pattern.Concat -h2mReferent :: Hashing.Referent.Referent -> Memory.Referent.Referent -h2mReferent = \case +h2mReferent :: (Memory.Reference.Reference -> Memory.ConstructorType.ConstructorType) -> Hashing.Referent.Referent -> Memory.Referent.Referent +h2mReferent getCT = \case Hashing.Referent.Ref ref -> Memory.Referent.Ref (h2mReference ref) - Hashing.Referent.Con ref n ct -> Memory.Referent.Con (h2mReference ref) n ct + Hashing.Referent.Con ref n -> + let mRef = h2mReference ref + in Memory.Referent.Con mRef n (getCT mRef) hashDecls :: Var v => @@ -334,7 +343,7 @@ m2hBranch b = | ns <- toList . Relation.ran $ Memory.Star3.d1 s, let m2 = Map.fromList - [ (m2hReferent r, md) + [ (fst (Writer.runWriter (m2hReferent r)), md) | r <- toList . Relation.lookupRan ns $ Memory.Star3.d1 s, let mdrefs1to2 (_typeR1, valR1) = m2hReference valR1 md = Hashing.Branch.MdValues . Set.map mdrefs1to2 . Relation.lookupDom r $ Memory.Star3.d3 s diff --git a/parser-typechecker/src/Unison/Hashing/V2/Pattern.hs b/parser-typechecker/src/Unison/Hashing/V2/Pattern.hs index 8a766f8b17..843bf646fd 100644 --- a/parser-typechecker/src/Unison/Hashing/V2/Pattern.hs +++ b/parser-typechecker/src/Unison/Hashing/V2/Pattern.hs @@ -1,17 +1,18 @@ -{-# Language DeriveTraversable, DeriveGeneric, PatternSynonyms, OverloadedStrings #-} +{-# LANGUAGE DeriveGeneric #-} +{-# LANGUAGE DeriveTraversable #-} +{-# LANGUAGE OverloadedStrings #-} +{-# LANGUAGE PatternSynonyms #-} module Unison.Hashing.V2.Pattern where -import Unison.Prelude - import Data.Foldable as Foldable hiding (foldMap') import Data.List (intercalate) import qualified Data.Set as Set +import Unison.DataDeclaration.ConstructorId (ConstructorId) +import qualified Unison.Hashable as H import Unison.Hashing.V2.Reference (Reference) import qualified Unison.Hashing.V2.Type as Type -import qualified Unison.Hashable as H - -type ConstructorId = Int +import Unison.Prelude data Pattern loc = Unbound loc @@ -22,18 +23,19 @@ data Pattern loc | Float loc !Double | Text loc !Text | Char loc !Char - | Constructor loc !Reference !Int [Pattern loc] + | Constructor loc !Reference !ConstructorId [Pattern loc] | As loc (Pattern loc) | EffectPure loc (Pattern loc) - | EffectBind loc !Reference !Int [Pattern loc] (Pattern loc) + | EffectBind loc !Reference !ConstructorId [Pattern loc] (Pattern loc) | SequenceLiteral loc [Pattern loc] | SequenceOp loc (Pattern loc) !SeqOp (Pattern loc) - deriving (Ord,Generic,Functor,Foldable,Traversable) + deriving (Ord, Generic, Functor, Foldable, Traversable) -data SeqOp = Cons - | Snoc - | Concat - deriving (Eq, Show, Ord, Generic) +data SeqOp + = Cons + | Snoc + | Concat + deriving (Eq, Show, Ord, Generic) instance H.Hashable SeqOp where tokens Cons = [H.Tag 0] @@ -41,17 +43,17 @@ instance H.Hashable SeqOp where tokens Concat = [H.Tag 2] instance Show (Pattern loc) where - show (Unbound _ ) = "Unbound" - show (Var _ ) = "Var" + show (Unbound _) = "Unbound" + show (Var _) = "Var" show (Boolean _ x) = "Boolean " <> show x - show (Int _ x) = "Int " <> show x - show (Nat _ x) = "Nat " <> show x - show (Float _ x) = "Float " <> show x - show (Text _ t) = "Text " <> show t - show (Char _ c) = "Char " <> show c + show (Int _ x) = "Int " <> show x + show (Nat _ x) = "Nat " <> show x + show (Float _ x) = "Float " <> show x + show (Text _ t) = "Text " <> show t + show (Char _ c) = "Char " <> show c show (Constructor _ r i ps) = "Constructor " <> unwords [show r, show i, show ps] - show (As _ p) = "As " <> show p + show (As _ p) = "As " <> show p show (EffectPure _ k) = "EffectPure " <> show k show (EffectBind _ r i ps k) = "EffectBind " <> unwords [show r, show i, show ps, show k] @@ -111,46 +113,47 @@ instance Eq (Pattern loc) where foldMap' :: Monoid m => (Pattern loc -> m) -> Pattern loc -> m foldMap' f p = case p of - Unbound _ -> f p - Var _ -> f p - Boolean _ _ -> f p - Int _ _ -> f p - Nat _ _ -> f p - Float _ _ -> f p - Text _ _ -> f p - Char _ _ -> f p - Constructor _ _ _ ps -> f p <> foldMap (foldMap' f) ps - As _ p' -> f p <> foldMap' f p' - EffectPure _ p' -> f p <> foldMap' f p' - EffectBind _ _ _ ps p' -> f p <> foldMap (foldMap' f) ps <> foldMap' f p' - SequenceLiteral _ ps -> f p <> foldMap (foldMap' f) ps - SequenceOp _ p1 _ p2 -> f p <> foldMap' f p1 <> foldMap' f p2 + Unbound _ -> f p + Var _ -> f p + Boolean _ _ -> f p + Int _ _ -> f p + Nat _ _ -> f p + Float _ _ -> f p + Text _ _ -> f p + Char _ _ -> f p + Constructor _ _ _ ps -> f p <> foldMap (foldMap' f) ps + As _ p' -> f p <> foldMap' f p' + EffectPure _ p' -> f p <> foldMap' f p' + EffectBind _ _ _ ps p' -> f p <> foldMap (foldMap' f) ps <> foldMap' f p' + SequenceLiteral _ ps -> f p <> foldMap (foldMap' f) ps + SequenceOp _ p1 _ p2 -> f p <> foldMap' f p1 <> foldMap' f p2 -generalizedDependencies - :: Ord r - => (Reference -> r) - -> (Reference -> ConstructorId -> r) - -> (Reference -> r) - -> (Reference -> ConstructorId -> r) - -> (Reference -> r) - -> Pattern loc - -> Set r -generalizedDependencies literalType dataConstructor dataType effectConstructor effectType - = Set.fromList . foldMap' - (\case - Unbound _ -> mempty - Var _ -> mempty - As _ _ -> mempty - Constructor _ r cid _ -> [dataType r, dataConstructor r cid] - EffectPure _ _ -> [effectType Type.effectRef] - EffectBind _ r cid _ _ -> - [effectType Type.effectRef, effectType r, effectConstructor r cid] - SequenceLiteral _ _ -> [literalType Type.listRef] - SequenceOp {} -> [literalType Type.listRef] - Boolean _ _ -> [literalType Type.booleanRef] - Int _ _ -> [literalType Type.intRef] - Nat _ _ -> [literalType Type.natRef] - Float _ _ -> [literalType Type.floatRef] - Text _ _ -> [literalType Type.textRef] - Char _ _ -> [literalType Type.charRef] - ) +generalizedDependencies :: + Ord r => + (Reference -> r) -> + (Reference -> ConstructorId -> r) -> + (Reference -> r) -> + (Reference -> ConstructorId -> r) -> + (Reference -> r) -> + Pattern loc -> + Set r +generalizedDependencies literalType dataConstructor dataType effectConstructor effectType = + Set.fromList + . foldMap' + ( \case + Unbound _ -> mempty + Var _ -> mempty + As _ _ -> mempty + Constructor _ r cid _ -> [dataType r, dataConstructor r cid] + EffectPure _ _ -> [effectType Type.effectRef] + EffectBind _ r cid _ _ -> + [effectType Type.effectRef, effectType r, effectConstructor r cid] + SequenceLiteral _ _ -> [literalType Type.listRef] + SequenceOp {} -> [literalType Type.listRef] + Boolean _ _ -> [literalType Type.booleanRef] + Int _ _ -> [literalType Type.intRef] + Nat _ _ -> [literalType Type.natRef] + Float _ _ -> [literalType Type.floatRef] + Text _ _ -> [literalType Type.textRef] + Char _ _ -> [literalType Type.charRef] + ) diff --git a/parser-typechecker/src/Unison/Hashing/V2/Referent.hs b/parser-typechecker/src/Unison/Hashing/V2/Referent.hs index 76aeedd1c5..f19c75b0e0 100644 --- a/parser-typechecker/src/Unison/Hashing/V2/Referent.hs +++ b/parser-typechecker/src/Unison/Hashing/V2/Referent.hs @@ -6,33 +6,17 @@ module Unison.Hashing.V2.Referent pattern Ref, pattern Con, ConstructorId, - toReference, - Unison.Hashing.V2.Referent.fold, ) where -import Unison.Referent' ( Referent'(..), toReference' ) +import Unison.DataDeclaration.ConstructorId (ConstructorId) +import Unison.Hashable (Hashable) +import qualified Unison.Hashable as H import Unison.Hashing.V2.Reference (Reference) -import Unison.ConstructorType (ConstructorType) --- | Specifies a term. --- --- Either a term 'Reference', a data constructor, or an effect constructor. --- --- Slightly odd naming. This is the "referent of term name in the codebase", --- rather than the target of a Reference. -type Referent = Referent' Reference -type ConstructorId = Int -pattern Ref :: Reference -> Referent -pattern Ref r = Ref' r -pattern Con :: Reference -> ConstructorId -> ConstructorType -> Referent -pattern Con r i t = Con' r i t -{-# COMPLETE Ref, Con #-} +data Referent = Ref Reference | Con Reference ConstructorId + deriving (Show, Ord, Eq) -toReference :: Referent -> Reference -toReference = toReference' - -fold :: (r -> a) -> (r -> ConstructorId -> ConstructorType -> a) -> Referent' r -> a -fold fr fc = \case - Ref' r -> fr r - Con' r i ct -> fc r i ct +instance Hashable Referent where + tokens (Ref r) = [H.Tag 0] ++ H.tokens r + tokens (Con r i) = [H.Tag 2] ++ H.tokens r ++ H.tokens i diff --git a/parser-typechecker/src/Unison/Hashing/V2/Term.hs b/parser-typechecker/src/Unison/Hashing/V2/Term.hs index 06fdd8dda7..5aeb69829c 100644 --- a/parser-typechecker/src/Unison/Hashing/V2/Term.hs +++ b/parser-typechecker/src/Unison/Hashing/V2/Term.hs @@ -1,22 +1,22 @@ {-# LANGUAGE DeriveFoldable #-} {-# LANGUAGE DeriveGeneric #-} {-# LANGUAGE DeriveTraversable #-} +{-# LANGUAGE InstanceSigs #-} {-# LANGUAGE OverloadedStrings #-} +{-# LANGUAGE PartialTypeSignatures #-} {-# LANGUAGE PatternSynonyms #-} {-# LANGUAGE Rank2Types #-} {-# LANGUAGE UnicodeSyntax #-} {-# LANGUAGE ViewPatterns #-} -module Unison.Hashing.V2.Term ( - Term, - F(..), - MatchCase(..), - hashClosedTerm, - hashComponents -) where - -import Unison.Prelude -import Prelude hiding (and,or) +module Unison.Hashing.V2.Term + ( Term, + F (..), + MatchCase (..), + hashClosedTerm, + hashComponents, + ) +where import qualified Data.Sequence as Sequence import qualified Data.Text as Text @@ -24,24 +24,23 @@ import Prelude.Extras (Eq1 (..), Show1 (..)) import Text.Show import qualified Unison.ABT as ABT import qualified Unison.Blank as B +import Unison.DataDeclaration.ConstructorId (ConstructorId) import qualified Unison.Hash as Hash import Unison.Hashable (Hashable1, accumulateToken) import qualified Unison.Hashable as Hashable import qualified Unison.Hashing.V2.ABT as ABT import Unison.Hashing.V2.Pattern (Pattern) -import qualified Unison.Hashing.V2.Pattern as Pattern import Unison.Hashing.V2.Reference (Reference) import qualified Unison.Hashing.V2.Reference as Reference import qualified Unison.Hashing.V2.Reference.Util as ReferenceUtil import Unison.Hashing.V2.Referent (Referent) import Unison.Hashing.V2.Type (Type) +import Unison.Prelude import Unison.Var (Var) - --- This gets reexported; should maybe live somewhere other than Pattern, though. -type ConstructorId = Pattern.ConstructorId +import Prelude hiding (and, or) data MatchCase loc a = MatchCase (Pattern loc) (Maybe a) a - deriving (Show,Eq,Foldable,Functor,Generic,Generic1,Traversable) + deriving (Show, Eq, Foldable, Functor, Generic, Generic1, Traversable) -- | Base functor for terms in the Unison language -- We need `typeVar` because the term and type variables may differ. @@ -54,9 +53,9 @@ data F typeVar typeAnn patternAnn a | Char Char | Blank (B.Blank typeAnn) | Ref Reference - -- First argument identifies the data type, - -- second argument identifies the constructor - | Constructor Reference ConstructorId + | -- First argument identifies the data type, + -- second argument identifies the constructor + Constructor Reference ConstructorId | Request Reference ConstructorId | Handle a a | App a a @@ -66,26 +65,26 @@ data F typeVar typeAnn patternAnn a | And a a | Or a a | Lam a - -- Note: let rec blocks have an outer ABT.Cycle which introduces as many - -- variables as there are bindings - | LetRec IsTop [a] a - -- Note: first parameter is the binding, second is the expression which may refer - -- to this let bound variable. Constructed as `Let b (abs v e)` - | Let IsTop a a - -- Pattern matching / eliminating data types, example: - -- case x of - -- Just n -> rhs1 - -- Nothing -> rhs2 - -- - -- translates to - -- - -- Match x - -- [ (Constructor 0 [Var], ABT.abs n rhs1) - -- , (Constructor 1 [], rhs2) ] - | Match a [MatchCase patternAnn a] + | -- Note: let rec blocks have an outer ABT.Cycle which introduces as many + -- variables as there are bindings + LetRec IsTop [a] a + | -- Note: first parameter is the binding, second is the expression which may refer + -- to this let bound variable. Constructed as `Let b (abs v e)` + Let IsTop a a + | -- Pattern matching / eliminating data types, example: + -- case x of + -- Just n -> rhs1 + -- Nothing -> rhs2 + -- + -- translates to + -- + -- Match x + -- [ (Constructor 0 [Var], ABT.abs n rhs1) + -- , (Constructor 1 [], rhs2) ] + Match a [MatchCase patternAnn a] | TermLink Referent | TypeLink Reference - deriving (Foldable,Functor,Generic,Generic1,Traversable) + deriving (Foldable, Functor, Generic, Generic1, Traversable) type IsTop = Bool @@ -102,85 +101,94 @@ ref a r = ABT.tm' a (Ref r) refId :: Ord v => a -> Reference.Id -> Term2 vt at ap v a refId a = ref a . Reference.DerivedId -hashComponents - :: Var v => Map v (Term v a) -> Map v (Reference.Id, Term v a) +hashComponents :: + Var v => Map v (Term v a) -> Map v (Reference.Id, Term v a) hashComponents = ReferenceUtil.hashComponents $ refId () hashClosedTerm :: Var v => Term v a -> Reference.Id hashClosedTerm tm = Reference.Id (ABT.hash tm) 0 instance Var v => Hashable1 (F v a p) where - hash1 hashCycle hash e - = let (tag, hashed, varint) = - (Hashable.Tag, Hashable.Hashed, Hashable.Nat . fromIntegral) - in - case e of - -- So long as `Reference.Derived` ctors are created using the same - -- hashing function as is used here, this case ensures that references - -- are 'transparent' wrt hash and hashing is unaffected by whether - -- expressions are linked. So for example `x = 1 + 1` and `y = x` hash - -- the same. + hash1 :: forall h x. (Ord h, Hashable.Accumulate h) => ([x] -> ([h], x -> h)) -> (x -> h) -> (F v a p) x -> h + hash1 hashCycle hash e = + let varint :: Integral i => i -> Hashable.Token h + varint = Hashable.Nat . fromIntegral + tag = Hashable.Tag + hashed = Hashable.Hashed + in case e of + -- So long as `Reference.Derived` ctors are created using the same + -- hashing function as is used here, this case ensures that references + -- are 'transparent' wrt hash and hashing is unaffected by whether + -- expressions are linked. So for example `x = 1 + 1` and `y = x` hash + -- the same. Ref (Reference.Derived h 0) -> Hashable.fromBytes (Hash.toByteString h) - Ref (Reference.Derived h i) -> Hashable.accumulate - [ tag 1 - , hashed $ Hashable.fromBytes (Hash.toByteString h) - , Hashable.Nat i - ] + Ref (Reference.Derived h i) -> + Hashable.accumulate + [ tag 1, + hashed $ Hashable.fromBytes (Hash.toByteString h), + Hashable.Nat i + ] -- Note: start each layer with leading `1` byte, to avoid collisions -- with types, which start each layer with leading `0`. -- See `Hashable1 Type.F` _ -> - Hashable.accumulate - $ tag 1 - : case e of - Nat i -> [tag 64, accumulateToken i] - Int i -> [tag 65, accumulateToken i] - Float n -> [tag 66, Hashable.Double n] - Boolean b -> [tag 67, accumulateToken b] - Text t -> [tag 68, accumulateToken t] - Char c -> [tag 69, accumulateToken c] - Blank b -> tag 1 : case b of + Hashable.accumulate $ + tag 1 : + case e of + Nat i -> [tag 64, accumulateToken i] + Int i -> [tag 65, accumulateToken i] + Float n -> [tag 66, Hashable.Double n] + Boolean b -> [tag 67, accumulateToken b] + Text t -> [tag 68, accumulateToken t] + Char c -> [tag 69, accumulateToken c] + Blank b -> + tag 1 : case b of B.Blank -> [tag 0] B.Recorded (B.Placeholder _ s) -> [tag 1, Hashable.Text (Text.pack s)] B.Recorded (B.Resolve _ s) -> [tag 2, Hashable.Text (Text.pack s)] - Ref (Reference.Builtin name) -> [tag 2, accumulateToken name] - Ref Reference.Derived {} -> - error "handled above, but GHC can't figure this out" - App a a2 -> [tag 3, hashed (hash a), hashed (hash a2)] - Ann a t -> [tag 4, hashed (hash a), hashed (ABT.hash t)] - List as -> tag 5 : varint (Sequence.length as) : map + Ref (Reference.Builtin name) -> [tag 2, accumulateToken name] + Ref Reference.Derived {} -> + error "handled above, but GHC can't figure this out" + App a a2 -> [tag 3, hashed (hash a), hashed (hash a2)] + Ann a t -> [tag 4, hashed (hash a), hashed (ABT.hash t)] + List as -> + tag 5 : + varint (Sequence.length as) : + map (hashed . hash) (toList as) - Lam a -> [tag 6, hashed (hash a)] - -- note: we use `hashCycle` to ensure result is independent of - -- let binding order - LetRec _ as a -> case hashCycle as of - (hs, hash) -> tag 7 : hashed (hash a) : map hashed hs - -- here, order is significant, so don't use hashCycle - Let _ b a -> [tag 8, hashed $ hash b, hashed $ hash a] - If b t f -> - [tag 9, hashed $ hash b, hashed $ hash t, hashed $ hash f] - Request r n -> [tag 10, accumulateToken r, varint n] - Constructor r n -> [tag 12, accumulateToken r, varint n] - Match e branches -> - tag 13 : hashed (hash e) : concatMap h branches - where - h (MatchCase pat guard branch) = concat - [ [accumulateToken pat] - , toList (hashed . hash <$> guard) - , [hashed (hash branch)] - ] - Handle h b -> [tag 15, hashed $ hash h, hashed $ hash b] - And x y -> [tag 16, hashed $ hash x, hashed $ hash y] - Or x y -> [tag 17, hashed $ hash x, hashed $ hash y] - TermLink r -> [tag 18, accumulateToken r] - TypeLink r -> [tag 19, accumulateToken r] + Lam a -> [tag 6, hashed (hash a)] + -- note: we use `hashCycle` to ensure result is independent of + -- let binding order + LetRec _ as a -> case hashCycle as of + (hs, hash) -> tag 7 : hashed (hash a) : map hashed hs + -- here, order is significant, so don't use hashCycle + Let _ b a -> [tag 8, hashed $ hash b, hashed $ hash a] + If b t f -> + [tag 9, hashed $ hash b, hashed $ hash t, hashed $ hash f] + Request r n -> [tag 10, accumulateToken r, varint n] + Constructor r n -> [tag 12, accumulateToken r, varint n] + Match e branches -> + tag 13 : hashed (hash e) : concatMap h branches + where + h (MatchCase pat guard branch) = + concat + [ [accumulateToken pat], + toList (hashed . hash <$> guard), + [hashed (hash branch)] + ] + Handle h b -> [tag 15, hashed $ hash h, hashed $ hash b] + And x y -> [tag 16, hashed $ hash x, hashed $ hash y] + Or x y -> [tag 17, hashed $ hash x, hashed $ hash y] + TermLink r -> [tag 18, accumulateToken r] + TypeLink r -> [tag 19, accumulateToken r] -- mostly boring serialization code below ... instance (Eq a, ABT.Var v) => Eq1 (F v a p) where (==#) = (==) + instance (Show v) => Show1 (F v a p) where showsPrec1 = showsPrec instance (ABT.Var vt, Eq at, Eq a) => Eq (F vt at p a) where @@ -210,52 +218,54 @@ instance (ABT.Var vt, Eq at, Eq a) => Eq (F vt at p a) where Match scrutinee cases == Match s2 cs2 = scrutinee == s2 && cases == cs2 _ == _ = False - instance (Show v, Show a) => Show (F v a0 p a) where showsPrec = go - where - go _ (Int n ) = (if n >= 0 then s "+" else s "") <> shows n - go _ (Nat n ) = shows n - go _ (Float n ) = shows n - go _ (Boolean True ) = s "true" - go _ (Boolean False) = s "false" - go p (Ann t k) = showParen (p > 1) $ shows t <> s ":" <> shows k - go p (App f x) = showParen (p > 9) $ showsPrec 9 f <> s " " <> showsPrec 10 x - go _ (Lam body ) = showParen True (s "λ " <> shows body) - go _ (List vs ) = showListWith shows (toList vs) - go _ (Blank b ) = case b of - B.Blank -> s "_" - B.Recorded (B.Placeholder _ r) -> s ("_" ++ r) - B.Recorded (B.Resolve _ r) -> s r - go _ (Ref r) = s "Ref(" <> shows r <> s ")" - go _ (TermLink r) = s "TermLink(" <> shows r <> s ")" - go _ (TypeLink r) = s "TypeLink(" <> shows r <> s ")" - go _ (Let _ b body) = - showParen True (s "let " <> shows b <> s " in " <> shows body) - go _ (LetRec _ bs body) = showParen - True - (s "let rec" <> shows bs <> s " in " <> shows body) - go _ (Handle b body) = showParen - True - (s "handle " <> shows b <> s " in " <> shows body) - go _ (Constructor r n ) = s "Con" <> shows r <> s "#" <> shows n - go _ (Match scrutinee cases) = showParen - True - (s "case " <> shows scrutinee <> s " of " <> shows cases) - go _ (Text s ) = shows s - go _ (Char c ) = shows c - go _ (Request r n) = s "Req" <> shows r <> s "#" <> shows n - go p (If c t f) = - showParen (p > 0) - $ s "if " - <> shows c - <> s " then " - <> shows t - <> s " else " - <> shows f - go p (And x y) = - showParen (p > 0) $ s "and " <> shows x <> s " " <> shows y - go p (Or x y) = - showParen (p > 0) $ s "or " <> shows x <> s " " <> shows y - (<>) = (.) - s = showString + where + go _ (Int n) = (if n >= 0 then s "+" else s "") <> shows n + go _ (Nat n) = shows n + go _ (Float n) = shows n + go _ (Boolean True) = s "true" + go _ (Boolean False) = s "false" + go p (Ann t k) = showParen (p > 1) $ shows t <> s ":" <> shows k + go p (App f x) = showParen (p > 9) $ showsPrec 9 f <> s " " <> showsPrec 10 x + go _ (Lam body) = showParen True (s "λ " <> shows body) + go _ (List vs) = showListWith shows (toList vs) + go _ (Blank b) = case b of + B.Blank -> s "_" + B.Recorded (B.Placeholder _ r) -> s ("_" ++ r) + B.Recorded (B.Resolve _ r) -> s r + go _ (Ref r) = s "Ref(" <> shows r <> s ")" + go _ (TermLink r) = s "TermLink(" <> shows r <> s ")" + go _ (TypeLink r) = s "TypeLink(" <> shows r <> s ")" + go _ (Let _ b body) = + showParen True (s "let " <> shows b <> s " in " <> shows body) + go _ (LetRec _ bs body) = + showParen + True + (s "let rec" <> shows bs <> s " in " <> shows body) + go _ (Handle b body) = + showParen + True + (s "handle " <> shows b <> s " in " <> shows body) + go _ (Constructor r n) = s "Con" <> shows r <> s "#" <> shows n + go _ (Match scrutinee cases) = + showParen + True + (s "case " <> shows scrutinee <> s " of " <> shows cases) + go _ (Text s) = shows s + go _ (Char c) = shows c + go _ (Request r n) = s "Req" <> shows r <> s "#" <> shows n + go p (If c t f) = + showParen (p > 0) $ + s "if " + <> shows c + <> s " then " + <> shows t + <> s " else " + <> shows f + go p (And x y) = + showParen (p > 0) $ s "and " <> shows x <> s " " <> shows y + go p (Or x y) = + showParen (p > 0) $ s "or " <> shows x <> s " " <> shows y + (<>) = (.) + s = showString diff --git a/parser-typechecker/src/Unison/PrettyPrintEnv.hs b/parser-typechecker/src/Unison/PrettyPrintEnv.hs index 8f622a59b6..7723dbf94e 100644 --- a/parser-typechecker/src/Unison/PrettyPrintEnv.hs +++ b/parser-typechecker/src/Unison/PrettyPrintEnv.hs @@ -13,6 +13,7 @@ where import Unison.Prelude +import Unison.DataDeclaration.ConstructorId (ConstructorId) import Unison.HashQualified ( HashQualified ) import Unison.Name ( Name ) import Unison.Reference ( Reference ) @@ -27,7 +28,7 @@ data PrettyPrintEnv = PrettyPrintEnv { -- names for types types :: Reference -> Maybe (HashQualified Name) } -patterns :: PrettyPrintEnv -> Reference -> Int -> Maybe (HashQualified Name) +patterns :: PrettyPrintEnv -> Reference -> ConstructorId -> Maybe (HashQualified Name) patterns ppe r cid = terms ppe (Referent.Con r cid CT.Data) <|>terms ppe (Referent.Con r cid CT.Effect) @@ -52,7 +53,7 @@ typeName :: PrettyPrintEnv -> Reference -> HashQualified Name typeName env r = fromMaybe (HQ.take todoHashLength $ HQ.fromReference r) (types env r) -patternName :: PrettyPrintEnv -> Reference -> Int -> HashQualified Name +patternName :: PrettyPrintEnv -> Reference -> ConstructorId -> HashQualified Name patternName env r cid = case patterns env r cid of Just name -> name diff --git a/parser-typechecker/src/Unison/Typechecker/TypeLookup.hs b/parser-typechecker/src/Unison/Typechecker/TypeLookup.hs index 161bf50b46..05598d6db7 100644 --- a/parser-typechecker/src/Unison/Typechecker/TypeLookup.hs +++ b/parser-typechecker/src/Unison/Typechecker/TypeLookup.hs @@ -2,14 +2,15 @@ module Unison.Typechecker.TypeLookup where import Unison.Prelude -import Unison.Reference (Reference) -import Unison.Referent (Referent, ConstructorId) -import Unison.Type (Type) import qualified Data.Map as Map import qualified Unison.ConstructorType as CT +import Unison.DataDeclaration (DataDeclaration, EffectDeclaration) import qualified Unison.DataDeclaration as DD -import Unison.DataDeclaration (EffectDeclaration, DataDeclaration) +import Unison.DataDeclaration.ConstructorId (ConstructorId) +import Unison.Reference (Reference) +import Unison.Referent (Referent) import qualified Unison.Referent as Referent +import Unison.Type (Type) -- Used for typechecking. data TypeLookup v a = diff --git a/parser-typechecker/unison-parser-typechecker.cabal b/parser-typechecker/unison-parser-typechecker.cabal index bcc81c35a8..b8bcbd844b 100644 --- a/parser-typechecker/unison-parser-typechecker.cabal +++ b/parser-typechecker/unison-parser-typechecker.cabal @@ -77,6 +77,7 @@ library Unison.Codebase.SqliteCodebase.Conversions Unison.Codebase.SqliteCodebase.GitError Unison.Codebase.SqliteCodebase.MigrateSchema12 + Unison.Codebase.SqliteCodebase.MigrateSchema12.DbHelpers Unison.Codebase.SqliteCodebase.SyncEphemeral Unison.Codebase.SyncMode Unison.Codebase.TermEdit diff --git a/unison-core/src/Unison/DataDeclaration.hs b/unison-core/src/Unison/DataDeclaration.hs index ff414f02fa..14c5d9089a 100644 --- a/unison-core/src/Unison/DataDeclaration.hs +++ b/unison-core/src/Unison/DataDeclaration.hs @@ -180,7 +180,7 @@ declFields = bimap cf cf . first toDataDecl fields _ = 0 typeOfConstructor :: DataDeclaration v a -> ConstructorId -> Maybe (Type v a) -typeOfConstructor dd i = constructorTypes dd `atMay` i +typeOfConstructor dd i = constructorTypes dd `atMay` fromIntegral i constructors :: DataDeclaration v a -> [(v, Type v a)] constructors (DataDeclaration _ _ _ ctors) = [(v,t) | (_,v,t) <- ctors ] @@ -200,7 +200,7 @@ declConstructorReferents rid decl = where ct = constructorType decl constructorIds :: DataDeclaration v a -> [ConstructorId] -constructorIds dd = [0 .. length (constructors dd) - 1] +constructorIds dd = [0 .. fromIntegral $ length (constructors dd) - 1] -- | All variables mentioned in the given data declaration. -- Includes both term and type variables, both free and bound. @@ -287,4 +287,3 @@ unhashComponent m amap :: (a -> a2) -> Decl v a -> Decl v a2 amap f (Left e) = Left (f <$> e) amap f (Right d) = Right (f <$> d) - diff --git a/unison-core/src/Unison/DataDeclaration/ConstructorId.hs b/unison-core/src/Unison/DataDeclaration/ConstructorId.hs index 0de60aed08..011effb5e5 100644 --- a/unison-core/src/Unison/DataDeclaration/ConstructorId.hs +++ b/unison-core/src/Unison/DataDeclaration/ConstructorId.hs @@ -1,11 +1,5 @@ -{-# LANGUAGE RecordWildCards #-} -{-# LANGUAGE DeriveAnyClass #-} -{-# Language DeriveFoldable #-} -{-# Language DeriveTraversable #-} -{-# Language OverloadedStrings #-} -{-# Language PatternSynonyms #-} -{-# Language ViewPatterns #-} - module Unison.DataDeclaration.ConstructorId (ConstructorId) where -type ConstructorId = Int +import Data.Word (Word64) + +type ConstructorId = Word64 diff --git a/unison-core/src/Unison/HashQualified.hs b/unison-core/src/Unison/HashQualified.hs index 73c3a9084f..84d3b1bc79 100644 --- a/unison-core/src/Unison/HashQualified.hs +++ b/unison-core/src/Unison/HashQualified.hs @@ -9,11 +9,12 @@ import Unison.Prelude hiding (fromString) import qualified Data.Text as Text import Prelude hiding ( take ) +import Unison.DataDeclaration.ConstructorId (ConstructorId) import Unison.Name ( Name, Convert, Parse ) import qualified Unison.Name as Name import Unison.Reference ( Reference ) import qualified Unison.Reference as Reference -import Unison.Referent ( Referent, ConstructorId ) +import Unison.Referent ( Referent ) import qualified Unison.Referent as Referent import Unison.ShortHash ( ShortHash ) import qualified Unison.ShortHash as SH diff --git a/unison-core/src/Unison/LabeledDependency.hs b/unison-core/src/Unison/LabeledDependency.hs index 289d283fa8..dbdefa44a3 100644 --- a/unison-core/src/Unison/LabeledDependency.hs +++ b/unison-core/src/Unison/LabeledDependency.hs @@ -18,8 +18,9 @@ module Unison.LabeledDependency import Unison.Prelude hiding (fold) import Unison.ConstructorType (ConstructorType(Data, Effect)) +import Unison.DataDeclaration.ConstructorId (ConstructorId) import Unison.Reference (Reference(DerivedId), Id) -import Unison.Referent (Referent, pattern Ref, pattern Con, ConstructorId) +import Unison.Referent (Referent, pattern Ref, pattern Con) import qualified Data.Set as Set -- dumb constructor name is private diff --git a/unison-core/src/Unison/Names3.hs b/unison-core/src/Unison/Names3.hs index 4b0870254f..bfd2160829 100644 --- a/unison-core/src/Unison/Names3.hs +++ b/unison-core/src/Unison/Names3.hs @@ -6,6 +6,7 @@ import Unison.Prelude import Control.Lens (view, _4) import Data.List.Extra (nubOrd, sort) +import Unison.DataDeclaration.ConstructorId (ConstructorId) import Unison.HashQualified (HashQualified) import qualified Unison.HashQualified as HQ import qualified Unison.HashQualified' as HQ' @@ -292,7 +293,7 @@ lookupHQPattern :: HQ.HashQualified Name -> CT.ConstructorType -> Names - -> Set (Reference, Int) + -> Set (Reference, ConstructorId) lookupHQPattern hq ctt names = Set.fromList [ (r, cid) | Referent.Con r cid ct <- toList $ lookupHQTerm hq names diff --git a/unison-core/src/Unison/Referent'.hs b/unison-core/src/Unison/Referent'.hs index 88df22d500..fbac1dee83 100644 --- a/unison-core/src/Unison/Referent'.hs +++ b/unison-core/src/Unison/Referent'.hs @@ -5,9 +5,7 @@ module Unison.Referent' where import Unison.ConstructorType (ConstructorType) import Unison.DataDeclaration.ConstructorId (ConstructorId) -import Unison.Hashable (Hashable (tokens)) -import qualified Unison.Hashable as H -import Unison.Prelude (Word64, Generic) +import Unison.Prelude -- | Specifies a term. -- @@ -45,7 +43,3 @@ fold :: (r -> a) -> (r -> ConstructorId -> ConstructorType -> a) -> Referent' r fold fr fc = \case Ref' r -> fr r Con' r i ct -> fc r i ct - -instance Hashable r => Hashable (Referent' r) where - tokens (Ref' r) = [H.Tag 0] ++ H.tokens r - tokens (Con' r i dt) = [H.Tag 2] ++ H.tokens r ++ H.tokens (fromIntegral i :: Word64) ++ H.tokens dt diff --git a/unison-core/src/Unison/Referent.hs b/unison-core/src/Unison/Referent.hs index 639a423635..5f1bb909f0 100644 --- a/unison-core/src/Unison/Referent.hs +++ b/unison-core/src/Unison/Referent.hs @@ -5,7 +5,6 @@ module Unison.Referent ( Referent, pattern Ref, pattern Con, - ConstructorId, Id, pattern RefId, pattern ConId, @@ -71,7 +70,7 @@ toShortHash = \case Con r i _ -> patternShortHash r i -- also used by HashQualified.fromPattern -patternShortHash :: Reference -> Int -> ShortHash +patternShortHash :: Reference -> ConstructorId -> ShortHash patternShortHash r i = (R.toShortHash r) { SH.cid = Just . Text.pack $ show i } toText :: Referent -> Text @@ -119,7 +118,7 @@ fromText t = either (const Nothing) Just $ cidPart' = Text.takeWhileEnd (/= '#') t cidPart = Text.drop 1 cidPart' -fold :: (r -> a) -> (r -> Int -> ConstructorType -> a) -> Referent' r -> a +fold :: (r -> a) -> (r -> ConstructorId -> ConstructorType -> a) -> Referent' r -> a fold fr fc = \case Ref' r -> fr r Con' r i ct -> fc r i ct diff --git a/unison-core/src/Unison/Term.hs b/unison-core/src/Unison/Term.hs index 5af7c9d073..c0900c86ff 100644 --- a/unison-core/src/Unison/Term.hs +++ b/unison-core/src/Unison/Term.hs @@ -25,6 +25,7 @@ import Prelude.Extras (Eq1(..), Show1(..)) import Text.Show import qualified Unison.ABT as ABT import qualified Unison.Blank as B +import Unison.DataDeclaration.ConstructorId (ConstructorId) import Unison.Names3 ( Names0 ) import qualified Unison.Names3 as Names import qualified Unison.Names.ResolutionResult as Names @@ -32,7 +33,7 @@ import Unison.Pattern (Pattern) import qualified Unison.Pattern as Pattern import Unison.Reference (Reference, pattern Builtin) import qualified Unison.Reference as Reference -import Unison.Referent (Referent, ConstructorId) +import Unison.Referent (Referent) import qualified Unison.Referent as Referent import Unison.Type (Type) import qualified Unison.Type as Type From 8c171528bd89ddaf43b54c5eec197b5623014cd9 Mon Sep 17 00:00:00 2001 From: Arya Irani Date: Fri, 29 Oct 2021 20:55:21 -0400 Subject: [PATCH 042/297] use ConstructorId in place of Int in many places --- .../SqliteCodebase/MigrateSchema12.hs | 30 ++++++++----------- parser-typechecker/src/Unison/DeclPrinter.hs | 3 +- parser-typechecker/src/Unison/Parser.hs | 5 ++-- parser-typechecker/src/Unison/PrintError.hs | 3 +- parser-typechecker/src/Unison/Runtime/ANF.hs | 24 +++++++-------- .../src/Unison/Runtime/Builtin.hs | 24 +++++++-------- .../src/Unison/Runtime/Foreign/Function.hs | 9 +++--- .../src/Unison/Runtime/IOSource.hs | 1 + .../src/Unison/Runtime/Pattern.hs | 10 +++---- parser-typechecker/src/Unison/TermParser.hs | 3 +- .../src/Unison/Typechecker/Context.hs | 16 +++++----- parser-typechecker/tests/Unison/Test/ANF.hs | 4 +-- 12 files changed, 65 insertions(+), 67 deletions(-) diff --git a/parser-typechecker/src/Unison/Codebase/SqliteCodebase/MigrateSchema12.hs b/parser-typechecker/src/Unison/Codebase/SqliteCodebase/MigrateSchema12.hs index e32106a9b8..3647d42249 100644 --- a/parser-typechecker/src/Unison/Codebase/SqliteCodebase/MigrateSchema12.hs +++ b/parser-typechecker/src/Unison/Codebase/SqliteCodebase/MigrateSchema12.hs @@ -1,29 +1,24 @@ {-# LANGUAGE DataKinds #-} -{-# LANGUAGE DeriveGeneric #-} -{-# LANGUAGE PartialTypeSignatures #-} -{-# LANGUAGE PatternSynonyms #-} {-# LANGUAGE RankNTypes #-} {-# LANGUAGE RecordWildCards #-} +{-# LANGUAGE ViewPatterns #-} module Unison.Codebase.SqliteCodebase.MigrateSchema12 where import Control.Lens import Control.Monad.Except (runExceptT, ExceptT) -import Control.Monad.Reader (MonadReader, ReaderT (runReaderT), ask) +import Control.Monad.Reader (ReaderT (runReaderT), ask) import Control.Monad.State.Strict import Control.Monad.Trans.Except (throwE) -import Control.Monad.Trans.Writer.CPS (Writer, execWriter, execWriterT, runWriterT, tell) +import Control.Monad.Trans.Writer.CPS (Writer, execWriter, tell) import Data.Generics.Product -import Data.Generics.Sum import Data.List.Extra (nubOrd) import qualified Data.Map as Map import qualified U.Codebase.Sqlite.Operations as Ops import Data.Tuple (swap) import qualified Data.Zip as Zip import U.Codebase.Sqlite.Connection (Connection) -import U.Codebase.Sqlite.DbId (CausalHashId, ObjectId) -import U.Codebase.Sqlite.ObjectType (ObjectType) -import qualified U.Codebase.Sqlite.ObjectType as OT +import U.Codebase.Sqlite.DbId (ObjectId) import qualified U.Codebase.Sqlite.Reference as S.Reference import U.Codebase.Sync (Sync (Sync)) import qualified U.Codebase.Sync as Sync @@ -32,6 +27,7 @@ import qualified Unison.ABT as ABT import Unison.Codebase (Codebase (Codebase)) import qualified Unison.Codebase as Codebase import qualified Unison.DataDeclaration as DD +import Unison.DataDeclaration.ConstructorId (ConstructorId) import Unison.Hash (Hash) import qualified Unison.Hash as Unison import qualified Unison.Hashing.V2.Convert as Convert @@ -49,8 +45,6 @@ import qualified U.Codebase.Causal as C import qualified U.Codebase.Sqlite.Branch.Full as S import qualified U.Codebase.Reference as UReference import qualified U.Codebase.Referent as UReferent -import Numeric.Lens (integral) -import Control.Lens.Unsound (adjoin) -- lookupCtor :: ConstructorMapping -> ObjectId -> Pos -> ConstructorId -> Maybe (Pos, ConstructorId) -- lookupCtor (ConstructorMapping cm) oid pos cid = @@ -212,10 +206,10 @@ runDB conn = (runExceptT >=> err) . flip runReaderT conn -- loadCausalBranchByCausalHash :: EDB m => CausalHash -> m (Maybe (C.Branch.Causal m)) migrateCausal :: MonadIO m => Connection -> CausalHash -> StateT MigrationState m (Sync.TrySyncResult Entity) migrateCausal conn causalHash = runDB conn $ do - C.Causal{..} <- Ops.loadCausalBranchByCausalHash causalHash >>= \case + C.Causal{} <- Ops.loadCausalBranchByCausalHash causalHash >>= \case Nothing -> error $ "Expected causal to exist but it didn't" <> show causalHash Just c -> pure c - migratedCausals <- gets causalMapping + _migratedCausals <- gets causalMapping -- Plan: -- * Load a C.Causal -- * Ensure its parent causals and branch (value hash) have been migrated @@ -252,7 +246,7 @@ migrateCausal conn causalHash = runDB conn $ do -- } migrateBranch :: Monad m => Connection -> ObjectId -> StateT MigrationState m (Sync.TrySyncResult Entity) -migrateBranch conn objectID = fmap (either id id) . runExceptT $ do +migrateBranch _conn _objectID = fmap (either id id) . runExceptT $ do -- note for tomorrow: we want to just load the (Branch m) instead, forget the DbBranch -- dbBranch <- Ops.loadDbBranchByObjectId objectId @@ -280,7 +274,7 @@ migrateBranch conn objectID = fmap (either id id) . runExceptT $ do -- Migrate branch let oldDBBranch :: S.DbBranch = undefined - newBranch <- oldDBBranch & dbBranchObjRefs_ %%~ remapObjIdRefs + _newBranch <- oldDBBranch & dbBranchObjRefs_ %%~ remapObjIdRefs -- Need to generalize the traversal, and also generalize the Id backing "SomeReference" -- newBranch <- oldDBBranch & dbBranchObjRefs_ %%~ objIdsToHashed @@ -482,10 +476,10 @@ migrateDeclComponent Codebase {..} hash = fmap (either id id) . runExceptT $ do (componentIDMap Map.! oldReferenceId) & DD.asDataDecl & DD.constructors' - & imap (\constructorId (_ann, name, _type) -> (name, constructorId)) + & imap (\(fromIntegral -> constructorId) (_ann, name, _type) -> (name, constructorId)) & Map.fromList - ifor_ (DD.constructors' (DD.asDataDecl dd)) \newConstructorId (_ann, name, _type) -> do + ifor_ (DD.constructors' (DD.asDataDecl dd)) \(fromIntegral -> newConstructorId) (_ann, name, _type) -> do field @"referenceMapping" %= Map.insert (ConstructorReference oldReferenceId (oldConstructorIds Map.! name)) @@ -686,7 +680,7 @@ someReferenceIdToEntity = undefined -- | migrate sqlite codebase from version 1 to 2, return False and rollback on failure migrateSchema12 :: Applicative m => Connection -> m Bool -migrateSchema12 db = do +migrateSchema12 _db = do -- todo: drop and recreate corrected type/mentions index schema -- do we want to garbage collect at this time? ✅ -- or just convert everything without going in dependency order? ✅ diff --git a/parser-typechecker/src/Unison/DeclPrinter.hs b/parser-typechecker/src/Unison/DeclPrinter.hs index 901f4bf5dd..98efa3076f 100644 --- a/parser-typechecker/src/Unison/DeclPrinter.hs +++ b/parser-typechecker/src/Unison/DeclPrinter.hs @@ -12,6 +12,7 @@ import Unison.DataDeclaration ( DataDeclaration , toDataDecl ) import qualified Unison.DataDeclaration as DD +import Unison.DataDeclaration.ConstructorId (ConstructorId) import qualified Unison.ConstructorType as CT import qualified Unison.Hashing.V2.Convert as Hashing import Unison.HashQualified ( HashQualified ) @@ -77,7 +78,7 @@ prettyPattern -> CT.ConstructorType -> Reference -> HashQualified Name - -> Int + -> ConstructorId -> Pretty SyntaxText prettyPattern env ctorType ref namespace cid = styleHashQualified'' (fmt (S.Referent conRef)) diff --git a/parser-typechecker/src/Unison/Parser.hs b/parser-typechecker/src/Unison/Parser.hs index 0d57784a08..a354e11558 100644 --- a/parser-typechecker/src/Unison/Parser.hs +++ b/parser-typechecker/src/Unison/Parser.hs @@ -36,6 +36,7 @@ import Text.Megaparsec (runParserT) import qualified Text.Megaparsec as P import qualified Text.Megaparsec.Char as P import qualified Unison.ABT as ABT +import Unison.DataDeclaration.ConstructorId (ConstructorId) import qualified Unison.Hash as Hash import qualified Unison.HashQualified as HQ import qualified Unison.Lexer as L @@ -106,8 +107,8 @@ data Error v = SignatureNeedsAccompanyingBody (L.Token v) | DisallowedAbsoluteName (L.Token Name) | EmptyBlock (L.Token String) - | UnknownAbilityConstructor (L.Token (HQ.HashQualified Name)) (Set (Reference, Int)) - | UnknownDataConstructor (L.Token (HQ.HashQualified Name)) (Set (Reference, Int)) + | UnknownAbilityConstructor (L.Token (HQ.HashQualified Name)) (Set (Reference, ConstructorId)) + | UnknownDataConstructor (L.Token (HQ.HashQualified Name)) (Set (Reference, ConstructorId)) | UnknownTerm (L.Token (HQ.HashQualified Name)) (Set Referent) | UnknownType (L.Token (HQ.HashQualified Name)) (Set Reference) | UnknownId (L.Token (HQ.HashQualified Name)) (Set Referent) (Set Reference) diff --git a/parser-typechecker/src/Unison/PrintError.hs b/parser-typechecker/src/Unison/PrintError.hs index 0a53308254..a6e1dbaad2 100644 --- a/parser-typechecker/src/Unison/PrintError.hs +++ b/parser-typechecker/src/Unison/PrintError.hs @@ -20,6 +20,7 @@ import Data.Void (Void) import qualified Text.Megaparsec as P import qualified Unison.ABT as ABT import Unison.Builtin.Decls (pattern TupleType') +import Unison.DataDeclaration.ConstructorId (ConstructorId) import qualified Unison.HashQualified as HQ import Unison.Kind (Kind) import qualified Unison.Kind as Kind @@ -909,7 +910,7 @@ showTypeRef :: IsString s => Env -> R.Reference -> s showTypeRef env r = fromString . HQ.toString $ PPE.typeName env r -- todo: do something different/better if cid not found -showConstructor :: IsString s => Env -> R.Reference -> Int -> s +showConstructor :: IsString s => Env -> R.Reference -> ConstructorId -> s showConstructor env r cid = fromString . HQ.toString $ PPE.patternName env r cid diff --git a/parser-typechecker/src/Unison/Runtime/ANF.hs b/parser-typechecker/src/Unison/Runtime/ANF.hs index 163f6cd14e..70d29c250f 100644 --- a/parser-typechecker/src/Unison/Runtime/ANF.hs +++ b/parser-typechecker/src/Unison/Runtime/ANF.hs @@ -328,10 +328,10 @@ saturate :: (Var v, Monoid a) => Map (Reference,Int) Int -> Term v a -> Term v a saturate dat = ABT.visitPure $ \case - Apps' f@(Constructor' r t) args -> sat r t f args - Apps' f@(Request' r t) args -> sat r t f args - f@(Constructor' r t) -> sat r t f [] - f@(Request' r t) -> sat r t f [] + Apps' f@(Constructor' r t) args -> sat r (fromIntegral t) f args + Apps' f@(Request' r t) args -> sat r (fromIntegral t) f args + f@(Constructor' r t) -> sat r (fromIntegral t) f [] + f@(Request' r t) -> sat r (fromIntegral t) f [] _ -> Nothing where frsh avoid _ = @@ -1022,8 +1022,8 @@ anfBlock (Match' scrut cas) = do , pure . TMatch r $ MatchDataCover Ty.seqViewRef (EC.mapFromList - [ (toEnum Ty.seqViewEmpty, ([], em)) - , (toEnum Ty.seqViewElem, ([BX,BX], bd)) + [ (fromIntegral Ty.seqViewEmpty, ([], em)) + , (fromIntegral Ty.seqViewElem, ([BX,BX], bd)) ] ) ) @@ -1072,9 +1072,9 @@ anfBlock (Apps' f args) = do (actx, cas) <- anfArgs args pure (fctx <> actx, (d, TApp cf cas)) anfBlock (Constructor' r t) - = pure (mempty, pure $ TCon r (toEnum t) []) + = pure (mempty, pure $ TCon r (fromIntegral t) []) anfBlock (Request' r t) - = pure (mempty, (Indirect (), TReq r (toEnum t) [])) + = pure (mempty, (Indirect (), TReq r (fromIntegral t) [])) anfBlock (Boolean' b) = pure (mempty, pure $ TCon Ty.booleanRef (if b then 1 else 0) []) anfBlock (Lit' l@(T _)) = @@ -1136,7 +1136,7 @@ anfInitCase u (MatchCase p guard (ABT.AbsN' vs bd)) | P.Constructor _ r t ps <- p = do (,) <$> expandBindings ps vs <*> anfBody bd <&> \(us,bd) -> AccumData r Nothing - . EC.mapSingleton (toEnum t) + . EC.mapSingleton (fromIntegral t) . (BX<$us,) . ABTN.TAbss us $ bd @@ -1154,7 +1154,7 @@ anfInitCase u (MatchCase p guard (ABT.AbsN' vs bd)) jn = Builtin "jumpCont" in flip AccumRequest Nothing . Map.singleton r - . EC.mapSingleton (toEnum t) + . EC.mapSingleton (fromIntegral t) . (BX<$us,) . ABTN.TAbss us . TShift r kf @@ -1297,8 +1297,8 @@ anfCases u = getCompose . fmap fold . traverse (anfInitCase u) anfFunc :: Var v => Term v a -> ANFM v (Ctx v, Directed () (Func v)) anfFunc (Var' v) = pure (mempty, (Indirect (), FVar v)) anfFunc (Ref' r) = pure (mempty, (Indirect (), FComb r)) -anfFunc (Constructor' r t) = pure (mempty, (Direct, FCon r $ toEnum t)) -anfFunc (Request' r t) = pure (mempty, (Indirect (), FReq r $ toEnum t)) +anfFunc (Constructor' r t) = pure (mempty, (Direct, FCon r $ fromIntegral t)) +anfFunc (Request' r t) = pure (mempty, (Indirect (), FReq r $ fromIntegral t)) anfFunc tm = do (fctx, ctm) <- anfBlock tm (cx, v) <- contextualize ctm diff --git a/parser-typechecker/src/Unison/Runtime/Builtin.hs b/parser-typechecker/src/Unison/Runtime/Builtin.hs index 13700356b1..4ce74e0c47 100644 --- a/parser-typechecker/src/Unison/Runtime/Builtin.hs +++ b/parser-typechecker/src/Unison/Runtime/Builtin.hs @@ -185,15 +185,15 @@ fls = TCon Ty.booleanRef 0 [] tru = TCon Ty.booleanRef 1 [] none :: Var v => ANormal v -none = TCon Ty.optionalRef (toEnum Ty.noneId) [] +none = TCon Ty.optionalRef (fromIntegral Ty.noneId) [] some, left, right :: Var v => v -> ANormal v -some a = TCon Ty.optionalRef (toEnum Ty.someId) [a] -left x = TCon Ty.eitherRef (toEnum Ty.eitherLeftId) [x] -right x = TCon Ty.eitherRef (toEnum Ty.eitherRightId) [x] +some a = TCon Ty.optionalRef (fromIntegral Ty.someId) [a] +left x = TCon Ty.eitherRef (fromIntegral Ty.eitherLeftId) [x] +right x = TCon Ty.eitherRef (fromIntegral Ty.eitherRightId) [x] seqViewEmpty :: Var v => ANormal v -seqViewEmpty = TCon Ty.seqViewRef (toEnum Ty.seqViewEmpty) [] +seqViewEmpty = TCon Ty.seqViewRef (fromIntegral Ty.seqViewEmpty) [] seqViewElem :: Var v => v -> v -> ANormal v -seqViewElem l r = TCon Ty.seqViewRef (toEnum Ty.seqViewElem) [l,r] +seqViewElem l r = TCon Ty.seqViewRef (fromIntegral Ty.seqViewElem) [l,r] boolift :: Var v => v -> ANormal v boolift v @@ -825,10 +825,10 @@ seek'handle instr (arg1, arg2, arg3, seek, nat, stack1, stack2, stack3, unit, fail, result) = fresh11 no'buf, line'buf, block'buf, sblock'buf :: Enum e => e -no'buf = toEnum Ty.bufferModeNoBufferingId -line'buf = toEnum Ty.bufferModeLineBufferingId -block'buf = toEnum Ty.bufferModeBlockBufferingId -sblock'buf = toEnum Ty.bufferModeSizedBlockBufferingId +no'buf = toEnum $ fromIntegral Ty.bufferModeNoBufferingId +line'buf = toEnum $ fromIntegral Ty.bufferModeLineBufferingId +block'buf = toEnum $ fromIntegral Ty.bufferModeBlockBufferingId +sblock'buf = toEnum $ fromIntegral Ty.bufferModeSizedBlockBufferingId infixr 0 --> (-->) :: a -> b -> (a, b) @@ -938,9 +938,9 @@ inMaybeBx arg1 arg2 arg3 mb result cont instr = . TAbss [arg1, arg2] . TMatch arg1 . flip (MatchData Ty.optionalRef) Nothing $ mapFromList - [ (toEnum Ty.noneId, ([], TLetD mb UN (TLit $ I 0) + [ (fromIntegral Ty.noneId, ([], TLetD mb UN (TLit $ I 0) $ TLetD result UN (TFOp instr [mb, arg2]) cont)) - , (toEnum Ty.someId, ([BX], TAbs arg3 . TLetD mb UN (TLit $ I 1) $ TLetD result UN (TFOp instr [mb, arg3, arg2]) cont)) + , (fromIntegral Ty.someId, ([BX], TAbs arg3 . TLetD mb UN (TLit $ I 1) $ TLetD result UN (TFOp instr [mb, arg3, arg2]) cont)) ] -- a -> b -> ... diff --git a/parser-typechecker/src/Unison/Runtime/Foreign/Function.hs b/parser-typechecker/src/Unison/Runtime/Foreign/Function.hs index 826f938357..f89f4c32b2 100644 --- a/parser-typechecker/src/Unison/Runtime/Foreign/Function.hs +++ b/parser-typechecker/src/Unison/Runtime/Foreign/Function.hs @@ -300,10 +300,10 @@ instance ( ForeignConvention a writeForeign ustk bstk a no'buf, line'buf, block'buf, sblock'buf :: Int -no'buf = Ty.bufferModeNoBufferingId -line'buf = Ty.bufferModeLineBufferingId -block'buf = Ty.bufferModeBlockBufferingId -sblock'buf = Ty.bufferModeSizedBlockBufferingId +no'buf = fromIntegral Ty.bufferModeNoBufferingId +line'buf = fromIntegral Ty.bufferModeLineBufferingId +block'buf = fromIntegral Ty.bufferModeBlockBufferingId +sblock'buf = fromIntegral Ty.bufferModeSizedBlockBufferingId instance ForeignConvention BufferMode where readForeign (i:us) bs ustk bstk @@ -382,4 +382,3 @@ instance {-# overlappable #-} BuiltinForeign b => ForeignConvention [b] foreignCCError :: String -> IO a foreignCCError nm = die $ "mismatched foreign calling convention for `" ++ nm ++ "`" - diff --git a/parser-typechecker/src/Unison/Runtime/IOSource.hs b/parser-typechecker/src/Unison/Runtime/IOSource.hs index 9a07526fb7..132e52142d 100644 --- a/parser-typechecker/src/Unison/Runtime/IOSource.hs +++ b/parser-typechecker/src/Unison/Runtime/IOSource.hs @@ -272,6 +272,7 @@ constructorNamed ref name = $ "There's a bug in the Unison runtime. Couldn't find type " <> show ref Just decl -> + fromIntegral . fromMaybe ( error $ "Unison runtime bug. The type " diff --git a/parser-typechecker/src/Unison/Runtime/Pattern.hs b/parser-typechecker/src/Unison/Runtime/Pattern.hs index f5939eee28..e7c8047d0a 100644 --- a/parser-typechecker/src/Unison/Runtime/Pattern.hs +++ b/parser-typechecker/src/Unison/Runtime/Pattern.hs @@ -160,7 +160,7 @@ decomposePattern rf0 t _ (P.Boolean _ b) , t == if b then 1 else 0 = [[]] decomposePattern rf0 t nfields p@(P.Constructor _ rf u ps) - | t == u + | t == fromIntegral u , rf0 == rf = if length ps == nfields then [ps] @@ -169,7 +169,7 @@ decomposePattern rf0 t nfields p@(P.Constructor _ rf u ps) err = "decomposePattern: wrong number of constructor fields: " ++ show (nfields, p) decomposePattern rf0 t nfields p@(P.EffectBind _ rf u ps pk) - | t == u + | t == fromIntegral u , rf0 == rf = if length ps + 1 == nfields then [ps ++ [pk]] @@ -557,8 +557,8 @@ preparePattern p = prepareAs p =<< freshVar buildPattern :: Bool -> Reference -> ConstructorId -> [v] -> Int -> P.Pattern () buildPattern effect r t vs nfields | effect, [] <- vps = internalBug "too few patterns for effect bind" - | effect = P.EffectBind () r t (init vps) (last vps) - | otherwise = P.Constructor () r t vps + | effect = P.EffectBind () r (fromIntegral t) (init vps) (last vps) + | otherwise = P.Constructor () r (fromIntegral t) vps where vps | length vs < nfields = replicate nfields $ P.Unbound () @@ -661,7 +661,7 @@ buildCase buildCase spec r eff cons ctx0 (t, vts, m) = MatchCase pat Nothing . absChain' vs $ compile spec ctx m where - pat = buildPattern eff r t vs $ cons !! t + pat = buildPattern eff r (fromIntegral t) vs $ cons !! t vs = ((),) . fst <$> vts ctx = Map.fromList vts <> ctx0 diff --git a/parser-typechecker/src/Unison/TermParser.hs b/parser-typechecker/src/Unison/TermParser.hs index 06bca71fda..45a7200c42 100644 --- a/parser-typechecker/src/Unison/TermParser.hs +++ b/parser-typechecker/src/Unison/TermParser.hs @@ -34,6 +34,7 @@ import qualified Data.Sequence as Sequence import qualified Text.Megaparsec as P import qualified Unison.ABT as ABT import qualified Unison.Builtin.Decls as DD +import Unison.DataDeclaration.ConstructorId (ConstructorId) import qualified Unison.ConstructorType as CT import qualified Unison.HashQualified as HQ import qualified Unison.Lexer as L @@ -215,7 +216,7 @@ parsePattern = root else pure (Pattern.Var (ann v), [tokenToPair v]) unbound :: P v (Pattern Ann, [(Ann, v)]) unbound = (\tok -> (Pattern.Unbound (ann tok), [])) <$> blank - ctor :: CT.ConstructorType -> _ -> P v (L.Token (Reference, Int)) + ctor :: CT.ConstructorType -> _ -> P v (L.Token (Reference, ConstructorId)) ctor ct err = do -- this might be a var, so we avoid consuming it at first tok <- P.try (P.lookAhead hqPrefixId) diff --git a/parser-typechecker/src/Unison/Typechecker/Context.hs b/parser-typechecker/src/Unison/Typechecker/Context.hs index 15db6377ca..56fdf2e3ab 100644 --- a/parser-typechecker/src/Unison/Typechecker/Context.hs +++ b/parser-typechecker/src/Unison/Typechecker/Context.hs @@ -68,6 +68,7 @@ import Unison.DataDeclaration ( DataDeclaration , EffectDeclaration ) import qualified Unison.DataDeclaration as DD +import Unison.DataDeclaration.ConstructorId (ConstructorId) import Unison.Pattern ( Pattern ) import qualified Unison.Pattern as Pattern import Unison.Reference ( Reference ) @@ -107,7 +108,7 @@ universal' a v = ABT.annotatedVar a (TypeVar.Universal v) -- | Elements of an ordered algorithmic context data Element v loc -- | A variable declaration - = Var (TypeVar v loc) + = Var (TypeVar v loc) -- | `v` is solved to some monotype | Solved (B.Blank loc) v (Monotype v loc) -- | `v` has type `a`, maybe quantified @@ -210,7 +211,7 @@ data Unknown = Data | Effect deriving Show data CompilerBug v loc = UnknownDecl Unknown Reference (Map Reference (DataDeclaration v loc)) - | UnknownConstructor Unknown Reference Int (DataDeclaration v loc) + | UnknownConstructor Unknown Reference ConstructorId (DataDeclaration v loc) | UndeclaredTermVariable v (Context v loc) | RetractFailure (Element v loc) (Context v loc) | EmptyLetRec (Term v loc) -- the body of the empty let rec @@ -247,7 +248,6 @@ data PathElement v loc type ExpectedArgCount = Int type ActualArgCount = Int -type ConstructorId = Int data SuggestionMatch = Exact | WrongType | WrongName deriving (Ord, Eq, Show) @@ -708,10 +708,10 @@ getEffectDeclaration r = do liftResult . typeError $ DataEffectMismatch Data r decl Just decl -> pure decl -getDataConstructorType :: (Var v, Ord loc) => Reference -> Int -> M v loc (Type v loc) +getDataConstructorType :: (Var v, Ord loc) => Reference -> ConstructorId -> M v loc (Type v loc) getDataConstructorType = getConstructorType' Data getDataDeclaration -getEffectConstructorType :: (Var v, Ord loc) => Reference -> Int -> M v loc (Type v loc) +getEffectConstructorType :: (Var v, Ord loc) => Reference -> ConstructorId -> M v loc (Type v loc) getEffectConstructorType = getConstructorType' Effect go where go r = DD.toDataDecl <$> getEffectDeclaration r @@ -721,11 +721,11 @@ getConstructorType' :: Var v => Unknown -> (Reference -> M v loc (DataDeclaration v loc)) -> Reference - -> Int + -> ConstructorId -> M v loc (Type v loc) getConstructorType' kind get r cid = do decl <- get r - case drop cid (DD.constructors decl) of + case drop (fromIntegral cid) (DD.constructors decl) of [] -> compilerCrash $ UnknownConstructor kind r cid decl (_v, typ) : _ -> pure $ ABT.vmap TypeVar.Universal typ @@ -1160,7 +1160,7 @@ checkCases scrutType outType cases@(Term.MatchCase _ _ t : _) coalesceWanteds =<< traverse (checkCase scrutType' outType) cases getEffect - :: Var v => Ord loc => Reference -> Int -> M v loc (Type v loc) + :: Var v => Ord loc => Reference -> ConstructorId -> M v loc (Type v loc) getEffect ref cid = do ect <- getEffectConstructorType ref cid uect <- ungeneralize ect diff --git a/parser-typechecker/tests/Unison/Test/ANF.hs b/parser-typechecker/tests/Unison/Test/ANF.hs index fd997a856b..d2a32abc97 100644 --- a/parser-typechecker/tests/Unison/Test/ANF.hs +++ b/parser-typechecker/tests/Unison/Test/ANF.hs @@ -146,7 +146,7 @@ denormalizeMatch b ipat r _ i | r == Ty.natRef = P.Nat () $ fromIntegral i | otherwise = P.Int () $ fromIntegral i - dpat r n t = P.Constructor () r (fromEnum t) (replicate n $ P.Var ()) + dpat r n t = P.Constructor () r (fromIntegral (fromEnum t)) (replicate n $ P.Var ()) denormalizeBranch :: (Num a, Var v) => Term ANormalF v -> (a, ABT.Term (Term.F v () ()) v ()) @@ -170,7 +170,7 @@ denormalizeHandler cs df = dcs where (_, db) = denormalizeBranch df rf r rcs = foldMapWithKey (cf r) rcs cf r t b = [ Term.MatchCase - (P.EffectBind () r (fromEnum t) + (P.EffectBind () r (fromIntegral (fromEnum t)) (replicate n $ P.Var ()) (P.Var ())) Nothing db From 0f87f184cbac3602e38001c401842c306cec94ac Mon Sep 17 00:00:00 2001 From: Chris Penner Date: Mon, 1 Nov 2021 11:39:44 -0600 Subject: [PATCH 043/297] Update migrateCausal --- .../U/Codebase/Sqlite/Causal.hs | 25 ++++- .../SqliteCodebase/MigrateSchema12.hs | 99 +++++++++++++++---- .../src/Unison/Hashing/V2/Causal.hs | 4 +- 3 files changed, 102 insertions(+), 26 deletions(-) diff --git a/codebase2/codebase-sqlite/U/Codebase/Sqlite/Causal.hs b/codebase2/codebase-sqlite/U/Codebase/Sqlite/Causal.hs index 08421a3bb1..c9b6c2fb1f 100644 --- a/codebase2/codebase-sqlite/U/Codebase/Sqlite/Causal.hs +++ b/codebase2/codebase-sqlite/U/Codebase/Sqlite/Causal.hs @@ -2,11 +2,29 @@ module U.Codebase.Sqlite.Causal where import Unison.Prelude +import U.Codebase.Sqlite.DbId (CausalHashId, BranchHashId) -data GDbCausal causalHash valueHash = - DbCausal { selfHash :: causalHash, valueHash :: valueHash, parents :: Set causalHash } --- type DbCausal = GDbCausal CausalHashId BranchHashId +data GDbCausal causalHash valueHash = DbCausal + { selfHash :: causalHash, + valueHash :: valueHash, + parents :: Set causalHash + } + +-- Causal Plan +-- * Load a DbCausal (how do we do this) +-- => new function Queries.localCausalByCausalHashId, can model after loadCausalByCausalHash or factor out of +-- * Add valueHashId as a dependency if unmigrated +-- * Add parent causal hash ids as dependencies if unmigrated + -- => Queries.loadCausalParents +-- * Map over Branch hash IDs +-- * Inside saveDBCausal (new / factored out of original) +-- * Save as a new self-hash +-- ==> Queries.saveCausal +-- * Map over parent causal hash IDs +-- ==> Queries.saveCausalParents + +type DbCausal = GDbCausal CausalHashId BranchHashId -- causalHashes_ :: Traversal (GDbCausal ch vh) (GDbCausal ch' vh) ch ch' -- causalHashes_ f DbCausal {..} = @@ -16,7 +34,6 @@ data GDbCausal causalHash valueHash = -- valueHashes_ f DbCausal{..} = -- (\p vh -> DbCausal selfHash vh p) parents <$> f valueHash - -- data Causal m hc he e = Causal -- { causalHash :: hc, -- valueHash :: he, diff --git a/parser-typechecker/src/Unison/Codebase/SqliteCodebase/MigrateSchema12.hs b/parser-typechecker/src/Unison/Codebase/SqliteCodebase/MigrateSchema12.hs index 3647d42249..e14c3a906f 100644 --- a/parser-typechecker/src/Unison/Codebase/SqliteCodebase/MigrateSchema12.hs +++ b/parser-typechecker/src/Unison/Codebase/SqliteCodebase/MigrateSchema12.hs @@ -18,7 +18,8 @@ import qualified U.Codebase.Sqlite.Operations as Ops import Data.Tuple (swap) import qualified Data.Zip as Zip import U.Codebase.Sqlite.Connection (Connection) -import U.Codebase.Sqlite.DbId (ObjectId) +import U.Codebase.Sqlite.DbId (ObjectId, BranchHashId (unBranchHashId, BranchHashId), CausalHashId (CausalHashId, unCausalHashId), HashId (HashId)) +import U.Codebase.Sqlite.Causal (DbCausal, GDbCausal(..)) import qualified U.Codebase.Sqlite.Reference as S.Reference import U.Codebase.Sync (Sync (Sync)) import qualified U.Codebase.Sync as Sync @@ -40,11 +41,18 @@ import qualified Unison.Term as Term import Unison.Type (Type) import qualified Unison.Type as Type import Unison.Var (Var) -import U.Codebase.HashTags (CausalHash) +import U.Codebase.HashTags (CausalHash (CausalHash, unCausalHash)) import qualified U.Codebase.Causal as C import qualified U.Codebase.Sqlite.Branch.Full as S import qualified U.Codebase.Reference as UReference import qualified U.Codebase.Referent as UReferent +import qualified U.Codebase.Sqlite.Causal as SC +import qualified U.Codebase.Sqlite.Queries as Q +import qualified Data.Set as Set +import qualified U.Util.Monoid as Monoid +import qualified Unison.Hashing.V2.Causal as Hashing +import qualified Unison.Hashable as Hashable +import Data.Maybe -- lookupCtor :: ConstructorMapping -> ObjectId -> Pos -> ConstructorId -> Maybe (Pos, ConstructorId) -- lookupCtor (ConstructorMapping cm) oid pos cid = @@ -77,14 +85,13 @@ data MigrationState = MigrationState -- Mapping between old cycle-position -> new cycle-position for a given Decl object. { -- declLookup :: Map (Old ObjectId) (Map (Old Pos) (New Pos)), referenceMapping :: Map (Old SomeReferenceId) (New SomeReferenceId), - causalMapping :: Map (Old CausalHash) (New CausalHash), + causalMapping :: Map (Old CausalHashId) (New (CausalHash, CausalHashId)), -- Mapping between contructor indexes for the type identified by (ObjectId, Pos) ctorLookup :: Map (Old TypeIdentifier) (Map (Old ConstructorId) (New ConstructorId)), ctorLookup' :: Map (Old Referent.Id) (New Referent.Id), -- This provides the info needed for rewriting a term. You'll access it with a function :: Old termLookup :: Map (Old ObjectId) (New ObjectId, Map (Old Pos) (New Pos)), - objLookup :: Map (Old ObjectId) (New ObjectId), - hashByObj :: Map (Old ObjectId) (Old Hash), + objLookup :: Map (Old ObjectId) (New (ObjectId, HashId, Hash)), -- componentPositionMapping :: Map ObjectId (Map (Old Pos) (New Pos)), constructorIDMapping :: Map ObjectId (Map (Old ConstructorId) (New ConstructorId)), @@ -125,7 +132,7 @@ data Y = MkY Int data Entity = TComponent Unison.Hash | DComponent Unison.Hash - | C CausalHash + | C CausalHashId -- haven't proven we need these yet | B ObjectId | Patch ObjectId @@ -186,9 +193,9 @@ migrationSync = Sync \case B objectId -> do Env{db} <- ask lift (migrateBranch db objectId) - C causalHash -> do + C causalHashId -> do Env{db} <- ask - lift (migrateCausal db causalHash) + lift (migrateCausal db causalHashId) -- To sync a watch result, -- 1. ??? -- 2. Synced @@ -204,19 +211,71 @@ runDB conn = (runExceptT >=> err) . flip runReaderT conn err = \case Left err -> error $ show err; Right a -> pure a -- loadCausalBranchByCausalHash :: EDB m => CausalHash -> m (Maybe (C.Branch.Causal m)) -migrateCausal :: MonadIO m => Connection -> CausalHash -> StateT MigrationState m (Sync.TrySyncResult Entity) -migrateCausal conn causalHash = runDB conn $ do - C.Causal{} <- Ops.loadCausalBranchByCausalHash causalHash >>= \case - Nothing -> error $ "Expected causal to exist but it didn't" <> show causalHash - Just c -> pure c - _migratedCausals <- gets causalMapping +-- +-- Causal Plan +-- * Load a DbCausal (how do we do this) +-- => new function Queries.localCausalByCausalHashId, can model after loadCausalByCausalHash or factor out of +-- * Add valueHashId's ObjectId as a dependency if unmigrated +-- * Add parent causal hash ids as dependencies if unmigrated + -- => Queries.loadCausalParents +-- * Map over Branch hash IDs +-- * Inside saveDBCausal (new / factored out of original) +-- * Save as a new self-hash +-- ==> Queries.saveCausal +-- * Map over parent causal hash IDs +-- ==> Queries.saveCausalParents +migrateCausal :: MonadIO m => Connection -> CausalHashId -> StateT MigrationState m (Sync.TrySyncResult Entity) +migrateCausal conn _causalHashId = runDB conn . fmap (either id id) . runExceptT $ do + -- C.Causal{} <- lift $ Ops.loadCausalBranchByCausalHash causalHash >>= \case + -- Nothing -> error $ "Expected causal to exist but it didn't" <> show causalHash + -- Just c -> pure c + let oldCausal :: SC.DbCausal + oldCausal = undefined + + let branchHashId = (SC.valueHash oldCausal) + -- This fails if the object for the branch doesn't exist, CHECK: we currently expect + -- this to always be true? + branchObjId <- Q.maybeObjectIdForPrimaryHashId (unBranchHashId branchHashId) >>= \case + Nothing -> error $ "Expected branch object ID but didn't exist for hashId:" <> show branchHashId + Just objId -> pure objId + + migratedObjIds <- gets objLookup + -- If the branch for this causal hasn't been migrated, migrate it first. + let unmigratedBranch = if (branchObjId `Map.notMember` migratedObjIds) then [B branchObjId] + else [] + + migratedCausals <- gets causalMapping + let unmigratedParents = map C . filter (`Map.member` migratedCausals) . Set.toList . SC.parents $ oldCausal + let unmigratedEntities = unmigratedBranch <> unmigratedParents + when (not . null $ unmigratedParents <> unmigratedBranch) (throwE $ Sync.Missing unmigratedEntities) + + (_newBranchObjId, _newBranchHashId, newBranchHash) <- gets (\MigrationState{..} -> objLookup Map.! branchObjId) + + newParentHashes <- Set.fromList <$> for (Set.toList . SC.parents $ oldCausal) \oldParentHashId -> do + unCausalHash . fst <$> gets (\MigrationState{..} -> causalMapping Map.! oldParentHashId) + + let newCausalHash :: Hash + newCausalHash = Hashable.accumulate $ Hashing.hashCausal (Hashing.Causal {branchHash=newBranchHash, parents=newParentHashes}) + let newCausal = + case oldCausal of + DbCausal {..} -> + DbCausal + { selfHash = undefined, + valueHash = BranchHashId $ view _2 (migratedObjIds Map.! branchObjId), + parents = Set.map (snd . (\old -> migratedCausals Map.! old)) $ parents + } + -- let newCausalHash = ??? + Q.saveCausal (SC.selfHash newCausal) (SC.valueHash newCausal) + Q.saveCausalParents (SC.selfHash newCausal) (Set.toList $ SC.parents newCausal) + undefined + + -- Plan: -- * Load a C.Causal - -- * Ensure its parent causals and branch (value hash) have been migrated + -- * Ensure its parent causals and branch (value hash) have been migrated ✅ -- * Rewrite the value-hash and parent causal hashes -- * Save the new causal (Do we change the self-hash?) - -- - -- let unMigratedParents = + -- * Save Causal Hash mapping to skymap @@ -565,17 +624,17 @@ type SomeReferenceObjId = SomeReference (UReference.Id' ObjectId) objIdsToHashed :: MonadState MigrationState m => SomeReferenceObjId -> m SomeReferenceId objIdsToHashed = someRef_ %%~ \(UReference.Id objId pos) -> do - objMapping <- gets hashByObj + objMapping <- gets objLookup case Map.lookup objId objMapping of Nothing -> error $ "Expected object mapping for ID: " <> show objId - Just hash -> pure (Reference.Id hash pos) + Just (_, _, hash) -> pure (Reference.Id hash pos) remapObjIdRefs :: MonadState MigrationState m => SomeReferenceObjId -> m SomeReferenceObjId remapObjIdRefs = someRef_ %%~ \(UReference.Id objId pos) -> do objMapping <- gets objLookup case Map.lookup objId objMapping of Nothing -> error $ "Expected object mapping for ID: " <> show objId - Just newObjId -> pure (UReference.Id newObjId pos) + Just (newObjId, _, _) -> pure (UReference.Id newObjId pos) data SomeReference ref = TermReference ref diff --git a/parser-typechecker/src/Unison/Hashing/V2/Causal.hs b/parser-typechecker/src/Unison/Hashing/V2/Causal.hs index 2e939c8469..218eb4536b 100644 --- a/parser-typechecker/src/Unison/Hashing/V2/Causal.hs +++ b/parser-typechecker/src/Unison/Hashing/V2/Causal.hs @@ -16,9 +16,9 @@ import qualified Unison.Hashable as H hashCausal :: H.Accumulate h => Causal -> [(H.Token h)] hashCausal c = - H.tokens $ [selfHash c, branchHash c] ++ (Set.toList $ parents c) + H.tokens $ [branchHash c] ++ (Set.toList $ parents c) -data Causal = Causal {selfHash :: Hash, branchHash :: Hash, parents :: Set Hash} +data Causal = Causal {branchHash :: Hash, parents :: Set Hash} instance Hashable Causal where tokens c = hashCausal c From c75175a6a8f581008b60b4f716c666014995344f Mon Sep 17 00:00:00 2001 From: Arya Irani Date: Mon, 1 Nov 2021 13:42:10 -0400 Subject: [PATCH 044/297] commit wip --- .../U/Codebase/Sqlite/Branch/Format.hs | 34 +++- .../U/Codebase/Sqlite/Branch/Full.hs | 17 +- .../U/Codebase/Sqlite/Causal.hs | 25 ++- .../U/Codebase/Sqlite/Operations.hs | 13 +- .../SqliteCodebase/MigrateSchema12.hs | 160 +++++++++--------- 5 files changed, 151 insertions(+), 98 deletions(-) diff --git a/codebase2/codebase-sqlite/U/Codebase/Sqlite/Branch/Format.hs b/codebase2/codebase-sqlite/U/Codebase/Sqlite/Branch/Format.hs index 7cb9496823..1ab04f6e4a 100644 --- a/codebase2/codebase-sqlite/U/Codebase/Sqlite/Branch/Format.hs +++ b/codebase2/codebase-sqlite/U/Codebase/Sqlite/Branch/Format.hs @@ -12,10 +12,10 @@ where import Data.ByteString (ByteString) import Data.Vector (Vector) import qualified Data.Vector as Vector -import U.Codebase.Sqlite.Branch.Diff (LocalDiff, Diff) -import U.Codebase.Sqlite.Branch.Full (DbBranch, LocalBranch) -import qualified U.Codebase.Sqlite.Branch.Full as Branch.Full +import U.Codebase.Sqlite.Branch.Diff (Diff, LocalDiff) import qualified U.Codebase.Sqlite.Branch.Diff as Branch.Diff +import U.Codebase.Sqlite.Branch.Full (Branch' (types, terms, patches, children), DbBranch, LocalBranch) +import qualified U.Codebase.Sqlite.Branch.Full as Branch.Full import U.Codebase.Sqlite.DbId (BranchObjectId, CausalHashId, ObjectId, PatchObjectId, TextId) import U.Codebase.Sqlite.LocalIds ( LocalBranchChildId (..), @@ -53,7 +53,32 @@ localToDbBranch li = Branch.Full.quadmap (lookupBranchLocalText li) (lookupBranchLocalDefn li) (lookupBranchLocalPatch li) (lookupBranchLocalChild li) dbToLocalBranch :: DbBranch -> (BranchLocalIds, LocalBranch) -dbToLocalBranch = undefined +dbToLocalBranch (Branch.Full.Branch terms types patches children) = + let temp = Branch.Full.Branch + <$> Map.bitraverse localNameSegment (Map.bitraverse localReferent localMetadata) terms + <*> Map.bitraverse localNameSegment (Map.bitraverse localReference localMetadata) types + <*> Map.bitraverse localNameSegment localPatch patches + <*> Map.bitraverse localNameSegment localChild children + undefined + where + localNameSegment :: State (Map TextId LocalTextId) LocalTextId + localNameSegment = undefined + + localReferent :: State (Map TextId LocalTextId, Map + + -- type BranchSavingState = (Map TextId LocalTextId, Map HashId LocalDefnId, Map Db.PatchObjectId LocalPatchObjectId, Map (Db.BranchObjectId, Db.CausalHashId) LocalBranchChildId) + -- State (Map TextId LocalTextId) + + -- + + -- WriterT BranchLocalIds (State ) + + -- done =<< (runWriterT . flip evalStateT startState) do + -- S.Branch + -- <$> Map.bitraverse saveNameSegment (Map.bitraverse saveReferent saveMetadata) terms + -- <*> Map.bitraverse saveNameSegment (Map.bitraverse saveReference saveMetadata) types + -- <*> Map.bitraverse saveNameSegment savePatch' patches + -- <*> Map.bitraverse saveNameSegment saveChild children lookupBranchLocalText :: BranchLocalIds -> LocalTextId -> TextId lookupBranchLocalText li (LocalTextId w) = branchTextLookup li Vector.! fromIntegral w @@ -70,7 +95,6 @@ lookupBranchLocalChild li (LocalBranchChildId w) = branchChildLookup li Vector.! localToDbDiff :: BranchLocalIds -> LocalDiff -> Diff localToDbDiff li = Branch.Diff.quadmap (lookupBranchLocalText li) (lookupBranchLocalDefn li) (lookupBranchLocalPatch li) (lookupBranchLocalChild li) - {- projects.arya.message = "hello, world" -> -> #abc projects.arya.program = printLine message -> printLine #abc -> #def diff --git a/codebase2/codebase-sqlite/U/Codebase/Sqlite/Branch/Full.hs b/codebase2/codebase-sqlite/U/Codebase/Sqlite/Branch/Full.hs index 2976e3c46d..1d67dad716 100644 --- a/codebase2/codebase-sqlite/U/Codebase/Sqlite/Branch/Full.hs +++ b/codebase2/codebase-sqlite/U/Codebase/Sqlite/Branch/Full.hs @@ -47,18 +47,21 @@ branchCausalHashes_ f Branch{..} = -- =========== -- 1. function that reads a DbBranch out of codebase -- ==> loadDbBranchByObjectId --- 2. function which lifts a function over SomeReference's to run over a DBBranch by inflating Hashes --- 3. function which remaps references in a Branch +-- 2. function which remaps references in a Branch ✅ -- ==> Chris's work --- 4. function that takes DbBranch to (LocalIds, LocalBranch) --- ==> dbToLocalBranch --- 5. function that takes a DbBranch to a Hashing.V2.Branch +-- 3. function that takes DbBranch to (LocalIds, LocalBranch) +-- ==> dbToLocalBranch (todo) +-- 4. function that takes a DbBranch to a Hashing.V2.Branch -- ==> Unison.Codebase.SqliteCodebase.MigrateSchema12.DbHelpers.dbBranchHash --- 6. saveBranchHash --- 7. saveBranchObject +-- 5. saveBranchHash +-- 6. saveBranchObject -- =============== -- end branch plan +-- causal plan' +-- =========== +-- 1. + -- database has a root CausalHashId -- from CausalHashId, we can look up ValueHashId and -- diff --git a/codebase2/codebase-sqlite/U/Codebase/Sqlite/Causal.hs b/codebase2/codebase-sqlite/U/Codebase/Sqlite/Causal.hs index 08421a3bb1..66d5ce2783 100644 --- a/codebase2/codebase-sqlite/U/Codebase/Sqlite/Causal.hs +++ b/codebase2/codebase-sqlite/U/Codebase/Sqlite/Causal.hs @@ -1,12 +1,30 @@ {-# LANGUAGE RecordWildCards #-} + module U.Codebase.Sqlite.Causal where +import U.Codebase.Sqlite.DbId (BranchHashId, CausalHashId) import Unison.Prelude -data GDbCausal causalHash valueHash = - DbCausal { selfHash :: causalHash, valueHash :: valueHash, parents :: Set causalHash } +data GDbCausal causalHash valueHash = DbCausal + { selfHash :: causalHash, + valueHash :: valueHash, + parents :: Set causalHash + } + +-- Causal Plan +-- * Load a DbCausal (how do we do this) +-- => new function Queries.localCausalByCausalHashId, can model after loadCausalByCausalHash or factor out of +-- * Add valueHashId as a dependency if unmigrated +-- * Add parent causal hash ids as dependencies if unmigrated + -- => Queries.loadCausalParents +-- * Map over Branch hash IDs +-- * Inside saveDBCausal (new / factored out of original) +-- * Save as a new self-hash +-- ==> Queries.saveCausal +-- * Map over parent causal hash IDs +-- ==> Queries.saveCausalParents --- type DbCausal = GDbCausal CausalHashId BranchHashId +type DbCausal = GDbCausal CausalHashId BranchHashId -- causalHashes_ :: Traversal (GDbCausal ch vh) (GDbCausal ch' vh) ch ch' -- causalHashes_ f DbCausal {..} = @@ -16,7 +34,6 @@ data GDbCausal causalHash valueHash = -- valueHashes_ f DbCausal{..} = -- (\p vh -> DbCausal selfHash vh p) parents <$> f valueHash - -- data Causal m hc he e = Causal -- { causalHash :: hc, -- valueHash :: he, diff --git a/codebase2/codebase-sqlite/U/Codebase/Sqlite/Operations.hs b/codebase2/codebase-sqlite/U/Codebase/Sqlite/Operations.hs index da5ab610a5..3c9708ef06 100644 --- a/codebase2/codebase-sqlite/U/Codebase/Sqlite/Operations.hs +++ b/codebase2/codebase-sqlite/U/Codebase/Sqlite/Operations.hs @@ -1046,12 +1046,6 @@ saveBranch (C.Causal hc he parents me) = do m LocalBranchChildId lookupChild = lookup_ Lens._4 Lens._4 LocalBranchChildId startState = mempty @BranchSavingState - saveBranchObject :: DB m => Db.BranchHashId -> BranchLocalIds -> S.Branch.Full.LocalBranch -> m Db.BranchObjectId - saveBranchObject id@(Db.unBranchHashId -> hashId) li lBranch = do - when debug $ traceM $ "saveBranchObject\n\tid = " ++ show id ++ "\n\tli = " ++ show li ++ "\n\tlBranch = " ++ show lBranch - let bytes = S.putBytes S.putBranchFormat $ S.BranchFormat.Full li lBranch - oId <- Q.saveObject hashId OT.Namespace bytes - pure $ Db.BranchObjectId oId done :: (EDB m, Show a) => (a, BranchSavingWriter) -> m (BranchLocalIds, a) done (lBranch, written@(textValues, defnHashes, patchObjectIds, branchCausalIds)) = do when debug $ traceM $ "saveBranch.done\n\tlBranch = " ++ show lBranch ++ "\n\twritten = " ++ show written @@ -1065,6 +1059,13 @@ saveBranch (C.Causal hc he parents me) = do (Vector.fromList (Foldable.toList branchCausalIds)) pure (ids, lBranch) +saveBranchObject :: DB m => Db.BranchHashId -> BranchLocalIds -> S.Branch.Full.LocalBranch -> m Db.BranchObjectId +saveBranchObject id@(Db.unBranchHashId -> hashId) li lBranch = do + when debug $ traceM $ "saveBranchObject\n\tid = " ++ show id ++ "\n\tli = " ++ show li ++ "\n\tlBranch = " ++ show lBranch + let bytes = S.putBytes S.putBranchFormat $ S.BranchFormat.Full li lBranch + oId <- Q.saveObject hashId OT.Namespace bytes + pure $ Db.BranchObjectId oId + loadRootCausal :: EDB m => m (C.Branch.Causal m) loadRootCausal = liftQ Q.loadNamespaceRoot >>= loadCausalByCausalHashId diff --git a/parser-typechecker/src/Unison/Codebase/SqliteCodebase/MigrateSchema12.hs b/parser-typechecker/src/Unison/Codebase/SqliteCodebase/MigrateSchema12.hs index 3647d42249..e3adbaa268 100644 --- a/parser-typechecker/src/Unison/Codebase/SqliteCodebase/MigrateSchema12.hs +++ b/parser-typechecker/src/Unison/Codebase/SqliteCodebase/MigrateSchema12.hs @@ -6,7 +6,7 @@ module Unison.Codebase.SqliteCodebase.MigrateSchema12 where import Control.Lens -import Control.Monad.Except (runExceptT, ExceptT) +import Control.Monad.Except (ExceptT, runExceptT) import Control.Monad.Reader (ReaderT (runReaderT), ask) import Control.Monad.State.Strict import Control.Monad.Trans.Except (throwE) @@ -14,11 +14,19 @@ import Control.Monad.Trans.Writer.CPS (Writer, execWriter, tell) import Data.Generics.Product import Data.List.Extra (nubOrd) import qualified Data.Map as Map -import qualified U.Codebase.Sqlite.Operations as Ops import Data.Tuple (swap) import qualified Data.Zip as Zip +import qualified U.Codebase.Causal as C +import U.Codebase.HashTags (BranchHash (BranchHash), CausalHash) +import qualified U.Codebase.Reference as UReference +import qualified U.Codebase.Referent as UReferent +import qualified U.Codebase.Sqlite.Branch.Format as S.Branch +import qualified U.Codebase.Sqlite.Branch.Full as S (DbBranch) +import qualified U.Codebase.Sqlite.Branch.Full as S.Branch.Full import U.Codebase.Sqlite.Connection (Connection) -import U.Codebase.Sqlite.DbId (ObjectId) +import U.Codebase.Sqlite.DbId (BranchObjectId (..), ObjectId) +import qualified U.Codebase.Sqlite.Operations as Ops +import qualified U.Codebase.Sqlite.Queries as Q import qualified U.Codebase.Sqlite.Reference as S.Reference import U.Codebase.Sync (Sync (Sync)) import qualified U.Codebase.Sync as Sync @@ -26,6 +34,7 @@ import qualified U.Codebase.WatchKind as WK import qualified Unison.ABT as ABT import Unison.Codebase (Codebase (Codebase)) import qualified Unison.Codebase as Codebase +import qualified Unison.Codebase.SqliteCodebase.MigrateSchema12.DbHelpers as Hashing import qualified Unison.DataDeclaration as DD import Unison.DataDeclaration.ConstructorId (ConstructorId) import Unison.Hash (Hash) @@ -40,11 +49,7 @@ import qualified Unison.Term as Term import Unison.Type (Type) import qualified Unison.Type as Type import Unison.Var (Var) -import U.Codebase.HashTags (CausalHash) -import qualified U.Codebase.Causal as C -import qualified U.Codebase.Sqlite.Branch.Full as S -import qualified U.Codebase.Reference as UReference -import qualified U.Codebase.Referent as UReferent +import qualified Unison.Codebase.SqliteCodebase.Conversions as Cv -- lookupCtor :: ConstructorMapping -> ObjectId -> Pos -> ConstructorId -> Maybe (Pos, ConstructorId) -- lookupCtor (ConstructorMapping cm) oid pos cid = @@ -126,8 +131,8 @@ data Entity = TComponent Unison.Hash | DComponent Unison.Hash | C CausalHash - -- haven't proven we need these yet - | B ObjectId + | -- haven't proven we need these yet + B ObjectId | Patch ObjectId | W WK.WatchKind S.Reference.IdH -- Hash Reference.Id deriving (Eq, Ord, Show) @@ -178,16 +183,16 @@ migrationSync = Sync \case --- * If we haven't yet synced the causal's value (namespace), push it onto the work queue. --- * Rehash the Causal's valueHash AND CausalHash, and add the new causal, its hashes, and hash objects to the codebase under a fresh object ID TComponent hash -> do - Env{codebase} <- ask + Env {codebase} <- ask lift (migrateTermComponent codebase hash) DComponent hash -> do - Env{codebase} <- ask + Env {codebase} <- ask lift (migrateDeclComponent codebase hash) B objectId -> do - Env{db} <- ask + Env {db} <- ask lift (migrateBranch db objectId) C causalHash -> do - Env{db} <- ask + Env {db} <- ask lift (migrateCausal db causalHash) -- To sync a watch result, -- 1. ??? @@ -206,9 +211,10 @@ runDB conn = (runExceptT >=> err) . flip runReaderT conn -- loadCausalBranchByCausalHash :: EDB m => CausalHash -> m (Maybe (C.Branch.Causal m)) migrateCausal :: MonadIO m => Connection -> CausalHash -> StateT MigrationState m (Sync.TrySyncResult Entity) migrateCausal conn causalHash = runDB conn $ do - C.Causal{} <- Ops.loadCausalBranchByCausalHash causalHash >>= \case - Nothing -> error $ "Expected causal to exist but it didn't" <> show causalHash - Just c -> pure c + C.Causal {} <- + Ops.loadCausalBranchByCausalHash causalHash >>= \case + Nothing -> error $ "Expected causal to exist but it didn't" <> show causalHash + Just c -> pure c _migratedCausals <- gets causalMapping -- Plan: -- * Load a C.Causal @@ -218,10 +224,6 @@ migrateCausal conn causalHash = runDB conn $ do -- -- let unMigratedParents = - - - - undefined -- data Causal m hc he e = Causal @@ -231,12 +233,12 @@ migrateCausal conn causalHash = runDB conn $ do -- value :: m e -- } - -- data C.Branch m = Branch - -- { terms :: Map NameSegment (Map Referent (m MdValues)), - -- types :: Map NameSegment (Map Reference (m MdValues)), - -- patches :: Map NameSegment (PatchHash, m Patch), - -- children :: Map NameSegment (Causal m) - -- } +-- data C.Branch m = Branch +-- { terms :: Map NameSegment (Map Referent (m MdValues)), +-- types :: Map NameSegment (Map Reference (m MdValues)), +-- patches :: Map NameSegment (PatchHash, m Patch), +-- children :: Map NameSegment (Causal m) +-- } -- data Branch' t h p c = Branch -- { terms :: Map t (Map (Referent'' t h) (MetadataSetFormat' t h)), @@ -245,14 +247,11 @@ migrateCausal conn causalHash = runDB conn $ do -- children :: Map t c -- } -migrateBranch :: Monad m => Connection -> ObjectId -> StateT MigrationState m (Sync.TrySyncResult Entity) -migrateBranch _conn _objectID = fmap (either id id) . runExceptT $ do +migrateBranch :: MonadIO m => Connection -> ObjectId -> StateT MigrationState m (Sync.TrySyncResult Entity) +migrateBranch conn oldObjectId = fmap (either id id) . runExceptT $ do -- note for tomorrow: we want to just load the (Branch m) instead, forget the DbBranch -- dbBranch <- Ops.loadDbBranchByObjectId objectId - -- Plan: - -- * Load a C.Branch by converting Branch Hash into a branch object ID - -- * let allMissingTypes = undefined let allMissingTerms = undefined let allMissingPatches = undefined @@ -262,58 +261,64 @@ migrateBranch _conn _objectID = fmap (either id id) . runExceptT $ do -- Identify dependencies and bail out if they aren't all built let allMissingReferences :: [Entity] allMissingReferences = - allMissingTypes ++ - allMissingTerms ++ - allMissingPatches ++ - allMissingChildren ++ - allMissingPredecessors + allMissingTypes + ++ allMissingTerms + ++ allMissingPatches + ++ allMissingChildren + ++ allMissingPredecessors when (not . null $ allMissingReferences) $ throwE $ Sync.Missing allMissingReferences - -- Migrate branch - let oldDBBranch :: S.DbBranch = undefined - - _newBranch <- oldDBBranch & dbBranchObjRefs_ %%~ remapObjIdRefs - -- Need to generalize the traversal, and also generalize the Id backing "SomeReference" - -- newBranch <- oldDBBranch & dbBranchObjRefs_ %%~ objIdsToHashed - - error "not implemented" - + -- Read the old branch + oldBranch <- runDB conn (Ops.loadDbBranchByObjectId (BranchObjectId oldObjectId)) + -- Remap object id references + -- TODO: remap sub-namespace causal hashes + newBranch <- oldBranch & dbBranchObjRefs_ %%~ remapObjIdRefs + let (localBranchIds, localBranch) = S.Branch.dbToLocalBranch newBranch + hash <- runDB conn (Ops.liftQ (Hashing.dbBranchHash newBranch)) + newHashId <- runDB conn (Ops.liftQ (Q.saveBranchHash (BranchHash (Cv.hash1to2 hash)))) + newObjectId <- runDB conn (Ops.saveBranchObject newHashId localBranchIds localBranch) + field @"objLookup" %= Map.insert oldObjectId newObjectId -- Project an S.Referent'' into its SomeReferenceObjId's -someReferent_ :: Traversal' (S.Referent'' t ObjectId) SomeReferenceObjId -someReferent_ = (UReferent._Ref . someReference_) - `failing` (UReferent._Con - . asPair_ -- Need to unpack the embedded reference AND remap between mismatched Constructor ID types. - . unsafeInsidePrism _ConstructorReference - ) +someReferent_ :: Traversal' (S.Branch.Full.Referent'' t ObjectId) SomeReferenceObjId +someReferent_ = + (UReferent._Ref . someReference_) + `failing` ( UReferent._Con + . asPair_ -- Need to unpack the embedded reference AND remap between mismatched Constructor ID types. + . unsafeInsidePrism _ConstructorReference + ) where - asPair_ f (UReference.ReferenceDerived id', conId) = f (id', fromIntegral conId) - <&> \(newId, newConId) -> (UReference.ReferenceDerived newId, fromIntegral newConId) + asPair_ f (UReference.ReferenceDerived id', conId) = + f (id', fromIntegral conId) + <&> \(newId, newConId) -> (UReference.ReferenceDerived newId, fromIntegral newConId) asPair_ _ (UReference.ReferenceBuiltin x, conId) = pure (UReference.ReferenceBuiltin x, conId) someReference_ :: Traversal' (UReference.Reference' t ObjectId) SomeReferenceObjId someReference_ = UReference._ReferenceDerived . unsafeInsidePrism _TermReference -someMetadataSetFormat :: Ord t => Traversal' (S.MetadataSetFormat' t ObjectId) SomeReferenceObjId -someMetadataSetFormat = S.metadataSetFormatReferences_ . someReference_ +someMetadataSetFormat :: Ord t => Traversal' (S.Branch.Full.MetadataSetFormat' t ObjectId) SomeReferenceObjId +someMetadataSetFormat = S.Branch.Full.metadataSetFormatReferences_ . someReference_ -mapReferentMetadata :: (Ord k, Ord t) => +mapReferentMetadata :: + (Ord k, Ord t) => Traversal' k SomeReferenceObjId -> - Traversal' (Map k (S.MetadataSetFormat' t ObjectId)) - (SomeReferenceObjId) -mapReferentMetadata keyTraversal f m = Map.toList m - & traversed . beside keyTraversal someMetadataSetFormat %%~ f - <&> Map.fromList + Traversal' + (Map k (S.Branch.Full.MetadataSetFormat' t ObjectId)) + (SomeReferenceObjId) +mapReferentMetadata keyTraversal f m = + Map.toList m + & traversed . beside keyTraversal someMetadataSetFormat %%~ f + <&> Map.fromList dbBranchObjRefs_ :: Traversal' S.DbBranch SomeReferenceObjId -dbBranchObjRefs_ f S.Branch{..} = do +dbBranchObjRefs_ f S.Branch.Full.Branch {children, patches, terms, types} = do let newTypesMap = types & traversed . mapReferentMetadata someReference_ %%~ f let newTermsMap = terms & traversed . mapReferentMetadata someReferent_ %%~ f - S.Branch <$> newTermsMap <*> newTypesMap <*> pure patches <*> pure children + S.Branch.Full.Branch <$> newTermsMap <*> newTypesMap <*> pure patches <*> pure children - -- convertBranch :: (DB m, MonadState MigrationState m) => DbBranch -> m DbBranch +-- convertBranch :: (DB m, MonadState MigrationState m) => DbBranch -> m DbBranch -- convertBranch dbBranch = _ -- DbBranch -- migrate --> DbBranch -- hydrate for the hash --> Hashing.V2.Branch -- put (Hash, toBranchFormat(DbBranch)) --> COOL @@ -330,7 +335,6 @@ dbBranchObjRefs_ f S.Branch{..} = do -- Traversal' DbBranch PatchId -- MonadState MigrationState m => PatchObjectId -> m PatchObjectId - -- type DbBranch = Branch' TextId ObjectId PatchObjectId (BranchObjectId, CausalHashId) -- data Branch' t h p c = Branch @@ -561,21 +565,24 @@ remapReferences declMap = \case x -> x type SomeReferenceId = SomeReference Reference.Id + type SomeReferenceObjId = SomeReference (UReference.Id' ObjectId) objIdsToHashed :: MonadState MigrationState m => SomeReferenceObjId -> m SomeReferenceId -objIdsToHashed = someRef_ %%~ \(UReference.Id objId pos) -> do - objMapping <- gets hashByObj - case Map.lookup objId objMapping of - Nothing -> error $ "Expected object mapping for ID: " <> show objId - Just hash -> pure (Reference.Id hash pos) +objIdsToHashed = + someRef_ %%~ \(UReference.Id objId pos) -> do + objMapping <- gets hashByObj + case Map.lookup objId objMapping of + Nothing -> error $ "Expected object mapping for ID: " <> show objId + Just hash -> pure (Reference.Id hash pos) remapObjIdRefs :: MonadState MigrationState m => SomeReferenceObjId -> m SomeReferenceObjId -remapObjIdRefs = someRef_ %%~ \(UReference.Id objId pos) -> do - objMapping <- gets objLookup - case Map.lookup objId objMapping of - Nothing -> error $ "Expected object mapping for ID: " <> show objId - Just newObjId -> pure (UReference.Id newObjId pos) +remapObjIdRefs = + someRef_ %%~ \(UReference.Id objId pos) -> do + objMapping <- gets objLookup + case Map.lookup objId objMapping of + Nothing -> error $ "Expected object mapping for ID: " <> show objId + Just newObjId -> pure (UReference.Id newObjId pos) data SomeReference ref = TermReference ref @@ -588,6 +595,7 @@ someRef_ = param @0 _TermReference :: Prism (SomeReference ref) (SomeReference ref') ref ref' _TermReference = undefined -- _Ctor @"TermReference" + _TypeReference :: Prism (SomeReference ref) (SomeReference ref') ref ref' _TypeReference = undefined --_Ctor @"TypeReference" From 87a8057710f56fcd7aea9e7c9eb71f317531a081 Mon Sep 17 00:00:00 2001 From: Chris Penner Date: Mon, 1 Nov 2021 11:45:59 -0600 Subject: [PATCH 045/297] Ormolu --- .../SqliteCodebase/MigrateSchema12.hs | 166 ++++++++++-------- 1 file changed, 90 insertions(+), 76 deletions(-) diff --git a/parser-typechecker/src/Unison/Codebase/SqliteCodebase/MigrateSchema12.hs b/parser-typechecker/src/Unison/Codebase/SqliteCodebase/MigrateSchema12.hs index e14c3a906f..a49a15b0c7 100644 --- a/parser-typechecker/src/Unison/Codebase/SqliteCodebase/MigrateSchema12.hs +++ b/parser-typechecker/src/Unison/Codebase/SqliteCodebase/MigrateSchema12.hs @@ -6,7 +6,7 @@ module Unison.Codebase.SqliteCodebase.MigrateSchema12 where import Control.Lens -import Control.Monad.Except (runExceptT, ExceptT) +import Control.Monad.Except (ExceptT, runExceptT) import Control.Monad.Reader (ReaderT (runReaderT), ask) import Control.Monad.State.Strict import Control.Monad.Trans.Except (throwE) @@ -14,16 +14,26 @@ import Control.Monad.Trans.Writer.CPS (Writer, execWriter, tell) import Data.Generics.Product import Data.List.Extra (nubOrd) import qualified Data.Map as Map -import qualified U.Codebase.Sqlite.Operations as Ops +import Data.Maybe +import qualified Data.Set as Set import Data.Tuple (swap) import qualified Data.Zip as Zip +import qualified U.Codebase.Causal as C +import U.Codebase.HashTags (CausalHash (CausalHash, unCausalHash)) +import qualified U.Codebase.Reference as UReference +import qualified U.Codebase.Referent as UReferent +import qualified U.Codebase.Sqlite.Branch.Full as S +import U.Codebase.Sqlite.Causal (DbCausal, GDbCausal (..)) +import qualified U.Codebase.Sqlite.Causal as SC import U.Codebase.Sqlite.Connection (Connection) -import U.Codebase.Sqlite.DbId (ObjectId, BranchHashId (unBranchHashId, BranchHashId), CausalHashId (CausalHashId, unCausalHashId), HashId (HashId)) -import U.Codebase.Sqlite.Causal (DbCausal, GDbCausal(..)) +import U.Codebase.Sqlite.DbId (BranchHashId (BranchHashId, unBranchHashId), CausalHashId (CausalHashId, unCausalHashId), HashId (HashId), ObjectId) +import qualified U.Codebase.Sqlite.Operations as Ops +import qualified U.Codebase.Sqlite.Queries as Q import qualified U.Codebase.Sqlite.Reference as S.Reference import U.Codebase.Sync (Sync (Sync)) import qualified U.Codebase.Sync as Sync import qualified U.Codebase.WatchKind as WK +import qualified U.Util.Monoid as Monoid import qualified Unison.ABT as ABT import Unison.Codebase (Codebase (Codebase)) import qualified Unison.Codebase as Codebase @@ -31,6 +41,8 @@ import qualified Unison.DataDeclaration as DD import Unison.DataDeclaration.ConstructorId (ConstructorId) import Unison.Hash (Hash) import qualified Unison.Hash as Unison +import qualified Unison.Hashable as Hashable +import qualified Unison.Hashing.V2.Causal as Hashing import qualified Unison.Hashing.V2.Convert as Convert import Unison.Pattern (Pattern) import Unison.Prelude @@ -41,18 +53,6 @@ import qualified Unison.Term as Term import Unison.Type (Type) import qualified Unison.Type as Type import Unison.Var (Var) -import U.Codebase.HashTags (CausalHash (CausalHash, unCausalHash)) -import qualified U.Codebase.Causal as C -import qualified U.Codebase.Sqlite.Branch.Full as S -import qualified U.Codebase.Reference as UReference -import qualified U.Codebase.Referent as UReferent -import qualified U.Codebase.Sqlite.Causal as SC -import qualified U.Codebase.Sqlite.Queries as Q -import qualified Data.Set as Set -import qualified U.Util.Monoid as Monoid -import qualified Unison.Hashing.V2.Causal as Hashing -import qualified Unison.Hashable as Hashable -import Data.Maybe -- lookupCtor :: ConstructorMapping -> ObjectId -> Pos -> ConstructorId -> Maybe (Pos, ConstructorId) -- lookupCtor (ConstructorMapping cm) oid pos cid = @@ -133,8 +133,8 @@ data Entity = TComponent Unison.Hash | DComponent Unison.Hash | C CausalHashId - -- haven't proven we need these yet - | B ObjectId + | -- haven't proven we need these yet + B ObjectId | Patch ObjectId | W WK.WatchKind S.Reference.IdH -- Hash Reference.Id deriving (Eq, Ord, Show) @@ -185,16 +185,16 @@ migrationSync = Sync \case --- * If we haven't yet synced the causal's value (namespace), push it onto the work queue. --- * Rehash the Causal's valueHash AND CausalHash, and add the new causal, its hashes, and hash objects to the codebase under a fresh object ID TComponent hash -> do - Env{codebase} <- ask + Env {codebase} <- ask lift (migrateTermComponent codebase hash) DComponent hash -> do - Env{codebase} <- ask + Env {codebase} <- ask lift (migrateDeclComponent codebase hash) B objectId -> do - Env{db} <- ask + Env {db} <- ask lift (migrateBranch db objectId) C causalHashId -> do - Env{db} <- ask + Env {db} <- ask lift (migrateCausal db causalHashId) -- To sync a watch result, -- 1. ??? @@ -213,19 +213,27 @@ runDB conn = (runExceptT >=> err) . flip runReaderT conn -- loadCausalBranchByCausalHash :: EDB m => CausalHash -> m (Maybe (C.Branch.Causal m)) -- -- Causal Plan + -- * Load a DbCausal (how do we do this) + -- => new function Queries.localCausalByCausalHashId, can model after loadCausalByCausalHash or factor out of + -- * Add valueHashId's ObjectId as a dependency if unmigrated + -- * Add parent causal hash ids as dependencies if unmigrated - -- => Queries.loadCausalParents + +-- => Queries.loadCausalParents + -- * Map over Branch hash IDs + -- * Inside saveDBCausal (new / factored out of original) + -- * Save as a new self-hash -- ==> Queries.saveCausal -- * Map over parent causal hash IDs -- ==> Queries.saveCausalParents migrateCausal :: MonadIO m => Connection -> CausalHashId -> StateT MigrationState m (Sync.TrySyncResult Entity) -migrateCausal conn _causalHashId = runDB conn . fmap (either id id) . runExceptT $ do +migrateCausal conn _causalHashId = runDB conn . fmap (either id id) . runExceptT $ do -- C.Causal{} <- lift $ Ops.loadCausalBranchByCausalHash causalHash >>= \case -- Nothing -> error $ "Expected causal to exist but it didn't" <> show causalHash -- Just c -> pure c @@ -235,27 +243,31 @@ migrateCausal conn _causalHashId = runDB conn . fmap (either id id) . runExcept let branchHashId = (SC.valueHash oldCausal) -- This fails if the object for the branch doesn't exist, CHECK: we currently expect -- this to always be true? - branchObjId <- Q.maybeObjectIdForPrimaryHashId (unBranchHashId branchHashId) >>= \case - Nothing -> error $ "Expected branch object ID but didn't exist for hashId:" <> show branchHashId - Just objId -> pure objId + branchObjId <- + Q.maybeObjectIdForPrimaryHashId (unBranchHashId branchHashId) >>= \case + Nothing -> error $ "Expected branch object ID but didn't exist for hashId:" <> show branchHashId + Just objId -> pure objId migratedObjIds <- gets objLookup -- If the branch for this causal hasn't been migrated, migrate it first. - let unmigratedBranch = if (branchObjId `Map.notMember` migratedObjIds) then [B branchObjId] - else [] + let unmigratedBranch = + if (branchObjId `Map.notMember` migratedObjIds) + then [B branchObjId] + else [] migratedCausals <- gets causalMapping let unmigratedParents = map C . filter (`Map.member` migratedCausals) . Set.toList . SC.parents $ oldCausal let unmigratedEntities = unmigratedBranch <> unmigratedParents when (not . null $ unmigratedParents <> unmigratedBranch) (throwE $ Sync.Missing unmigratedEntities) - (_newBranchObjId, _newBranchHashId, newBranchHash) <- gets (\MigrationState{..} -> objLookup Map.! branchObjId) + (_newBranchObjId, _newBranchHashId, newBranchHash) <- gets (\MigrationState {..} -> objLookup Map.! branchObjId) - newParentHashes <- Set.fromList <$> for (Set.toList . SC.parents $ oldCausal) \oldParentHashId -> do - unCausalHash . fst <$> gets (\MigrationState{..} -> causalMapping Map.! oldParentHashId) + newParentHashes <- + Set.fromList <$> for (Set.toList . SC.parents $ oldCausal) \oldParentHashId -> do + unCausalHash . fst <$> gets (\MigrationState {..} -> causalMapping Map.! oldParentHashId) let newCausalHash :: Hash - newCausalHash = Hashable.accumulate $ Hashing.hashCausal (Hashing.Causal {branchHash=newBranchHash, parents=newParentHashes}) + newCausalHash = Hashable.accumulate $ Hashing.hashCausal (Hashing.Causal {branchHash = newBranchHash, parents = newParentHashes}) let newCausal = case oldCausal of DbCausal {..} -> @@ -269,7 +281,6 @@ migrateCausal conn _causalHashId = runDB conn . fmap (either id id) . runExcept Q.saveCausalParents (SC.selfHash newCausal) (Set.toList $ SC.parents newCausal) undefined - -- Plan: -- * Load a C.Causal -- * Ensure its parent causals and branch (value hash) have been migrated ✅ @@ -277,10 +288,6 @@ migrateCausal conn _causalHashId = runDB conn . fmap (either id id) . runExcept -- * Save the new causal (Do we change the self-hash?) -- * Save Causal Hash mapping to skymap - - - - undefined -- data Causal m hc he e = Causal @@ -290,12 +297,12 @@ migrateCausal conn _causalHashId = runDB conn . fmap (either id id) . runExcept -- value :: m e -- } - -- data C.Branch m = Branch - -- { terms :: Map NameSegment (Map Referent (m MdValues)), - -- types :: Map NameSegment (Map Reference (m MdValues)), - -- patches :: Map NameSegment (PatchHash, m Patch), - -- children :: Map NameSegment (Causal m) - -- } +-- data C.Branch m = Branch +-- { terms :: Map NameSegment (Map Referent (m MdValues)), +-- types :: Map NameSegment (Map Reference (m MdValues)), +-- patches :: Map NameSegment (PatchHash, m Patch), +-- children :: Map NameSegment (Causal m) +-- } -- data Branch' t h p c = Branch -- { terms :: Map t (Map (Referent'' t h) (MetadataSetFormat' t h)), @@ -321,11 +328,11 @@ migrateBranch _conn _objectID = fmap (either id id) . runExceptT $ do -- Identify dependencies and bail out if they aren't all built let allMissingReferences :: [Entity] allMissingReferences = - allMissingTypes ++ - allMissingTerms ++ - allMissingPatches ++ - allMissingChildren ++ - allMissingPredecessors + allMissingTypes + ++ allMissingTerms + ++ allMissingPatches + ++ allMissingChildren + ++ allMissingPredecessors when (not . null $ allMissingReferences) $ throwE $ Sync.Missing allMissingReferences @@ -339,17 +346,18 @@ migrateBranch _conn _objectID = fmap (either id id) . runExceptT $ do error "not implemented" - -- Project an S.Referent'' into its SomeReferenceObjId's someReferent_ :: Traversal' (S.Referent'' t ObjectId) SomeReferenceObjId -someReferent_ = (UReferent._Ref . someReference_) - `failing` (UReferent._Con - . asPair_ -- Need to unpack the embedded reference AND remap between mismatched Constructor ID types. - . unsafeInsidePrism _ConstructorReference - ) +someReferent_ = + (UReferent._Ref . someReference_) + `failing` ( UReferent._Con + . asPair_ -- Need to unpack the embedded reference AND remap between mismatched Constructor ID types. + . unsafeInsidePrism _ConstructorReference + ) where - asPair_ f (UReference.ReferenceDerived id', conId) = f (id', fromIntegral conId) - <&> \(newId, newConId) -> (UReference.ReferenceDerived newId, fromIntegral newConId) + asPair_ f (UReference.ReferenceDerived id', conId) = + f (id', fromIntegral conId) + <&> \(newId, newConId) -> (UReference.ReferenceDerived newId, fromIntegral newConId) asPair_ _ (UReference.ReferenceBuiltin x, conId) = pure (UReference.ReferenceBuiltin x, conId) someReference_ :: Traversal' (UReference.Reference' t ObjectId) SomeReferenceObjId @@ -358,21 +366,24 @@ someReference_ = UReference._ReferenceDerived . unsafeInsidePrism _TermReference someMetadataSetFormat :: Ord t => Traversal' (S.MetadataSetFormat' t ObjectId) SomeReferenceObjId someMetadataSetFormat = S.metadataSetFormatReferences_ . someReference_ -mapReferentMetadata :: (Ord k, Ord t) => +mapReferentMetadata :: + (Ord k, Ord t) => Traversal' k SomeReferenceObjId -> - Traversal' (Map k (S.MetadataSetFormat' t ObjectId)) - (SomeReferenceObjId) -mapReferentMetadata keyTraversal f m = Map.toList m - & traversed . beside keyTraversal someMetadataSetFormat %%~ f - <&> Map.fromList + Traversal' + (Map k (S.MetadataSetFormat' t ObjectId)) + (SomeReferenceObjId) +mapReferentMetadata keyTraversal f m = + Map.toList m + & traversed . beside keyTraversal someMetadataSetFormat %%~ f + <&> Map.fromList dbBranchObjRefs_ :: Traversal' S.DbBranch SomeReferenceObjId -dbBranchObjRefs_ f S.Branch{..} = do +dbBranchObjRefs_ f S.Branch {..} = do let newTypesMap = types & traversed . mapReferentMetadata someReference_ %%~ f let newTermsMap = terms & traversed . mapReferentMetadata someReferent_ %%~ f S.Branch <$> newTermsMap <*> newTypesMap <*> pure patches <*> pure children - -- convertBranch :: (DB m, MonadState MigrationState m) => DbBranch -> m DbBranch +-- convertBranch :: (DB m, MonadState MigrationState m) => DbBranch -> m DbBranch -- convertBranch dbBranch = _ -- DbBranch -- migrate --> DbBranch -- hydrate for the hash --> Hashing.V2.Branch -- put (Hash, toBranchFormat(DbBranch)) --> COOL @@ -389,7 +400,6 @@ dbBranchObjRefs_ f S.Branch{..} = do -- Traversal' DbBranch PatchId -- MonadState MigrationState m => PatchObjectId -> m PatchObjectId - -- type DbBranch = Branch' TextId ObjectId PatchObjectId (BranchObjectId, CausalHashId) -- data Branch' t h p c = Branch @@ -620,21 +630,24 @@ remapReferences declMap = \case x -> x type SomeReferenceId = SomeReference Reference.Id + type SomeReferenceObjId = SomeReference (UReference.Id' ObjectId) objIdsToHashed :: MonadState MigrationState m => SomeReferenceObjId -> m SomeReferenceId -objIdsToHashed = someRef_ %%~ \(UReference.Id objId pos) -> do - objMapping <- gets objLookup - case Map.lookup objId objMapping of - Nothing -> error $ "Expected object mapping for ID: " <> show objId - Just (_, _, hash) -> pure (Reference.Id hash pos) +objIdsToHashed = + someRef_ %%~ \(UReference.Id objId pos) -> do + objMapping <- gets objLookup + case Map.lookup objId objMapping of + Nothing -> error $ "Expected object mapping for ID: " <> show objId + Just (_, _, hash) -> pure (Reference.Id hash pos) remapObjIdRefs :: MonadState MigrationState m => SomeReferenceObjId -> m SomeReferenceObjId -remapObjIdRefs = someRef_ %%~ \(UReference.Id objId pos) -> do - objMapping <- gets objLookup - case Map.lookup objId objMapping of - Nothing -> error $ "Expected object mapping for ID: " <> show objId - Just (newObjId, _, _) -> pure (UReference.Id newObjId pos) +remapObjIdRefs = + someRef_ %%~ \(UReference.Id objId pos) -> do + objMapping <- gets objLookup + case Map.lookup objId objMapping of + Nothing -> error $ "Expected object mapping for ID: " <> show objId + Just (newObjId, _, _) -> pure (UReference.Id newObjId pos) data SomeReference ref = TermReference ref @@ -647,6 +660,7 @@ someRef_ = param @0 _TermReference :: Prism (SomeReference ref) (SomeReference ref') ref ref' _TermReference = undefined -- _Ctor @"TermReference" + _TypeReference :: Prism (SomeReference ref) (SomeReference ref') ref ref' _TypeReference = undefined --_Ctor @"TypeReference" From 1a6b417544369c3ed82ee66f4109d54a16e41aea Mon Sep 17 00:00:00 2001 From: Chris Penner Date: Mon, 1 Nov 2021 13:17:20 -0600 Subject: [PATCH 046/297] Save new causal and insert skymappings --- .../Codebase/SqliteCodebase/MigrateSchema12.hs | 15 +++++++++------ 1 file changed, 9 insertions(+), 6 deletions(-) diff --git a/parser-typechecker/src/Unison/Codebase/SqliteCodebase/MigrateSchema12.hs b/parser-typechecker/src/Unison/Codebase/SqliteCodebase/MigrateSchema12.hs index 96736c05cf..b7db83f1c2 100644 --- a/parser-typechecker/src/Unison/Codebase/SqliteCodebase/MigrateSchema12.hs +++ b/parser-typechecker/src/Unison/Codebase/SqliteCodebase/MigrateSchema12.hs @@ -238,7 +238,7 @@ runDB conn = (runExceptT >=> err) . flip runReaderT conn -- * Map over parent causal hash IDs -- ==> Queries.saveCausalParents migrateCausal :: MonadIO m => Connection -> CausalHashId -> StateT MigrationState m (Sync.TrySyncResult Entity) -migrateCausal conn _causalHashId = runDB conn . fmap (either id id) . runExceptT $ do +migrateCausal conn oldCausalHashId = runDB conn . fmap (either id id) . runExceptT $ do -- C.Causal{} <- lift $ Ops.loadCausalBranchByCausalHash causalHash >>= \case -- Nothing -> error $ "Expected causal to exist but it didn't" <> show causalHash -- Just c -> pure c @@ -271,20 +271,23 @@ migrateCausal conn _causalHashId = runDB conn . fmap (either id id) . runExceptT Set.fromList <$> for (Set.toList . SC.parents $ oldCausal) \oldParentHashId -> do unCausalHash . fst <$> gets (\MigrationState {..} -> causalMapping Map.! oldParentHashId) - let newCausalHash :: Hash - newCausalHash = Hashable.accumulate $ Hashing.hashCausal (Hashing.Causal {branchHash = newBranchHash, parents = newParentHashes}) + let newCausalHash :: CausalHash + newCausalHash = CausalHash . Cv.hash1to2 . Hashable.accumulate $ Hashing.hashCausal (Hashing.Causal {branchHash = newBranchHash, parents = Set.map Cv.hash2to1 newParentHashes}) + newCausalHashId <- Q.saveCausalHash newCausalHash let newCausal = case oldCausal of DbCausal {..} -> DbCausal - { selfHash = undefined, + { selfHash = newCausalHashId, valueHash = BranchHashId $ view _2 (migratedObjIds Map.! branchObjId), parents = Set.map (snd . (\old -> migratedCausals Map.! old)) $ parents } - -- let newCausalHash = ??? Q.saveCausal (SC.selfHash newCausal) (SC.valueHash newCausal) Q.saveCausalParents (SC.selfHash newCausal) (Set.toList $ SC.parents newCausal) - undefined + + field @"causalMapping" %= Map.insert oldCausalHashId (newCausalHash, newCausalHashId) + + -- causalMapping :: Map (Old CausalHashId) (New (CausalHash, CausalHashId)), -- Plan: -- * Load a C.Causal From 20f18c55595529ee0b4ad945d9b7f8c0d0bc7cd1 Mon Sep 17 00:00:00 2001 From: Arya Irani Date: Mon, 1 Nov 2021 15:36:13 -0400 Subject: [PATCH 047/297] Branch.Format.dbToLocalBranch --- .../U/Codebase/Sqlite/Branch/Format.hs | 103 ++++++++++++------ .../U/Codebase/Sqlite/Branch/Full.hs | 18 +++ codebase2/util/U/Util/Map.hs | 21 +++- codebase2/util/package.yaml | 1 + codebase2/util/unison-util.cabal | 3 +- 5 files changed, 112 insertions(+), 34 deletions(-) diff --git a/codebase2/codebase-sqlite/U/Codebase/Sqlite/Branch/Format.hs b/codebase2/codebase-sqlite/U/Codebase/Sqlite/Branch/Format.hs index b6d82651e1..fefc1db709 100644 --- a/codebase2/codebase-sqlite/U/Codebase/Sqlite/Branch/Format.hs +++ b/codebase2/codebase-sqlite/U/Codebase/Sqlite/Branch/Format.hs @@ -9,12 +9,17 @@ module U.Codebase.Sqlite.Branch.Format ) where -import Data.ByteString (ByteString) +import Control.Lens (Lens', zoom, _1, _2, _3, _4) +import Control.Monad.Trans.State.Strict (State) +import qualified Control.Monad.Trans.State.Strict as State +import Data.Bitraversable (bitraverse) +import Data.Coerce (Coercible, coerce) +import qualified Data.Map.Strict as Map import Data.Vector (Vector) import qualified Data.Vector as Vector import U.Codebase.Sqlite.Branch.Diff (Diff, LocalDiff) import qualified U.Codebase.Sqlite.Branch.Diff as Branch.Diff -import U.Codebase.Sqlite.Branch.Full (Branch' (types, terms, patches, children), DbBranch, LocalBranch) +import U.Codebase.Sqlite.Branch.Full (DbBranch, LocalBranch) import qualified U.Codebase.Sqlite.Branch.Full as Branch.Full import U.Codebase.Sqlite.DbId (BranchObjectId, CausalHashId, ObjectId, PatchObjectId, TextId) import U.Codebase.Sqlite.LocalIds @@ -23,6 +28,11 @@ import U.Codebase.Sqlite.LocalIds LocalPatchObjectId (..), LocalTextId (..), ) +import U.Codebase.Sqlite.Reference (LocalReference, Reference) +import U.Codebase.Sqlite.Referent (LocalReferent, Referent) +import qualified U.Util.Map as Map +import qualified U.Util.Set as Set +import Unison.Prelude -- | A 'BranchFormat' is a deserialized namespace object (@object.bytes@). -- @@ -52,38 +62,69 @@ localToDbBranch :: BranchLocalIds -> LocalBranch -> DbBranch localToDbBranch li = Branch.Full.quadmap (lookupBranchLocalText li) (lookupBranchLocalDefn li) (lookupBranchLocalPatch li) (lookupBranchLocalChild li) +type DbToLocalBranchState = + ( Map TextId LocalTextId, + Map ObjectId LocalDefnId, + Map PatchObjectId LocalPatchObjectId, + Map (BranchObjectId, CausalHashId) LocalBranchChildId + ) + dbToLocalBranch :: DbBranch -> (BranchLocalIds, LocalBranch) dbToLocalBranch (Branch.Full.Branch terms types patches children) = - let temp = Branch.Full.Branch - <$> Map.bitraverse localNameSegment (Map.bitraverse localReferent localMetadata) terms - <*> Map.bitraverse localNameSegment (Map.bitraverse localReference localMetadata) types - <*> Map.bitraverse localNameSegment localPatch patches - <*> Map.bitraverse localNameSegment localChild children - undefined + let (localBranch, (localTexts, localDefns, localPatches, localChildren)) = + (`State.runState` (mempty @DbToLocalBranchState)) do + Branch.Full.Branch + <$> Map.bitraverse (zoom _1 . localText) (Map.bitraverse (zoom _1_2 . localReferent) (zoom _1_2 . localMetadata)) terms + <*> Map.bitraverse (zoom _1 . localText) (Map.bitraverse (zoom _1_2 . localReference) (zoom _1_2 . localMetadata)) types + <*> Map.bitraverse (zoom _1 . localText) (zoom _3 . localPatch) patches + <*> Map.bitraverse (zoom _1 . localText) (zoom _4 . localChild) children + branchLocalIds :: BranchLocalIds + branchLocalIds = + LocalIds + { branchTextLookup = Map.valuesVector (Map.swap localTexts), + branchDefnLookup = Map.valuesVector (Map.swap localDefns), + branchPatchLookup = Map.valuesVector (Map.swap localPatches), + branchChildLookup = Map.valuesVector (Map.swap localChildren) + } + in (branchLocalIds, localBranch) where - localNameSegment :: State (Map TextId LocalTextId) LocalTextId - localNameSegment = undefined - - localReferent :: State (Map TextId LocalTextId, Map ObjectId LocalDefnId) S.LocalReferent - localReferent = undefined - - localMetadatata - - -- Lens (A, B, C, D) (B, D) = alongside _2 _4 - - -- type BranchSavingState = (Map TextId LocalTextId, Map ObjectId LocalDefnId, Map Db.PatchObjectId LocalPatchObjectId, Map (Db.BranchObjectId, Db.CausalHashId) LocalBranchChildId) - -- State (Map TextId LocalTextId) - - -- - - -- WriterT BranchLocalIds (State ) - - -- done =<< (runWriterT . flip evalStateT startState) do - -- S.Branch - -- <$> Map.bitraverse saveNameSegment (Map.bitraverse saveReferent saveMetadata) terms - -- <*> Map.bitraverse saveNameSegment (Map.bitraverse saveReference saveMetadata) types - -- <*> Map.bitraverse saveNameSegment savePatch' patches - -- <*> Map.bitraverse saveNameSegment saveChild children + localChild :: + (BranchObjectId, CausalHashId) -> + State (Map (BranchObjectId, CausalHashId) LocalBranchChildId) LocalBranchChildId + localChild = localize + + localDefn :: ObjectId -> State (Map ObjectId LocalDefnId) LocalDefnId + localDefn = localize + + localMetadata :: Branch.Full.DbMetadataSet -> State (Map TextId LocalTextId, Map ObjectId LocalDefnId) Branch.Full.LocalMetadataSet + localMetadata (Branch.Full.Inline vals) = + Branch.Full.Inline <$> Set.traverse localReference vals + + localText :: TextId -> State (Map TextId LocalTextId) LocalTextId + localText = localize + + localPatch :: PatchObjectId -> State (Map PatchObjectId LocalPatchObjectId) LocalPatchObjectId + localPatch = localize + + localReference :: Reference -> State (Map TextId LocalTextId, Map ObjectId LocalDefnId) LocalReference + localReference = bitraverse (zoom _1 . localText) (zoom _2 . localDefn) + + localReferent :: Referent -> State (Map TextId LocalTextId, Map ObjectId LocalDefnId) LocalReferent + localReferent = bitraverse localReference localReference + +localize :: (Coercible local Word64, Ord real) => real -> State (Map real local) local +localize real = do + mapping <- State.get + case Map.lookup real mapping of + Nothing -> do + let nextLocal = coerce @Word64 (fromIntegral (Map.size mapping)) + State.put $! Map.insert real nextLocal mapping + pure nextLocal + Just local -> pure local + +_1_2 :: Lens' (a, b, c, d) (a, b) +_1_2 f (a0, b0, c, d) = + (\(a, b) -> (a, b, c, d)) <$> f (a0, b0) lookupBranchLocalText :: BranchLocalIds -> LocalTextId -> TextId lookupBranchLocalText li (LocalTextId w) = branchTextLookup li Vector.! fromIntegral w diff --git a/codebase2/codebase-sqlite/U/Codebase/Sqlite/Branch/Full.hs b/codebase2/codebase-sqlite/U/Codebase/Sqlite/Branch/Full.hs index 1d67dad716..826eac6a02 100644 --- a/codebase2/codebase-sqlite/U/Codebase/Sqlite/Branch/Full.hs +++ b/codebase2/codebase-sqlite/U/Codebase/Sqlite/Branch/Full.hs @@ -17,8 +17,26 @@ import qualified Data.Set as Set import Control.Lens (Traversal, Traversal') import Unison.Prelude +-- | +-- @ +-- Branch +-- { terms :: Map LocalTextId (Map S.LocalReferent LocalMetadataSet), +-- types :: Map LocalTextId (Map S.LocalReference LocalMetadataSet), +-- patches :: Map LocalTextId LocalPatchObjectId, +-- children :: Map LocalTextId LocalBranchChildId +-- } +-- @ type LocalBranch = Branch' LocalTextId LocalDefnId LocalPatchObjectId LocalBranchChildId +-- | +-- @ +-- Branch +-- { terms :: Map TextId (Map S.Referent DbMetadataSet), +-- types :: Map TextId (Map S.Reference DbMetadataSet), +-- patches :: Map TextId PatchObjectId, +-- children :: Map TextId (BranchObjectId, CausalHashId) +-- } +-- @ type DbBranch = Branch' TextId ObjectId PatchObjectId (BranchObjectId, CausalHashId) type Referent'' t h = Referent' (Reference' t h) (Reference' t h) diff --git a/codebase2/util/U/Util/Map.hs b/codebase2/util/U/Util/Map.hs index 0c24812d95..af41464017 100644 --- a/codebase2/util/U/Util/Map.hs +++ b/codebase2/util/U/Util/Map.hs @@ -1,12 +1,29 @@ -module U.Util.Map where +module U.Util.Map + ( bimap, + bitraverse, + swap, + valuesVector, + ) +where import qualified Data.Bifunctor as B import qualified Data.Bitraversable as B import Data.Map (Map) -import qualified Data.Map as Map +import qualified Data.Map.Strict as Map +import Data.Vector (Vector) +import qualified Data.Vector as Vector bimap :: Ord a' => (a -> a') -> (b -> b') -> Map a b -> Map a' b' bimap fa fb = Map.fromList . map (B.bimap fa fb) . Map.toList bitraverse :: (Applicative f, Ord a') => (a -> f a') -> (b -> f b') -> Map a b -> f (Map a' b') bitraverse fa fb = fmap Map.fromList . traverse (B.bitraverse fa fb) . Map.toList + +-- | 'swap' throws away data if the input contains duplicate values +swap :: Ord b => Map a b -> Map b a +swap = + Map.foldlWithKey' (\z a b -> Map.insert b a z) mempty + +valuesVector :: Map k v -> Vector v +valuesVector = + Vector.fromList . Map.elems diff --git a/codebase2/util/package.yaml b/codebase2/util/package.yaml index c163a5318b..910cb7e98c 100644 --- a/codebase2/util/package.yaml +++ b/codebase2/util/package.yaml @@ -17,3 +17,4 @@ dependencies: - time - unison-util-relation - unliftio + - vector diff --git a/codebase2/util/unison-util.cabal b/codebase2/util/unison-util.cabal index 1ac3ce5db5..85bb77f78d 100644 --- a/codebase2/util/unison-util.cabal +++ b/codebase2/util/unison-util.cabal @@ -4,7 +4,7 @@ cabal-version: 1.12 -- -- see: https://github.com/sol/hpack -- --- hash: 980485bb761ff6b020b6cc8caf8432753f3fed20b2f135efeded0fff0fbeaeb6 +-- hash: 31a8bc44e9e7254b4567e7d791f3dba17fb3a4a1feb0bdc33fd38413fddc2297 name: unison-util version: 0.0.0 @@ -47,4 +47,5 @@ library , time , unison-util-relation , unliftio + , vector default-language: Haskell2010 From a435674efccd13bda4ddc261fa865b5d4ee72575 Mon Sep 17 00:00:00 2001 From: Chris Penner Date: Mon, 1 Nov 2021 13:39:06 -0600 Subject: [PATCH 048/297] Finish migrate causal --- .../SqliteCodebase/MigrateSchema12.hs | 80 +++++++++---------- 1 file changed, 38 insertions(+), 42 deletions(-) diff --git a/parser-typechecker/src/Unison/Codebase/SqliteCodebase/MigrateSchema12.hs b/parser-typechecker/src/Unison/Codebase/SqliteCodebase/MigrateSchema12.hs index b7db83f1c2..62179ad3b2 100644 --- a/parser-typechecker/src/Unison/Codebase/SqliteCodebase/MigrateSchema12.hs +++ b/parser-typechecker/src/Unison/Codebase/SqliteCodebase/MigrateSchema12.hs @@ -7,7 +7,7 @@ module Unison.Codebase.SqliteCodebase.MigrateSchema12 where import Control.Lens import Control.Monad.Except (ExceptT, runExceptT) -import Control.Monad.Reader (ReaderT (runReaderT), ask) +import Control.Monad.Reader (ReaderT (runReaderT), ask, mapReaderT) import Control.Monad.State.Strict import Control.Monad.Trans.Except (throwE) import Control.Monad.Trans.Writer.CPS (Writer, execWriter, tell) @@ -210,11 +210,15 @@ migrationSync = Sync \case migratePatch :: Hash -> ByteString -> m (Sync.TrySyncResult Entity) migratePatch = error "not implemented" -runDB :: MonadIO m => Connection -> ReaderT Connection (ExceptT Ops.Error m) a -> m a -runDB conn = (runExceptT >=> err) . flip runReaderT conn +runDB :: MonadIO m => Connection -> ReaderT Connection (ExceptT Ops.Error (ExceptT Q.Integrity m)) a -> m a +runDB conn = (runExceptT >=> err) . (runExceptT >=> err) . flip runReaderT conn where + err :: forall e x m. (Either e x -> m x) err = \case Left err -> error $ show err; Right a -> pure a +liftQ :: ReaderT Connection (ExceptT Q.Integrity m) a -> ReaderT Connection (ExceptT Ops.Error (ExceptT Q.Integrity m)) a +liftQ = mapReaderT lift + -- loadCausalBranchByCausalHash :: EDB m => CausalHash -> m (Maybe (C.Branch.Causal m)) -- -- Causal Plan @@ -239,20 +243,12 @@ runDB conn = (runExceptT >=> err) . flip runReaderT conn -- ==> Queries.saveCausalParents migrateCausal :: MonadIO m => Connection -> CausalHashId -> StateT MigrationState m (Sync.TrySyncResult Entity) migrateCausal conn oldCausalHashId = runDB conn . fmap (either id id) . runExceptT $ do - -- C.Causal{} <- lift $ Ops.loadCausalBranchByCausalHash causalHash >>= \case - -- Nothing -> error $ "Expected causal to exist but it didn't" <> show causalHash - -- Just c -> pure c - let oldCausal :: SC.DbCausal - oldCausal = undefined + oldBranchHashId <- lift . liftQ $ Q.loadCausalValueHashId oldCausalHashId + oldCausalParentHashIds <- lift . liftQ $ Q.loadCausalParents oldCausalHashId - let branchHashId = (SC.valueHash oldCausal) -- This fails if the object for the branch doesn't exist, CHECK: we currently expect -- this to always be true? - branchObjId <- - Q.maybeObjectIdForPrimaryHashId (unBranchHashId branchHashId) >>= \case - Nothing -> error $ "Expected branch object ID but didn't exist for hashId:" <> show branchHashId - Just objId -> pure objId - + branchObjId <- lift . liftQ $ Q.expectObjectIdForAnyHashId (unBranchHashId oldBranchHashId) migratedObjIds <- gets objLookup -- If the branch for this causal hasn't been migrated, migrate it first. let unmigratedBranch = @@ -261,49 +257,49 @@ migrateCausal conn oldCausalHashId = runDB conn . fmap (either id id) . runExcep else [] migratedCausals <- gets causalMapping - let unmigratedParents = map C . filter (`Map.member` migratedCausals) . Set.toList . SC.parents $ oldCausal + let unmigratedParents = map C . filter (`Map.member` migratedCausals) $ oldCausalParentHashIds let unmigratedEntities = unmigratedBranch <> unmigratedParents when (not . null $ unmigratedParents <> unmigratedBranch) (throwE $ Sync.Missing unmigratedEntities) - (_newBranchObjId, _newBranchHashId, newBranchHash) <- gets (\MigrationState {..} -> objLookup Map.! branchObjId) + (_, _, newBranchHash) <- gets (\MigrationState {..} -> objLookup Map.! branchObjId) - newParentHashes <- - Set.fromList <$> for (Set.toList . SC.parents $ oldCausal) \oldParentHashId -> do - unCausalHash . fst <$> gets (\MigrationState {..} -> causalMapping Map.! oldParentHashId) + let newParentHashes = + oldCausalParentHashIds + & fmap + ( \oldParentHashId -> + let (CausalHash h, _) = migratedCausals Map.! oldParentHashId + in h + ) + & Set.fromList let newCausalHash :: CausalHash - newCausalHash = CausalHash . Cv.hash1to2 . Hashable.accumulate $ Hashing.hashCausal (Hashing.Causal {branchHash = newBranchHash, parents = Set.map Cv.hash2to1 newParentHashes}) + newCausalHash = + CausalHash . Cv.hash1to2 . Hashable.accumulate $ + Hashing.hashCausal + ( Hashing.Causal + { branchHash = newBranchHash, + parents = Set.map Cv.hash2to1 newParentHashes + } + ) newCausalHashId <- Q.saveCausalHash newCausalHash let newCausal = - case oldCausal of - DbCausal {..} -> - DbCausal - { selfHash = newCausalHashId, - valueHash = BranchHashId $ view _2 (migratedObjIds Map.! branchObjId), - parents = Set.map (snd . (\old -> migratedCausals Map.! old)) $ parents - } + DbCausal + { selfHash = newCausalHashId, + valueHash = BranchHashId $ view _2 (migratedObjIds Map.! branchObjId), + parents = Set.fromList . map (snd . (\old -> migratedCausals Map.! old)) $ oldCausalParentHashIds + } Q.saveCausal (SC.selfHash newCausal) (SC.valueHash newCausal) Q.saveCausalParents (SC.selfHash newCausal) (Set.toList $ SC.parents newCausal) field @"causalMapping" %= Map.insert oldCausalHashId (newCausalHash, newCausalHashId) - -- causalMapping :: Map (Old CausalHashId) (New (CausalHash, CausalHashId)), - + pure Sync.Done -- Plan: - -- * Load a C.Causal + -- * Load the pieces of a Db.Causal ✅ -- * Ensure its parent causals and branch (value hash) have been migrated ✅ - -- * Rewrite the value-hash and parent causal hashes - -- * Save the new causal (Do we change the self-hash?) - -- * Save Causal Hash mapping to skymap - - undefined - --- data Causal m hc he e = Causal --- { causalHash :: hc, --- valueHash :: he, --- parents :: Map hc (m (Causal m hc he e)), --- value :: m e --- } + -- * Rewrite the value-hash and parent causal hashes ✅ + -- * Save the new causal ✅ + -- * Save Causal Hash mapping to skymap ✅ -- data C.Branch m = Branch -- { terms :: Map NameSegment (Map Referent (m MdValues)), From ba9271ac2810f647dfe785d7809f2ce83d450240 Mon Sep 17 00:00:00 2001 From: Chris Penner Date: Mon, 1 Nov 2021 13:42:43 -0600 Subject: [PATCH 049/297] Reformat --- .../src/Unison/Codebase/SqliteCodebase/MigrateSchema12.hs | 7 +++++-- 1 file changed, 5 insertions(+), 2 deletions(-) diff --git a/parser-typechecker/src/Unison/Codebase/SqliteCodebase/MigrateSchema12.hs b/parser-typechecker/src/Unison/Codebase/SqliteCodebase/MigrateSchema12.hs index 62179ad3b2..ea748238a4 100644 --- a/parser-typechecker/src/Unison/Codebase/SqliteCodebase/MigrateSchema12.hs +++ b/parser-typechecker/src/Unison/Codebase/SqliteCodebase/MigrateSchema12.hs @@ -257,11 +257,14 @@ migrateCausal conn oldCausalHashId = runDB conn . fmap (either id id) . runExcep else [] migratedCausals <- gets causalMapping - let unmigratedParents = map C . filter (`Map.member` migratedCausals) $ oldCausalParentHashIds + let unmigratedParents = + oldCausalParentHashIds + & filter (`Map.member` migratedCausals) + & fmap C let unmigratedEntities = unmigratedBranch <> unmigratedParents when (not . null $ unmigratedParents <> unmigratedBranch) (throwE $ Sync.Missing unmigratedEntities) - (_, _, newBranchHash) <- gets (\MigrationState {..} -> objLookup Map.! branchObjId) + (_, _, newBranchHash) <- gets (\MigrationState {objLookup} -> objLookup Map.! branchObjId) let newParentHashes = oldCausalParentHashIds From 43a41106f3ee28ed3622bce7098ccf75961c4414 Mon Sep 17 00:00:00 2001 From: Arya Irani Date: Mon, 1 Nov 2021 17:18:05 -0400 Subject: [PATCH 050/297] Sqlite.patchToLocalPatch --- .../U/Codebase/Sqlite/Patch/Format.hs | 104 +++++++++++++++++- .../U/Codebase/Sqlite/Patch/Full.hs | 14 +++ 2 files changed, 114 insertions(+), 4 deletions(-) diff --git a/codebase2/codebase-sqlite/U/Codebase/Sqlite/Patch/Format.hs b/codebase2/codebase-sqlite/U/Codebase/Sqlite/Patch/Format.hs index cee41cce84..1260d48db0 100644 --- a/codebase2/codebase-sqlite/U/Codebase/Sqlite/Patch/Format.hs +++ b/codebase2/codebase-sqlite/U/Codebase/Sqlite/Patch/Format.hs @@ -1,10 +1,32 @@ -module U.Codebase.Sqlite.Patch.Format where +module U.Codebase.Sqlite.Patch.Format + ( PatchFormat (..), + PatchLocalIds (..), + SyncPatchFormat (..), + localPatchToPatch, + patchToLocalPatch, + ) +where +import Control.Lens (Lens', zoom, _1, _2) +import Control.Monad.Trans.State.Strict (State) +import qualified Control.Monad.Trans.State.Strict as State +import Data.Bitraversable (bitraverse) +import Data.Coerce (Coercible, coerce) +import qualified Data.Map.Strict as Map import Data.Vector (Vector) +import qualified Data.Vector as Vector import U.Codebase.Sqlite.DbId (HashId, ObjectId, PatchObjectId, TextId) +import U.Codebase.Sqlite.LocalIds (LocalDefnId (LocalDefnId), LocalHashId (LocalHashId), LocalTextId (LocalTextId)) import U.Codebase.Sqlite.Patch.Diff (LocalPatchDiff) -import U.Codebase.Sqlite.Patch.Full (LocalPatch) -import Data.ByteString (ByteString) +import U.Codebase.Sqlite.Patch.Full (LocalPatch, Patch) +import qualified U.Codebase.Sqlite.Patch.Full as Patch.Full +import U.Codebase.Sqlite.Patch.TermEdit (LocalTermEdit, TermEdit) +import U.Codebase.Sqlite.Patch.TypeEdit (LocalTypeEdit, TypeEdit) +import U.Codebase.Sqlite.Reference (LocalReferenceH, ReferenceH) +import U.Codebase.Sqlite.Referent (LocalReferentH, ReferentH) +import qualified U.Util.Map as Map +import qualified U.Util.Set as Set +import Unison.Prelude data PatchFormat = Full PatchLocalIds LocalPatch @@ -18,4 +40,78 @@ data PatchLocalIds = LocalIds data SyncPatchFormat = SyncFull PatchLocalIds ByteString - | SyncDiff PatchObjectId PatchLocalIds ByteString \ No newline at end of file + | SyncDiff PatchObjectId PatchLocalIds ByteString + +localPatchToPatch :: PatchLocalIds -> LocalPatch -> Patch +localPatchToPatch li = + Patch.Full.trimap (lookupPatchLocalText li) (lookupPatchLocalHash li) (lookupPatchLocalDefn li) + where + lookupPatchLocalText :: PatchLocalIds -> LocalTextId -> TextId + lookupPatchLocalText li (LocalTextId w) = patchTextLookup li Vector.! fromIntegral w + + lookupPatchLocalHash :: PatchLocalIds -> LocalHashId -> HashId + lookupPatchLocalHash li (LocalHashId w) = patchHashLookup li Vector.! fromIntegral w + + lookupPatchLocalDefn :: PatchLocalIds -> LocalDefnId -> ObjectId + lookupPatchLocalDefn li (LocalDefnId w) = patchDefnLookup li Vector.! fromIntegral w + +type PatchToLocalPatchState = + ( Map TextId LocalTextId, + Map HashId LocalHashId, + Map ObjectId LocalDefnId + ) + +patchToLocalPatch :: Patch -> (PatchLocalIds, LocalPatch) +patchToLocalPatch (Patch.Full.Patch termEdits typeEdits) = + let (localPatch, (localTexts, localHashes, localDefns)) = + (`State.runState` (mempty @PatchToLocalPatchState)) do + Patch.Full.Patch + <$> Map.bitraverse (zoom _1_2 . localReferentH) (Set.traverse (zoom _1_3 . localTermEdit)) termEdits + <*> Map.bitraverse (zoom _1_2 . localReferenceH) (Set.traverse (zoom _1_3 . localTypeEdit)) typeEdits + patchLocalIds :: PatchLocalIds + patchLocalIds = + LocalIds + { patchTextLookup = Map.valuesVector (Map.swap localTexts), + patchHashLookup = Map.valuesVector (Map.swap localHashes), + patchDefnLookup = Map.valuesVector (Map.swap localDefns) + } + in (patchLocalIds, localPatch) + where + localDefn :: ObjectId -> State (Map ObjectId LocalDefnId) LocalDefnId + localDefn = localize + + localText :: TextId -> State (Map TextId LocalTextId) LocalTextId + localText = localize + + localHash :: HashId -> State (Map HashId LocalHashId) LocalHashId + localHash = localize + + localReferenceH :: ReferenceH -> State (Map TextId LocalTextId, Map HashId LocalHashId) LocalReferenceH + localReferenceH = bitraverse (zoom _1 . localText) (zoom _2 . localHash) + + localReferentH :: ReferentH -> State (Map TextId LocalTextId, Map HashId LocalHashId) LocalReferentH + localReferentH = bitraverse localReferenceH localReferenceH + + localTermEdit :: TermEdit -> State (Map TextId LocalTextId, Map ObjectId LocalDefnId) LocalTermEdit + localTermEdit = bitraverse (zoom _1 . localText) (zoom _2 . localDefn) + + localTypeEdit :: TypeEdit -> State (Map TextId LocalTextId, Map ObjectId LocalDefnId) LocalTypeEdit + localTypeEdit = bitraverse (zoom _1 . localText) (zoom _2 . localDefn) + +localize :: (Coercible local Word64, Ord real) => real -> State (Map real local) local +localize real = do + mapping <- State.get + case Map.lookup real mapping of + Nothing -> do + let nextLocal = coerce @Word64 (fromIntegral (Map.size mapping)) + State.put $! Map.insert real nextLocal mapping + pure nextLocal + Just local -> pure local + +_1_2 :: Lens' (a, b, c) (a, b) +_1_2 f (a0, b0, c) = + (\(a, b) -> (a, b, c)) <$> f (a0, b0) + +_1_3 :: Lens' (a, b, c) (a, c) +_1_3 f (a0, b, c0) = + (\(a, c) -> (a, b, c)) <$> f (a0, c0) diff --git a/codebase2/codebase-sqlite/U/Codebase/Sqlite/Patch/Full.hs b/codebase2/codebase-sqlite/U/Codebase/Sqlite/Patch/Full.hs index 06750e7104..81ec4b915c 100644 --- a/codebase2/codebase-sqlite/U/Codebase/Sqlite/Patch/Full.hs +++ b/codebase2/codebase-sqlite/U/Codebase/Sqlite/Patch/Full.hs @@ -12,8 +12,22 @@ import U.Codebase.Sqlite.Patch.TermEdit (TermEdit') import U.Codebase.Sqlite.Patch.TypeEdit (TypeEdit') import qualified U.Util.Map as Map +-- | +-- @ +-- LocalPatch +-- { termEdits :: Map Sqlite.ReferentH (Set TermEdit), +-- typeEdits :: Map Sqlite.ReferenceH (Set TypeEdit) +-- } +-- @ type Patch = Patch' Db.TextId Db.HashId Db.ObjectId +-- | +-- @ +-- LocalPatch +-- { termEdits :: Map Sqlite.LocalReferentH (Set LocalTermEdit), +-- typeEdits :: Map Sqlite.LocalReferenceH (Set LocalTypeEdit) +-- } +-- @ type LocalPatch = Patch' LocalTextId LocalHashId LocalDefnId type Referent'' t h = Referent' (Reference' t h) (Reference' t h) From 639b3d44ce60b27d8d929ffccb4f8880b4564c5d Mon Sep 17 00:00:00 2001 From: Arya Irani Date: Mon, 1 Nov 2021 17:33:06 -0400 Subject: [PATCH 051/297] comments --- .../U/Codebase/Sqlite/Branch/Format.hs | 80 ------------------ .../U/Codebase/Sqlite/Branch/Full.hs | 64 ++++---------- .../U/Codebase/Sqlite/Patch/Full.hs | 8 +- docs/repoformats/v2.markdown | 84 +++++++++++++++++++ 4 files changed, 102 insertions(+), 134 deletions(-) diff --git a/codebase2/codebase-sqlite/U/Codebase/Sqlite/Branch/Format.hs b/codebase2/codebase-sqlite/U/Codebase/Sqlite/Branch/Format.hs index fefc1db709..c842285d05 100644 --- a/codebase2/codebase-sqlite/U/Codebase/Sqlite/Branch/Format.hs +++ b/codebase2/codebase-sqlite/U/Codebase/Sqlite/Branch/Format.hs @@ -140,83 +140,3 @@ lookupBranchLocalChild li (LocalBranchChildId w) = branchChildLookup li Vector.! localToDbDiff :: BranchLocalIds -> LocalDiff -> Diff localToDbDiff li = Branch.Diff.quadmap (lookupBranchLocalText li) (lookupBranchLocalDefn li) (lookupBranchLocalPatch li) (lookupBranchLocalChild li) - -{- -projects.arya.message = "hello, world" -> -> #abc -projects.arya.program = printLine message -> printLine #abc -> #def - -projects.arya { - terms = { "message" -> #abc - , "program" -> #def - } -} - -text table = - { 1 -> "hello, world" - , 2 -> "message" - , 3 -> "program" - } - -hash table = - { 10 -> "abc" - , 11 -> "def" - } - -object table = - { ... - } - -projects.arya { - terms = { TextId 2 -> Reference { builtin = null, object = ObjectId 20, position = 0 } - , TextId 3 -> Reference { builtin = null, object = ObjectId 21, position = 0 } - } -} - -stored in original codebase: -projects.arya = BranchFormat.Full { - localIds = { - text = [2, 3] - hash = [10, 11] - object = [20, 21] - } - localBranch = { - terms = { LocalTextId 0 -> Reference { builtin = null, object = LocalObjectId 0, position = 0 } - , LocalTextId 1 -> Reference { builtin = null, object = LocalObjectId 1, position = 0 } - } - ... - } -} - -write to dest codebase: -text table = - { ... - , 901 -> "hello, world" - , 902 -> "message" - , 903 -> "program" - } - -hash table = - { ... - , 500 -> "abc" - , 501 -> "def" - } - -projects.arya { - - -- updated copy of original localIds, with new mapping - localIds = { - text = [902, 903] - hash = [500, 501] - object = [300, 301] - } - - -- copy unmodified from original - localBranch = { - terms = { LocalTextId 0 -> Reference { builtin = null, object = LocalObjectId 0, position = 0 } - , LocalTextId 1 -> Reference { builtin = null, object = LocalObjectId 1, position = 0 } - } - ... - } -} - --} diff --git a/codebase2/codebase-sqlite/U/Codebase/Sqlite/Branch/Full.hs b/codebase2/codebase-sqlite/U/Codebase/Sqlite/Branch/Full.hs index 826eac6a02..d5f3a0d145 100644 --- a/codebase2/codebase-sqlite/U/Codebase/Sqlite/Branch/Full.hs +++ b/codebase2/codebase-sqlite/U/Codebase/Sqlite/Branch/Full.hs @@ -1,27 +1,27 @@ -{-# LANGUAGE RankNTypes #-} -{-# LANGUAGE ScopedTypeVariables #-} -{-# LANGUAGE TypeApplications #-} {-# LANGUAGE DataKinds #-} {-# LANGUAGE DeriveGeneric #-} +{-# LANGUAGE RankNTypes #-} {-# LANGUAGE RecordWildCards #-} +{-# LANGUAGE ScopedTypeVariables #-} +{-# LANGUAGE TypeApplications #-} module U.Codebase.Sqlite.Branch.Full where +import Control.Lens (Traversal, Traversal') +import Data.Bifunctor (Bifunctor (bimap)) +import qualified Data.Set as Set import U.Codebase.Reference (Reference') import U.Codebase.Referent (Referent') import U.Codebase.Sqlite.DbId (BranchObjectId, CausalHashId, ObjectId, PatchObjectId, TextId) import U.Codebase.Sqlite.LocalIds (LocalBranchChildId, LocalDefnId, LocalPatchObjectId, LocalTextId) import qualified U.Util.Map as Map -import Data.Bifunctor (Bifunctor(bimap)) -import qualified Data.Set as Set -import Control.Lens (Traversal, Traversal') import Unison.Prelude -- | -- @ -- Branch --- { terms :: Map LocalTextId (Map S.LocalReferent LocalMetadataSet), --- types :: Map LocalTextId (Map S.LocalReference LocalMetadataSet), +-- { terms :: Map LocalTextId (Map LocalReferent LocalMetadataSet), +-- types :: Map LocalTextId (Map LocalReference LocalMetadataSet), -- patches :: Map LocalTextId LocalPatchObjectId, -- children :: Map LocalTextId LocalBranchChildId -- } @@ -31,8 +31,8 @@ type LocalBranch = Branch' LocalTextId LocalDefnId LocalPatchObjectId LocalBranc -- | -- @ -- Branch --- { terms :: Map TextId (Map S.Referent DbMetadataSet), --- types :: Map TextId (Map S.Reference DbMetadataSet), +-- { terms :: Map TextId (Map Referent DbMetadataSet), +-- types :: Map TextId (Map Reference DbMetadataSet), -- patches :: Map TextId PatchObjectId, -- children :: Map TextId (BranchObjectId, CausalHashId) -- } @@ -51,55 +51,19 @@ data Branch' t h p c = Branch branchHashes_ :: Traversal (Branch' t h p c) (Branch' t h' p c) h h' branchHashes_ _f _ = undefined - -- Branch <$> traverse (\m -> Map.mapKeys) + +-- Branch <$> traverse (\m -> Map.mapKeys) branchCausalHashes_ :: Traversal (Branch' t h p c) (Branch' t h p c') c c' -branchCausalHashes_ f Branch{..} = +branchCausalHashes_ f Branch {..} = Branch terms types patches <$> traverse f children - --- convertBranch :: (DB m, MonadState MigrationState m) => DbBranch -> m DbBranch --- convertBranch dbBranch = _ - --- branch plan --- =========== --- 1. function that reads a DbBranch out of codebase --- ==> loadDbBranchByObjectId --- 2. function which remaps references in a Branch ✅ --- ==> Chris's work --- 3. function that takes DbBranch to (LocalIds, LocalBranch) --- ==> dbToLocalBranch (todo) --- 4. function that takes a DbBranch to a Hashing.V2.Branch --- ==> Unison.Codebase.SqliteCodebase.MigrateSchema12.DbHelpers.dbBranchHash --- 5. saveBranchHash --- 6. saveBranchObject --- =============== --- end branch plan - --- causal plan' --- =========== --- 1. - --- database has a root CausalHashId --- from CausalHashId, we can look up ValueHashId and --- --- old object id --db--> old hash --mem--> new hash --db--> new object id - --- --- Branch --- { terms :: Map TextId (Map (Referent'' TextId ObjectId) (MetadataSetFormat' TextId ObjectId)), --- types :: Map TextId (Map (Reference' TextId ObjectId) (MetadataSetFormat' TextId ObjectId)), --- patches :: Map TextId PatchObjectId, --- children :: Map TextId (BranchObjectId, CausalHashId) --- } - - type LocalMetadataSet = MetadataSetFormat' LocalTextId LocalDefnId type DbMetadataSet = MetadataSetFormat' TextId ObjectId data MetadataSetFormat' t h = Inline (Set (Reference' t h)) - deriving Show + deriving (Show) metadataSetFormatReferences_ :: (Ord t, Ord h) => diff --git a/codebase2/codebase-sqlite/U/Codebase/Sqlite/Patch/Full.hs b/codebase2/codebase-sqlite/U/Codebase/Sqlite/Patch/Full.hs index 81ec4b915c..cb4eacb4e9 100644 --- a/codebase2/codebase-sqlite/U/Codebase/Sqlite/Patch/Full.hs +++ b/codebase2/codebase-sqlite/U/Codebase/Sqlite/Patch/Full.hs @@ -15,8 +15,8 @@ import qualified U.Util.Map as Map -- | -- @ -- LocalPatch --- { termEdits :: Map Sqlite.ReferentH (Set TermEdit), --- typeEdits :: Map Sqlite.ReferenceH (Set TypeEdit) +-- { termEdits :: Map ReferentH (Set TermEdit), +-- typeEdits :: Map ReferenceH (Set TypeEdit) -- } -- @ type Patch = Patch' Db.TextId Db.HashId Db.ObjectId @@ -24,8 +24,8 @@ type Patch = Patch' Db.TextId Db.HashId Db.ObjectId -- | -- @ -- LocalPatch --- { termEdits :: Map Sqlite.LocalReferentH (Set LocalTermEdit), --- typeEdits :: Map Sqlite.LocalReferenceH (Set LocalTypeEdit) +-- { termEdits :: Map LocalReferentH (Set LocalTermEdit), +-- typeEdits :: Map LocalReferenceH (Set LocalTypeEdit) -- } -- @ type LocalPatch = Patch' LocalTextId LocalHashId LocalDefnId diff --git a/docs/repoformats/v2.markdown b/docs/repoformats/v2.markdown index b29541b2e2..e5e99f2fb3 100644 --- a/docs/repoformats/v2.markdown +++ b/docs/repoformats/v2.markdown @@ -248,3 +248,87 @@ data BranchLocalIds = LocalIds `PatchObjectIds` reference the object ids of patch objects, as you might imagine. `branchChildLookup` contains two fields: a `CausalHashId` which points to the history of the child, and the `BranchObjectId` which proves that the relevant namespace slice is also present. In general, a codebase may not have the namespace slice corresponding to every causal id, but it ought to have them for the children of another namespace slice it does have (thus, the `BranchObjectId` is used). The causal relationship stored relationally rather than as blobs, and the `CausalHashId` is a useful index into the `causal_parents` table. + + + From 97acf505f8ca7b79eb43e8e05a81a492d6c1e17c Mon Sep 17 00:00:00 2001 From: Mitchell Rosen Date: Mon, 1 Nov 2021 21:19:30 -0400 Subject: [PATCH 052/297] Add U.Codebase.Sqlite.LocalizeObject --- .../U/Codebase/Sqlite/Branch/Format.hs | 75 ---------- .../U/Codebase/Sqlite/LocalizeObject.hs | 137 ++++++++++++++++++ .../U/Codebase/Sqlite/Serialization.hs | 5 +- codebase2/codebase-sqlite/package.yaml | 6 + .../unison-codebase-sqlite.cabal | 9 +- 5 files changed, 154 insertions(+), 78 deletions(-) create mode 100644 codebase2/codebase-sqlite/U/Codebase/Sqlite/LocalizeObject.hs diff --git a/codebase2/codebase-sqlite/U/Codebase/Sqlite/Branch/Format.hs b/codebase2/codebase-sqlite/U/Codebase/Sqlite/Branch/Format.hs index c842285d05..8901c2bd16 100644 --- a/codebase2/codebase-sqlite/U/Codebase/Sqlite/Branch/Format.hs +++ b/codebase2/codebase-sqlite/U/Codebase/Sqlite/Branch/Format.hs @@ -3,18 +3,11 @@ module U.Codebase.Sqlite.Branch.Format BranchLocalIds (..), SyncBranchFormat (..), localToDbBranch, - dbToLocalBranch, localToDbDiff, -- dbToLocalDiff, ) where -import Control.Lens (Lens', zoom, _1, _2, _3, _4) -import Control.Monad.Trans.State.Strict (State) -import qualified Control.Monad.Trans.State.Strict as State -import Data.Bitraversable (bitraverse) -import Data.Coerce (Coercible, coerce) -import qualified Data.Map.Strict as Map import Data.Vector (Vector) import qualified Data.Vector as Vector import U.Codebase.Sqlite.Branch.Diff (Diff, LocalDiff) @@ -28,10 +21,6 @@ import U.Codebase.Sqlite.LocalIds LocalPatchObjectId (..), LocalTextId (..), ) -import U.Codebase.Sqlite.Reference (LocalReference, Reference) -import U.Codebase.Sqlite.Referent (LocalReferent, Referent) -import qualified U.Util.Map as Map -import qualified U.Util.Set as Set import Unison.Prelude -- | A 'BranchFormat' is a deserialized namespace object (@object.bytes@). @@ -62,70 +51,6 @@ localToDbBranch :: BranchLocalIds -> LocalBranch -> DbBranch localToDbBranch li = Branch.Full.quadmap (lookupBranchLocalText li) (lookupBranchLocalDefn li) (lookupBranchLocalPatch li) (lookupBranchLocalChild li) -type DbToLocalBranchState = - ( Map TextId LocalTextId, - Map ObjectId LocalDefnId, - Map PatchObjectId LocalPatchObjectId, - Map (BranchObjectId, CausalHashId) LocalBranchChildId - ) - -dbToLocalBranch :: DbBranch -> (BranchLocalIds, LocalBranch) -dbToLocalBranch (Branch.Full.Branch terms types patches children) = - let (localBranch, (localTexts, localDefns, localPatches, localChildren)) = - (`State.runState` (mempty @DbToLocalBranchState)) do - Branch.Full.Branch - <$> Map.bitraverse (zoom _1 . localText) (Map.bitraverse (zoom _1_2 . localReferent) (zoom _1_2 . localMetadata)) terms - <*> Map.bitraverse (zoom _1 . localText) (Map.bitraverse (zoom _1_2 . localReference) (zoom _1_2 . localMetadata)) types - <*> Map.bitraverse (zoom _1 . localText) (zoom _3 . localPatch) patches - <*> Map.bitraverse (zoom _1 . localText) (zoom _4 . localChild) children - branchLocalIds :: BranchLocalIds - branchLocalIds = - LocalIds - { branchTextLookup = Map.valuesVector (Map.swap localTexts), - branchDefnLookup = Map.valuesVector (Map.swap localDefns), - branchPatchLookup = Map.valuesVector (Map.swap localPatches), - branchChildLookup = Map.valuesVector (Map.swap localChildren) - } - in (branchLocalIds, localBranch) - where - localChild :: - (BranchObjectId, CausalHashId) -> - State (Map (BranchObjectId, CausalHashId) LocalBranchChildId) LocalBranchChildId - localChild = localize - - localDefn :: ObjectId -> State (Map ObjectId LocalDefnId) LocalDefnId - localDefn = localize - - localMetadata :: Branch.Full.DbMetadataSet -> State (Map TextId LocalTextId, Map ObjectId LocalDefnId) Branch.Full.LocalMetadataSet - localMetadata (Branch.Full.Inline vals) = - Branch.Full.Inline <$> Set.traverse localReference vals - - localText :: TextId -> State (Map TextId LocalTextId) LocalTextId - localText = localize - - localPatch :: PatchObjectId -> State (Map PatchObjectId LocalPatchObjectId) LocalPatchObjectId - localPatch = localize - - localReference :: Reference -> State (Map TextId LocalTextId, Map ObjectId LocalDefnId) LocalReference - localReference = bitraverse (zoom _1 . localText) (zoom _2 . localDefn) - - localReferent :: Referent -> State (Map TextId LocalTextId, Map ObjectId LocalDefnId) LocalReferent - localReferent = bitraverse localReference localReference - -localize :: (Coercible local Word64, Ord real) => real -> State (Map real local) local -localize real = do - mapping <- State.get - case Map.lookup real mapping of - Nothing -> do - let nextLocal = coerce @Word64 (fromIntegral (Map.size mapping)) - State.put $! Map.insert real nextLocal mapping - pure nextLocal - Just local -> pure local - -_1_2 :: Lens' (a, b, c, d) (a, b) -_1_2 f (a0, b0, c, d) = - (\(a, b) -> (a, b, c, d)) <$> f (a0, b0) - lookupBranchLocalText :: BranchLocalIds -> LocalTextId -> TextId lookupBranchLocalText li (LocalTextId w) = branchTextLookup li Vector.! fromIntegral w diff --git a/codebase2/codebase-sqlite/U/Codebase/Sqlite/LocalizeObject.hs b/codebase2/codebase-sqlite/U/Codebase/Sqlite/LocalizeObject.hs new file mode 100644 index 0000000000..4a869b1c19 --- /dev/null +++ b/codebase2/codebase-sqlite/U/Codebase/Sqlite/LocalizeObject.hs @@ -0,0 +1,137 @@ +-- | This module facilitates the creation of "localized" versions of objects, suitable for storage. +module U.Codebase.Sqlite.LocalizeObject + ( -- * High-level localization + localizeBranch, + + -- * General-purpose localization + LocalizeBranchT, + runLocalizeBranchT, + + -- ** Helpers + localizeBranchChild, + localizeBranchMetadata, + localizeDefn, + localizePatch, + localizeReference, + localizeReferent, + localizeText, + + -- * @Contains@ constraints + ContainsBranches, + ContainsDefns, + ContainsPatches, + ContainsText, + ) +where + +import Control.Lens +import Control.Monad.Trans.State.Strict (StateT) +import qualified Control.Monad.Trans.State.Strict as State +import Data.Bitraversable (bitraverse) +import Data.Coerce (Coercible, coerce) +import Data.Generics.Product.Typed (HasType (typed)) +import qualified Data.Map.Strict as Map +import U.Codebase.Sqlite.Branch.Format (BranchLocalIds) +import qualified U.Codebase.Sqlite.Branch.Format as Branch +import U.Codebase.Sqlite.Branch.Full (Branch' (..), DbBranch, LocalBranch) +import qualified U.Codebase.Sqlite.Branch.Full as Branch +import U.Codebase.Sqlite.DbId (BranchObjectId, CausalHashId, ObjectId, PatchObjectId, TextId) +import U.Codebase.Sqlite.LocalIds + ( LocalBranchChildId (..), + LocalDefnId (..), + LocalPatchObjectId (..), + LocalTextId (..), + ) +import U.Codebase.Sqlite.Reference (LocalReference, Reference) +import U.Codebase.Sqlite.Referent (LocalReferent, Referent) +import qualified U.Util.Map as Map +import qualified U.Util.Set as Set +import Unison.Prelude + +-------------------------------------------------------------------------------------------------------------------------------------------- +-- High-level localization + +localizeBranch :: DbBranch -> (BranchLocalIds, LocalBranch) +localizeBranch (Branch terms types patches children) = + (runIdentity . runLocalizeBranchT) do + Branch + <$> Map.bitraverse localizeText (Map.bitraverse localizeReferent localizeBranchMetadata) terms + <*> Map.bitraverse localizeText (Map.bitraverse localizeReference localizeBranchMetadata) types + <*> Map.bitraverse localizeText localizePatch patches + <*> Map.bitraverse localizeText localizeBranchChild children + +-------------------------------------------------------------------------------------------------------------------------------------------- +-- General-purpose localization + +type ContainsBranches = + HasType (Map (BranchObjectId, CausalHashId) LocalBranchChildId) + +type ContainsDefns = + HasType (Map ObjectId LocalDefnId) + +type ContainsPatches = + HasType (Map PatchObjectId LocalPatchObjectId) + +type ContainsText = + HasType (Map TextId LocalTextId) + +type LocalizeBranchT m a = + StateT LocalizeBranchState m a + +type LocalizeBranchState = + ( Map TextId LocalTextId, + Map ObjectId LocalDefnId, + Map PatchObjectId LocalPatchObjectId, + Map (BranchObjectId, CausalHashId) LocalBranchChildId + ) + +runLocalizeBranchT :: Monad m => LocalizeBranchT m a -> m (BranchLocalIds, a) +runLocalizeBranchT action = do + (result, (localTexts, localDefns, localPatches, localChildren)) <- State.runStateT action (mempty @LocalizeBranchState) + let branchLocalIds :: BranchLocalIds + branchLocalIds = + Branch.LocalIds + { Branch.branchTextLookup = Map.valuesVector (Map.swap localTexts), + Branch.branchDefnLookup = Map.valuesVector (Map.swap localDefns), + Branch.branchPatchLookup = Map.valuesVector (Map.swap localPatches), + Branch.branchChildLookup = Map.valuesVector (Map.swap localChildren) + } + pure (branchLocalIds, result) + +localizeBranchChild :: (ContainsBranches s, Monad m) => (BranchObjectId, CausalHashId) -> StateT s m LocalBranchChildId +localizeBranchChild = + zoom typed . localize + +localizeBranchMetadata :: (ContainsDefns s, ContainsText s, Monad m) => Branch.DbMetadataSet -> StateT s m Branch.LocalMetadataSet +localizeBranchMetadata (Branch.Inline refs) = + Branch.Inline <$> Set.traverse localizeReference refs + +localizeDefn :: (ContainsDefns s, Monad m) => ObjectId -> StateT s m LocalDefnId +localizeDefn = + zoom typed . localize + +localizePatch :: (ContainsPatches s, Monad m) => PatchObjectId -> StateT s m LocalPatchObjectId +localizePatch = + zoom typed . localize + +localizeReference :: (ContainsDefns s, ContainsText s, Monad m) => Reference -> StateT s m LocalReference +localizeReference = + bitraverse localizeText localizeDefn + +localizeReferent :: (ContainsDefns s, ContainsText s, Monad m) => Referent -> StateT s m LocalReferent +localizeReferent = + bitraverse localizeReference localizeReference + +localizeText :: (ContainsText s, Monad m) => TextId -> StateT s m LocalTextId +localizeText = + zoom typed . localize + +localize :: (Coercible localId Word64, Monad m, Ord realId) => realId -> StateT (Map realId localId) m localId +localize realId = do + mapping <- State.get + case Map.lookup realId mapping of + Nothing -> do + let nextLocalId = coerce @Word64 (fromIntegral (Map.size mapping)) + State.put $! Map.insert realId nextLocalId mapping + pure nextLocalId + Just localId -> pure localId diff --git a/codebase2/codebase-sqlite/U/Codebase/Sqlite/Serialization.hs b/codebase2/codebase-sqlite/U/Codebase/Sqlite/Serialization.hs index bb439bdd06..fa65698812 100644 --- a/codebase2/codebase-sqlite/U/Codebase/Sqlite/Serialization.hs +++ b/codebase2/codebase-sqlite/U/Codebase/Sqlite/Serialization.hs @@ -348,9 +348,10 @@ lookupTermElementDiscardingTerm i = getTType :: MonadGet m => m TermFormat.Type getTType = getType getReference -getType :: MonadGet m => m r -> m (Type.TypeR r Symbol) +getType :: forall m r. MonadGet m => m r -> m (Type.TypeR r Symbol) getType getReference = getABT getSymbol getUnit go where + go :: m x -> m (Type.F' r x) go getChild = getWord8 >>= \case 0 -> Type.Ref <$> getReference @@ -807,6 +808,7 @@ putDType :: MonadPut m => DeclFormat.Type Symbol -> m () putDType = putType putRecursiveReference putSymbol putType :: + forall m r v. (MonadPut m, Ord v) => (r -> m ()) -> (v -> m ()) -> @@ -814,6 +816,7 @@ putType :: m () putType putReference putVar = putABT putVar putUnit go where + go :: (x -> m ()) -> Type.F' r x -> m () go putChild t = case t of Type.Ref r -> putWord8 0 *> putReference r Type.Arrow i o -> putWord8 1 *> putChild i *> putChild o diff --git a/codebase2/codebase-sqlite/package.yaml b/codebase2/codebase-sqlite/package.yaml index 253dfedb38..fbb44ec769 100644 --- a/codebase2/codebase-sqlite/package.yaml +++ b/codebase2/codebase-sqlite/package.yaml @@ -4,11 +4,15 @@ github: unisonweb/unison default-extensions: - ApplicativeDo - BlockArguments + - ConstraintKinds - DeriveFunctor - DerivingStrategies - DoAndIfThenElse - FlexibleContexts - FlexibleInstances + - FunctionalDependencies + - GeneralizedNewtypeDeriving + - InstanceSigs - LambdaCase - MultiParamTypeClasses - NamedFieldPuns @@ -16,6 +20,8 @@ default-extensions: - ScopedTypeVariables - TupleSections - TypeApplications + - TypeFamilies + - TypeFamilyDependencies library: source-dirs: . diff --git a/codebase2/codebase-sqlite/unison-codebase-sqlite.cabal b/codebase2/codebase-sqlite/unison-codebase-sqlite.cabal index f06323daa0..bfed069c77 100644 --- a/codebase2/codebase-sqlite/unison-codebase-sqlite.cabal +++ b/codebase2/codebase-sqlite/unison-codebase-sqlite.cabal @@ -3,8 +3,6 @@ cabal-version: 1.12 -- This file has been generated from package.yaml by hpack version 0.34.4. -- -- see: https://github.com/sol/hpack --- --- hash: 4227133c6f1df043f279939d16c10379d28386b6b9604af6c8e17bd0a87748ce name: unison-codebase-sqlite version: 0.0.0 @@ -29,6 +27,7 @@ library U.Codebase.Sqlite.Decl.Format U.Codebase.Sqlite.JournalMode U.Codebase.Sqlite.LocalIds + U.Codebase.Sqlite.LocalizeObject U.Codebase.Sqlite.ObjectType U.Codebase.Sqlite.Operations U.Codebase.Sqlite.Patch.Diff @@ -50,11 +49,15 @@ library default-extensions: ApplicativeDo BlockArguments + ConstraintKinds DeriveFunctor DerivingStrategies DoAndIfThenElse FlexibleContexts FlexibleInstances + FunctionalDependencies + GeneralizedNewtypeDeriving + InstanceSigs LambdaCase MultiParamTypeClasses NamedFieldPuns @@ -62,6 +65,8 @@ library ScopedTypeVariables TupleSections TypeApplications + TypeFamilies + TypeFamilyDependencies build-depends: base , bytes From 303ba0faef6b89520afc1147336383373cb94138 Mon Sep 17 00:00:00 2001 From: Mitchell Rosen Date: Mon, 1 Nov 2021 21:32:49 -0400 Subject: [PATCH 053/297] move patch localization to U.Codebase.Sqlite.LocalizeObject --- .../U/Codebase/Sqlite/LocalizeObject.hs | 89 ++++++++++++++++--- .../U/Codebase/Sqlite/Patch/Format.hs | 74 --------------- 2 files changed, 76 insertions(+), 87 deletions(-) diff --git a/codebase2/codebase-sqlite/U/Codebase/Sqlite/LocalizeObject.hs b/codebase2/codebase-sqlite/U/Codebase/Sqlite/LocalizeObject.hs index 4a869b1c19..f40512bb6e 100644 --- a/codebase2/codebase-sqlite/U/Codebase/Sqlite/LocalizeObject.hs +++ b/codebase2/codebase-sqlite/U/Codebase/Sqlite/LocalizeObject.hs @@ -2,19 +2,25 @@ module U.Codebase.Sqlite.LocalizeObject ( -- * High-level localization localizeBranch, + localizePatch, -- * General-purpose localization LocalizeBranchT, runLocalizeBranchT, -- ** Helpers - localizeBranchChild, + localizeBranchReference, localizeBranchMetadata, localizeDefn, - localizePatch, + localizeHash, + localizePatchReference, localizeReference, + localizeReferenceH, localizeReferent, + localizeReferentH, + localizeTermEdit, localizeText, + localizeTypeEdit, -- * @Contains@ constraints ContainsBranches, @@ -35,15 +41,21 @@ import U.Codebase.Sqlite.Branch.Format (BranchLocalIds) import qualified U.Codebase.Sqlite.Branch.Format as Branch import U.Codebase.Sqlite.Branch.Full (Branch' (..), DbBranch, LocalBranch) import qualified U.Codebase.Sqlite.Branch.Full as Branch -import U.Codebase.Sqlite.DbId (BranchObjectId, CausalHashId, ObjectId, PatchObjectId, TextId) +import U.Codebase.Sqlite.DbId (BranchObjectId, CausalHashId, HashId, ObjectId, PatchObjectId, TextId) import U.Codebase.Sqlite.LocalIds ( LocalBranchChildId (..), LocalDefnId (..), + LocalHashId (..), LocalPatchObjectId (..), LocalTextId (..), ) -import U.Codebase.Sqlite.Reference (LocalReference, Reference) -import U.Codebase.Sqlite.Referent (LocalReferent, Referent) +import U.Codebase.Sqlite.Patch.Format (PatchLocalIds) +import qualified U.Codebase.Sqlite.Patch.Format as Patch +import U.Codebase.Sqlite.Patch.Full (LocalPatch, Patch, Patch' (..)) +import U.Codebase.Sqlite.Patch.TermEdit (LocalTermEdit, TermEdit) +import U.Codebase.Sqlite.Patch.TypeEdit (LocalTypeEdit, TypeEdit) +import U.Codebase.Sqlite.Reference (LocalReference, LocalReferenceH, Reference, ReferenceH) +import U.Codebase.Sqlite.Referent (LocalReferent, LocalReferentH, Referent, ReferentH) import qualified U.Util.Map as Map import qualified U.Util.Set as Set import Unison.Prelude @@ -57,8 +69,15 @@ localizeBranch (Branch terms types patches children) = Branch <$> Map.bitraverse localizeText (Map.bitraverse localizeReferent localizeBranchMetadata) terms <*> Map.bitraverse localizeText (Map.bitraverse localizeReference localizeBranchMetadata) types - <*> Map.bitraverse localizeText localizePatch patches - <*> Map.bitraverse localizeText localizeBranchChild children + <*> Map.bitraverse localizeText localizePatchReference patches + <*> Map.bitraverse localizeText localizeBranchReference children + +localizePatch :: Patch -> (PatchLocalIds, LocalPatch) +localizePatch (Patch termEdits typeEdits) = + (runIdentity . runLocalizePatchT) do + Patch + <$> Map.bitraverse localizeReferentH (Set.traverse localizeTermEdit) termEdits + <*> Map.bitraverse localizeReferenceH (Set.traverse localizeTypeEdit) typeEdits -------------------------------------------------------------------------------------------------------------------------------------------- -- General-purpose localization @@ -69,14 +88,17 @@ type ContainsBranches = type ContainsDefns = HasType (Map ObjectId LocalDefnId) +type ContainsHashes = + HasType (Map HashId LocalHashId) + type ContainsPatches = HasType (Map PatchObjectId LocalPatchObjectId) type ContainsText = HasType (Map TextId LocalTextId) -type LocalizeBranchT m a = - StateT LocalizeBranchState m a +type LocalizeBranchT = + StateT LocalizeBranchState type LocalizeBranchState = ( Map TextId LocalTextId, @@ -98,8 +120,29 @@ runLocalizeBranchT action = do } pure (branchLocalIds, result) -localizeBranchChild :: (ContainsBranches s, Monad m) => (BranchObjectId, CausalHashId) -> StateT s m LocalBranchChildId -localizeBranchChild = +type LocalizePatchState = + ( Map TextId LocalTextId, + Map HashId LocalHashId, + Map ObjectId LocalDefnId + ) + +type LocalizePatchT = + StateT LocalizePatchState + +runLocalizePatchT :: Monad m => LocalizePatchT m a -> m (PatchLocalIds, a) +runLocalizePatchT action = do + (result, (localTexts, localHashes, localDefns)) <- State.runStateT action (mempty @LocalizePatchState) + let patchLocalIds :: PatchLocalIds + patchLocalIds = + Patch.LocalIds + { Patch.patchTextLookup = Map.valuesVector (Map.swap localTexts), + Patch.patchHashLookup = Map.valuesVector (Map.swap localHashes), + Patch.patchDefnLookup = Map.valuesVector (Map.swap localDefns) + } + pure (patchLocalIds, result) + +localizeBranchReference :: (ContainsBranches s, Monad m) => (BranchObjectId, CausalHashId) -> StateT s m LocalBranchChildId +localizeBranchReference = zoom typed . localize localizeBranchMetadata :: (ContainsDefns s, ContainsText s, Monad m) => Branch.DbMetadataSet -> StateT s m Branch.LocalMetadataSet @@ -110,22 +153,42 @@ localizeDefn :: (ContainsDefns s, Monad m) => ObjectId -> StateT s m LocalDefnId localizeDefn = zoom typed . localize -localizePatch :: (ContainsPatches s, Monad m) => PatchObjectId -> StateT s m LocalPatchObjectId -localizePatch = +localizeHash :: (ContainsHashes s, Monad m) => HashId -> StateT s m LocalHashId +localizeHash = + zoom typed . localize + +localizePatchReference :: (ContainsPatches s, Monad m) => PatchObjectId -> StateT s m LocalPatchObjectId +localizePatchReference = zoom typed . localize localizeReference :: (ContainsDefns s, ContainsText s, Monad m) => Reference -> StateT s m LocalReference localizeReference = bitraverse localizeText localizeDefn +localizeReferenceH :: (ContainsHashes s, ContainsText s, Monad m) => ReferenceH -> StateT s m LocalReferenceH +localizeReferenceH = + bitraverse localizeText localizeHash + localizeReferent :: (ContainsDefns s, ContainsText s, Monad m) => Referent -> StateT s m LocalReferent localizeReferent = bitraverse localizeReference localizeReference +localizeReferentH :: (ContainsHashes s, ContainsText s, Monad m) => ReferentH -> StateT s m LocalReferentH +localizeReferentH = + bitraverse localizeReferenceH localizeReferenceH + +localizeTermEdit :: (ContainsText s, ContainsDefns s, Monad m) => TermEdit -> StateT s m LocalTermEdit +localizeTermEdit = + bitraverse localizeText localizeDefn + localizeText :: (ContainsText s, Monad m) => TextId -> StateT s m LocalTextId localizeText = zoom typed . localize +localizeTypeEdit :: (ContainsText s, ContainsDefns s, Monad m) => TypeEdit -> StateT s m LocalTypeEdit +localizeTypeEdit = + bitraverse localizeText localizeDefn + localize :: (Coercible localId Word64, Monad m, Ord realId) => realId -> StateT (Map realId localId) m localId localize realId = do mapping <- State.get diff --git a/codebase2/codebase-sqlite/U/Codebase/Sqlite/Patch/Format.hs b/codebase2/codebase-sqlite/U/Codebase/Sqlite/Patch/Format.hs index 1260d48db0..5cb2a5ddd5 100644 --- a/codebase2/codebase-sqlite/U/Codebase/Sqlite/Patch/Format.hs +++ b/codebase2/codebase-sqlite/U/Codebase/Sqlite/Patch/Format.hs @@ -3,16 +3,9 @@ module U.Codebase.Sqlite.Patch.Format PatchLocalIds (..), SyncPatchFormat (..), localPatchToPatch, - patchToLocalPatch, ) where -import Control.Lens (Lens', zoom, _1, _2) -import Control.Monad.Trans.State.Strict (State) -import qualified Control.Monad.Trans.State.Strict as State -import Data.Bitraversable (bitraverse) -import Data.Coerce (Coercible, coerce) -import qualified Data.Map.Strict as Map import Data.Vector (Vector) import qualified Data.Vector as Vector import U.Codebase.Sqlite.DbId (HashId, ObjectId, PatchObjectId, TextId) @@ -20,12 +13,6 @@ import U.Codebase.Sqlite.LocalIds (LocalDefnId (LocalDefnId), LocalHashId (Local import U.Codebase.Sqlite.Patch.Diff (LocalPatchDiff) import U.Codebase.Sqlite.Patch.Full (LocalPatch, Patch) import qualified U.Codebase.Sqlite.Patch.Full as Patch.Full -import U.Codebase.Sqlite.Patch.TermEdit (LocalTermEdit, TermEdit) -import U.Codebase.Sqlite.Patch.TypeEdit (LocalTypeEdit, TypeEdit) -import U.Codebase.Sqlite.Reference (LocalReferenceH, ReferenceH) -import U.Codebase.Sqlite.Referent (LocalReferentH, ReferentH) -import qualified U.Util.Map as Map -import qualified U.Util.Set as Set import Unison.Prelude data PatchFormat @@ -54,64 +41,3 @@ localPatchToPatch li = lookupPatchLocalDefn :: PatchLocalIds -> LocalDefnId -> ObjectId lookupPatchLocalDefn li (LocalDefnId w) = patchDefnLookup li Vector.! fromIntegral w - -type PatchToLocalPatchState = - ( Map TextId LocalTextId, - Map HashId LocalHashId, - Map ObjectId LocalDefnId - ) - -patchToLocalPatch :: Patch -> (PatchLocalIds, LocalPatch) -patchToLocalPatch (Patch.Full.Patch termEdits typeEdits) = - let (localPatch, (localTexts, localHashes, localDefns)) = - (`State.runState` (mempty @PatchToLocalPatchState)) do - Patch.Full.Patch - <$> Map.bitraverse (zoom _1_2 . localReferentH) (Set.traverse (zoom _1_3 . localTermEdit)) termEdits - <*> Map.bitraverse (zoom _1_2 . localReferenceH) (Set.traverse (zoom _1_3 . localTypeEdit)) typeEdits - patchLocalIds :: PatchLocalIds - patchLocalIds = - LocalIds - { patchTextLookup = Map.valuesVector (Map.swap localTexts), - patchHashLookup = Map.valuesVector (Map.swap localHashes), - patchDefnLookup = Map.valuesVector (Map.swap localDefns) - } - in (patchLocalIds, localPatch) - where - localDefn :: ObjectId -> State (Map ObjectId LocalDefnId) LocalDefnId - localDefn = localize - - localText :: TextId -> State (Map TextId LocalTextId) LocalTextId - localText = localize - - localHash :: HashId -> State (Map HashId LocalHashId) LocalHashId - localHash = localize - - localReferenceH :: ReferenceH -> State (Map TextId LocalTextId, Map HashId LocalHashId) LocalReferenceH - localReferenceH = bitraverse (zoom _1 . localText) (zoom _2 . localHash) - - localReferentH :: ReferentH -> State (Map TextId LocalTextId, Map HashId LocalHashId) LocalReferentH - localReferentH = bitraverse localReferenceH localReferenceH - - localTermEdit :: TermEdit -> State (Map TextId LocalTextId, Map ObjectId LocalDefnId) LocalTermEdit - localTermEdit = bitraverse (zoom _1 . localText) (zoom _2 . localDefn) - - localTypeEdit :: TypeEdit -> State (Map TextId LocalTextId, Map ObjectId LocalDefnId) LocalTypeEdit - localTypeEdit = bitraverse (zoom _1 . localText) (zoom _2 . localDefn) - -localize :: (Coercible local Word64, Ord real) => real -> State (Map real local) local -localize real = do - mapping <- State.get - case Map.lookup real mapping of - Nothing -> do - let nextLocal = coerce @Word64 (fromIntegral (Map.size mapping)) - State.put $! Map.insert real nextLocal mapping - pure nextLocal - Just local -> pure local - -_1_2 :: Lens' (a, b, c) (a, b) -_1_2 f (a0, b0, c) = - (\(a, b) -> (a, b, c)) <$> f (a0, b0) - -_1_3 :: Lens' (a, b, c) (a, c) -_1_3 f (a0, b, c0) = - (\(a, c) -> (a, b, c)) <$> f (a0, c0) From 9481a9c444102124d4de663c9593ee3eef672ba2 Mon Sep 17 00:00:00 2001 From: Mitchell Rosen Date: Mon, 1 Nov 2021 21:33:01 -0400 Subject: [PATCH 054/297] fix up one type error in MigrateSchema12 --- .../SqliteCodebase/MigrateSchema12.hs | 24 ++++++++++--------- 1 file changed, 13 insertions(+), 11 deletions(-) diff --git a/parser-typechecker/src/Unison/Codebase/SqliteCodebase/MigrateSchema12.hs b/parser-typechecker/src/Unison/Codebase/SqliteCodebase/MigrateSchema12.hs index ea748238a4..411000174e 100644 --- a/parser-typechecker/src/Unison/Codebase/SqliteCodebase/MigrateSchema12.hs +++ b/parser-typechecker/src/Unison/Codebase/SqliteCodebase/MigrateSchema12.hs @@ -19,17 +19,18 @@ import qualified Data.Set as Set import Data.Tuple (swap) import qualified Data.Zip as Zip import qualified U.Codebase.Causal as C -import U.Codebase.HashTags (CausalHash (CausalHash, unCausalHash), BranchHash (BranchHash), CausalHash) +import U.Codebase.HashTags (BranchHash (BranchHash), CausalHash (CausalHash, unCausalHash)) import qualified U.Codebase.Reference as UReference import qualified U.Codebase.Referent as UReferent import qualified U.Codebase.Sqlite.Branch.Format as S.Branch import qualified U.Codebase.Sqlite.Branch.Full as S import qualified U.Codebase.Sqlite.Branch.Full as S (DbBranch) import qualified U.Codebase.Sqlite.Branch.Full as S.Branch.Full -import U.Codebase.Sqlite.Connection (Connection) -import qualified U.Codebase.Sqlite.Causal as SC import U.Codebase.Sqlite.Causal (DbCausal, GDbCausal (..)) +import qualified U.Codebase.Sqlite.Causal as SC +import U.Codebase.Sqlite.Connection (Connection) import U.Codebase.Sqlite.DbId (BranchHashId (BranchHashId, unBranchHashId), CausalHashId (CausalHashId, unCausalHashId), HashId (HashId), ObjectId) +import qualified U.Codebase.Sqlite.LocalizeObject as S.LocalizeObject import qualified U.Codebase.Sqlite.Operations as Ops import qualified U.Codebase.Sqlite.Queries as Q import qualified U.Codebase.Sqlite.Reference as S.Reference @@ -40,6 +41,7 @@ import qualified U.Util.Monoid as Monoid import qualified Unison.ABT as ABT import Unison.Codebase (Codebase (Codebase)) import qualified Unison.Codebase as Codebase +import qualified Unison.Codebase.SqliteCodebase.Conversions as Cv import qualified Unison.Codebase.SqliteCodebase.MigrateSchema12.DbHelpers as Hashing import qualified Unison.DataDeclaration as DD import Unison.DataDeclaration.ConstructorId (ConstructorId) @@ -57,7 +59,6 @@ import qualified Unison.Term as Term import Unison.Type (Type) import qualified Unison.Type as Type import Unison.Var (Var) -import qualified Unison.Codebase.SqliteCodebase.Conversions as Cv -- lookupCtor :: ConstructorMapping -> ObjectId -> Pos -> ConstructorId -> Maybe (Pos, ConstructorId) -- lookupCtor (ConstructorMapping cm) oid pos cid = @@ -297,12 +298,13 @@ migrateCausal conn oldCausalHashId = runDB conn . fmap (either id id) . runExcep field @"causalMapping" %= Map.insert oldCausalHashId (newCausalHash, newCausalHashId) pure Sync.Done - -- Plan: - -- * Load the pieces of a Db.Causal ✅ - -- * Ensure its parent causals and branch (value hash) have been migrated ✅ - -- * Rewrite the value-hash and parent causal hashes ✅ - -- * Save the new causal ✅ - -- * Save Causal Hash mapping to skymap ✅ + +-- Plan: +-- * Load the pieces of a Db.Causal ✅ +-- * Ensure its parent causals and branch (value hash) have been migrated ✅ +-- * Rewrite the value-hash and parent causal hashes ✅ +-- * Save the new causal ✅ +-- * Save Causal Hash mapping to skymap ✅ -- data C.Branch m = Branch -- { terms :: Map NameSegment (Map Referent (m MdValues)), @@ -346,7 +348,7 @@ migrateBranch conn oldObjectId = fmap (either id id) . runExceptT $ do -- Remap object id references -- TODO: remap sub-namespace causal hashes newBranch <- oldBranch & dbBranchObjRefs_ %%~ remapObjIdRefs - let (localBranchIds, localBranch) = S.Branch.dbToLocalBranch newBranch + let (localBranchIds, localBranch) = S.LocalizeObject.localizeBranch newBranch hash <- runDB conn (Ops.liftQ (Hashing.dbBranchHash newBranch)) newHashId <- runDB conn (Ops.liftQ (Q.saveBranchHash (BranchHash (Cv.hash1to2 hash)))) newObjectId <- runDB conn (Ops.saveBranchObject newHashId localBranchIds localBranch) From 7bb6d8825ac20867af6480b937cad36fbe7196ab Mon Sep 17 00:00:00 2001 From: Mitchell Rosen Date: Mon, 1 Nov 2021 21:47:10 -0400 Subject: [PATCH 055/297] refactoring and haddocks --- .../U/Codebase/Sqlite/LocalizeObject.hs | 85 +++++++++++-------- 1 file changed, 51 insertions(+), 34 deletions(-) diff --git a/codebase2/codebase-sqlite/U/Codebase/Sqlite/LocalizeObject.hs b/codebase2/codebase-sqlite/U/Codebase/Sqlite/LocalizeObject.hs index f40512bb6e..68bcdbcb4d 100644 --- a/codebase2/codebase-sqlite/U/Codebase/Sqlite/LocalizeObject.hs +++ b/codebase2/codebase-sqlite/U/Codebase/Sqlite/LocalizeObject.hs @@ -5,12 +5,11 @@ module U.Codebase.Sqlite.LocalizeObject localizePatch, -- * General-purpose localization - LocalizeBranchT, - runLocalizeBranchT, + runLocalizeBranch, + runLocalizePatch, - -- ** Helpers + -- ** Localizers localizeBranchReference, - localizeBranchMetadata, localizeDefn, localizeHash, localizePatchReference, @@ -18,9 +17,7 @@ module U.Codebase.Sqlite.LocalizeObject localizeReferenceH, localizeReferent, localizeReferentH, - localizeTermEdit, localizeText, - localizeTypeEdit, -- * @Contains@ constraints ContainsBranches, @@ -63,43 +60,60 @@ import Unison.Prelude -------------------------------------------------------------------------------------------------------------------------------------------- -- High-level localization +-- | Localize a branch object. localizeBranch :: DbBranch -> (BranchLocalIds, LocalBranch) localizeBranch (Branch terms types patches children) = - (runIdentity . runLocalizeBranchT) do + (runIdentity . runLocalizeBranch) do Branch <$> Map.bitraverse localizeText (Map.bitraverse localizeReferent localizeBranchMetadata) terms <*> Map.bitraverse localizeText (Map.bitraverse localizeReference localizeBranchMetadata) types <*> Map.bitraverse localizeText localizePatchReference patches <*> Map.bitraverse localizeText localizeBranchReference children + where + localizeBranchMetadata :: (ContainsDefns s, ContainsText s, Monad m) => Branch.DbMetadataSet -> StateT s m Branch.LocalMetadataSet + localizeBranchMetadata (Branch.Inline refs) = + Branch.Inline <$> Set.traverse localizeReference refs +-- | Localize a patch object. localizePatch :: Patch -> (PatchLocalIds, LocalPatch) localizePatch (Patch termEdits typeEdits) = - (runIdentity . runLocalizePatchT) do + (runIdentity . runLocalizePatch) do Patch <$> Map.bitraverse localizeReferentH (Set.traverse localizeTermEdit) termEdits <*> Map.bitraverse localizeReferenceH (Set.traverse localizeTypeEdit) typeEdits + where + localizeTermEdit :: (ContainsText s, ContainsDefns s, Monad m) => TermEdit -> StateT s m LocalTermEdit + localizeTermEdit = + bitraverse localizeText localizeDefn + + localizeTypeEdit :: (ContainsText s, ContainsDefns s, Monad m) => TypeEdit -> StateT s m LocalTypeEdit + localizeTypeEdit = + bitraverse localizeText localizeDefn -------------------------------------------------------------------------------------------------------------------------------------------- -- General-purpose localization -type ContainsBranches = - HasType (Map (BranchObjectId, CausalHashId) LocalBranchChildId) +-- | Contains references to branch objects. +type ContainsBranches s = + HasType (Map (BranchObjectId, CausalHashId) LocalBranchChildId) s -type ContainsDefns = - HasType (Map ObjectId LocalDefnId) +-- | Contains references to definition objects i.e. term/decl component objects. +type ContainsDefns s = + HasType (Map ObjectId LocalDefnId) s +-- | Contains references to objects by their hash. type ContainsHashes = HasType (Map HashId LocalHashId) +-- | Contains references to patch objects. type ContainsPatches = HasType (Map PatchObjectId LocalPatchObjectId) +-- | Contains text. type ContainsText = HasType (Map TextId LocalTextId) -type LocalizeBranchT = - StateT LocalizeBranchState - +-- | The inner state of the localization of a branch object. type LocalizeBranchState = ( Map TextId LocalTextId, Map ObjectId LocalDefnId, @@ -107,8 +121,9 @@ type LocalizeBranchState = Map (BranchObjectId, CausalHashId) LocalBranchChildId ) -runLocalizeBranchT :: Monad m => LocalizeBranchT m a -> m (BranchLocalIds, a) -runLocalizeBranchT action = do +-- | Run a computation that localizes a branch object, returning the local ids recorded within. +runLocalizeBranch :: Monad m => StateT LocalizeBranchState m a -> m (BranchLocalIds, a) +runLocalizeBranch action = do (result, (localTexts, localDefns, localPatches, localChildren)) <- State.runStateT action (mempty @LocalizeBranchState) let branchLocalIds :: BranchLocalIds branchLocalIds = @@ -120,17 +135,16 @@ runLocalizeBranchT action = do } pure (branchLocalIds, result) +-- | The inner state of 'LocalizePatchT'. type LocalizePatchState = ( Map TextId LocalTextId, Map HashId LocalHashId, Map ObjectId LocalDefnId ) -type LocalizePatchT = - StateT LocalizePatchState - -runLocalizePatchT :: Monad m => LocalizePatchT m a -> m (PatchLocalIds, a) -runLocalizePatchT action = do +-- | Run a computation that localizes a patch object, returning the local ids recorded within. +runLocalizePatch :: Monad m => StateT LocalizePatchState m a -> m (PatchLocalIds, a) +runLocalizePatch action = do (result, (localTexts, localHashes, localDefns)) <- State.runStateT action (mempty @LocalizePatchState) let patchLocalIds :: PatchLocalIds patchLocalIds = @@ -141,54 +155,57 @@ runLocalizePatchT action = do } pure (patchLocalIds, result) +-- | Localize a branch object reference in any monad that encapsulates the stateful localization of an object that contains branch +-- references. localizeBranchReference :: (ContainsBranches s, Monad m) => (BranchObjectId, CausalHashId) -> StateT s m LocalBranchChildId localizeBranchReference = zoom typed . localize -localizeBranchMetadata :: (ContainsDefns s, ContainsText s, Monad m) => Branch.DbMetadataSet -> StateT s m Branch.LocalMetadataSet -localizeBranchMetadata (Branch.Inline refs) = - Branch.Inline <$> Set.traverse localizeReference refs - +-- | Localize a definition object reference in any monad that encapsulates the stateful localization of an object that contains definition +-- references. localizeDefn :: (ContainsDefns s, Monad m) => ObjectId -> StateT s m LocalDefnId localizeDefn = zoom typed . localize +-- | Localize a hash reference in any monad that encapsulates the stateful localization of an object that contains hash references. localizeHash :: (ContainsHashes s, Monad m) => HashId -> StateT s m LocalHashId localizeHash = zoom typed . localize +-- | Localize a patch object reference in any monad that encapsulates the stateful localization of an object that contains patch references. localizePatchReference :: (ContainsPatches s, Monad m) => PatchObjectId -> StateT s m LocalPatchObjectId localizePatchReference = zoom typed . localize +-- | Localize a reference in any monad that encapsulates the stateful localization of an object that contains references. localizeReference :: (ContainsDefns s, ContainsText s, Monad m) => Reference -> StateT s m LocalReference localizeReference = bitraverse localizeText localizeDefn +-- | Localize a possibly-missing reference in any monad that encapsulates the stateful localization of an object that contains +-- possibly-missing references. localizeReferenceH :: (ContainsHashes s, ContainsText s, Monad m) => ReferenceH -> StateT s m LocalReferenceH localizeReferenceH = bitraverse localizeText localizeHash +-- | Localize a referent in any monad that encapsulates the stateful localization of an object that contains referents. localizeReferent :: (ContainsDefns s, ContainsText s, Monad m) => Referent -> StateT s m LocalReferent localizeReferent = bitraverse localizeReference localizeReference +-- | Localize a possibly-missing referent in any monad that encapsulates the stateful localization of an object that contains +-- possibly-missing referents. localizeReferentH :: (ContainsHashes s, ContainsText s, Monad m) => ReferentH -> StateT s m LocalReferentH localizeReferentH = bitraverse localizeReferenceH localizeReferenceH -localizeTermEdit :: (ContainsText s, ContainsDefns s, Monad m) => TermEdit -> StateT s m LocalTermEdit -localizeTermEdit = - bitraverse localizeText localizeDefn - +-- | Localize a text reference in any monad that encapsulates the stateful localization of an object that contains text. localizeText :: (ContainsText s, Monad m) => TextId -> StateT s m LocalTextId localizeText = zoom typed . localize -localizeTypeEdit :: (ContainsText s, ContainsDefns s, Monad m) => TypeEdit -> StateT s m LocalTypeEdit -localizeTypeEdit = - bitraverse localizeText localizeDefn - +-- Resolve a real id to its corresponding local id, either by looking it up in a map, or else using the next available local id, which is +-- recorded for next time. localize :: (Coercible localId Word64, Monad m, Ord realId) => realId -> StateT (Map realId localId) m localId localize realId = do mapping <- State.get From 76fbb320cfbb277c33c9084428d0f53744127bb0 Mon Sep 17 00:00:00 2001 From: Mitchell Rosen Date: Mon, 1 Nov 2021 21:48:02 -0400 Subject: [PATCH 056/297] haddocks --- codebase2/codebase-sqlite/U/Codebase/Sqlite/LocalizeObject.hs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/codebase2/codebase-sqlite/U/Codebase/Sqlite/LocalizeObject.hs b/codebase2/codebase-sqlite/U/Codebase/Sqlite/LocalizeObject.hs index 68bcdbcb4d..c3f6c33404 100644 --- a/codebase2/codebase-sqlite/U/Codebase/Sqlite/LocalizeObject.hs +++ b/codebase2/codebase-sqlite/U/Codebase/Sqlite/LocalizeObject.hs @@ -135,7 +135,7 @@ runLocalizeBranch action = do } pure (branchLocalIds, result) --- | The inner state of 'LocalizePatchT'. +-- | The inner state of the localization of a patch object. type LocalizePatchState = ( Map TextId LocalTextId, Map HashId LocalHashId, From a872d792bf5a737043a769f36b3c714e24669163 Mon Sep 17 00:00:00 2001 From: Mitchell Rosen Date: Tue, 2 Nov 2021 11:36:29 -0400 Subject: [PATCH 057/297] more haddocks --- .../U/Codebase/Sqlite/LocalizeObject.hs | 86 +++++++++---------- 1 file changed, 43 insertions(+), 43 deletions(-) diff --git a/codebase2/codebase-sqlite/U/Codebase/Sqlite/LocalizeObject.hs b/codebase2/codebase-sqlite/U/Codebase/Sqlite/LocalizeObject.hs index c3f6c33404..1364449888 100644 --- a/codebase2/codebase-sqlite/U/Codebase/Sqlite/LocalizeObject.hs +++ b/codebase2/codebase-sqlite/U/Codebase/Sqlite/LocalizeObject.hs @@ -1,29 +1,30 @@ -- | This module facilitates the creation of "localized" versions of objects, suitable for storage. +-- +-- Localization is a stateful process in which the real database identifiers contained within an object, e.g. 'DbBranch', are canonicalized +-- as local identifiers counting up from 0 in the order they are encountered in the object. The association between real and local +-- identifier is captured in a vector, where the @ith@ index maps local identifier @i@ to the real identifier it corresponds to. +-- +-- For example, consider a branch object that refers to terms @#foo@ and @#bar@. In totally made-up syntax, +-- +-- @ +-- branch = { +-- terms = [#foo, #bar] +-- } +-- @ +-- +-- The localized version of this branch would be +-- +-- @ +-- branch = { +-- terms = [0, 1] +-- } +-- terms = [#foo, #bar] +-- @ +-- +-- where all terms, types, etc. within the @branch@ structure refer to offsets in the associated vectors. module U.Codebase.Sqlite.LocalizeObject - ( -- * High-level localization - localizeBranch, + ( localizeBranch, localizePatch, - - -- * General-purpose localization - runLocalizeBranch, - runLocalizePatch, - - -- ** Localizers - localizeBranchReference, - localizeDefn, - localizeHash, - localizePatchReference, - localizeReference, - localizeReferenceH, - localizeReferent, - localizeReferentH, - localizeText, - - -- * @Contains@ constraints - ContainsBranches, - ContainsDefns, - ContainsPatches, - ContainsText, ) where @@ -93,27 +94,27 @@ localizePatch (Patch termEdits typeEdits) = -------------------------------------------------------------------------------------------------------------------------------------------- -- General-purpose localization --- | Contains references to branch objects. +-- Contains references to branch objects. type ContainsBranches s = HasType (Map (BranchObjectId, CausalHashId) LocalBranchChildId) s --- | Contains references to definition objects i.e. term/decl component objects. +-- Contains references to definition objects i.e. term/decl component objects. type ContainsDefns s = HasType (Map ObjectId LocalDefnId) s --- | Contains references to objects by their hash. +-- Contains references to objects by their hash. type ContainsHashes = HasType (Map HashId LocalHashId) --- | Contains references to patch objects. +-- Contains references to patch objects. type ContainsPatches = HasType (Map PatchObjectId LocalPatchObjectId) --- | Contains text. +-- Contains text. type ContainsText = HasType (Map TextId LocalTextId) --- | The inner state of the localization of a branch object. +-- The inner state of the localization of a branch object. type LocalizeBranchState = ( Map TextId LocalTextId, Map ObjectId LocalDefnId, @@ -121,7 +122,7 @@ type LocalizeBranchState = Map (BranchObjectId, CausalHashId) LocalBranchChildId ) --- | Run a computation that localizes a branch object, returning the local ids recorded within. +-- Run a computation that localizes a branch object, returning the local ids recorded within. runLocalizeBranch :: Monad m => StateT LocalizeBranchState m a -> m (BranchLocalIds, a) runLocalizeBranch action = do (result, (localTexts, localDefns, localPatches, localChildren)) <- State.runStateT action (mempty @LocalizeBranchState) @@ -135,14 +136,14 @@ runLocalizeBranch action = do } pure (branchLocalIds, result) --- | The inner state of the localization of a patch object. +-- The inner state of the localization of a patch object. type LocalizePatchState = ( Map TextId LocalTextId, Map HashId LocalHashId, Map ObjectId LocalDefnId ) --- | Run a computation that localizes a patch object, returning the local ids recorded within. +-- Run a computation that localizes a patch object, returning the local ids recorded within. runLocalizePatch :: Monad m => StateT LocalizePatchState m a -> m (PatchLocalIds, a) runLocalizePatch action = do (result, (localTexts, localHashes, localDefns)) <- State.runStateT action (mempty @LocalizePatchState) @@ -155,51 +156,50 @@ runLocalizePatch action = do } pure (patchLocalIds, result) --- | Localize a branch object reference in any monad that encapsulates the stateful localization of an object that contains branch --- references. +-- Localize a branch object reference in any monad that encapsulates the stateful localization of an object that contains branch references. localizeBranchReference :: (ContainsBranches s, Monad m) => (BranchObjectId, CausalHashId) -> StateT s m LocalBranchChildId localizeBranchReference = zoom typed . localize --- | Localize a definition object reference in any monad that encapsulates the stateful localization of an object that contains definition +-- Localize a definition object reference in any monad that encapsulates the stateful localization of an object that contains definition -- references. localizeDefn :: (ContainsDefns s, Monad m) => ObjectId -> StateT s m LocalDefnId localizeDefn = zoom typed . localize --- | Localize a hash reference in any monad that encapsulates the stateful localization of an object that contains hash references. +-- Localize a hash reference in any monad that encapsulates the stateful localization of an object that contains hash references. localizeHash :: (ContainsHashes s, Monad m) => HashId -> StateT s m LocalHashId localizeHash = zoom typed . localize --- | Localize a patch object reference in any monad that encapsulates the stateful localization of an object that contains patch references. +-- Localize a patch object reference in any monad that encapsulates the stateful localization of an object that contains patch references. localizePatchReference :: (ContainsPatches s, Monad m) => PatchObjectId -> StateT s m LocalPatchObjectId localizePatchReference = zoom typed . localize --- | Localize a reference in any monad that encapsulates the stateful localization of an object that contains references. +-- Localize a reference in any monad that encapsulates the stateful localization of an object that contains references. localizeReference :: (ContainsDefns s, ContainsText s, Monad m) => Reference -> StateT s m LocalReference localizeReference = bitraverse localizeText localizeDefn --- | Localize a possibly-missing reference in any monad that encapsulates the stateful localization of an object that contains +-- Localize a possibly-missing reference in any monad that encapsulates the stateful localization of an object that contains -- possibly-missing references. localizeReferenceH :: (ContainsHashes s, ContainsText s, Monad m) => ReferenceH -> StateT s m LocalReferenceH localizeReferenceH = bitraverse localizeText localizeHash --- | Localize a referent in any monad that encapsulates the stateful localization of an object that contains referents. +-- Localize a referent in any monad that encapsulates the stateful localization of an object that contains referents. localizeReferent :: (ContainsDefns s, ContainsText s, Monad m) => Referent -> StateT s m LocalReferent localizeReferent = bitraverse localizeReference localizeReference --- | Localize a possibly-missing referent in any monad that encapsulates the stateful localization of an object that contains --- possibly-missing referents. +-- Localize a possibly-missing referent in any monad that encapsulates the stateful localization of an object that contains possibly-missing +-- referents. localizeReferentH :: (ContainsHashes s, ContainsText s, Monad m) => ReferentH -> StateT s m LocalReferentH localizeReferentH = bitraverse localizeReferenceH localizeReferenceH --- | Localize a text reference in any monad that encapsulates the stateful localization of an object that contains text. +-- Localize a text reference in any monad that encapsulates the stateful localization of an object that contains text. localizeText :: (ContainsText s, Monad m) => TextId -> StateT s m LocalTextId localizeText = zoom typed . localize From 94fcea6eeacf5ba3e25a5b477cad9c68b5b9ee42 Mon Sep 17 00:00:00 2001 From: Chris Penner Date: Tue, 2 Nov 2021 12:21:49 -0600 Subject: [PATCH 058/297] migrate watch --- .../U/Codebase/Sqlite/Term/Format.hs | 16 ++-- .../SqliteCodebase/MigrateSchema12.hs | 94 +++++++++++-------- .../src/Unison/Hashing/V2/Causal.hs | 7 +- unison-core/src/Unison/ABT.hs | 10 +- 4 files changed, 70 insertions(+), 57 deletions(-) diff --git a/codebase2/codebase-sqlite/U/Codebase/Sqlite/Term/Format.hs b/codebase2/codebase-sqlite/U/Codebase/Sqlite/Term/Format.hs index c6ce3d203c..7d3463d287 100644 --- a/codebase2/codebase-sqlite/U/Codebase/Sqlite/Term/Format.hs +++ b/codebase2/codebase-sqlite/U/Codebase/Sqlite/Term/Format.hs @@ -14,15 +14,15 @@ import qualified U.Codebase.Type as Type import qualified U.Codebase.Sqlite.Reference as Sqlite import U.Codebase.Sqlite.DbId (ObjectId, TextId) --- | +-- | -- * Builtin terms are represented as local text ids. -- * Non-builtin terms are represented as local definition ids, with an added distinguished element (here @Nothing@) -- which represents a self-reference. type TermRef = Reference' LocalTextId (Maybe LocalDefnId) --- | +-- | -- * Builtin types are represented as a local text id. --- * Non-builtin types are represented by a local definition id. +-- * Non-builtin types are represented by a local definition id. type TypeRef = Reference' LocalTextId LocalDefnId type TermLink = Referent' TermRef TypeRef @@ -61,15 +61,15 @@ hash table = 10 abc 11 def -hash_object table = +hash_object table = hash_id object_id hash_version ------- --------- ------------ 10 20 2 object table = - { 20 -> + { 20 -> LocallyIndexedComponent [ - (localIds = LocalIds { + (localIds = LocalIds { text = [1,4] defs = [] }, @@ -80,7 +80,7 @@ object table = 21 -> LocallyIndexedComponent [ - (localIds = LocalIds { + (localIds = LocalIds { text = [7,5,6] defs = [20] }, @@ -88,7 +88,7 @@ object table = type = ABT { ... { Term.FT.App (Term.FT.Ref (Builtin (LocalTextId 0))) (Term.FT.Ref (Builtin (LocalTextId 1))) } } ) ], - } + } -} type F = diff --git a/parser-typechecker/src/Unison/Codebase/SqliteCodebase/MigrateSchema12.hs b/parser-typechecker/src/Unison/Codebase/SqliteCodebase/MigrateSchema12.hs index c794910925..ef586741ea 100644 --- a/parser-typechecker/src/Unison/Codebase/SqliteCodebase/MigrateSchema12.hs +++ b/parser-typechecker/src/Unison/Codebase/SqliteCodebase/MigrateSchema12.hs @@ -18,7 +18,7 @@ import Data.Maybe import qualified Data.Set as Set import Data.Tuple (swap) import qualified Data.Zip as Zip -import U.Codebase.HashTags (BranchHash (BranchHash), CausalHash (CausalHash)) +import U.Codebase.HashTags (BranchHash (BranchHash), CausalHash (CausalHash, unCausalHash)) import qualified U.Codebase.Reference as UReference import qualified U.Codebase.Referent as UReferent import qualified U.Codebase.Sqlite.Branch.Format as S.Branch @@ -27,12 +27,13 @@ import qualified U.Codebase.Sqlite.Branch.Full as S.Branch.Full import U.Codebase.Sqlite.Causal (GDbCausal (..)) import qualified U.Codebase.Sqlite.Causal as SC import U.Codebase.Sqlite.Connection (Connection) -import U.Codebase.Sqlite.DbId (BranchHashId (BranchHashId, unBranchHashId), BranchObjectId (BranchObjectId, unBranchObjectId), ObjectId, CausalHashId, HashId) +import U.Codebase.Sqlite.DbId (BranchHashId (BranchHashId, unBranchHashId), BranchObjectId (BranchObjectId, unBranchObjectId), CausalHashId, HashId, ObjectId) import qualified U.Codebase.Sqlite.Operations as Ops import qualified U.Codebase.Sqlite.Queries as Q import qualified U.Codebase.Sqlite.Reference as S.Reference import U.Codebase.Sync (Sync (Sync)) import qualified U.Codebase.Sync as Sync +import U.Codebase.WatchKind (WatchKind) import qualified U.Codebase.WatchKind as WK import qualified Unison.ABT as ABT import Unison.Codebase (Codebase (Codebase)) @@ -43,7 +44,6 @@ import qualified Unison.DataDeclaration as DD import Unison.DataDeclaration.ConstructorId (ConstructorId) import Unison.Hash (Hash) import qualified Unison.Hash as Unison -import qualified Unison.Hashable as Hashable import qualified Unison.Hashing.V2.Causal as Hashing import qualified Unison.Hashing.V2.Convert as Convert import Unison.Pattern (Pattern) @@ -89,15 +89,15 @@ data MigrationState = MigrationState referenceMapping :: Map (Old SomeReferenceId) (New SomeReferenceId), causalMapping :: Map (Old CausalHashId) (New (CausalHash, CausalHashId)), -- Mapping between contructor indexes for the type identified by (ObjectId, Pos) - ctorLookup :: Map (Old TypeIdentifier) (Map (Old ConstructorId) (New ConstructorId)), - ctorLookup' :: Map (Old Referent.Id) (New Referent.Id), + -- ctorLookup :: Map (Old TypeIdentifier) (Map (Old ConstructorId) (New ConstructorId)), + -- ctorLookup' :: Map (Old Referent.Id) (New Referent.Id), -- This provides the info needed for rewriting a term. You'll access it with a function :: Old - termLookup :: Map (Old ObjectId) (New ObjectId, Map (Old Pos) (New Pos)), - objLookup :: Map (Old ObjectId) (New (ObjectId, HashId, Hash)), + -- termLookup :: Map (Old ObjectId) (New ObjectId, Map (Old Pos) (New Pos)), + objLookup :: Map (Old ObjectId) (New (ObjectId, HashId, Hash)) -- - componentPositionMapping :: Map ObjectId (Map (Old Pos) (New Pos)), - constructorIDMapping :: Map ObjectId (Map (Old ConstructorId) (New ConstructorId)), - completed :: Set ObjectId + -- componentPositionMapping :: Map ObjectId (Map (Old Pos) (New Pos)), + -- constructorIDMapping :: Map ObjectId (Map (Old ConstructorId) (New ConstructorId)), + -- completed :: Set ObjectId } deriving (Generic) @@ -201,7 +201,9 @@ migrationSync = Sync \case -- To sync a watch result, -- 1. ??? -- 2. Synced - W _watchKind _idH -> undefined + W _watchKind _idH -> do + Env {codebase} <- ask + lift (migrateWatch codebase watchKind watchId) Patch {} -> undefined migratePatch :: Hash -> ByteString -> m (Sync.TrySyncResult Entity) @@ -263,22 +265,20 @@ migrateCausal conn oldCausalHashId = runDB conn . fmap (either id id) . runExcep (_, _, newBranchHash) <- gets (\MigrationState {objLookup} -> objLookup Map.! branchObjId) - let newParentHashes = + let (newParentHashes, newParentHashIds) = oldCausalParentHashIds & fmap - ( \oldParentHashId -> - let (CausalHash h, _) = migratedCausals Map.! oldParentHashId - in h - ) - & Set.fromList + (\oldParentHashId -> migratedCausals Map.! oldParentHashId) + & unzip + & bimap (Set.fromList . map unCausalHash) Set.fromList let newCausalHash :: CausalHash newCausalHash = - CausalHash . Cv.hash1to2 . Hashable.accumulate $ + CausalHash . Cv.hash1to2 $ Hashing.hashCausal ( Hashing.Causal { branchHash = newBranchHash, - parents = Set.map Cv.hash2to1 newParentHashes + parents = Set.mapMonotonic Cv.hash2to1 newParentHashes } ) newCausalHashId <- Q.saveCausalHash newCausalHash @@ -286,7 +286,7 @@ migrateCausal conn oldCausalHashId = runDB conn . fmap (either id id) . runExcep DbCausal { selfHash = newCausalHashId, valueHash = BranchHashId $ view _2 (migratedObjIds Map.! branchObjId), - parents = Set.fromList . map (snd . (\old -> migratedCausals Map.! old)) $ oldCausalParentHashIds + parents = newParentHashIds } Q.saveCausal (SC.selfHash newCausal) (SC.valueHash newCausal) Q.saveCausalParents (SC.selfHash newCausal) (Set.toList $ SC.parents newCausal) @@ -294,26 +294,6 @@ migrateCausal conn oldCausalHashId = runDB conn . fmap (either id id) . runExcep field @"causalMapping" %= Map.insert oldCausalHashId (newCausalHash, newCausalHashId) pure Sync.Done - -- Plan: - -- * Load the pieces of a Db.Causal ✅ - -- * Ensure its parent causals and branch (value hash) have been migrated ✅ - -- * Rewrite the value-hash and parent causal hashes ✅ - -- * Save the new causal ✅ - -- * Save Causal Hash mapping to skymap ✅ - --- data C.Branch m = Branch --- { terms :: Map NameSegment (Map Referent (m MdValues)), --- types :: Map NameSegment (Map Reference (m MdValues)), --- patches :: Map NameSegment (PatchHash, m Patch), --- children :: Map NameSegment (Causal m) --- } - --- data Branch' t h p c = Branch --- { terms :: Map t (Map (Referent'' t h) (MetadataSetFormat' t h)), --- types :: Map t (Map (Reference' t h) (MetadataSetFormat' t h)), --- patches :: Map t p, --- children :: Map t c --- } migrateBranch :: MonadIO m => Connection -> ObjectId -> StateT MigrationState m (Sync.TrySyncResult Entity) migrateBranch conn oldObjectId = fmap (either id id) . runExceptT $ do @@ -350,6 +330,40 @@ migrateBranch conn oldObjectId = fmap (either id id) . runExceptT $ do field @"objLookup" %= Map.insert oldObjectId (unBranchObjectId newObjectId, unBranchHashId newHashId, hash) pure Sync.Done +-- | PLAN +-- * +-- NOTE: this implementation assumes that watches will be migrated AFTER everything else is finished. +-- This is because it's difficult for us to know otherwise whether a reference refers to something which doesn't exist, or just +-- something that hasn't been migrated yet. If we do it last, we know that missing references are indeed just missing from the codebase. +migrateWatch :: + forall m v a. + MonadIO m => + Codebase m v a -> + WatchKind -> + Reference.Id -> + StateT MigrationState m (Sync.TrySyncResult Entity) +migrateWatch Codebase {..} watchKind oldWatchId = fmap (either id id) . runExceptT $ do + let watchKindV1 = Cv.watchKind2to1 watchKind + watchResultTerm <- + (lift . lift) (getWatch watchKindV1 oldWatchId) >>= \case + -- The hash which we're watching doesn't exist in the codebase, throw out this watch. + Nothing -> throwE Sync.Done + Just term -> pure term + migratedReferences <- gets referenceMapping + newWatchId <- case Map.lookup (TermReference oldWatchId) migratedReferences of + (Just (TermReference newRef)) -> pure newRef + _ -> throwE Sync.NonFatalError + let maybeRemappedTerm :: Maybe (Term.Term v a) + maybeRemappedTerm = + watchResultTerm + & termReferences_ %%~ \someRef -> Map.lookup someRef migratedReferences + case maybeRemappedTerm of + -- One or more references in the result didn't exist in our codebase. + Nothing -> pure Sync.NonFatalError + Just remappedTerm -> do + lift . lift $ putWatch watchKindV1 newWatchId remappedTerm + pure Sync.Done + -- Project an S.Referent'' into its SomeReferenceObjId's someReferent_ :: Traversal' (S.Branch.Full.Referent'' t ObjectId) SomeReferenceObjId someReferent_ = diff --git a/parser-typechecker/src/Unison/Hashing/V2/Causal.hs b/parser-typechecker/src/Unison/Hashing/V2/Causal.hs index 218eb4536b..eff2129716 100644 --- a/parser-typechecker/src/Unison/Hashing/V2/Causal.hs +++ b/parser-typechecker/src/Unison/Hashing/V2/Causal.hs @@ -14,11 +14,10 @@ import Unison.Hash (Hash) import Unison.Hashable (Hashable) import qualified Unison.Hashable as H -hashCausal :: H.Accumulate h => Causal -> [(H.Token h)] -hashCausal c = - H.tokens $ [branchHash c] ++ (Set.toList $ parents c) +hashCausal :: H.Accumulate h => Causal -> h +hashCausal = H.accumulate' data Causal = Causal {branchHash :: Hash, parents :: Set Hash} instance Hashable Causal where - tokens c = hashCausal c + tokens c = H.tokens $ branchHash c : Set.toList (parents c) diff --git a/unison-core/src/Unison/ABT.hs b/unison-core/src/Unison/ABT.hs index 1473e44822..181325f188 100644 --- a/unison-core/src/Unison/ABT.hs +++ b/unison-core/src/Unison/ABT.hs @@ -88,7 +88,7 @@ module Unison.ABT , orderedComponents ) where -import Control.Lens (Fold, Lens', Setter', folding, lens, use, (%%~), (.=)) +import Control.Lens (Lens', lens, use, (%%~), (.=)) import Control.Monad.State (MonadState) import qualified Data.Foldable as Foldable import Data.Functor.Identity (Identity (Identity), runIdentity) @@ -475,8 +475,8 @@ foreachSubterm f e = case out e of subterms :: (Traversable f) => Term f v a -> [Term f v a] subterms t = runIdentity $ foreachSubterm pure t -subterms_ :: (Traversable f) => Fold (Term f v a) (Term f v a) -subterms_ = folding subterms +-- subterms_ :: (Traversable f) => Fold (Term f v a) (Term f v a) +-- subterms_ = folding subterms -- | `visit f t` applies an effectful function to each subtree of -- `t` and sequences the results. When `f` returns `Nothing`, `visit` @@ -495,8 +495,8 @@ visit f t = flip fromMaybe (f t) $ case out t of Abs x e -> abs' (annotation t) x <$> visit f e Tm body -> tm' (annotation t) <$> traverse (visit f) body -subTermsSetter_ :: (Traversable f, Ord v) => Setter' (Term f v a) (Term f v a) -subTermsSetter_ f tm = visit (Just . f) tm +-- subTermsSetter_ :: (Traversable f, Ord v) => Setter' (Term f v a) (Term f v a) +-- subTermsSetter_ f tm = visit (Just . f) tm -- | Apply an effectful function to an ABT tree top down, sequencing the results. visit' :: (Traversable f, Monad g, Ord v) From 9cb6c0c33726639a329146cbe03e70da15e43d41 Mon Sep 17 00:00:00 2001 From: Chris Penner Date: Tue, 2 Nov 2021 12:24:56 -0600 Subject: [PATCH 059/297] Renaming bad names --- .../Unison/Codebase/SqliteCodebase/MigrateSchema12.hs | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/parser-typechecker/src/Unison/Codebase/SqliteCodebase/MigrateSchema12.hs b/parser-typechecker/src/Unison/Codebase/SqliteCodebase/MigrateSchema12.hs index ef586741ea..a6214dcc26 100644 --- a/parser-typechecker/src/Unison/Codebase/SqliteCodebase/MigrateSchema12.hs +++ b/parser-typechecker/src/Unison/Codebase/SqliteCodebase/MigrateSchema12.hs @@ -597,16 +597,16 @@ termReferences_ = termFReferences_ :: (Ord tv, Monad m) => LensLike' m (Term.F tv ta pa a) SomeReferenceId termFReferences_ f t = (t & Term._Ref . Reference._DerivedId . unsafeInsidePrism _TermReference %%~ f) - >>= Term._Constructor . thing . unsafeInsidePrism _ConstructorReference %%~ f - >>= Term._Request . thing . unsafeInsidePrism _ConstructorReference %%~ f + >>= Term._Constructor . refConPain_ . unsafeInsidePrism _ConstructorReference %%~ f + >>= Term._Request . refConPain_ . unsafeInsidePrism _ConstructorReference %%~ f >>= Term._Ann . _2 . typeReferences_ %%~ f >>= Term._Match . _2 . traversed . Term.matchPattern_ . patternReferences_ %%~ f >>= Term._TermLink . referentReferences %%~ f >>= Term._TypeLink . Reference._DerivedId . unsafeInsidePrism _TypeReference %%~ f --- fixme rename -thing :: Traversal' (Reference.Reference, ConstructorId) (Reference.Id, ConstructorId) -thing f s = +-- | Casts the left side of a reference/constructor pair into a Reference.Id +refConPain_ :: Traversal' (Reference.Reference, ConstructorId) (Reference.Id, ConstructorId) +refConPain_ f s = case s of (Reference.Builtin _, _) -> pure s (Reference.DerivedId n, c) -> (\(n', c') -> (Reference.DerivedId n', c')) <$> f (n, c) From 6344a417c882d3a994beaab06ab18b336c5166a0 Mon Sep 17 00:00:00 2001 From: Mitchell Rosen Date: Tue, 2 Nov 2021 14:34:56 -0400 Subject: [PATCH 060/297] Add type signatures and scoped type variables to get everything loadable into one ghci session --- .../src/Unison/Codebase/Editor/Propagate.hs | 9 +++++ .../Codebase/SqliteCodebase/Conversions.hs | 4 +++ .../src/Unison/CommandLine/OutputMessages.hs | 34 +++++++++++-------- .../src/Unison/Runtime/Interface.hs | 1 + .../src/Unison/Runtime/SparseVector.hs | 9 +++-- unison-core/src/Unison/ABT.hs | 5 ++- unison-core/src/Unison/Names.hs | 2 +- 7 files changed, 46 insertions(+), 18 deletions(-) diff --git a/parser-typechecker/src/Unison/Codebase/Editor/Propagate.hs b/parser-typechecker/src/Unison/Codebase/Editor/Propagate.hs index 5967cc9f67..e3f5c70b5e 100644 --- a/parser-typechecker/src/Unison/Codebase/Editor/Propagate.hs +++ b/parser-typechecker/src/Unison/Codebase/Editor/Propagate.hs @@ -56,6 +56,7 @@ import qualified Unison.Typechecker as Typechecker import qualified Unison.Runtime.IOSource as IOSource import qualified Unison.Hashing.V2.Convert as Hashing import Unison.WatchKind (WatchKind) +import Unison.NameSegment (NameSegment) type F m i v = Free (Command m i v) @@ -584,12 +585,20 @@ applyPropagate patch Edits {..} = do isPropagatedReferent (Referent.Con _ _ _) = True isPropagatedReferent (Referent.Ref r) = isPropagated r + terms0 :: Metadata.Star Referent NameSegment terms0 = Star3.replaceFacts replaceConstructor constructorReplacements _terms + terms :: Branch.Star Referent NameSegment terms = updateMetadatas Referent.Ref $ Star3.replaceFacts replaceTerm termEdits terms0 + types :: Branch.Star Reference NameSegment types = updateMetadatas id $ Star3.replaceFacts replaceType typeEdits _types + updateMetadatas :: + Ord r => + (Reference -> r) -> + Star3.Star3 r NameSegment Metadata.Type (Metadata.Type, Metadata.Value) -> + Star3.Star3 r NameSegment Metadata.Type (Metadata.Type, Metadata.Value) updateMetadatas ref s = clearPropagated $ Star3.mapD3 go s where clearPropagated s = foldl' go s allPatchTargets where diff --git a/parser-typechecker/src/Unison/Codebase/SqliteCodebase/Conversions.hs b/parser-typechecker/src/Unison/Codebase/SqliteCodebase/Conversions.hs index 0a898c8be1..fa126dd9e4 100644 --- a/parser-typechecker/src/Unison/Codebase/SqliteCodebase/Conversions.hs +++ b/parser-typechecker/src/Unison/Codebase/SqliteCodebase/Conversions.hs @@ -92,6 +92,8 @@ term1to2 h = where termF1to2 :: V1.Term.F V1.Symbol Ann Ann a -> V2.Term.F V2.Symbol a termF1to2 = go + + go :: V1.Term.F V1.Symbol Ann Ann a -> V2.Term.F V2.Symbol a go = \case V1.Term.Int i -> V2.Term.Int i V1.Term.Nat n -> V2.Term.Nat n @@ -116,8 +118,10 @@ term1to2 h = V1.Term.TermLink r -> V2.Term.TermLink (rreferent1to2 h r) V1.Term.TypeLink r -> V2.Term.TypeLink (reference1to2 r) V1.Term.Blank _ -> error "can't serialize term with blanks" + goCase (V1.Term.MatchCase p g b) = V2.Term.MatchCase (goPat p) g b + goPat :: V1.Pattern.Pattern a -> V2.Term.Pattern Text V2.Reference goPat = \case V1.Pattern.Unbound _ -> V2.Term.PUnbound diff --git a/parser-typechecker/src/Unison/CommandLine/OutputMessages.hs b/parser-typechecker/src/Unison/CommandLine/OutputMessages.hs index 189ac65774..af610e1469 100644 --- a/parser-typechecker/src/Unison/CommandLine/OutputMessages.hs +++ b/parser-typechecker/src/Unison/CommandLine/OutputMessages.hs @@ -94,7 +94,6 @@ import Unison.Term (Term) import Unison.Type (Type) import qualified Unison.TermPrinter as TermPrinter import qualified Unison.TypePrinter as TypePrinter -import qualified Unison.Util.ColorText as CT import Unison.Util.Monoid ( intercalateMap , unlessM ) @@ -250,7 +249,7 @@ notifyNumbered o = case o of prettyRemoteNamespace :: (RemoteRepo.ReadRepo, Maybe ShortBranchHash, Path.Path) - -> P.Pretty P.ColorText + -> Pretty prettyRemoteNamespace = P.group . P.text . uncurry3 RemoteRepo.printNamespace @@ -552,12 +551,12 @@ notifyUser dir o = case o of pure $ if null entries then P.lit "nothing to show" else numberedEntries entries where - numberedEntries :: [ShallowListEntry v a] -> P.Pretty P.ColorText + numberedEntries :: [ShallowListEntry v a] -> Pretty numberedEntries entries = (P.column3 . fmap f) ([(1::Integer)..] `zip` fmap formatEntry entries) where f (i, (p1, p2)) = (P.hiBlack . fromString $ show i <> ".", p1, p2) - formatEntry :: ShallowListEntry v a -> (P.Pretty P.ColorText, P.Pretty P.ColorText) + formatEntry :: ShallowListEntry v a -> (Pretty, Pretty) formatEntry = \case ShallowTermEntry (TermEntry _r hq ot _) -> (P.syntaxToColor . prettyHashQualified' . fmap Name.fromSegment $ hq @@ -813,7 +812,7 @@ notifyUser dir o = case o of if null patches then P.lit "nothing to show" else numberedPatches patches where - numberedPatches :: Set Name -> P.Pretty P.ColorText + numberedPatches :: Set Name -> Pretty numberedPatches patches = (P.column2 . fmap format) ([(1::Integer)..] `zip` (toList patches)) where @@ -867,12 +866,12 @@ notifyUser dir o = case o of , P.indentN 2 (P.lines (map qualifyTerm tms ++ map qualifyType tps)) ] where - qualifyTerm :: Referent -> P.Pretty P.ColorText + qualifyTerm :: Referent -> Pretty qualifyTerm = P.syntaxToColor . case hq of HQ.NameOnly n -> prettyNamedReferent hashLen n HQ.HashQualified n _ -> prettyNamedReferent hashLen n HQ.HashOnly _ -> prettyReferent hashLen - qualifyType :: Reference -> P.Pretty P.ColorText + qualifyType :: Reference -> Pretty qualifyType = P.syntaxToColor . case hq of HQ.NameOnly n -> prettyNamedReference hashLen n HQ.HashQualified n _ -> prettyNamedReference hashLen n @@ -893,9 +892,9 @@ notifyUser dir o = case o of where name :: Name name = Path.toName' (HQ'.toName (Path.unsplitHQ' p)) - qualifyTerm :: Referent -> P.Pretty P.ColorText + qualifyTerm :: Referent -> Pretty qualifyTerm = P.syntaxToColor . prettyNamedReferent hashLen name - qualifyType :: Reference -> P.Pretty P.ColorText + qualifyType :: Reference -> Pretty qualifyType = P.syntaxToColor . prettyNamedReference hashLen name TermAmbiguous _ _ -> pure "That term is ambiguous." HashAmbiguous h rs -> pure . P.callout "\129300" . P.lines $ [ @@ -957,7 +956,7 @@ notifyUser dir o = case o of P.numberedList . fmap renderEntry $ entries ] where - renderEntry :: Output.ReflogEntry -> P.Pretty CT.ColorText + renderEntry :: Output.ReflogEntry -> Pretty renderEntry (Output.ReflogEntry hash reason) = P.wrap $ P.blue (prettySBH hash) <> " : " <> P.text reason History _cap history tail -> pure $ @@ -1653,7 +1652,7 @@ showDiffNamespace sn ppe oldPath newPath OBD.BranchDiffOutput{..} = 13. ┌ability Yyz (+1 metadata) 14. └ability copies.Yyz (+2 metadata) -} - prettyAddTypes :: [OBD.AddedTypeDisplay v a] -> Numbered Pretty + prettyAddTypes :: forall a. [OBD.AddedTypeDisplay v a] -> Numbered Pretty prettyAddTypes = fmap P.lines . traverse prettyGroup where prettyGroup :: OBD.AddedTypeDisplay v a -> Numbered Pretty prettyGroup (hqmds, r, odecl) = do @@ -1668,7 +1667,7 @@ showDiffNamespace sn ppe oldPath newPath OBD.BranchDiffOutput{..} = 0 -> mempty c -> " (+" <> P.shown c <> " metadata)" - prettyAddTerms :: [OBD.AddedTermDisplay v a] -> Numbered Pretty + prettyAddTerms :: forall a. [OBD.AddedTermDisplay v a] -> Numbered Pretty prettyAddTerms = fmap (P.column3 . mconcat) . traverse prettyGroup . reorderTerms where reorderTerms = sortOn (not . Referent.isConstructor . view _2) prettyGroup :: OBD.AddedTermDisplay v a -> Numbered [(Pretty, Pretty, Pretty)] @@ -1677,6 +1676,11 @@ showDiffNamespace sn ppe oldPath newPath OBD.BranchDiffOutput{..} = let (nums, names, decls) = unzip3 pairs boxLeft = case hqmds of _:_:_ -> P.boxLeft; _ -> id pure $ zip3 nums (boxLeft names) decls + prettyLine :: + Referent -> + Maybe (Type v a) -> + (HQ'.HashQualified Name, [OBD.MetadataDisplay v a]) -> + Numbered (Pretty, Pretty, Pretty) prettyLine r otype (hq, mds) = do n <- numHQ' newPath hq r pure . (n, phq' hq, ) $ ": " <> prettyType otype <> case length mds of @@ -1711,7 +1715,7 @@ showDiffNamespace sn ppe oldPath newPath OBD.BranchDiffOutput{..} = 12. ability BadType 13. patch defunctThingy -} - prettyRemoveTypes :: [OBD.RemovedTypeDisplay v a] -> Numbered Pretty + prettyRemoveTypes :: forall a. [OBD.RemovedTypeDisplay v a] -> Numbered Pretty prettyRemoveTypes = fmap P.lines . traverse prettyGroup where prettyGroup :: OBD.RemovedTypeDisplay v a -> Numbered Pretty prettyGroup (hqs, r, odecl) = do @@ -1719,11 +1723,12 @@ showDiffNamespace sn ppe oldPath newPath OBD.BranchDiffOutput{..} = let (nums, decls) = unzip lines boxLeft = case hqs of _:_:_ -> P.boxLeft; _ -> id pure . P.column2 $ zip nums (boxLeft decls) + prettyLine :: Reference -> Maybe (DD.DeclOrBuiltin v a) -> HQ'.HashQualified Name -> Numbered (Pretty, Pretty) prettyLine r odecl hq = do n <- numHQ' newPath hq (Referent.Ref r) pure (n, prettyDecl hq odecl) - prettyRemoveTerms :: [OBD.RemovedTermDisplay v a] -> Numbered Pretty + prettyRemoveTerms :: forall a. [OBD.RemovedTermDisplay v a] -> Numbered Pretty prettyRemoveTerms = fmap (P.column3 . mconcat) . traverse prettyGroup . reorderTerms where reorderTerms = sortOn (not . Referent.isConstructor . view _2) prettyGroup :: OBD.RemovedTermDisplay v a -> Numbered [(Pretty, Pretty, Pretty)] @@ -1794,6 +1799,7 @@ showDiffNamespace sn ppe oldPath newPath OBD.BranchDiffOutput{..} = num <- numHQ p hq r pure (x <> num <> " " <> phq hq, ": " <> prettyType otype) + prettyType :: Maybe (Type v a) -> Pretty prettyType = maybe (P.red "type not found") (TypePrinter.pretty ppe) prettyDecl hq = maybe (P.red "type not found") diff --git a/parser-typechecker/src/Unison/Runtime/Interface.hs b/parser-typechecker/src/Unison/Runtime/Interface.hs index 75da32bd78..7f803d4977 100644 --- a/parser-typechecker/src/Unison/Runtime/Interface.hs +++ b/parser-typechecker/src/Unison/Runtime/Interface.hs @@ -528,6 +528,7 @@ buildSCache cs crsrc trsrc ftm fty intsrc rtmsrc rtysrc typeRefs = foldMap Set.singleton trs restrictTmW m = restrictKeys m combKeys + restrictTmR :: Map Reference a -> Map Reference a restrictTmR m = Map.restrictKeys m termRefs restrictTyW m = restrictKeys m typeKeys diff --git a/parser-typechecker/src/Unison/Runtime/SparseVector.hs b/parser-typechecker/src/Unison/Runtime/SparseVector.hs index 3ae57b9102..9cdd34ffb0 100644 --- a/parser-typechecker/src/Unison/Runtime/SparseVector.hs +++ b/parser-typechecker/src/Unison/Runtime/SparseVector.hs @@ -9,6 +9,7 @@ import Data.Bits ((.|.), (.&.)) import qualified Data.Bits as B import qualified GHC.Exts as Exts import qualified Data.Vector.Unboxed as UV +import Control.Monad.ST (ST) -- Denotes a `Nat -> Maybe a`. -- Representation is a `Vector a` along with a bitset @@ -27,7 +28,8 @@ map f v = v { elements = UV.map f (elements v) } -- Denotationally, a mask is a `Nat -> Bool`, so this implementation -- means: `mask ok v n = if ok n then v n else Nothing` -mask :: (UV.Unbox a, B.FiniteBits bits) +mask :: forall a bits. + (UV.Unbox a, B.FiniteBits bits) => bits -> SparseVector bits a -> SparseVector bits a mask bits a = if indices' == bits then a -- check if mask is a superset @@ -37,6 +39,7 @@ mask bits a = where indices' = indices a .&. bits eas = elements a + go :: MUV.STVector s a -> bits -> bits -> Int -> Int -> ST s (MUV.STVector s a) go !out !indAs !indBs !i !k = if indAs == B.zeroBits || indBs == B.zeroBits then pure out else let @@ -95,7 +98,8 @@ choose bits t f merge (mask bits t) (mask (B.complement bits) f) -- Denotationally: `merge a b n = a n <|> b n` -merge :: (B.FiniteBits bits, UV.Unbox a) +merge :: forall a bits. + (B.FiniteBits bits, UV.Unbox a) => SparseVector bits a -> SparseVector bits a -> SparseVector bits a @@ -106,6 +110,7 @@ merge a b = SparseVector indices' tricky vec <- MUV.new (B.popCount indices') go vec (indices a) (indices b) 0 0 0 (!eas, !ebs) = (elements a, elements b) + go :: MUV.STVector s a -> bits -> bits -> Int -> Int -> Int -> ST s (MUV.STVector s a) go !out !indAs !indBs !i !j !k = if indAs == B.zeroBits || indBs == B.zeroBits then pure out else let diff --git a/unison-core/src/Unison/ABT.hs b/unison-core/src/Unison/ABT.hs index 1473e44822..f84aee6ff3 100644 --- a/unison-core/src/Unison/ABT.hs +++ b/unison-core/src/Unison/ABT.hs @@ -540,10 +540,13 @@ data Subst f v a = , bindInheritAnnotation :: forall b . Term f v b -> Term f v a , variable :: v } -unabs1 :: (Foldable f, Functor f, Var v) => Term f v a -> Maybe (Subst f v a) +unabs1 :: forall a f v. (Foldable f, Functor f, Var v) => Term f v a -> Maybe (Subst f v a) unabs1 (Term _ _ (Abs v body)) = Just (Subst freshen bind bindInheritAnnotation v) where + freshen :: (v -> t) -> t freshen f = f v + bind :: Term f v a -> Term f v a bind x = subst v x body + bindInheritAnnotation :: Term f v b -> Term f v a bindInheritAnnotation x = substInheritAnnotation v x body unabs1 _ = Nothing diff --git a/unison-core/src/Unison/Names.hs b/unison-core/src/Unison/Names.hs index c0a60bc8a0..8eb966aba8 100644 --- a/unison-core/src/Unison/Names.hs +++ b/unison-core/src/Unison/Names.hs @@ -417,6 +417,7 @@ expandWildcardImport prefix ns = [ (suffix, full) | Just (suffix,full) <- go <$> R.toList (terms ns) ] <> [ (suffix, full) | Just (suffix,full) <- go <$> R.toList (types ns) ] where + go :: (Name, a) -> Maybe (Name, Name) go (full, _) = do -- running example: -- prefix = Int @@ -453,4 +454,3 @@ hashQualifyRelation fromNamedRef rel = R.map go rel if Set.size (R.lookupDom n rel) > 1 then (HQ.take numHashChars $ fromNamedRef n r, r) else (HQ.NameOnly n, r) - From 06a50aed5b716f472e39dfbe33e519ff7e23f670 Mon Sep 17 00:00:00 2001 From: Mitchell Rosen Date: Tue, 2 Nov 2021 14:35:31 -0400 Subject: [PATCH 061/297] remove merge markers --- .../Unison/Codebase/SqliteCodebase/MigrateSchema12.hs | 10 ---------- 1 file changed, 10 deletions(-) diff --git a/parser-typechecker/src/Unison/Codebase/SqliteCodebase/MigrateSchema12.hs b/parser-typechecker/src/Unison/Codebase/SqliteCodebase/MigrateSchema12.hs index fec9f9da7a..74daa5dbf8 100644 --- a/parser-typechecker/src/Unison/Codebase/SqliteCodebase/MigrateSchema12.hs +++ b/parser-typechecker/src/Unison/Codebase/SqliteCodebase/MigrateSchema12.hs @@ -18,37 +18,27 @@ import Data.Maybe import qualified Data.Set as Set import Data.Tuple (swap) import qualified Data.Zip as Zip -<<<<<<< HEAD -import qualified U.Codebase.Causal as C -import U.Codebase.HashTags (BranchHash (BranchHash), CausalHash (CausalHash, unCausalHash)) -||||||| 639b3d44c import qualified U.Codebase.Causal as C import U.Codebase.HashTags (CausalHash (CausalHash, unCausalHash), BranchHash (BranchHash), CausalHash) -======= import U.Codebase.HashTags (BranchHash (BranchHash), CausalHash (CausalHash)) ->>>>>>> 2ed8e687adf2b2b4443add0545c2ba9a89c9e3ad import qualified U.Codebase.Reference as UReference import qualified U.Codebase.Referent as UReferent import qualified U.Codebase.Sqlite.Branch.Format as S.Branch import qualified U.Codebase.Sqlite.Branch.Full as S import qualified U.Codebase.Sqlite.Branch.Full as S.Branch.Full -<<<<<<< HEAD import U.Codebase.Sqlite.Causal (DbCausal, GDbCausal (..)) import qualified U.Codebase.Sqlite.Causal as SC import U.Codebase.Sqlite.Connection (Connection) import U.Codebase.Sqlite.DbId (BranchHashId (BranchHashId, unBranchHashId), CausalHashId (CausalHashId, unCausalHashId), HashId (HashId), ObjectId) import qualified U.Codebase.Sqlite.LocalizeObject as S.LocalizeObject -||||||| 639b3d44c import U.Codebase.Sqlite.Connection (Connection) import qualified U.Codebase.Sqlite.Causal as SC import U.Codebase.Sqlite.Causal (DbCausal, GDbCausal (..)) import U.Codebase.Sqlite.DbId (BranchHashId (BranchHashId, unBranchHashId), CausalHashId (CausalHashId, unCausalHashId), HashId (HashId), ObjectId) -======= import U.Codebase.Sqlite.Causal (GDbCausal (..)) import qualified U.Codebase.Sqlite.Causal as SC import U.Codebase.Sqlite.Connection (Connection) import U.Codebase.Sqlite.DbId (BranchHashId (BranchHashId, unBranchHashId), BranchObjectId (BranchObjectId, unBranchObjectId), ObjectId, CausalHashId, HashId) ->>>>>>> 2ed8e687adf2b2b4443add0545c2ba9a89c9e3ad import qualified U.Codebase.Sqlite.Operations as Ops import qualified U.Codebase.Sqlite.Queries as Q import qualified U.Codebase.Sqlite.Reference as S.Reference From 275a8825d6ce0749e7490ee3c55ee0c81e7d69c1 Mon Sep 17 00:00:00 2001 From: Mitchell Rosen Date: Tue, 2 Nov 2021 15:36:00 -0400 Subject: [PATCH 062/297] begin hashing out migratePatch --- .../U/Codebase/Sqlite/Branch/Format.hs | 7 +- .../U/Codebase/Sqlite/Operations.hs | 250 ++++++++---------- .../U/Codebase/Sqlite/Patch/Format.hs | 54 +++- codebase2/codebase-sqlite/package.yaml | 1 + .../unison-codebase-sqlite.cabal | 1 + .../SqliteCodebase/MigrateSchema12.hs | 20 +- .../src/Unison/Util/EnumContainers.hs | 2 +- 7 files changed, 179 insertions(+), 156 deletions(-) diff --git a/codebase2/codebase-sqlite/U/Codebase/Sqlite/Branch/Format.hs b/codebase2/codebase-sqlite/U/Codebase/Sqlite/Branch/Format.hs index 8901c2bd16..9b2a525fdb 100644 --- a/codebase2/codebase-sqlite/U/Codebase/Sqlite/Branch/Format.hs +++ b/codebase2/codebase-sqlite/U/Codebase/Sqlite/Branch/Format.hs @@ -51,6 +51,10 @@ localToDbBranch :: BranchLocalIds -> LocalBranch -> DbBranch localToDbBranch li = Branch.Full.quadmap (lookupBranchLocalText li) (lookupBranchLocalDefn li) (lookupBranchLocalPatch li) (lookupBranchLocalChild li) +localToDbDiff :: BranchLocalIds -> LocalDiff -> Diff +localToDbDiff li = + Branch.Diff.quadmap (lookupBranchLocalText li) (lookupBranchLocalDefn li) (lookupBranchLocalPatch li) (lookupBranchLocalChild li) + lookupBranchLocalText :: BranchLocalIds -> LocalTextId -> TextId lookupBranchLocalText li (LocalTextId w) = branchTextLookup li Vector.! fromIntegral w @@ -62,6 +66,3 @@ lookupBranchLocalPatch li (LocalPatchObjectId w) = branchPatchLookup li Vector.! lookupBranchLocalChild :: BranchLocalIds -> LocalBranchChildId -> (BranchObjectId, CausalHashId) lookupBranchLocalChild li (LocalBranchChildId w) = branchChildLookup li Vector.! fromIntegral w - -localToDbDiff :: BranchLocalIds -> LocalDiff -> Diff -localToDbDiff li = Branch.Diff.quadmap (lookupBranchLocalText li) (lookupBranchLocalDefn li) (lookupBranchLocalPatch li) (lookupBranchLocalChild li) diff --git a/codebase2/codebase-sqlite/U/Codebase/Sqlite/Operations.hs b/codebase2/codebase-sqlite/U/Codebase/Sqlite/Operations.hs index aec0efca75..3bad5b9ddf 100644 --- a/codebase2/codebase-sqlite/U/Codebase/Sqlite/Operations.hs +++ b/codebase2/codebase-sqlite/U/Codebase/Sqlite/Operations.hs @@ -10,93 +10,99 @@ {-# LANGUAGE TypeApplications #-} {-# LANGUAGE ViewPatterns #-} -module U.Codebase.Sqlite.Operations ( - -- * data version - dataVersion, - - -- * branches - saveRootBranch, - loadMaybeRootCausalHash, - loadRootCausalHash, - loadRootCausal, - saveBranch, - loadCausalBranchByCausalHash, - - -- * terms - saveTermComponent, - loadTermComponent, - loadTermByReference, - loadTypeOfTermByTermReference, - - -- * decls - saveDeclComponent, - loadDeclComponent, - loadDeclByReference, - getDeclTypeById, - - -- * patches - savePatch, - loadPatchById, - - -- * test for stuff in codebase - objectExistsForHash, - - -- * dubiously exported stuff involving database ids - loadHashByObjectId, - primaryHashToMaybeObjectId, - primaryHashToMaybePatchObjectId, - - -- * watch expression cache - saveWatch, - loadWatch, - listWatches, - clearWatches, - - -- * indexes - -- ** nearest common ancestor - before, - lca, - -- ** prefix index - componentReferencesByPrefix, - termReferentsByPrefix, - declReferentsByPrefix, - causalHashesByPrefix, - -- ** dependents index - dependents, - dependentsOfComponent, - -- ** type index - addTypeToIndexForTerm, - termsHavingType, - -- ** type mentions index - addTypeMentionsToIndexForTerm, - termsMentioningType, - - -- * delete me - getCycleLen, - - -- * low-level stuff - liftQ, - loadDbBranchByObjectId, - saveBranchObject, - - -- * Error types - Error(..), - DecodeError(..), - - -- ** Constraint kinds - EDB, - - -- * somewhat unexpectedly unused definitions - c2sReferenceId, - c2sReferentId, - diffPatch, - decodeTermElementWithType, - loadTermWithTypeByReference, - s2cTermWithType, - declReferencesByPrefix, - branchHashesByPrefix, - derivedDependencies, -) where +module U.Codebase.Sqlite.Operations + ( -- * data version + dataVersion, + + -- * branches + saveRootBranch, + loadMaybeRootCausalHash, + loadRootCausalHash, + loadRootCausal, + saveBranch, + loadCausalBranchByCausalHash, + + -- * terms + saveTermComponent, + loadTermComponent, + loadTermByReference, + loadTypeOfTermByTermReference, + + -- * decls + saveDeclComponent, + loadDeclComponent, + loadDeclByReference, + getDeclTypeById, + + -- * patches + savePatch, + loadPatchById, + + -- * test for stuff in codebase + objectExistsForHash, + + -- * dubiously exported stuff involving database ids + loadHashByObjectId, + primaryHashToMaybeObjectId, + primaryHashToMaybePatchObjectId, + + -- * watch expression cache + saveWatch, + loadWatch, + listWatches, + clearWatches, + + -- * indexes + + -- ** nearest common ancestor + before, + lca, + + -- ** prefix index + componentReferencesByPrefix, + termReferentsByPrefix, + declReferentsByPrefix, + causalHashesByPrefix, + + -- ** dependents index + dependents, + dependentsOfComponent, + + -- ** type index + addTypeToIndexForTerm, + termsHavingType, + + -- ** type mentions index + addTypeMentionsToIndexForTerm, + termsMentioningType, + + -- * delete me + getCycleLen, + + -- * low-level stuff + liftQ, + loadDbBranchByObjectId, + saveBranchObject, + + -- * Error types + Error (..), + DecodeError (..), + + -- ** Constraint kinds + EDB, + + -- * somewhat unexpectedly unused definitions + c2sReferenceId, + c2sReferentId, + diffPatch, + decodeTermElementWithType, + loadTermWithTypeByReference, + s2cTermWithType, + declReferencesByPrefix, + branchHashesByPrefix, + derivedDependencies, + ) +where import Control.Lens (Lens') import qualified Control.Lens as Lens @@ -172,11 +178,9 @@ import U.Codebase.Sqlite.LocalIds import qualified U.Codebase.Sqlite.LocalIds as LocalIds import qualified U.Codebase.Sqlite.ObjectType as OT import qualified U.Codebase.Sqlite.Patch.Diff as S -import qualified U.Codebase.Sqlite.Patch.Diff as S.PatchDiff import qualified U.Codebase.Sqlite.Patch.Format as S -import qualified U.Codebase.Sqlite.Patch.Format as S.PatchFormat -import qualified U.Codebase.Sqlite.Patch.Full as S -import qualified U.Codebase.Sqlite.Patch.Full as S.Patch.Full +import qualified U.Codebase.Sqlite.Patch.Format as S.Patch.Format +import qualified U.Codebase.Sqlite.Patch.Full as S (LocalPatch, Patch, Patch' (..)) import qualified U.Codebase.Sqlite.Patch.TermEdit as S import qualified U.Codebase.Sqlite.Patch.TermEdit as S.TermEdit import qualified U.Codebase.Sqlite.Patch.TypeEdit as S @@ -322,8 +326,9 @@ loadRootCausalHash :: EDB m => m CausalHash loadRootCausalHash = loadCausalHashById =<< liftQ Q.loadNamespaceRoot loadMaybeRootCausalHash :: EDB m => m (Maybe CausalHash) -loadMaybeRootCausalHash = runMaybeT $ - loadCausalHashById =<< MaybeT (liftQ Q.loadMaybeNamespaceRoot) +loadMaybeRootCausalHash = + runMaybeT $ + loadCausalHashById =<< MaybeT (liftQ Q.loadMaybeNamespaceRoot) -- * Reference transformations @@ -398,13 +403,13 @@ c2lPatch (C.Branch.Patch termEdits typeEdits) = done :: EDB m => (a, (Seq Text, Seq H.Hash, Seq H.Hash)) -> - m (S.PatchFormat.PatchLocalIds, a) + m (S.Patch.Format.PatchLocalIds, a) done (lPatch, (textValues, hashValues, defnValues)) = do textIds <- liftQ $ traverse Q.saveText textValues hashIds <- liftQ $ traverse Q.saveHashHash hashValues objectIds <- traverse primaryHashToExistingObjectId defnValues let ids = - S.PatchFormat.LocalIds + S.Patch.Format.LocalIds (Vector.fromList (Foldable.toList textIds)) (Vector.fromList (Foldable.toList hashIds)) (Vector.fromList (Foldable.toList objectIds)) @@ -530,6 +535,7 @@ componentByObjectId id = do -- * Codebase operations -- ** Saving & loading terms + loadTermComponent :: EDB m => H.Hash -> MaybeT m [(C.Term Symbol, C.Term.Type Symbol)] loadTermComponent h = do MaybeT (anyHashToMaybeObjectId h) @@ -1186,7 +1192,7 @@ loadBranchByCausalHashId id = do loadDbBranchByObjectId :: EDB m => Db.BranchObjectId -> m S.DbBranch loadDbBranchByObjectId id = deserializeBranchObject id >>= \case - S.BranchFormat.Full li f -> pure (S.BranchFormat.localToDbBranch li f) + S.BranchFormat.Full li f -> pure (S.BranchFormat.localToDbBranch li f) S.BranchFormat.Diff r li d -> doDiff r [S.BranchFormat.localToDbDiff li d] where deserializeBranchObject :: EDB m => Db.BranchObjectId -> m S.BranchFormat @@ -1298,57 +1304,25 @@ loadBranchByObjectId id = loadPatchById :: EDB m => Db.PatchObjectId -> m C.Branch.Patch loadPatchById patchId = + loadDbPatchById patchId >>= s2cPatch + +loadDbPatchById :: EDB m => Db.PatchObjectId -> m S.Patch +loadDbPatchById patchId = deserializePatchObject patchId >>= \case - S.PatchFormat.Full li p -> s2cPatch (l2sPatchFull li p) - S.PatchFormat.Diff ref li d -> doDiff ref [l2sPatchDiff li d] + S.Patch.Format.Full li p -> pure (S.Patch.Format.localPatchToPatch li p) + S.Patch.Format.Diff ref li d -> doDiff ref [S.Patch.Format.localPatchDiffToPatchDiff li d] where - doDiff :: EDB m => Db.PatchObjectId -> [S.PatchDiff] -> m C.Branch.Patch + doDiff :: EDB m => Db.PatchObjectId -> [S.PatchDiff] -> m S.Patch doDiff ref ds = deserializePatchObject ref >>= \case - S.PatchFormat.Full li f -> joinFull (l2sPatchFull li f) ds - S.PatchFormat.Diff ref' li' d' -> doDiff ref' (l2sPatchDiff li' d' : ds) - joinFull :: EDB m => S.Patch -> [S.PatchDiff] -> m C.Branch.Patch - joinFull f [] = s2cPatch f - joinFull (S.Patch termEdits typeEdits) (S.PatchDiff addedTermEdits addedTypeEdits removedTermEdits removedTypeEdits : ds) = joinFull f' ds - where - f' = S.Patch (addRemove addedTermEdits removedTermEdits termEdits) (addRemove addedTypeEdits removedTypeEdits typeEdits) - addRemove :: (Ord a, Ord b) => Map a (Set b) -> Map a (Set b) -> Map a (Set b) -> Map a (Set b) - addRemove add del src = - (Map.unionWith (<>) add (Map.differenceWith remove src del)) - remove :: Ord b => Set b -> Set b -> Maybe (Set b) - remove src del = - let diff = Set.difference src del - in if diff == mempty then Nothing else Just diff - - -- implementation detail of loadPatchById? - lookupPatchLocalText :: S.PatchLocalIds -> LocalTextId -> Db.TextId - lookupPatchLocalText li (LocalTextId w) = S.PatchFormat.patchTextLookup li Vector.! fromIntegral w - - lookupPatchLocalHash :: S.PatchLocalIds -> LocalHashId -> Db.HashId - lookupPatchLocalHash li (LocalHashId w) = S.PatchFormat.patchHashLookup li Vector.! fromIntegral w - - lookupPatchLocalDefn :: S.PatchLocalIds -> LocalDefnId -> Db.ObjectId - lookupPatchLocalDefn li (LocalDefnId w) = S.PatchFormat.patchDefnLookup li Vector.! fromIntegral w - - l2sPatchFull :: S.PatchFormat.PatchLocalIds -> S.LocalPatch -> S.Patch - l2sPatchFull li = - S.Patch.Full.trimap - (lookupPatchLocalText li) - (lookupPatchLocalHash li) - (lookupPatchLocalDefn li) - - l2sPatchDiff :: S.PatchFormat.PatchLocalIds -> S.LocalPatchDiff -> S.PatchDiff - l2sPatchDiff li = - S.PatchDiff.trimap - (lookupPatchLocalText li) - (lookupPatchLocalHash li) - (lookupPatchLocalDefn li) + S.Patch.Format.Full li f -> pure (S.Patch.Format.applyPatchDiffs (S.Patch.Format.localPatchToPatch li f) ds) + S.Patch.Format.Diff ref' li' d' -> doDiff ref' (S.Patch.Format.localPatchDiffToPatchDiff li' d' : ds) savePatch :: EDB m => PatchHash -> C.Branch.Patch -> m Db.PatchObjectId savePatch h c = do (li, lPatch) <- c2lPatch c hashId <- Q.saveHashHash (unPatchHash h) - let bytes = S.putBytes S.putPatchFormat $ S.PatchFormat.Full li lPatch + let bytes = S.putBytes S.putPatchFormat $ S.Patch.Format.Full li lPatch Db.PatchObjectId <$> Q.saveObject hashId OT.Patch bytes s2cPatch :: EDB m => S.Patch -> m C.Branch.Patch diff --git a/codebase2/codebase-sqlite/U/Codebase/Sqlite/Patch/Format.hs b/codebase2/codebase-sqlite/U/Codebase/Sqlite/Patch/Format.hs index 5cb2a5ddd5..1a39a3ad8b 100644 --- a/codebase2/codebase-sqlite/U/Codebase/Sqlite/Patch/Format.hs +++ b/codebase2/codebase-sqlite/U/Codebase/Sqlite/Patch/Format.hs @@ -2,16 +2,21 @@ module U.Codebase.Sqlite.Patch.Format ( PatchFormat (..), PatchLocalIds (..), SyncPatchFormat (..), + applyPatchDiffs, localPatchToPatch, + localPatchDiffToPatchDiff, ) where +import qualified Data.Map.Strict as Map +import qualified Data.Set as Set import Data.Vector (Vector) import qualified Data.Vector as Vector import U.Codebase.Sqlite.DbId (HashId, ObjectId, PatchObjectId, TextId) import U.Codebase.Sqlite.LocalIds (LocalDefnId (LocalDefnId), LocalHashId (LocalHashId), LocalTextId (LocalTextId)) -import U.Codebase.Sqlite.Patch.Diff (LocalPatchDiff) -import U.Codebase.Sqlite.Patch.Full (LocalPatch, Patch) +import U.Codebase.Sqlite.Patch.Diff (LocalPatchDiff, PatchDiff, PatchDiff' (..)) +import qualified U.Codebase.Sqlite.Patch.Diff as Patch.Diff +import U.Codebase.Sqlite.Patch.Full (LocalPatch, Patch, Patch' (..)) import qualified U.Codebase.Sqlite.Patch.Full as Patch.Full import Unison.Prelude @@ -29,15 +34,46 @@ data SyncPatchFormat = SyncFull PatchLocalIds ByteString | SyncDiff PatchObjectId PatchLocalIds ByteString +-- | Apply a list of patch diffs to a patch, left to right. +applyPatchDiffs :: Patch -> [PatchDiff] -> Patch +applyPatchDiffs = + foldl' apply + where + apply :: Patch -> PatchDiff -> Patch + apply (Patch termEdits typeEdits) (PatchDiff addedTermEdits addedTypeEdits removedTermEdits removedTypeEdits) = + let !termEdits' = addRemove addedTermEdits removedTermEdits termEdits + !typeEdits' = addRemove addedTypeEdits removedTypeEdits typeEdits + in + Patch + { termEdits = termEdits', + typeEdits = typeEdits' + } + + addRemove :: (Ord a, Ord b) => Map a (Set b) -> Map a (Set b) -> Map a (Set b) -> Map a (Set b) + addRemove add del src = + Map.unionWith (<>) add (Map.differenceWith remove src del) + + remove :: Ord b => Set b -> Set b -> Maybe (Set b) + remove src del = + let diff = Set.difference src del + in if Set.null diff then Nothing else Just diff + localPatchToPatch :: PatchLocalIds -> LocalPatch -> Patch localPatchToPatch li = Patch.Full.trimap (lookupPatchLocalText li) (lookupPatchLocalHash li) (lookupPatchLocalDefn li) - where - lookupPatchLocalText :: PatchLocalIds -> LocalTextId -> TextId - lookupPatchLocalText li (LocalTextId w) = patchTextLookup li Vector.! fromIntegral w - lookupPatchLocalHash :: PatchLocalIds -> LocalHashId -> HashId - lookupPatchLocalHash li (LocalHashId w) = patchHashLookup li Vector.! fromIntegral w +localPatchDiffToPatchDiff :: PatchLocalIds -> LocalPatchDiff -> PatchDiff +localPatchDiffToPatchDiff li = + Patch.Diff.trimap + (lookupPatchLocalText li) + (lookupPatchLocalHash li) + (lookupPatchLocalDefn li) + +lookupPatchLocalText :: PatchLocalIds -> LocalTextId -> TextId +lookupPatchLocalText li (LocalTextId w) = patchTextLookup li Vector.! fromIntegral w + +lookupPatchLocalHash :: PatchLocalIds -> LocalHashId -> HashId +lookupPatchLocalHash li (LocalHashId w) = patchHashLookup li Vector.! fromIntegral w - lookupPatchLocalDefn :: PatchLocalIds -> LocalDefnId -> ObjectId - lookupPatchLocalDefn li (LocalDefnId w) = patchDefnLookup li Vector.! fromIntegral w +lookupPatchLocalDefn :: PatchLocalIds -> LocalDefnId -> ObjectId +lookupPatchLocalDefn li (LocalDefnId w) = patchDefnLookup li Vector.! fromIntegral w diff --git a/codebase2/codebase-sqlite/package.yaml b/codebase2/codebase-sqlite/package.yaml index fbb44ec769..6763204c62 100644 --- a/codebase2/codebase-sqlite/package.yaml +++ b/codebase2/codebase-sqlite/package.yaml @@ -3,6 +3,7 @@ github: unisonweb/unison default-extensions: - ApplicativeDo + - BangPatterns - BlockArguments - ConstraintKinds - DeriveFunctor diff --git a/codebase2/codebase-sqlite/unison-codebase-sqlite.cabal b/codebase2/codebase-sqlite/unison-codebase-sqlite.cabal index bfed069c77..9b6712975c 100644 --- a/codebase2/codebase-sqlite/unison-codebase-sqlite.cabal +++ b/codebase2/codebase-sqlite/unison-codebase-sqlite.cabal @@ -48,6 +48,7 @@ library ./ default-extensions: ApplicativeDo + BangPatterns BlockArguments ConstraintKinds DeriveFunctor diff --git a/parser-typechecker/src/Unison/Codebase/SqliteCodebase/MigrateSchema12.hs b/parser-typechecker/src/Unison/Codebase/SqliteCodebase/MigrateSchema12.hs index 74daa5dbf8..fbcd7ce261 100644 --- a/parser-typechecker/src/Unison/Codebase/SqliteCodebase/MigrateSchema12.hs +++ b/parser-typechecker/src/Unison/Codebase/SqliteCodebase/MigrateSchema12.hs @@ -34,7 +34,7 @@ import qualified U.Codebase.Sqlite.LocalizeObject as S.LocalizeObject import U.Codebase.Sqlite.Connection (Connection) import qualified U.Codebase.Sqlite.Causal as SC import U.Codebase.Sqlite.Causal (DbCausal, GDbCausal (..)) -import U.Codebase.Sqlite.DbId (BranchHashId (BranchHashId, unBranchHashId), CausalHashId (CausalHashId, unCausalHashId), HashId (HashId), ObjectId) +import U.Codebase.Sqlite.DbId (PatchObjectId(..), BranchHashId (BranchHashId, unBranchHashId), CausalHashId (CausalHashId, unCausalHashId), HashId (HashId), ObjectId) import U.Codebase.Sqlite.Causal (GDbCausal (..)) import qualified U.Codebase.Sqlite.Causal as SC import U.Codebase.Sqlite.Connection (Connection) @@ -213,10 +213,9 @@ migrationSync = Sync \case -- 1. ??? -- 2. Synced W _watchKind _idH -> undefined - Patch {} -> undefined - -migratePatch :: Hash -> ByteString -> m (Sync.TrySyncResult Entity) -migratePatch = error "not implemented" + Patch objectId -> do + Env{db} <- ask + lift (migratePatch db (PatchObjectId objectId)) runDB :: MonadIO m => Connection -> ReaderT Connection (ExceptT Ops.Error (ExceptT Q.Integrity m)) a -> m a runDB conn = (runExceptT >=> err) . (runExceptT >=> err) . flip runReaderT conn @@ -362,6 +361,17 @@ migrateBranch conn oldObjectId = fmap (either id id) . runExceptT $ do field @"objLookup" %= Map.insert oldObjectId (unBranchObjectId newObjectId, unBranchHashId newHashId, hash) pure Sync.Done +migratePatch :: MonadIO m => Connection -> Old PatchObjectId -> StateT MigrationState m (Sync.TrySyncResult Entity) +migratePatch conn oldObjectId = do + -- 1. Read old patch out of the codebase. + -- 2. Determine whether all things the patch refers to are built. + -- 3. If not, return those as a `Missing`. + -- 4. Otherwise, update the things the patch references. + -- 5. Hash it. + -- 6. Store it. + -- 7. Update migratation state, recording old->new patch mapping. + undefined + -- Project an S.Referent'' into its SomeReferenceObjId's someReferent_ :: Traversal' (S.Branch.Full.Referent'' t ObjectId) SomeReferenceObjId someReferent_ = diff --git a/parser-typechecker/src/Unison/Util/EnumContainers.hs b/parser-typechecker/src/Unison/Util/EnumContainers.hs index b48f1e567c..ba1b8d246e 100644 --- a/parser-typechecker/src/Unison/Util/EnumContainers.hs +++ b/parser-typechecker/src/Unison/Util/EnumContainers.hs @@ -127,7 +127,7 @@ mapToList :: EnumKey k => EnumMap k a -> [(k, a)] mapToList (EM m) = first intToKey <$> IM.toList m (!) :: EnumKey k => EnumMap k a -> k -> a -EM m ! e = m IM.! keyToInt e +(!) (EM m) e = m IM.! keyToInt e findMin :: EnumKey k => EnumSet k -> k findMin (ES s) = intToKey $ IS.findMin s From 33000db21f05a788022946d7e517ee4a70bee9b3 Mon Sep 17 00:00:00 2001 From: Chris Penner Date: Tue, 2 Nov 2021 14:18:39 -0600 Subject: [PATCH 063/297] write patternReferences_ traversal --- .../SqliteCodebase/MigrateSchema12.hs | 47 +++++++++++++++---- 1 file changed, 38 insertions(+), 9 deletions(-) diff --git a/parser-typechecker/src/Unison/Codebase/SqliteCodebase/MigrateSchema12.hs b/parser-typechecker/src/Unison/Codebase/SqliteCodebase/MigrateSchema12.hs index 548f335028..6d76e9e9b2 100644 --- a/parser-typechecker/src/Unison/Codebase/SqliteCodebase/MigrateSchema12.hs +++ b/parser-typechecker/src/Unison/Codebase/SqliteCodebase/MigrateSchema12.hs @@ -2,6 +2,7 @@ {-# LANGUAGE RankNTypes #-} {-# LANGUAGE RecordWildCards #-} {-# LANGUAGE ViewPatterns #-} +{-# LANGUAGE ApplicativeDo #-} module Unison.Codebase.SqliteCodebase.MigrateSchema12 where @@ -61,6 +62,7 @@ import qualified Unison.Term as Term import Unison.Type (Type) import qualified Unison.Type as Type import Unison.Var (Var) +import qualified Unison.Pattern as Pattern -- lookupCtor :: ConstructorMapping -> ObjectId -> Pos -> ConstructorId -> Maybe (Pos, ConstructorId) -- lookupCtor (ConstructorMapping cm) oid pos cid = @@ -634,25 +636,52 @@ termReferences_ = termFReferences_ :: (Ord tv, Monad m) => LensLike' m (Term.F tv ta pa a) SomeReferenceId termFReferences_ f t = (t & Term._Ref . Reference._DerivedId . unsafeInsidePrism _TermReference %%~ f) - >>= Term._Constructor . refConPain_ . unsafeInsidePrism _ConstructorReference %%~ f - >>= Term._Request . refConPain_ . unsafeInsidePrism _ConstructorReference %%~ f + >>= Term._Constructor . someRefCon_ %%~ f + >>= Term._Request . someRefCon_ %%~ f >>= Term._Ann . _2 . typeReferences_ %%~ f >>= Term._Match . _2 . traversed . Term.matchPattern_ . patternReferences_ %%~ f >>= Term._TermLink . referentReferences %%~ f >>= Term._TypeLink . Reference._DerivedId . unsafeInsidePrism _TypeReference %%~ f -- | Casts the left side of a reference/constructor pair into a Reference.Id -refConPain_ :: Traversal' (Reference.Reference, ConstructorId) (Reference.Id, ConstructorId) -refConPain_ f s = - case s of - (Reference.Builtin _, _) -> pure s - (Reference.DerivedId n, c) -> (\(n', c') -> (Reference.DerivedId n', c')) <$> f (n, c) +someRefCon_ :: Traversal' (Reference.Reference, ConstructorId) SomeReferenceId +someRefCon_ = refConPair_ . unsafeInsidePrism _ConstructorReference + where + refConPair_ :: Traversal' (Reference.Reference, ConstructorId) (Reference.Id, ConstructorId) + refConPair_ f s = + case s of + (Reference.Builtin _, _) -> pure s + (Reference.DerivedId n, c) -> (\(n', c') -> (Reference.DerivedId n', c')) <$> f (n, c) patternReferences_ :: Traversal' (Pattern loc) SomeReferenceId -patternReferences_ = undefined -- types @Reference.Id +patternReferences_ f = \case + p@(Pattern.Unbound {}) -> pure p + p@(Pattern.Var {}) -> pure p + p@(Pattern.Boolean {}) -> pure p + p@(Pattern.Int {}) -> pure p + p@(Pattern.Nat {}) -> pure p + p@(Pattern.Float {}) -> pure p + p@(Pattern.Text {}) -> pure p + p@(Pattern.Char {}) -> pure p + (Pattern.Constructor loc ref conId patterns) -> + (\(newRef, newConId) newPatterns -> Pattern.Constructor loc newRef newConId newPatterns) + <$> ((ref, conId) & someRefCon_ %%~ f) + <*> (patterns & traversed . patternReferences_ %%~ f) + (Pattern.As loc pat) -> Pattern.As loc <$> patternReferences_ f pat + (Pattern.EffectPure loc pat) -> Pattern.EffectPure loc <$> patternReferences_ f pat + (Pattern.EffectBind loc ref conId patterns pat) -> + do + (\(newRef, newConId) newPatterns newPat -> Pattern.EffectBind loc newRef newConId newPatterns newPat) + <$> ((ref, conId) & someRefCon_ %%~ f) + <*> (patterns & traversed . patternReferences_ %%~ f) + <*> (patternReferences_ f pat) + (Pattern.SequenceLiteral loc patterns) -> + Pattern.SequenceLiteral loc <$> (patterns & traversed . patternReferences_ %%~ f) + Pattern.SequenceOp loc pat seqOp pat2 -> do + Pattern.SequenceOp loc <$> patternReferences_ f pat <*> pure seqOp <*> patternReferences_ f pat2 referentReferences :: Traversal' Referent.Referent SomeReferenceId -referentReferences = undefined +referentReferences f = undefined -- structural type Ping x = P1 (Pong x) -- P1 : forall x. Pong x -> Ping x From 7a229fb2808ad59692ff942ada07501e4f74ce53 Mon Sep 17 00:00:00 2001 From: Chris Penner Date: Tue, 2 Nov 2021 14:28:23 -0600 Subject: [PATCH 064/297] Write referent as some term traversal --- .../Codebase/SqliteCodebase/MigrateSchema12.hs | 14 +++++++++++--- 1 file changed, 11 insertions(+), 3 deletions(-) diff --git a/parser-typechecker/src/Unison/Codebase/SqliteCodebase/MigrateSchema12.hs b/parser-typechecker/src/Unison/Codebase/SqliteCodebase/MigrateSchema12.hs index 6d76e9e9b2..6907086cc9 100644 --- a/parser-typechecker/src/Unison/Codebase/SqliteCodebase/MigrateSchema12.hs +++ b/parser-typechecker/src/Unison/Codebase/SqliteCodebase/MigrateSchema12.hs @@ -63,6 +63,7 @@ import Unison.Type (Type) import qualified Unison.Type as Type import Unison.Var (Var) import qualified Unison.Pattern as Pattern +import qualified Unison.Referent' as Referent' -- lookupCtor :: ConstructorMapping -> ObjectId -> Pos -> ConstructorId -> Maybe (Pos, ConstructorId) -- lookupCtor (ConstructorMapping cm) oid pos cid = @@ -640,7 +641,7 @@ termFReferences_ f t = >>= Term._Request . someRefCon_ %%~ f >>= Term._Ann . _2 . typeReferences_ %%~ f >>= Term._Match . _2 . traversed . Term.matchPattern_ . patternReferences_ %%~ f - >>= Term._TermLink . referentReferences %%~ f + >>= Term._TermLink . referentAsSomeTerm_ %%~ f >>= Term._TypeLink . Reference._DerivedId . unsafeInsidePrism _TypeReference %%~ f -- | Casts the left side of a reference/constructor pair into a Reference.Id @@ -680,8 +681,15 @@ patternReferences_ f = \case Pattern.SequenceOp loc pat seqOp pat2 -> do Pattern.SequenceOp loc <$> patternReferences_ f pat <*> pure seqOp <*> patternReferences_ f pat2 -referentReferences :: Traversal' Referent.Referent SomeReferenceId -referentReferences f = undefined +referentAsSomeTerm_ :: Traversal' Referent.Referent SomeReferenceId +referentAsSomeTerm_ f = \case + (Referent'.Ref' (Reference.DerivedId refId)) -> do + newRefId <- refId & unsafeInsidePrism _TermReference %%~ f + pure (Referent'.Ref' (Reference.DerivedId newRefId)) + (Referent'.Con' (Reference.DerivedId refId) conId conType) -> + ((refId, conId) & unsafeInsidePrism _ConstructorReference %%~ f) <&> + (\(newRefId, newConId) -> (Referent'.Con' (Reference.DerivedId newRefId) newConId conType)) + r -> pure r -- structural type Ping x = P1 (Pong x) -- P1 : forall x. Pong x -> Ping x From d4de7fa0a12cb4b61be004bc7362fcc7232033ba Mon Sep 17 00:00:00 2001 From: Chris Penner Date: Tue, 2 Nov 2021 14:32:18 -0600 Subject: [PATCH 065/297] Implement more undefined's --- .../SqliteCodebase/MigrateSchema12.hs | 27 +++++++++++-------- 1 file changed, 16 insertions(+), 11 deletions(-) diff --git a/parser-typechecker/src/Unison/Codebase/SqliteCodebase/MigrateSchema12.hs b/parser-typechecker/src/Unison/Codebase/SqliteCodebase/MigrateSchema12.hs index 6907086cc9..54ae499d81 100644 --- a/parser-typechecker/src/Unison/Codebase/SqliteCodebase/MigrateSchema12.hs +++ b/parser-typechecker/src/Unison/Codebase/SqliteCodebase/MigrateSchema12.hs @@ -13,6 +13,7 @@ import Control.Monad.State.Strict import Control.Monad.Trans.Except (throwE) import Control.Monad.Trans.Writer.CPS (Writer, execWriter, tell) import Data.Generics.Product +import Data.Generics.Sum (_Ctor) import Data.List.Extra (nubOrd) import qualified Data.Map as Map import Data.Maybe @@ -141,8 +142,8 @@ data Y = MkY Int -} data Entity - = TComponent Unison.Hash - | DComponent Unison.Hash + = TermComponent Unison.Hash + | DeclComponent Unison.Hash | C CausalHashId | -- haven't proven we need these yet B ObjectId @@ -195,10 +196,10 @@ migrationSync = Sync \case --- * If we haven't yet synced its parents, push them onto the work queue --- * If we haven't yet synced the causal's value (namespace), push it onto the work queue. --- * Rehash the Causal's valueHash AND CausalHash, and add the new causal, its hashes, and hash objects to the codebase under a fresh object ID - TComponent hash -> do + TermComponent hash -> do Env {codebase} <- ask lift (migrateTermComponent codebase hash) - DComponent hash -> do + DeclComponent hash -> do Env {codebase} <- ask lift (migrateDeclComponent codebase hash) B objectId -> do @@ -750,17 +751,21 @@ data SomeReference ref someRef_ :: Traversal (SomeReference ref) (SomeReference ref') ref ref' someRef_ = param @0 -_TermReference :: Prism (SomeReference ref) (SomeReference ref') ref ref' -_TermReference = undefined -- _Ctor @"TermReference" +_TermReference :: Prism' (SomeReference ref) ref +_TermReference = _Ctor @"TermReference" -_TypeReference :: Prism (SomeReference ref) (SomeReference ref') ref ref' -_TypeReference = undefined --_Ctor @"TypeReference" +_TypeReference :: Prism' (SomeReference ref) ref +_TypeReference = _Ctor @"TypeReference" -_ConstructorReference :: Prism (SomeReference ref) (SomeReference ref') (ref, ConstructorId) (ref', ConstructorId) -_ConstructorReference = undefined -- _Ctor @"ConstructorReference" +_ConstructorReference :: Prism' (SomeReference ref) (ref, ConstructorId) +_ConstructorReference = _Ctor @"ConstructorReference" someReferenceIdToEntity :: SomeReferenceId -> Entity -someReferenceIdToEntity = undefined +someReferenceIdToEntity = \case + (TermReference ref) -> TermComponent (Reference.idToHash ref) + (TypeReference ref) -> DeclComponent (Reference.idToHash ref) + -- Constructors are migrated by their decl component. + (ConstructorReference ref _conId) -> DeclComponent (Reference.idToHash ref) -- get references: -- From 07fb8343ac6ec0339d270c643bd67ccd2edf4f0a Mon Sep 17 00:00:00 2001 From: Mitchell Rosen Date: Tue, 2 Nov 2021 16:44:18 -0400 Subject: [PATCH 066/297] more migratePatch work --- .../U/Codebase/Sqlite/Operations.hs | 10 ++- .../U/Codebase/Sqlite/Queries.hs | 1 + .../SqliteCodebase/MigrateSchema12.hs | 28 ++++++--- .../MigrateSchema12/DbHelpers.hs | 61 +++++++++++++++++-- .../src/Unison/Hashing/V2/Patch.hs | 13 ++-- 5 files changed, 90 insertions(+), 23 deletions(-) diff --git a/codebase2/codebase-sqlite/U/Codebase/Sqlite/Operations.hs b/codebase2/codebase-sqlite/U/Codebase/Sqlite/Operations.hs index 3bad5b9ddf..fedce7bed4 100644 --- a/codebase2/codebase-sqlite/U/Codebase/Sqlite/Operations.hs +++ b/codebase2/codebase-sqlite/U/Codebase/Sqlite/Operations.hs @@ -82,7 +82,9 @@ module U.Codebase.Sqlite.Operations -- * low-level stuff liftQ, loadDbBranchByObjectId, + loadDbPatchById, saveBranchObject, + saveDbPatch, -- * Error types Error (..), @@ -1321,8 +1323,12 @@ loadDbPatchById patchId = savePatch :: EDB m => PatchHash -> C.Branch.Patch -> m Db.PatchObjectId savePatch h c = do (li, lPatch) <- c2lPatch c - hashId <- Q.saveHashHash (unPatchHash h) - let bytes = S.putBytes S.putPatchFormat $ S.Patch.Format.Full li lPatch + saveDbPatch h (S.Patch.Format.Full li lPatch) + +saveDbPatch :: EDB m => PatchHash -> S.PatchFormat -> m Db.PatchObjectId +saveDbPatch hash patch = do + hashId <- Q.saveHashHash (unPatchHash hash) + let bytes = S.putBytes S.putPatchFormat patch Db.PatchObjectId <$> Q.saveObject hashId OT.Patch bytes s2cPatch :: EDB m => S.Patch -> m C.Branch.Patch diff --git a/codebase2/codebase-sqlite/U/Codebase/Sqlite/Queries.hs b/codebase2/codebase-sqlite/U/Codebase/Sqlite/Queries.hs index ec204376d6..cffecb472a 100644 --- a/codebase2/codebase-sqlite/U/Codebase/Sqlite/Queries.hs +++ b/codebase2/codebase-sqlite/U/Codebase/Sqlite/Queries.hs @@ -32,6 +32,7 @@ module U.Codebase.Sqlite.Queries ( saveHashHash, loadHashId, loadHashById, + loadHashHashById, loadHashIdByHash, expectHashIdByHash, saveCausalHash, diff --git a/parser-typechecker/src/Unison/Codebase/SqliteCodebase/MigrateSchema12.hs b/parser-typechecker/src/Unison/Codebase/SqliteCodebase/MigrateSchema12.hs index 54ae499d81..0ba1b3f787 100644 --- a/parser-typechecker/src/Unison/Codebase/SqliteCodebase/MigrateSchema12.hs +++ b/parser-typechecker/src/Unison/Codebase/SqliteCodebase/MigrateSchema12.hs @@ -20,7 +20,7 @@ import Data.Maybe import qualified Data.Set as Set import Data.Tuple (swap) import qualified Data.Zip as Zip -import U.Codebase.HashTags (BranchHash (BranchHash), CausalHash (CausalHash, unCausalHash)) +import U.Codebase.HashTags (BranchHash (..), CausalHash (..), PatchHash (..)) import qualified U.Codebase.Reference as UReference import qualified U.Codebase.Referent as UReferent import qualified U.Codebase.Sqlite.Branch.Full as S @@ -30,7 +30,7 @@ import qualified U.Codebase.Sqlite.Causal as SC import U.Codebase.Sqlite.Connection (Connection) import U.Codebase.Sqlite.DbId ( BranchHashId (..), - BranchObjectId (BranchObjectId, unBranchObjectId), + BranchObjectId (..), CausalHashId (..), HashId, ObjectId, @@ -38,6 +38,7 @@ import U.Codebase.Sqlite.DbId ) import qualified U.Codebase.Sqlite.LocalizeObject as S.LocalizeObject import qualified U.Codebase.Sqlite.Operations as Ops +import qualified U.Codebase.Sqlite.Patch.Format as S.Patch.Format import qualified U.Codebase.Sqlite.Queries as Q import U.Codebase.Sync (Sync (Sync)) import qualified U.Codebase.Sync as Sync @@ -361,15 +362,22 @@ migrateBranch conn oldObjectId = fmap (either id id) . runExceptT $ do pure Sync.Done migratePatch :: MonadIO m => Connection -> Old PatchObjectId -> StateT MigrationState m (Sync.TrySyncResult Entity) -migratePatch _conn _oldObjectId = do - -- 1. Read old patch out of the codebase. +migratePatch conn oldObjectId = do + oldPatch <- runDB conn (Ops.loadDbPatchById oldObjectId) -- 2. Determine whether all things the patch refers to are built. - -- 3. If not, return those as a `Missing`. - -- 4. Otherwise, update the things the patch references. - -- 5. Hash it. - -- 6. Store it. - -- 7. Update migratation state, recording old->new patch mapping. - undefined + let dependencies :: [Entity] + dependencies = undefined + if null dependencies + then pure (Sync.Missing dependencies) + else do + let migrate = undefined + let newPatch = migrate oldPatch + let (localPatchIds, localPatch) = S.LocalizeObject.localizePatch newPatch + newHash <- runDB conn (liftQ (Hashing.dbPatchHash newPatch)) + newObjectId <- runDB conn (Ops.saveDbPatch (PatchHash (Cv.hash1to2 newHash)) (S.Patch.Format.Full localPatchIds localPatch)) + newHashId <- runDB conn (liftQ (Q.expectHashIdByHash (Cv.hash1to2 newHash))) + field @"objLookup" %= Map.insert (unPatchObjectId oldObjectId) (unPatchObjectId newObjectId, newHashId, newHash) + pure Sync.Done -- | PLAN -- * diff --git a/parser-typechecker/src/Unison/Codebase/SqliteCodebase/MigrateSchema12/DbHelpers.hs b/parser-typechecker/src/Unison/Codebase/SqliteCodebase/MigrateSchema12/DbHelpers.hs index 5a0189833f..e59250f267 100644 --- a/parser-typechecker/src/Unison/Codebase/SqliteCodebase/MigrateSchema12/DbHelpers.hs +++ b/parser-typechecker/src/Unison/Codebase/SqliteCodebase/MigrateSchema12/DbHelpers.hs @@ -1,5 +1,6 @@ module Unison.Codebase.SqliteCodebase.MigrateSchema12.DbHelpers ( dbBranchHash, + dbPatchHash, ) where @@ -11,6 +12,11 @@ import qualified U.Codebase.Sqlite.Branch.Full as S import qualified U.Codebase.Sqlite.Branch.Full as S.Branch.Full import qualified U.Codebase.Sqlite.Branch.Full as S.MetadataSet import qualified U.Codebase.Sqlite.DbId as Db +import qualified U.Codebase.Sqlite.Patch.Full as S +import qualified U.Codebase.Sqlite.Patch.TermEdit as S (TermEdit) +import qualified U.Codebase.Sqlite.Patch.TermEdit as S.TermEdit +import qualified U.Codebase.Sqlite.Patch.TypeEdit as S (TypeEdit) +import qualified U.Codebase.Sqlite.Patch.TypeEdit as S.TypeEdit import U.Codebase.Sqlite.Queries (EDB) import qualified U.Codebase.Sqlite.Queries as Q import qualified U.Codebase.Sqlite.Reference as S @@ -22,10 +28,15 @@ import qualified Unison.Codebase.SqliteCodebase.Conversions as Cv import Unison.Hash (Hash) import qualified Unison.Hashable as H import qualified Unison.Hashing.V2.Branch as Hashing.Branch +import qualified Unison.Hashing.V2.Patch as Hashing (Patch (..)) import qualified Unison.Hashing.V2.Reference as Hashing (Reference) import qualified Unison.Hashing.V2.Reference as Hashing.Reference import qualified Unison.Hashing.V2.Referent as Hashing (Referent) import qualified Unison.Hashing.V2.Referent as Hashing.Referent +import qualified Unison.Hashing.V2.TermEdit as Hashing (TermEdit) +import qualified Unison.Hashing.V2.TermEdit as Hashing.TermEdit +import qualified Unison.Hashing.V2.TypeEdit as Hashing (TypeEdit) +import qualified Unison.Hashing.V2.TypeEdit as Hashing.TypeEdit import Unison.NameSegment (NameSegment (..)) import Unison.Prelude @@ -44,7 +55,10 @@ dbBranchHash (S.Branch.Full.Branch tms tps patches children) = s2hNameSegment (Map.bitraverse s2hReferent s2hMetadataSet) - doTypes :: EDB m => Map Db.TextId (Map S.Reference S.DbMetadataSet) -> m (Map NameSegment (Map Hashing.Reference Hashing.Branch.MdValues)) + doTypes :: + EDB m => + Map Db.TextId (Map S.Reference S.DbMetadataSet) -> + m (Map NameSegment (Map Hashing.Reference Hashing.Branch.MdValues)) doTypes = Map.bitraverse s2hNameSegment @@ -58,6 +72,21 @@ dbBranchHash (S.Branch.Full.Branch tms tps patches children) = doChildren = Map.bitraverse s2hNameSegment \(_boId, chId) -> causalHashIdToHash chId +dbPatchHash :: forall m. EDB m => S.Patch -> m Hash +dbPatchHash S.Patch {S.termEdits, S.typeEdits} = + fmap H.accumulate' $ + Hashing.Patch + <$> doTermEdits termEdits + <*> doTypeEdits typeEdits + where + doTermEdits :: Map S.ReferentH (Set S.TermEdit) -> m (Map Hashing.Referent (Set Hashing.TermEdit)) + doTermEdits = + Map.bitraverse s2hReferentH (Set.traverse s2hTermEdit) + + doTypeEdits :: Map S.ReferenceH (Set S.TypeEdit) -> m (Map Hashing.Reference (Set Hashing.TypeEdit)) + doTypeEdits = + Map.bitraverse s2hReferenceH (Set.traverse s2hTypeEdit) + s2hMetadataSet :: EDB m => DbMetadataSet -> m Hashing.Branch.MdValues s2hMetadataSet = \case S.MetadataSet.Inline rs -> Hashing.Branch.MdValues <$> Set.traverse s2hReference rs @@ -71,17 +100,41 @@ s2hReferent = \case S.Referent.Ref r -> Hashing.Referent.Ref <$> s2hReference r S.Referent.Con r cid -> Hashing.Referent.Con <$> s2hReference r <*> pure (fromIntegral cid) +s2hReferentH :: EDB m => S.ReferentH -> m Hashing.Referent +s2hReferentH = \case + S.Referent.Ref r -> Hashing.Referent.Ref <$> s2hReferenceH r + S.Referent.Con r cid -> Hashing.Referent.Con <$> s2hReferenceH r <*> pure (fromIntegral cid) + s2hReference :: EDB m => S.Reference -> m Hashing.Reference s2hReference = \case S.ReferenceBuiltin t -> Hashing.Reference.Builtin <$> Q.loadTextById t S.Reference.Derived h i -> Hashing.Reference.Derived <$> objectIdToPrimaryHash h <*> pure i +s2hReferenceH :: EDB m => S.ReferenceH -> m Hashing.Reference +s2hReferenceH = \case + S.ReferenceBuiltin t -> Hashing.Reference.Builtin <$> Q.loadTextById t + S.Reference.Derived h i -> Hashing.Reference.Derived <$> loadHashHashById h <*> pure i + +s2hTermEdit :: EDB m => S.TermEdit -> m Hashing.TermEdit +s2hTermEdit = \case + S.TermEdit.Replace r _typing -> Hashing.TermEdit.Replace <$> s2hReferent r + S.TermEdit.Deprecate -> pure Hashing.TermEdit.Deprecate + +s2hTypeEdit :: EDB m => S.TypeEdit -> m Hashing.TypeEdit +s2hTypeEdit = \case + S.TypeEdit.Replace r -> Hashing.TypeEdit.Replace <$> s2hReference r + S.TypeEdit.Deprecate -> pure Hashing.TypeEdit.Deprecate + -- Mitchell: Do these variants of Q.* queries belong somewhere else? Or in Q perhaps? +causalHashIdToHash :: EDB m => Db.CausalHashId -> m Hash +causalHashIdToHash = + fmap Cv.hash2to1 . Q.loadHashHashById . Db.unCausalHashId + objectIdToPrimaryHash :: EDB m => Db.ObjectId -> m Hash objectIdToPrimaryHash = fmap (Cv.hash2to1 . U.Util.Hash.fromBase32Hex) . Q.loadPrimaryHashByObjectId -causalHashIdToHash :: EDB m => Db.CausalHashId -> m Hash -causalHashIdToHash = - fmap (Cv.hash2to1 . U.Util.Hash.fromBase32Hex) . Q.loadHashById . Db.unCausalHashId +loadHashHashById :: EDB m => Db.HashId -> m Hash +loadHashHashById = + fmap Cv.hash2to1 . Q.loadHashHashById diff --git a/parser-typechecker/src/Unison/Hashing/V2/Patch.hs b/parser-typechecker/src/Unison/Hashing/V2/Patch.hs index 9b87ba32f1..a26faebc1d 100644 --- a/parser-typechecker/src/Unison/Hashing/V2/Patch.hs +++ b/parser-typechecker/src/Unison/Hashing/V2/Patch.hs @@ -1,17 +1,17 @@ -{-# LANGUAGE TemplateHaskell #-} {-# LANGUAGE PatternSynonyms #-} {-# LANGUAGE RecordWildCards #-} +{-# LANGUAGE TemplateHaskell #-} -module Unison.Hashing.V2.Patch (Patch(..)) where +module Unison.Hashing.V2.Patch (Patch (..)) where -import Unison.Hashing.V2.Reference (Reference) import Data.Map (Map) -import Unison.Hashing.V2.Referent (Referent) import Data.Set (Set) -import Unison.Hashing.V2.TermEdit (TermEdit) -import Unison.Hashing.V2.TypeEdit (TypeEdit) import Unison.Hashable (Hashable) import qualified Unison.Hashable as H +import Unison.Hashing.V2.Reference (Reference) +import Unison.Hashing.V2.Referent (Referent) +import Unison.Hashing.V2.TermEdit (TermEdit) +import Unison.Hashing.V2.TypeEdit (TypeEdit) data Patch = Patch { termEdits :: Map Referent (Set TermEdit), @@ -23,4 +23,3 @@ instance Hashable Patch where [ H.accumulateToken (termEdits p), H.accumulateToken (typeEdits p) ] - From d9f5c7b021120b05d53c8fe1b60213e1f2f31112 Mon Sep 17 00:00:00 2001 From: Chris Penner Date: Tue, 2 Nov 2021 17:16:47 -0600 Subject: [PATCH 067/297] Implement some more traversals --- .../U/Codebase/Sqlite/Branch/Full.hs | 12 ++ .../SqliteCodebase/MigrateSchema12.hs | 139 ++++++++++++------ 2 files changed, 103 insertions(+), 48 deletions(-) diff --git a/codebase2/codebase-sqlite/U/Codebase/Sqlite/Branch/Full.hs b/codebase2/codebase-sqlite/U/Codebase/Sqlite/Branch/Full.hs index d5f3a0d145..6cae0b0b18 100644 --- a/codebase2/codebase-sqlite/U/Codebase/Sqlite/Branch/Full.hs +++ b/codebase2/codebase-sqlite/U/Codebase/Sqlite/Branch/Full.hs @@ -52,6 +52,18 @@ data Branch' t h p c = Branch branchHashes_ :: Traversal (Branch' t h p c) (Branch' t h' p c) h h' branchHashes_ _f _ = undefined +termHashes_ :: Traversal (Branch' t h p c) (Branch' t h' p c) h h' +termHashes_ _f _ = undefined + +typeHashes_ :: Traversal (Branch' t h p c) (Branch' t h' p c) h h' +typeHashes_ _f _ = undefined + +patchHashes_ :: Traversal (Branch' t h p c) (Branch' t h p' c) p p' +patchHashes_ = undefined + +childrenHashes_ :: Traversal (Branch' t h p c) (Branch' t h p c') c c' +childrenHashes_ = undefined + -- Branch <$> traverse (\m -> Map.mapKeys) branchCausalHashes_ :: Traversal (Branch' t h p c) (Branch' t h p c') c c' diff --git a/parser-typechecker/src/Unison/Codebase/SqliteCodebase/MigrateSchema12.hs b/parser-typechecker/src/Unison/Codebase/SqliteCodebase/MigrateSchema12.hs index 0ba1b3f787..4afae652c3 100644 --- a/parser-typechecker/src/Unison/Codebase/SqliteCodebase/MigrateSchema12.hs +++ b/parser-typechecker/src/Unison/Codebase/SqliteCodebase/MigrateSchema12.hs @@ -1,8 +1,8 @@ +{-# LANGUAGE ApplicativeDo #-} {-# LANGUAGE DataKinds #-} {-# LANGUAGE RankNTypes #-} {-# LANGUAGE RecordWildCards #-} {-# LANGUAGE ViewPatterns #-} -{-# LANGUAGE ApplicativeDo #-} module Unison.Codebase.SqliteCodebase.MigrateSchema12 where @@ -44,6 +44,7 @@ import U.Codebase.Sync (Sync (Sync)) import qualified U.Codebase.Sync as Sync import U.Codebase.WatchKind (WatchKind) import qualified U.Codebase.WatchKind as WK +import qualified U.Util.Hash as U.Util import qualified Unison.ABT as ABT import Unison.Codebase (Codebase (Codebase)) import qualified Unison.Codebase as Codebase @@ -56,16 +57,16 @@ import qualified Unison.Hash as Unison import qualified Unison.Hashing.V2.Causal as Hashing import qualified Unison.Hashing.V2.Convert as Convert import Unison.Pattern (Pattern) +import qualified Unison.Pattern as Pattern import Unison.Prelude import Unison.Reference (Pos) import qualified Unison.Reference as Reference import qualified Unison.Referent as Referent +import qualified Unison.Referent' as Referent' import qualified Unison.Term as Term import Unison.Type (Type) import qualified Unison.Type as Type import Unison.Var (Var) -import qualified Unison.Pattern as Pattern -import qualified Unison.Referent' as Referent' -- lookupCtor :: ConstructorMapping -> ObjectId -> Pos -> ConstructorId -> Maybe (Pos, ConstructorId) -- lookupCtor (ConstructorMapping cm) oid pos cid = @@ -104,6 +105,7 @@ data MigrationState = MigrationState -- ctorLookup' :: Map (Old Referent.Id) (New Referent.Id), -- This provides the info needed for rewriting a term. You'll access it with a function :: Old -- termLookup :: Map (Old ObjectId) (New ObjectId, Map (Old Pos) (New Pos)), + -- TODO: Split up mappings for Branch ObjectIds vs Term & Type Object IDs objLookup :: Map (Old ObjectId) (New (ObjectId, HashId, Hash)) -- -- componentPositionMapping :: Map ObjectId (Map (Old Pos) (New Pos)), @@ -147,8 +149,8 @@ data Entity | DeclComponent Unison.Hash | C CausalHashId | -- haven't proven we need these yet - B ObjectId - | Patch ObjectId + BranchE ObjectId + | PatchE ObjectId | W WK.WatchKind Reference.Id deriving (Eq, Ord, Show) @@ -203,7 +205,7 @@ migrationSync = Sync \case DeclComponent hash -> do Env {codebase} <- ask lift (migrateDeclComponent codebase hash) - B objectId -> do + BranchE objectId -> do Env {db} <- ask lift (migrateBranch db objectId) C causalHashId -> do @@ -212,7 +214,7 @@ migrationSync = Sync \case -- To sync a watch result, -- 1. ??? -- 2. Synced - Patch objectId -> do + PatchE objectId -> do Env {db} <- ask lift (migratePatch db (PatchObjectId objectId)) W watchKind watchId -> do @@ -262,7 +264,7 @@ migrateCausal conn oldCausalHashId = runDB conn . fmap (either id id) . runExcep -- If the branch for this causal hasn't been migrated, migrate it first. let unmigratedBranch = if (branchObjId `Map.notMember` migratedObjIds) - then [B branchObjId] + then [BranchE branchObjId] else [] migratedCausals <- gets causalMapping @@ -326,41 +328,72 @@ migrateCausal conn oldCausalHashId = runDB conn . fmap (either id id) . runExcep -- children :: Map t c -- } +-- Chris: Remaining Plan: +-- Convert all term & type object IDs in DbBranch into Hashes using database, can't use the skymap since they might not be migrated yet. +-- Collect all unmigrated Reference Ids and require them +-- Using the `objLookup` skymap, map over the original dbBranch, and use the objLookup table to convert all object id references. +-- Save this branch. migrateBranch :: MonadIO m => Connection -> ObjectId -> StateT MigrationState m (Sync.TrySyncResult Entity) -migrateBranch conn oldObjectId = fmap (either id id) . runExceptT $ do - -- note for tomorrow: we want to just load the (Branch m) instead, forget the DbBranch - -- dbBranch <- Ops.loadDbBranchByObjectId objectId - - let allMissingTypes = undefined - let allMissingTerms = undefined - let allMissingPatches = undefined - let allMissingChildren = undefined - let allMissingPredecessors = undefined +migrateBranch conn oldObjectId = runDB conn . fmap (either id id) . runExceptT $ do + -- Read the old branch + oldBranch <- runDB conn (Ops.loadDbBranchByObjectId (BranchObjectId oldObjectId)) + -- let convertBranch :: S.DbBranch -> m (S.Branch' TextId Hash PatchObjectId (BranchObjectId, CausalHashId)) + -- type DbBranch = Branch' TextId ObjectId PatchObjectId (BranchObjectId, CausalHashId) + oldBranchWithHashes <- traverseOf S.branchHashes_ (lift . Ops.loadHashByObjectId) oldBranch + _migratedRefs <- gets referenceMapping + let allMissingTypesAndTerms :: [Entity] + allMissingTypesAndTerms = + oldBranchWithHashes + ^.. branchSomeRefs_ + . uRefIdAsRefId_ + . undefined -- filtered (`Map.notMember` migratedRefs) + . to someReferenceIdToEntity + let allMissingPatches = + oldBranch + ^.. S.patchHashes_ + . undefined -- filtered (`Map.notMember` migratedObjects) + . to PatchE + let allMissingChildren = + oldBranch + ^.. S.childrenHashes_ + . undefined -- filtered (`Map.notMember` migratedObjects) + . to BranchE -- Identify dependencies and bail out if they aren't all built let allMissingReferences :: [Entity] allMissingReferences = - allMissingTypes - ++ allMissingTerms + allMissingTypesAndTerms ++ allMissingPatches ++ allMissingChildren - ++ allMissingPredecessors when (not . null $ allMissingReferences) $ throwE $ Sync.Missing allMissingReferences - -- Read the old branch - oldBranch <- runDB conn (Ops.loadDbBranchByObjectId (BranchObjectId oldObjectId)) -- Remap object id references -- TODO: remap sub-namespace causal hashes - newBranch <- oldBranch & dbBranchObjRefs_ %%~ remapObjIdRefs - let (localBranchIds, localBranch) = S.LocalizeObject.localizeBranch newBranch - hash <- runDB conn (Ops.liftQ (Hashing.dbBranchHash newBranch)) + -- TODO: Save term & decl SomeObjectID mappings in skymap + _newBranch <- oldBranch & branchSomeRefs_ %%~ remapObjIdRefs + let newBranchWithRemappedCausalsChildrenAndPatches = undefined + -- remember to remap the causals, children, and patches + let (localBranchIds, localBranch) = S.LocalizeObject.localizeBranch newBranchWithRemappedCausalsChildrenAndPatches + hash <- runDB conn (Ops.liftQ (Hashing.dbBranchHash newBranchWithRemappedCausalsChildrenAndPatches)) newHashId <- runDB conn (Ops.liftQ (Q.saveBranchHash (BranchHash (Cv.hash1to2 hash)))) newObjectId <- runDB conn (Ops.saveBranchObject newHashId localBranchIds localBranch) field @"objLookup" %= Map.insert oldObjectId (unBranchObjectId newObjectId, unBranchHashId newHashId, hash) pure Sync.Done +uRefIdAsRefId_ :: Iso' (SomeReference (UReference.Id' U.Util.Hash)) SomeReferenceId +uRefIdAsRefId_ = mapping uRefAsRef_ + +uRefAsRef_ :: Iso' (UReference.Id' U.Util.Hash) Reference.Id +uRefAsRef_ = iso intoRef intoURef + where + intoRef (UReference.Id hash pos) = Reference.Id (Cv.hash2to1 hash) pos + intoURef (Reference.Id hash pos) = UReference.Id (Cv.hash1to2 hash) pos + +-- branchSomeRefs_ :: Traversal' (S.Branch' t h p c) (SomeReference h) +-- branchSomeRefs_ f b = _ + migratePatch :: MonadIO m => Connection -> Old PatchObjectId -> StateT MigrationState m (Sync.TrySyncResult Entity) migratePatch conn oldObjectId = do oldPatch <- runDB conn (Ops.loadDbPatchById oldObjectId) @@ -414,7 +447,7 @@ migrateWatch Codebase {..} watchKind oldWatchId = fmap (either id id) . runExcep pure Sync.Done -- Project an S.Referent'' into its SomeReferenceObjId's -someReferent_ :: Traversal' (S.Branch.Full.Referent'' t ObjectId) SomeReferenceObjId +someReferent_ :: Traversal' (S.Branch.Full.Referent'' t h) (SomeReference (UReference.Id' h)) someReferent_ = (UReferent._Ref . someReference_) `failing` ( UReferent._Con @@ -427,29 +460,31 @@ someReferent_ = <&> \(newId, newConId) -> (UReference.ReferenceDerived newId, fromIntegral newConId) asPair_ _ (UReference.ReferenceBuiltin x, conId) = pure (UReference.ReferenceBuiltin x, conId) -someReference_ :: Traversal' (UReference.Reference' t ObjectId) SomeReferenceObjId +someReference_ :: Traversal' (UReference.Reference' t h) (SomeReference (UReference.Id' h)) someReference_ = UReference._ReferenceDerived . unsafeInsidePrism _TermReference -someMetadataSetFormat :: Ord t => Traversal' (S.Branch.Full.MetadataSetFormat' t ObjectId) SomeReferenceObjId +someMetadataSetFormat :: (Ord t, Ord h) => Traversal' (S.Branch.Full.MetadataSetFormat' t h) (SomeReference (UReference.Id' h)) someMetadataSetFormat = S.Branch.Full.metadataSetFormatReferences_ . someReference_ mapReferentMetadata :: - (Ord k, Ord t) => - Traversal' k SomeReferenceObjId -> + (Ord k, Ord t, Ord h) => + Traversal' k (SomeReference (UReference.Id' h)) -> Traversal' - (Map k (S.Branch.Full.MetadataSetFormat' t ObjectId)) - (SomeReferenceObjId) + (Map k (S.Branch.Full.MetadataSetFormat' t h)) + (SomeReference (UReference.Id' h)) mapReferentMetadata keyTraversal f m = Map.toList m & traversed . beside keyTraversal someMetadataSetFormat %%~ f <&> Map.fromList -dbBranchObjRefs_ :: Traversal' S.DbBranch SomeReferenceObjId -dbBranchObjRefs_ f S.Branch.Full.Branch {children, patches, terms, types} = do +branchSomeRefs_ :: (Ord t, Ord h) => Traversal' (S.Branch' t h p c) (SomeReference (UReference.Id' h)) +branchSomeRefs_ f S.Branch.Full.Branch {children, patches, terms, types} = do let newTypesMap = types & traversed . mapReferentMetadata someReference_ %%~ f let newTermsMap = terms & traversed . mapReferentMetadata someReferent_ %%~ f S.Branch.Full.Branch <$> newTermsMap <*> newTypesMap <*> pure patches <*> pure children +-- convertUReferenceId :: _ -> _ + -- convertBranch :: (DB m, MonadState MigrationState m) => DbBranch -> m DbBranch -- convertBranch dbBranch = _ @@ -696,8 +731,8 @@ referentAsSomeTerm_ f = \case newRefId <- refId & unsafeInsidePrism _TermReference %%~ f pure (Referent'.Ref' (Reference.DerivedId newRefId)) (Referent'.Con' (Reference.DerivedId refId) conId conType) -> - ((refId, conId) & unsafeInsidePrism _ConstructorReference %%~ f) <&> - (\(newRefId, newConId) -> (Referent'.Con' (Reference.DerivedId newRefId) newConId conType)) + ((refId, conId) & unsafeInsidePrism _ConstructorReference %%~ f) + <&> (\(newRefId, newConId) -> (Referent'.Con' (Reference.DerivedId newRefId) newConId conType)) r -> pure r -- structural type Ping x = P1 (Pong x) @@ -742,22 +777,30 @@ objIdsToHashed = Nothing -> error $ "Expected object mapping for ID: " <> show objId Just (_, _, hash) -> pure (Reference.Id hash pos) -remapObjIdRefs :: MonadState MigrationState m => SomeReferenceObjId -> m SomeReferenceObjId -remapObjIdRefs = - someRef_ %%~ \(UReference.Id objId pos) -> do - objMapping <- gets objLookup - case Map.lookup objId objMapping of - Nothing -> error $ "Expected object mapping for ID: " <> show objId - Just (newObjId, _, _) -> pure (UReference.Id newObjId pos) +remapObjIdRefs :: (MonadState MigrationState m) => SomeReferenceObjId -> m SomeReferenceObjId +remapObjIdRefs someObjIdRef = do + objMapping <- gets objLookup + refMapping <- gets referenceMapping + let oldObjId = someObjIdRef ^. someRef_ . UReference.idH + let (newObjId, _, newHash) = + case Map.lookup oldObjId objMapping of + Nothing -> error $ "Expected object mapping for ID: " <> show oldObjId + Just found -> found + let someRefId = (someObjIdRef & someRef_ . UReference.idH .~ Cv.hash1to2 newHash) ^. uRefIdAsRefId_ + let newRefId = case Map.lookup someRefId refMapping of + Nothing -> error $ "Expected reference mapping for ID: " <> show someRefId + Just r -> r + let newSomeObjId = (newRefId ^. from uRefIdAsRefId_) & someRef_ . UReference.idH .~ newObjId + pure newSomeObjId data SomeReference ref = TermReference ref | TypeReference ref | ConstructorReference ref ConstructorId - deriving (Eq, Functor, Generic, Ord) + deriving (Eq, Functor, Generic, Ord, Show) -someRef_ :: Traversal (SomeReference ref) (SomeReference ref') ref ref' -someRef_ = param @0 +someRef_ :: Lens (SomeReference ref) (SomeReference ref') ref ref' +someRef_ = undefined _TermReference :: Prism' (SomeReference ref) ref _TermReference = _Ctor @"TermReference" @@ -765,11 +808,11 @@ _TermReference = _Ctor @"TermReference" _TypeReference :: Prism' (SomeReference ref) ref _TypeReference = _Ctor @"TypeReference" -_ConstructorReference :: Prism' (SomeReference ref) (ref, ConstructorId) +_ConstructorReference :: Prism' (SomeReference ref) (ref, ConstructorId) _ConstructorReference = _Ctor @"ConstructorReference" someReferenceIdToEntity :: SomeReferenceId -> Entity -someReferenceIdToEntity = \case +someReferenceIdToEntity = \case (TermReference ref) -> TermComponent (Reference.idToHash ref) (TypeReference ref) -> DeclComponent (Reference.idToHash ref) -- Constructors are migrated by their decl component. From 012e8aa3d9b9cd4eec1e995f24006bea193fe794 Mon Sep 17 00:00:00 2001 From: Arya Irani Date: Wed, 3 Nov 2021 15:46:12 -0400 Subject: [PATCH 068/297] cleanup --- .../SqliteCodebase/MigrateSchema12.hs | 332 ++---------------- 1 file changed, 26 insertions(+), 306 deletions(-) diff --git a/parser-typechecker/src/Unison/Codebase/SqliteCodebase/MigrateSchema12.hs b/parser-typechecker/src/Unison/Codebase/SqliteCodebase/MigrateSchema12.hs index 4afae652c3..e8ec9b9b23 100644 --- a/parser-typechecker/src/Unison/Codebase/SqliteCodebase/MigrateSchema12.hs +++ b/parser-typechecker/src/Unison/Codebase/SqliteCodebase/MigrateSchema12.hs @@ -68,22 +68,17 @@ import Unison.Type (Type) import qualified Unison.Type as Type import Unison.Var (Var) --- lookupCtor :: ConstructorMapping -> ObjectId -> Pos -> ConstructorId -> Maybe (Pos, ConstructorId) --- lookupCtor (ConstructorMapping cm) oid pos cid = --- Map.lookup oid cm >>= (Vector.!? fromIntegral pos) >>= (Vector.!? cid) - --- lookupTermRef :: TermLookup -> S.Reference -> Maybe S.Reference --- lookupTermRef _tl (ReferenceBuiltin t) = Just (ReferenceBuiltin t) --- lookupTermRef tl (ReferenceDerived id) = ReferenceDerived <$> lookupTermRefId tl id - --- lookupTermRefId :: TermLookup -> S.Reference.Id -> Maybe S.Reference.Id --- lookupTermRefId tl (Id oid pos) = Id oid <$> lookupTermPos tl oid pos - --- lookupTermPos :: TermLookup -> ObjectId -> Pos -> Maybe Pos --- lookupTermPos (TermLookup tl) oid pos = Map.lookup oid tl >>= (Vector.!? fromIntegral pos) - --- newtype ConstructorMapping = ConstructorMapping (Map ObjectId (Vector (Vector (Pos, ConstructorId)))) --- newtype TermLookup = TermLookup (Map ObjectId (Vector Pos)) +-- todo: +-- * migrateBranch +-- * migratePatch + +-- * write a harness to call & seed algorithm, then do cleanup +-- * may involve writing a `Progress` +-- * raw DB things: +-- * overwrite object_id column in hash_object table to point at new objects +-- * delete references to old objects in index tables (where else?) +-- * delete old objects +-- * refer to github megaticket https://github.com/unisonweb/unison/issues/2471 type TypeIdentifier = (ObjectId, Pos) @@ -95,110 +90,31 @@ type ConstructorName v = v type ComponentName v = v +type DeclName v = v + data MigrationState = MigrationState -- Mapping between old cycle-position -> new cycle-position for a given Decl object. - { -- declLookup :: Map (Old ObjectId) (Map (Old Pos) (New Pos)), - referenceMapping :: Map (Old SomeReferenceId) (New SomeReferenceId), + { referenceMapping :: Map (Old SomeReferenceId) (New SomeReferenceId), causalMapping :: Map (Old CausalHashId) (New (CausalHash, CausalHashId)), - -- Mapping between contructor indexes for the type identified by (ObjectId, Pos) - -- ctorLookup :: Map (Old TypeIdentifier) (Map (Old ConstructorId) (New ConstructorId)), - -- ctorLookup' :: Map (Old Referent.Id) (New Referent.Id), - -- This provides the info needed for rewriting a term. You'll access it with a function :: Old - -- termLookup :: Map (Old ObjectId) (New ObjectId, Map (Old Pos) (New Pos)), - -- TODO: Split up mappings for Branch ObjectIds vs Term & Type Object IDs objLookup :: Map (Old ObjectId) (New (ObjectId, HashId, Hash)) - -- - -- componentPositionMapping :: Map ObjectId (Map (Old Pos) (New Pos)), - -- constructorIDMapping :: Map ObjectId (Map (Old ConstructorId) (New ConstructorId)), - -- completed :: Set ObjectId } deriving (Generic) --- declLookup :: Map ObjectId (Map Pos (Pos, Map ConstructorId ConstructorId)), - -{- -* Load entire codebase as a list -* Pick a term from the codebase -* Look up the references inside the term -* If any haven't been processed, add them to the "to process" stack, push the term you were working on back onto that stack -* Rebuild & rehash the term, store that -* For any data constructor terms inside, - * Store a map from old ConstructorId to new, based on the old and new reference hashes -* After rebuilding a cycle, map old Pos to new --} - --- Q: can we plan to hold the whole mapping in memory? ✅ --- Q: a) update database in-place? or b) write to separate database and then overwrite? leaning (b). --- note: we do need to rebuild namespaces, although we don't need to rehash them. - --- cycle position index `Pos` --- constructor index `ConstructorId` - -{- -data Maybe a = (Just Bar | Nothing X) - --- changes due to missing size from ref(Y) -data X = MkX Y - --- know old hash and old cycle positions -data Y = MkY Int --} - data Entity = TermComponent Unison.Hash | DeclComponent Unison.Hash | C CausalHashId - | -- haven't proven we need these yet - BranchE ObjectId + | BranchE ObjectId | PatchE ObjectId | W WK.WatchKind Reference.Id deriving (Eq, Ord, Show) data Env m v a = Env {db :: Connection, codebase :: Codebase m v a} --- -> m (TrySyncResult h) migrationSync :: (MonadIO m, Var v) => Sync (ReaderT (Env m v a) (StateT MigrationState m)) Entity migrationSync = Sync \case - -- To sync an object, - -- * If we have already synced it, we are done. - -- * Otherwise, read the object from the database and switch on its object type. - -- * See next steps below v - -- - -- To sync a decl component object, - -- * If we have not already synced all dependencies, push syncing them onto the front of the work queue. - -- * Otherwise, ??? - -- - -- To sync a term component object, - -- * If we have not already synced all dependencies, push syncing them onto the front of the work queue. - -- * Otherwise, ??? - -- - -- To sync a namespace object, - -- * Deserialize it and compute its dependencies (terms, types, patches, children). - -- * If we have not already synced all of its dependencies, push syncing them onto the front of the work queue. - -- * To sync a 'BranchFull', - -- * We need to make a new 'BranchFull' in memory, then insert it into the database under a new object id. - -- * Wait, we need to preserve the ordering of the types/terms, either by not changing them (but the orderings of the - -- reference ids used in keys is definitely not preserved by this migration), or by permuting the local id vectors, - -- but we may be at a level too low or high for us to care? - -- * Its 'LocalBranch' must have all references changed in-place per the (old (object id, pos) => new (object id, pos)) mapping. - -- * The local IDs within the body _likely_ don't need to change. (why likely?) - -- * Its 'BranchLocalIds' must be translated from the old codebase object IDs to the new object IDs, - -- we can use our MigrationState to look these up, since they must have already been migrated. - -- * To sync a 'BranchDiff', - -- * These don't exist in schema v1; we can error if we encounter one. - -- - -- To sync a patch object - -- * Rewrite all old hashes in the patch to the new hashes. - -- - -- To sync a watch expression - -- * ??? - -- - -- To sync a Causal - --- * If we haven't yet synced its parents, push them onto the work queue - --- * If we haven't yet synced the causal's value (namespace), push it onto the work queue. - --- * Rehash the Causal's valueHash AND CausalHash, and add the new causal, its hashes, and hash objects to the codebase under a fresh object ID TermComponent hash -> do Env {codebase} <- ask lift (migrateTermComponent codebase hash) @@ -211,9 +127,6 @@ migrationSync = Sync \case C causalHashId -> do Env {db} <- ask lift (migrateCausal db causalHashId) - -- To sync a watch result, - -- 1. ??? - -- 2. Synced PatchE objectId -> do Env {db} <- ask lift (migratePatch db (PatchObjectId objectId)) @@ -230,35 +143,11 @@ runDB conn = (runExceptT >=> err) . (runExceptT >=> err) . flip runReaderT conn liftQ :: Monad m => ReaderT Connection (ExceptT Q.Integrity m) a -> ReaderT Connection (ExceptT Ops.Error (ExceptT Q.Integrity m)) a liftQ = mapReaderT lift --- loadCausalBranchByCausalHash :: EDB m => CausalHash -> m (Maybe (C.Branch.Causal m)) --- --- Causal Plan - --- * Load a DbCausal (how do we do this) - --- => new function Queries.localCausalByCausalHashId, can model after loadCausalByCausalHash or factor out of - --- * Add valueHashId's ObjectId as a dependency if unmigrated - --- * Add parent causal hash ids as dependencies if unmigrated - --- => Queries.loadCausalParents - --- * Map over Branch hash IDs - --- * Inside saveDBCausal (new / factored out of original) - --- * Save as a new self-hash --- ==> Queries.saveCausal --- * Map over parent causal hash IDs --- ==> Queries.saveCausalParents migrateCausal :: MonadIO m => Connection -> CausalHashId -> StateT MigrationState m (Sync.TrySyncResult Entity) migrateCausal conn oldCausalHashId = runDB conn . fmap (either id id) . runExceptT $ do oldBranchHashId <- lift . liftQ $ Q.loadCausalValueHashId oldCausalHashId oldCausalParentHashIds <- lift . liftQ $ Q.loadCausalParents oldCausalHashId - -- This fails if the object for the branch doesn't exist, CHECK: we currently expect - -- this to always be true? branchObjId <- lift . liftQ $ Q.expectObjectIdForAnyHashId (unBranchHashId oldBranchHashId) migratedObjIds <- gets objLookup -- If the branch for this causal hasn't been migrated, migrate it first. @@ -307,27 +196,6 @@ migrateCausal conn oldCausalHashId = runDB conn . fmap (either id id) . runExcep pure Sync.Done --- Plan: --- * Load the pieces of a Db.Causal ✅ --- * Ensure its parent causals and branch (value hash) have been migrated ✅ --- * Rewrite the value-hash and parent causal hashes ✅ --- * Save the new causal ✅ --- * Save Causal Hash mapping to skymap ✅ - --- data C.Branch m = Branch --- { terms :: Map NameSegment (Map Referent (m MdValues)), --- types :: Map NameSegment (Map Reference (m MdValues)), --- patches :: Map NameSegment (PatchHash, m Patch), --- children :: Map NameSegment (Causal m) --- } - --- data Branch' t h p c = Branch --- { terms :: Map t (Map (Referent'' t h) (MetadataSetFormat' t h)), --- types :: Map t (Map (Reference' t h) (MetadataSetFormat' t h)), --- patches :: Map t p, --- children :: Map t c --- } - -- Chris: Remaining Plan: -- Convert all term & type object IDs in DbBranch into Hashes using database, can't use the skymap since they might not be migrated yet. -- Collect all unmigrated Reference Ids and require them @@ -391,9 +259,6 @@ uRefAsRef_ = iso intoRef intoURef intoRef (UReference.Id hash pos) = Reference.Id (Cv.hash2to1 hash) pos intoURef (Reference.Id hash pos) = UReference.Id (Cv.hash1to2 hash) pos --- branchSomeRefs_ :: Traversal' (S.Branch' t h p c) (SomeReference h) --- branchSomeRefs_ f b = _ - migratePatch :: MonadIO m => Connection -> Old PatchObjectId -> StateT MigrationState m (Sync.TrySyncResult Entity) migratePatch conn oldObjectId = do oldPatch <- runDB conn (Ops.loadDbPatchById oldObjectId) @@ -483,34 +348,6 @@ branchSomeRefs_ f S.Branch.Full.Branch {children, patches, terms, types} = do let newTermsMap = terms & traversed . mapReferentMetadata someReferent_ %%~ f S.Branch.Full.Branch <$> newTermsMap <*> newTypesMap <*> pure patches <*> pure children --- convertUReferenceId :: _ -> _ - --- convertBranch :: (DB m, MonadState MigrationState m) => DbBranch -> m DbBranch --- convertBranch dbBranch = _ - --- DbBranch -- migrate --> DbBranch -- hydrate for the hash --> Hashing.V2.Branch -- put (Hash, toBranchFormat(DbBranch)) --> COOL - --- function that reads a DbBranch out of codebase - --- Traversal' DbBranch SomeReferenceObjectId --- DB m => LensLike' m SomeReferenceObjectId SomeReferenceId --- MonadState MigrationState m => SomeReferenceId -> m SomeReferenceId - --- Traversal' DbBranch (BranchId, CausalHashId) --- MonadState MigrationState m => (BranchId, CausalHashId) -> m (BranchId, CausalHashId) - --- Traversal' DbBranch PatchId --- MonadState MigrationState m => PatchObjectId -> m PatchObjectId - --- type DbBranch = Branch' TextId ObjectId PatchObjectId (BranchObjectId, CausalHashId) - --- data Branch' t h p c = Branch --- { terms :: Map t (Map (Referent'' t h) (MetadataSetFormat' t h)), --- types :: Map t (Map (Reference' t h) (MetadataSetFormat' t h)), --- patches :: Map t p, --- children :: Map t c --- } - migrateTermComponent :: forall m v a. (Ord v, Var v, Monad m) => Codebase m v a -> Unison.Hash -> StateT MigrationState m (Sync.TrySyncResult Entity) migrateTermComponent Codebase {..} hash = fmap (either id id) . runExceptT $ do component <- @@ -582,7 +419,7 @@ migrateDeclComponent :: migrateDeclComponent Codebase {..} hash = fmap (either id id) . runExceptT $ do declComponent :: [DD.Decl v a] <- (lift . lift $ getDeclComponent hash) >>= \case - Nothing -> error "handle this" -- not non-fatal! + Nothing -> error undefined -- not non-fatal! Just dc -> pure dc let componentIDMap :: Map (Old Reference.Id) (DD.Decl v a) @@ -628,18 +465,20 @@ migrateDeclComponent Codebase {..} hash = fmap (either id id) . runExceptT $ do . traversed -- traverse the list of them . _3 -- Select the Type term. %~ remapTerm - let vToOldReference :: Map v (Old Reference.Id) - vToOldReference = Map.fromList . fmap swap . Map.toList . fmap fst $ remappedReferences - let newComponent :: [(v, Reference.Id, DD.Decl v a)] + let declNameToOldReference :: Map (DeclName v) (Old Reference.Id) + declNameToOldReference = Map.fromList . fmap swap . Map.toList . fmap fst $ remappedReferences + + let newComponent :: [(DeclName v, Reference.Id, DD.Decl v a)] newComponent = remappedReferences & Map.elems & Map.fromList & Convert.hashDecls' & fromRight (error "unexpected resolution error") - for_ newComponent $ \(v, newReferenceId, dd) -> do - let oldReferenceId = vToOldReference Map.! v + + for_ newComponent $ \(declName, newReferenceId, dd) -> do + let oldReferenceId = declNameToOldReference Map.! declName field @"referenceMapping" %= Map.insert (TypeReference oldReferenceId) (TypeReference newReferenceId) let oldConstructorIds :: Map (ConstructorName v) (Old ConstructorId) @@ -647,13 +486,13 @@ migrateDeclComponent Codebase {..} hash = fmap (either id id) . runExceptT $ do (componentIDMap Map.! oldReferenceId) & DD.asDataDecl & DD.constructors' - & imap (\(fromIntegral -> constructorId) (_ann, name, _type) -> (name, constructorId)) + & imap (\(fromIntegral -> constructorId) (_ann, constructorName, _type) -> (constructorName, constructorId)) & Map.fromList - ifor_ (DD.constructors' (DD.asDataDecl dd)) \(fromIntegral -> newConstructorId) (_ann, name, _type) -> do + ifor_ (DD.constructors' (DD.asDataDecl dd)) \(fromIntegral -> newConstructorId) (_ann, constructorName, _type) -> do field @"referenceMapping" %= Map.insert - (ConstructorReference oldReferenceId (oldConstructorIds Map.! name)) + (ConstructorReference oldReferenceId (oldConstructorIds Map.! constructorName)) (ConstructorReference newReferenceId newConstructorId) lift . lift $ putTypeDeclaration newReferenceId dd @@ -735,24 +574,6 @@ referentAsSomeTerm_ f = \case <&> (\(newRefId, newConId) -> (Referent'.Con' (Reference.DerivedId newRefId) newConId conType)) r -> pure r --- structural type Ping x = P1 (Pong x) --- P1 : forall x. Pong x -> Ping x - --- structural type Pong x = P2 (Ping x) | P3 Nat --- P2 : forall x. Ping x -> Pong x --- P3 : forall x. Nat -> Pong x - --- end up with --- decl Ping (Ref.Id #abc pos=0) --- decl Pong (Ref.Id #abc pos=1) --- ctor P1: #abc pos=0 cid=0 --- ctor P2: #abc pos=1 cid=0 --- ctor P3: #abc pos=1 cid=1 --- --- we unhashComponent and get: --- { X -> structural type X x = AAA (Y x) --- , Y -> structural type Y x = BBB (X x) | CCC Nat } - remapReferences :: Map (Old Reference.Id) (New Reference.Id) -> Type.F (Type v a) -> @@ -818,87 +639,6 @@ someReferenceIdToEntity = \case -- Constructors are migrated by their decl component. (ConstructorReference ref _conId) -> DeclComponent (Reference.idToHash ref) --- get references: --- --- references :: Term f v a -> [Reference.Id] --- --- are all those references keys in our skymap? --- yes => migrate term --- no => returh those references (as Entity, though) as more work to do - --- how to turn Reference.Id into Entity? --- need its ObjectId, - --- Term f v a -> ValidateT (Seq Reference.Id) m (Term f v a) --- --- recordRefsInType :: MonadState MigrationState m => Type v a -> WriterT [Reference.Id] m (Type v a) --- recordRefsInType = _ - --- findMissingReferencesInTermF :: --- (Generic typeVar, Generic typeAnn, Generic patternAnn) => --- Term.F typeVar typeAnn patternAnn () -> --- [Reference.Id] --- findMissingReferencesInTermF t = --- -- TODO: Test that this descends into Match cases and finds everything it needs to. --- t ^.. types @Reference.Id - --- compute correspondence between `v`s in `fst <$> named` compared to `fst <$> new_references` to get a Reference.Id -> Reference.Id mapping --- mitchell tapped out before understanding the following line --- compute correspondence between constructors names & constructor indices in corresponding decls --- submit/mappend these two correspondences to sky mapping - --- Swap the Reference positions according to our map of already computed swaps --- Hydrate into the parser-typechecker version, get the new hash --- reserialize it into the sqlite format --- Compare the old and new sqlite versions to add those ConstructorID/Pos mappings to our context. - --- unrelated Q: --- do we kinda have circular dependency issues here? --- parser-typechecker depends on codebase2, but we are talking about doing things at the parser-typechecker level in this migration --- answer: no - --- unhashComponent --- :: forall v a. Var v => Map Reference.Id (Decl v a) -> Map Reference.Id (v, Decl v a) - --- DD.unhashComponent - --- [OldDecl] ==map==> [NewDecl] ==number==> [(NewDecl, Int)] ==sort==> [(NewDecl, Int)] ==> permutation is map snd of that - --- type List a = Nil | Cons (List a) - --- unique type Thunk = Thunk (Int ->{MakeThunk} Int) --- ability MakeThunk where go : (Int -> Int) -> Thunk - --- What mitchell thinks unhashComponent is doing: --- --- Take a recursive type like --- --- Fix \myself -> Alternatives [Nil, Cons a myself] --- --- And write it with variables in place of recursive mentions like --- --- (Var 1, Alternatives [Nil, Cons a (Var 1)] - --- can derive `original` from Hash + [OldDecl] --- original :: Map Reference.Id (Decl v a) - --- named, rewritten_dependencies :: Map (Reference.Id {old}) (v, Decl v a {old pos in references}) --- named = Decl.unhashComponent original - --- Mapping from the sky: (Reference.Id -> Reference.Id) - --- rewritten_dependencies = replace_dependency_pos's skymap named - --- new_references :: Map v (Reference.Id {new}, DataDeclaration v a) --- new_references = Unison.Hashing.V2.Convert.hashDecls $ Map.toList $ Foldable.toList rewritten_dependencies - --- let DeclFormat locallyIndexedComponent = case runGetS S.getDeclFormat declFormatBytes of --- Left err -> error "something went wrong" --- Right declFormat -> declFormat - --- Operations.hs converts from S level to C level --- SqliteCodebase.hs converts from C level to - -- | migrate sqlite codebase from version 1 to 2, return False and rollback on failure migrateSchema12 :: Applicative m => Connection -> m Bool migrateSchema12 _db = do @@ -915,25 +655,5 @@ migrateSchema12 _db = do pure "todo: migrate12" pure True --- -- remember that the component order might be different --- rehashDeclComponent :: [Decl v a] -> (Hash, ConstructorMappings) --- rehashDeclComponent decls = fmap decls <&> \case --- --- -- --- error "todo: rehashDeclComponent" - --- rewriteDeclComponent :: DeclFormat.LocallyIndexedComponent -> (Hash, DeclFormat.LocallyIndexedComponent, ConstructorMappings) --- rewriteDeclComponent = --- -- --- error "todo: rehashDeclComponent" - --- rehashDeclComponent :: [Decl v a] -> (Hash, DeclFormat.LocallyIndexedComponent, ConstructorMappings) - --- rehashTermComponent :: ConstructorMappings -> TermFormat.LocallyIndexedComponent -> (Hash, TermFormat.LocallyIndexedComponent) --- rehashTermComponent = error "todo: rehashTermComponent" - --- -- getConstructor :: ConstructorMappings -> ObjectId -> Pos -> ConstructorId --- -- getConstructor cm - foldSetter :: LensLike (Writer [a]) s t a a -> s -> [a] foldSetter t s = execWriter (s & t %%~ \a -> tell [a] *> pure a) From d2d619d897d82bfe2d7b454a7950526331fe190b Mon Sep 17 00:00:00 2001 From: Chris Penner Date: Wed, 3 Nov 2021 14:22:40 -0600 Subject: [PATCH 069/297] Implement 'undefined' holes and missing steps in migrateBranch --- .../SqliteCodebase/MigrateSchema12.hs | 136 ++++++++++++------ 1 file changed, 91 insertions(+), 45 deletions(-) diff --git a/parser-typechecker/src/Unison/Codebase/SqliteCodebase/MigrateSchema12.hs b/parser-typechecker/src/Unison/Codebase/SqliteCodebase/MigrateSchema12.hs index e8ec9b9b23..2be1772301 100644 --- a/parser-typechecker/src/Unison/Codebase/SqliteCodebase/MigrateSchema12.hs +++ b/parser-typechecker/src/Unison/Codebase/SqliteCodebase/MigrateSchema12.hs @@ -19,6 +19,7 @@ import qualified Data.Map as Map import Data.Maybe import qualified Data.Set as Set import Data.Tuple (swap) +import Data.Tuple.Extra ((***)) import qualified Data.Zip as Zip import U.Codebase.HashTags (BranchHash (..), CausalHash (..), PatchHash (..)) import qualified U.Codebase.Reference as UReference @@ -103,7 +104,7 @@ data MigrationState = MigrationState data Entity = TermComponent Unison.Hash | DeclComponent Unison.Hash - | C CausalHashId + | CausalE CausalHashId | BranchE ObjectId | PatchE ObjectId | W WK.WatchKind Reference.Id @@ -124,7 +125,7 @@ migrationSync = Sync \case BranchE objectId -> do Env {db} <- ask lift (migrateBranch db objectId) - C causalHashId -> do + CausalE causalHashId -> do Env {db} <- ask lift (migrateCausal db causalHashId) PatchE objectId -> do @@ -160,7 +161,7 @@ migrateCausal conn oldCausalHashId = runDB conn . fmap (either id id) . runExcep let unmigratedParents = oldCausalParentHashIds & filter (`Map.member` migratedCausals) - & fmap C + & fmap CausalE let unmigratedEntities = unmigratedBranch <> unmigratedParents when (not . null $ unmigratedParents <> unmigratedBranch) (throwE $ Sync.Missing unmigratedEntities) @@ -208,43 +209,69 @@ migrateBranch conn oldObjectId = runDB conn . fmap (either id id) . runExceptT $ -- let convertBranch :: S.DbBranch -> m (S.Branch' TextId Hash PatchObjectId (BranchObjectId, CausalHashId)) -- type DbBranch = Branch' TextId ObjectId PatchObjectId (BranchObjectId, CausalHashId) oldBranchWithHashes <- traverseOf S.branchHashes_ (lift . Ops.loadHashByObjectId) oldBranch - _migratedRefs <- gets referenceMapping + migratedRefs <- gets referenceMapping + migratedObjects <- gets objLookup + migratedCausals <- gets causalMapping let allMissingTypesAndTerms :: [Entity] allMissingTypesAndTerms = oldBranchWithHashes ^.. branchSomeRefs_ . uRefIdAsRefId_ - . undefined -- filtered (`Map.notMember` migratedRefs) + . filtered (`Map.notMember` migratedRefs) . to someReferenceIdToEntity - let allMissingPatches = + + let allMissingPatches :: [Entity] = oldBranch ^.. S.patchHashes_ - . undefined -- filtered (`Map.notMember` migratedObjects) + . to unPatchObjectId + . filtered (`Map.notMember` migratedObjects) . to PatchE - let allMissingChildren = + + let allMissingChildBranches :: [Entity] = oldBranch ^.. S.childrenHashes_ - . undefined -- filtered (`Map.notMember` migratedObjects) + . _1 + . to unBranchObjectId + . filtered (`Map.notMember` migratedObjects) . to BranchE + let allMissingChildCausals :: [Entity] = + oldBranch + ^.. S.childrenHashes_ + . _2 + . filtered (`Map.notMember` migratedCausals) + . to CausalE + -- Identify dependencies and bail out if they aren't all built let allMissingReferences :: [Entity] allMissingReferences = allMissingTypesAndTerms ++ allMissingPatches - ++ allMissingChildren + ++ allMissingChildBranches + ++ allMissingChildCausals when (not . null $ allMissingReferences) $ throwE $ Sync.Missing allMissingReferences - -- Remap object id references - -- TODO: remap sub-namespace causal hashes - -- TODO: Save term & decl SomeObjectID mappings in skymap - _newBranch <- oldBranch & branchSomeRefs_ %%~ remapObjIdRefs - let newBranchWithRemappedCausalsChildrenAndPatches = undefined - -- remember to remap the causals, children, and patches - let (localBranchIds, localBranch) = S.LocalizeObject.localizeBranch newBranchWithRemappedCausalsChildrenAndPatches - hash <- runDB conn (Ops.liftQ (Hashing.dbBranchHash newBranchWithRemappedCausalsChildrenAndPatches)) + let remapPatchObjectId patchObjId = case Map.lookup (unPatchObjectId patchObjId) migratedObjects of + Nothing -> error $ "Expected patch: " <> show patchObjId <> " to be migrated" + Just (newPatchObjId, _, _) -> PatchObjectId newPatchObjId + let remapCausalHashId causalHashId = case Map.lookup causalHashId migratedCausals of + Nothing -> error $ "Expected patch: " <> show causalHashId <> " to be migrated" + Just (_, newCausalHashId) -> newCausalHashId + let remapBranchObjectId patchObjId = case Map.lookup (unBranchObjectId patchObjId) migratedObjects of + Nothing -> error $ "Expected patch: " <> show patchObjId <> " to be migrated" + Just (newBranchObjId, _, _) -> BranchObjectId newBranchObjId + + let newBranch :: S.DbBranch + newBranch = + oldBranch + & branchSomeRefs_ %~ remapObjIdRefs migratedObjects migratedRefs + & S.patchHashes_ %~ remapPatchObjectId + & S.childrenHashes_ %~ (remapBranchObjectId *** remapCausalHashId) + + let (localBranchIds, localBranch) = S.LocalizeObject.localizeBranch newBranch + hash <- runDB conn (Ops.liftQ (Hashing.dbBranchHash newBranch)) newHashId <- runDB conn (Ops.liftQ (Q.saveBranchHash (BranchHash (Cv.hash1to2 hash)))) newObjectId <- runDB conn (Ops.saveBranchObject newHashId localBranchIds localBranch) field @"objLookup" %= Map.insert oldObjectId (unBranchObjectId newObjectId, unBranchHashId newHashId, hash) @@ -312,9 +339,9 @@ migrateWatch Codebase {..} watchKind oldWatchId = fmap (either id id) . runExcep pure Sync.Done -- Project an S.Referent'' into its SomeReferenceObjId's -someReferent_ :: Traversal' (S.Branch.Full.Referent'' t h) (SomeReference (UReference.Id' h)) -someReferent_ = - (UReferent._Ref . someReference_) +someReferent_ :: (forall ref. Prism' (SomeReference ref) ref) -> Traversal' (S.Branch.Full.Referent'' t h) (SomeReference (UReference.Id' h)) +someReferent_ typeOrTermPrism = + (UReferent._Ref . someReference_ typeOrTermPrism) `failing` ( UReferent._Con . asPair_ -- Need to unpack the embedded reference AND remap between mismatched Constructor ID types. . unsafeInsidePrism _ConstructorReference @@ -325,27 +352,32 @@ someReferent_ = <&> \(newId, newConId) -> (UReference.ReferenceDerived newId, fromIntegral newConId) asPair_ _ (UReference.ReferenceBuiltin x, conId) = pure (UReference.ReferenceBuiltin x, conId) -someReference_ :: Traversal' (UReference.Reference' t h) (SomeReference (UReference.Id' h)) -someReference_ = UReference._ReferenceDerived . unsafeInsidePrism _TermReference +someReference_ :: (forall ref. Prism' (SomeReference ref) ref) -> Traversal' (UReference.Reference' t h) (SomeReference (UReference.Id' h)) +someReference_ typeOrTermPrism = UReference._ReferenceDerived . unsafeInsidePrism typeOrTermPrism -someMetadataSetFormat :: (Ord t, Ord h) => Traversal' (S.Branch.Full.MetadataSetFormat' t h) (SomeReference (UReference.Id' h)) -someMetadataSetFormat = S.Branch.Full.metadataSetFormatReferences_ . someReference_ +someMetadataSetFormat_ :: + (Ord t, Ord h) => + (forall ref. Prism' (SomeReference ref) ref) -> + Traversal' (S.Branch.Full.MetadataSetFormat' t h) (SomeReference (UReference.Id' h)) +someMetadataSetFormat_ typeOrTermPrism = S.Branch.Full.metadataSetFormatReferences_ . someReference_ typeOrTermPrism mapReferentMetadata :: (Ord k, Ord t, Ord h) => + (forall ref. Prism' (SomeReference ref) ref) -> Traversal' k (SomeReference (UReference.Id' h)) -> Traversal' (Map k (S.Branch.Full.MetadataSetFormat' t h)) (SomeReference (UReference.Id' h)) -mapReferentMetadata keyTraversal f m = +mapReferentMetadata typeOrTermPrism keyTraversal f m = Map.toList m - & traversed . beside keyTraversal someMetadataSetFormat %%~ f + & traversed . beside keyTraversal (someMetadataSetFormat_ typeOrTermPrism) %%~ f <&> Map.fromList branchSomeRefs_ :: (Ord t, Ord h) => Traversal' (S.Branch' t h p c) (SomeReference (UReference.Id' h)) branchSomeRefs_ f S.Branch.Full.Branch {children, patches, terms, types} = do - let newTypesMap = types & traversed . mapReferentMetadata someReference_ %%~ f - let newTermsMap = terms & traversed . mapReferentMetadata someReferent_ %%~ f + -- Chris: Chat with Arya about which of these undefined's match refs which are types vs terms, metadata is confusing + let newTypesMap = types & traversed . mapReferentMetadata undefined (someReference_ undefined) %%~ f + let newTermsMap = terms & traversed . mapReferentMetadata undefined (someReferent_ undefined) %%~ f S.Branch.Full.Branch <$> newTermsMap <*> newTypesMap <*> pure patches <*> pure children migrateTermComponent :: forall m v a. (Ord v, Var v, Monad m) => Codebase m v a -> Unison.Hash -> StateT MigrationState m (Sync.TrySyncResult Entity) @@ -598,21 +630,27 @@ objIdsToHashed = Nothing -> error $ "Expected object mapping for ID: " <> show objId Just (_, _, hash) -> pure (Reference.Id hash pos) -remapObjIdRefs :: (MonadState MigrationState m) => SomeReferenceObjId -> m SomeReferenceObjId -remapObjIdRefs someObjIdRef = do - objMapping <- gets objLookup - refMapping <- gets referenceMapping - let oldObjId = someObjIdRef ^. someRef_ . UReference.idH - let (newObjId, _, newHash) = - case Map.lookup oldObjId objMapping of - Nothing -> error $ "Expected object mapping for ID: " <> show oldObjId - Just found -> found - let someRefId = (someObjIdRef & someRef_ . UReference.idH .~ Cv.hash1to2 newHash) ^. uRefIdAsRefId_ - let newRefId = case Map.lookup someRefId refMapping of - Nothing -> error $ "Expected reference mapping for ID: " <> show someRefId - Just r -> r - let newSomeObjId = (newRefId ^. from uRefIdAsRefId_) & someRef_ . UReference.idH .~ newObjId - pure newSomeObjId +remapObjIdRefs :: + (Map (Old ObjectId) (New ObjectId, New HashId, New Hash)) -> + (Map SomeReferenceId SomeReferenceId) -> + SomeReferenceObjId -> + SomeReferenceObjId +remapObjIdRefs objMapping refMapping someObjIdRef = newSomeObjId + where + oldObjId :: ObjectId + oldObjId = someObjIdRef ^. someRef_ . UReference.idH + (newObjId, _, newHash) = + case Map.lookup oldObjId objMapping of + Nothing -> error $ "Expected object mapping for ID: " <> show oldObjId + Just found -> found + someRefId :: SomeReferenceId + someRefId = (someObjIdRef & someRef_ . UReference.idH .~ Cv.hash1to2 newHash) ^. uRefIdAsRefId_ + newRefId :: SomeReferenceId + newRefId = case Map.lookup someRefId refMapping of + Nothing -> error $ "Expected reference mapping for ID: " <> show someRefId + Just r -> r + newSomeObjId :: SomeReference (UReference.Id' (New ObjectId)) + newSomeObjId = (newRefId ^. from uRefIdAsRefId_) & someRef_ . UReference.idH .~ newObjId data SomeReference ref = TermReference ref @@ -621,7 +659,15 @@ data SomeReference ref deriving (Eq, Functor, Generic, Ord, Show) someRef_ :: Lens (SomeReference ref) (SomeReference ref') ref ref' -someRef_ = undefined +someRef_ = lens getter setter + where + setter (TermReference _) r = TermReference r + setter (TypeReference _) r = TypeReference r + setter (ConstructorReference _ conId) r = (ConstructorReference r conId) + getter = \case + TermReference r -> r + TypeReference r -> r + ConstructorReference r _ -> r _TermReference :: Prism' (SomeReference ref) ref _TermReference = _Ctor @"TermReference" From 1020290a8e3d828e7ff197df9b28c55b34c6b443 Mon Sep 17 00:00:00 2001 From: Chris Penner Date: Wed, 3 Nov 2021 14:55:55 -0600 Subject: [PATCH 070/297] Cleanup --- .../U/Codebase/Sqlite/Branch/Full.hs | 17 +----- .../SqliteCodebase/MigrateSchema12.hs | 59 ++++++++++++------- 2 files changed, 40 insertions(+), 36 deletions(-) diff --git a/codebase2/codebase-sqlite/U/Codebase/Sqlite/Branch/Full.hs b/codebase2/codebase-sqlite/U/Codebase/Sqlite/Branch/Full.hs index 6cae0b0b18..8aa548a8a8 100644 --- a/codebase2/codebase-sqlite/U/Codebase/Sqlite/Branch/Full.hs +++ b/codebase2/codebase-sqlite/U/Codebase/Sqlite/Branch/Full.hs @@ -49,22 +49,11 @@ data Branch' t h p c = Branch } deriving (Show, Generic) -branchHashes_ :: Traversal (Branch' t h p c) (Branch' t h' p c) h h' -branchHashes_ _f _ = undefined - -termHashes_ :: Traversal (Branch' t h p c) (Branch' t h' p c) h h' -termHashes_ _f _ = undefined - -typeHashes_ :: Traversal (Branch' t h p c) (Branch' t h' p c) h h' -typeHashes_ _f _ = undefined - -patchHashes_ :: Traversal (Branch' t h p c) (Branch' t h p' c) p p' -patchHashes_ = undefined +patches_ :: Traversal (Branch' t h p c) (Branch' t h p' c) p p' +patches_ f Branch {..} = (\newPatches -> Branch terms types newPatches children) <$> traverse f patches childrenHashes_ :: Traversal (Branch' t h p c) (Branch' t h p c') c c' -childrenHashes_ = undefined - --- Branch <$> traverse (\m -> Map.mapKeys) +childrenHashes_ f Branch {..} = Branch terms types patches <$> traverse f children branchCausalHashes_ :: Traversal (Branch' t h p c) (Branch' t h p c') c c' branchCausalHashes_ f Branch {..} = diff --git a/parser-typechecker/src/Unison/Codebase/SqliteCodebase/MigrateSchema12.hs b/parser-typechecker/src/Unison/Codebase/SqliteCodebase/MigrateSchema12.hs index 2be1772301..bc550d2aee 100644 --- a/parser-typechecker/src/Unison/Codebase/SqliteCodebase/MigrateSchema12.hs +++ b/parser-typechecker/src/Unison/Codebase/SqliteCodebase/MigrateSchema12.hs @@ -277,16 +277,11 @@ migrateBranch conn oldObjectId = runDB conn . fmap (either id id) . runExceptT $ field @"objLookup" %= Map.insert oldObjectId (unBranchObjectId newObjectId, unBranchHashId newHashId, hash) pure Sync.Done -uRefIdAsRefId_ :: Iso' (SomeReference (UReference.Id' U.Util.Hash)) SomeReferenceId -uRefIdAsRefId_ = mapping uRefAsRef_ - -uRefAsRef_ :: Iso' (UReference.Id' U.Util.Hash) Reference.Id -uRefAsRef_ = iso intoRef intoURef - where - intoRef (UReference.Id hash pos) = Reference.Id (Cv.hash2to1 hash) pos - intoURef (Reference.Id hash pos) = UReference.Id (Cv.hash1to2 hash) pos - -migratePatch :: MonadIO m => Connection -> Old PatchObjectId -> StateT MigrationState m (Sync.TrySyncResult Entity) +migratePatch :: + MonadIO m => + Connection -> + Old PatchObjectId -> + StateT MigrationState m (Sync.TrySyncResult Entity) migratePatch conn oldObjectId = do oldPatch <- runDB conn (Ops.loadDbPatchById oldObjectId) -- 2. Determine whether all things the patch refers to are built. @@ -338,8 +333,20 @@ migrateWatch Codebase {..} watchKind oldWatchId = fmap (either id id) . runExcep lift . lift $ putWatch watchKindV1 newWatchId remappedTerm pure Sync.Done +uRefIdAsRefId_ :: Iso' (SomeReference (UReference.Id' U.Util.Hash)) SomeReferenceId +uRefIdAsRefId_ = mapping uRefAsRef_ + +uRefAsRef_ :: Iso' (UReference.Id' U.Util.Hash) Reference.Id +uRefAsRef_ = iso intoRef intoURef + where + intoRef (UReference.Id hash pos) = Reference.Id (Cv.hash2to1 hash) pos + intoURef (Reference.Id hash pos) = UReference.Id (Cv.hash1to2 hash) pos + -- Project an S.Referent'' into its SomeReferenceObjId's -someReferent_ :: (forall ref. Prism' (SomeReference ref) ref) -> Traversal' (S.Branch.Full.Referent'' t h) (SomeReference (UReference.Id' h)) +someReferent_ :: + forall t h. + (forall ref. Prism' (SomeReference ref) ref) -> + Traversal' (S.Branch.Full.Referent'' t h) (SomeReference (UReference.Id' h)) someReferent_ typeOrTermPrism = (UReferent._Ref . someReference_ typeOrTermPrism) `failing` ( UReferent._Con @@ -352,23 +359,26 @@ someReferent_ typeOrTermPrism = <&> \(newId, newConId) -> (UReference.ReferenceDerived newId, fromIntegral newConId) asPair_ _ (UReference.ReferenceBuiltin x, conId) = pure (UReference.ReferenceBuiltin x, conId) -someReference_ :: (forall ref. Prism' (SomeReference ref) ref) -> Traversal' (UReference.Reference' t h) (SomeReference (UReference.Id' h)) +someReference_ :: + (forall ref. Prism' (SomeReference ref) ref) -> + Traversal' (UReference.Reference' t h) (SomeReference (UReference.Id' h)) someReference_ typeOrTermPrism = UReference._ReferenceDerived . unsafeInsidePrism typeOrTermPrism someMetadataSetFormat_ :: (Ord t, Ord h) => (forall ref. Prism' (SomeReference ref) ref) -> Traversal' (S.Branch.Full.MetadataSetFormat' t h) (SomeReference (UReference.Id' h)) -someMetadataSetFormat_ typeOrTermPrism = S.Branch.Full.metadataSetFormatReferences_ . someReference_ typeOrTermPrism +someMetadataSetFormat_ typeOrTermPrism = + S.Branch.Full.metadataSetFormatReferences_ . someReference_ typeOrTermPrism -mapReferentMetadata :: +someReferentMetadata_ :: (Ord k, Ord t, Ord h) => (forall ref. Prism' (SomeReference ref) ref) -> Traversal' k (SomeReference (UReference.Id' h)) -> Traversal' (Map k (S.Branch.Full.MetadataSetFormat' t h)) (SomeReference (UReference.Id' h)) -mapReferentMetadata typeOrTermPrism keyTraversal f m = +someReferentMetadata_ typeOrTermPrism keyTraversal f m = Map.toList m & traversed . beside keyTraversal (someMetadataSetFormat_ typeOrTermPrism) %%~ f <&> Map.fromList @@ -376,11 +386,16 @@ mapReferentMetadata typeOrTermPrism keyTraversal f m = branchSomeRefs_ :: (Ord t, Ord h) => Traversal' (S.Branch' t h p c) (SomeReference (UReference.Id' h)) branchSomeRefs_ f S.Branch.Full.Branch {children, patches, terms, types} = do -- Chris: Chat with Arya about which of these undefined's match refs which are types vs terms, metadata is confusing - let newTypesMap = types & traversed . mapReferentMetadata undefined (someReference_ undefined) %%~ f - let newTermsMap = terms & traversed . mapReferentMetadata undefined (someReferent_ undefined) %%~ f + let newTypesMap = types & traversed . someReferentMetadata_ undefined (someReference_ undefined) %%~ f + let newTermsMap = terms & traversed . someReferentMetadata_ undefined (someReferent_ undefined) %%~ f S.Branch.Full.Branch <$> newTermsMap <*> newTypesMap <*> pure patches <*> pure children -migrateTermComponent :: forall m v a. (Ord v, Var v, Monad m) => Codebase m v a -> Unison.Hash -> StateT MigrationState m (Sync.TrySyncResult Entity) +migrateTermComponent :: + forall m v a. + (Ord v, Var v, Monad m) => + Codebase m v a -> + Unison.Hash -> + StateT MigrationState m (Sync.TrySyncResult Entity) migrateTermComponent Codebase {..} hash = fmap (either id id) . runExceptT $ do component <- (lift . lift $ getTermComponentWithTypes hash) >>= \case @@ -410,7 +425,7 @@ migrateTermComponent Codebase {..} hash = fmap (either id id) . runExceptT $ do ( traversed . _2 . termReferences_ - . filtered (\r -> Map.notMember r referencesMap) + . filtered (`Map.notMember` referencesMap) ) when (not . null $ allMissingReferences) $ @@ -477,7 +492,7 @@ migrateDeclComponent Codebase {..} hash = fmap (either id id) . runExceptT $ do & foldSetter ( traversed -- Every type in the list . typeReferences_ - . filtered (\r -> Map.notMember r migratedReferences) + . filtered (`Map.notMember` migratedReferences) ) when (not . null $ unmigratedRefIds) do @@ -518,7 +533,7 @@ migrateDeclComponent Codebase {..} hash = fmap (either id id) . runExceptT $ do (componentIDMap Map.! oldReferenceId) & DD.asDataDecl & DD.constructors' - & imap (\(fromIntegral -> constructorId) (_ann, constructorName, _type) -> (constructorName, constructorId)) + & imap (\constructorId (_ann, constructorName, _type) -> (constructorName, fromIntegral constructorId)) & Map.fromList ifor_ (DD.constructors' (DD.asDataDecl dd)) \(fromIntegral -> newConstructorId) (_ann, constructorName, _type) -> do @@ -559,7 +574,7 @@ termFReferences_ f t = >>= Term._TermLink . referentAsSomeTerm_ %%~ f >>= Term._TypeLink . Reference._DerivedId . unsafeInsidePrism _TypeReference %%~ f --- | Casts the left side of a reference/constructor pair into a Reference.Id +-- | Build a SomeConstructorReference someRefCon_ :: Traversal' (Reference.Reference, ConstructorId) SomeReferenceId someRefCon_ = refConPair_ . unsafeInsidePrism _ConstructorReference where From 35c813864e5c563d0b5886291142d9a5f07769fd Mon Sep 17 00:00:00 2001 From: Chris Penner Date: Wed, 3 Nov 2021 15:00:32 -0600 Subject: [PATCH 071/297] Whoops, add missing traversal --- codebase2/codebase-sqlite/U/Codebase/Sqlite/Branch/Full.hs | 3 +++ .../src/Unison/Codebase/SqliteCodebase/MigrateSchema12.hs | 4 ++-- 2 files changed, 5 insertions(+), 2 deletions(-) diff --git a/codebase2/codebase-sqlite/U/Codebase/Sqlite/Branch/Full.hs b/codebase2/codebase-sqlite/U/Codebase/Sqlite/Branch/Full.hs index 8aa548a8a8..37f9898c6b 100644 --- a/codebase2/codebase-sqlite/U/Codebase/Sqlite/Branch/Full.hs +++ b/codebase2/codebase-sqlite/U/Codebase/Sqlite/Branch/Full.hs @@ -49,6 +49,9 @@ data Branch' t h p c = Branch } deriving (Show, Generic) +branchHashes_ :: Traversal (Branch' t h p c) (Branch' t h' p c) h h' +branchHashes_ _f _ = undefined + patches_ :: Traversal (Branch' t h p c) (Branch' t h p' c) p p' patches_ f Branch {..} = (\newPatches -> Branch terms types newPatches children) <$> traverse f patches diff --git a/parser-typechecker/src/Unison/Codebase/SqliteCodebase/MigrateSchema12.hs b/parser-typechecker/src/Unison/Codebase/SqliteCodebase/MigrateSchema12.hs index bc550d2aee..9a12002c0e 100644 --- a/parser-typechecker/src/Unison/Codebase/SqliteCodebase/MigrateSchema12.hs +++ b/parser-typechecker/src/Unison/Codebase/SqliteCodebase/MigrateSchema12.hs @@ -222,7 +222,7 @@ migrateBranch conn oldObjectId = runDB conn . fmap (either id id) . runExceptT $ let allMissingPatches :: [Entity] = oldBranch - ^.. S.patchHashes_ + ^.. S.patches_ . to unPatchObjectId . filtered (`Map.notMember` migratedObjects) . to PatchE @@ -267,7 +267,7 @@ migrateBranch conn oldObjectId = runDB conn . fmap (either id id) . runExceptT $ newBranch = oldBranch & branchSomeRefs_ %~ remapObjIdRefs migratedObjects migratedRefs - & S.patchHashes_ %~ remapPatchObjectId + & S.patches_ %~ remapPatchObjectId & S.childrenHashes_ %~ (remapBranchObjectId *** remapCausalHashId) let (localBranchIds, localBranch) = S.LocalizeObject.localizeBranch newBranch From 9d56b5ee17fa35ebc6629279ad6ddeddb3822aed Mon Sep 17 00:00:00 2001 From: Chris Penner Date: Wed, 3 Nov 2021 17:09:56 -0600 Subject: [PATCH 072/297] Updates on migratePatch. Still work to do here. --- codebase2/codebase/U/Codebase/Referent.hs | 7 +- .../SqliteCodebase/MigrateSchema12.hs | 71 +++++++++++++++---- 2 files changed, 62 insertions(+), 16 deletions(-) diff --git a/codebase2/codebase/U/Codebase/Referent.hs b/codebase2/codebase/U/Codebase/Referent.hs index 3b4d227c28..6aac268639 100644 --- a/codebase2/codebase/U/Codebase/Referent.hs +++ b/codebase2/codebase/U/Codebase/Referent.hs @@ -3,6 +3,7 @@ {-# LANGUAGE TypeSynonymInstances #-} {-# LANGUAGE TypeApplications #-} {-# LANGUAGE DataKinds #-} +{-# LANGUAGE DeriveTraversable #-} module U.Codebase.Referent where @@ -23,12 +24,12 @@ type ReferentH = Referent' (Reference' Text (Maybe Hash)) (Reference' Text Hash) data Referent' termRef typeRef = Ref termRef | Con typeRef ConstructorId - deriving (Eq, Ord, Show, Generic) + deriving (Eq, Ord, Show, Generic, Functor, Foldable, Traversable) _Ref :: Prism (Referent' tmr tyr) (Referent' tmr' tyr) tmr tmr' -_Ref = _Ctor @"Ref" +_Ref = _Ctor @"Ref" _Con :: Prism (Referent' tmr tyr) (Referent' tmr tyr') (tyr, ConstructorId) (tyr', ConstructorId) -_Con = _Ctor @"Con" +_Con = _Ctor @"Con" type Id = Id' Hash Hash data Id' hTm hTp diff --git a/parser-typechecker/src/Unison/Codebase/SqliteCodebase/MigrateSchema12.hs b/parser-typechecker/src/Unison/Codebase/SqliteCodebase/MigrateSchema12.hs index 9a12002c0e..ba609b7e4b 100644 --- a/parser-typechecker/src/Unison/Codebase/SqliteCodebase/MigrateSchema12.hs +++ b/parser-typechecker/src/Unison/Codebase/SqliteCodebase/MigrateSchema12.hs @@ -3,6 +3,7 @@ {-# LANGUAGE RankNTypes #-} {-# LANGUAGE RecordWildCards #-} {-# LANGUAGE ViewPatterns #-} +{-# LANGUAGE DeriveTraversable #-} module Unison.Codebase.SqliteCodebase.MigrateSchema12 where @@ -40,12 +41,16 @@ import U.Codebase.Sqlite.DbId import qualified U.Codebase.Sqlite.LocalizeObject as S.LocalizeObject import qualified U.Codebase.Sqlite.Operations as Ops import qualified U.Codebase.Sqlite.Patch.Format as S.Patch.Format +import qualified U.Codebase.Sqlite.Patch.Full as S +import qualified U.Codebase.Sqlite.Patch.TermEdit as TermEdit +import qualified U.Codebase.Sqlite.Patch.TypeEdit as TypeEdit import qualified U.Codebase.Sqlite.Queries as Q import U.Codebase.Sync (Sync (Sync)) import qualified U.Codebase.Sync as Sync import U.Codebase.WatchKind (WatchKind) import qualified U.Codebase.WatchKind as WK import qualified U.Util.Hash as U.Util +import qualified U.Util.Set as Set import qualified Unison.ABT as ABT import Unison.Codebase (Codebase (Codebase)) import qualified Unison.Codebase as Codebase @@ -282,22 +287,40 @@ migratePatch :: Connection -> Old PatchObjectId -> StateT MigrationState m (Sync.TrySyncResult Entity) -migratePatch conn oldObjectId = do +migratePatch conn oldObjectId = runDB conn . fmap (either id id) . runExceptT $ do oldPatch <- runDB conn (Ops.loadDbPatchById oldObjectId) -- 2. Determine whether all things the patch refers to are built. let dependencies :: [Entity] dependencies = undefined - if null dependencies - then pure (Sync.Missing dependencies) - else do - let migrate = undefined - let newPatch = migrate oldPatch - let (localPatchIds, localPatch) = S.LocalizeObject.localizePatch newPatch - newHash <- runDB conn (liftQ (Hashing.dbPatchHash newPatch)) - newObjectId <- runDB conn (Ops.saveDbPatch (PatchHash (Cv.hash1to2 newHash)) (S.Patch.Format.Full localPatchIds localPatch)) - newHashId <- runDB conn (liftQ (Q.expectHashIdByHash (Cv.hash1to2 newHash))) - field @"objLookup" %= Map.insert (unPatchObjectId oldObjectId) (unPatchObjectId newObjectId, newHashId, newHash) - pure Sync.Done + when (not . null $ dependencies) (throwE (Sync.Missing dependencies)) + objMapping <- gets objLookup + let hydrate :: Q.DB m => HashId -> m Hash + hydrate = undefined + let dehydrate :: Q.DB m => Hash -> m HashId + dehydrate = undefined + let remapRef :: SomeReferenceId -> SomeReferenceId + remapRef = undefined + let remapObjectIds :: SomeReferenceObjId -> SomeReferenceObjId + remapObjectIds = undefined + newPatch <- + oldPatch + & patchSomeRefsH_ + %%~ ( \someRef -> do + hydratedRef <- (someRef & (traverse . traverse) %%~ hydrate) + hydratedRefId :: SomeReferenceId <- undefined hydratedRef + -- Hrmm, we can't really use Reference.Id here since it's not parameterized + -- over Hash. + x <- (remapRef hydratedRefId & (traverse . _) %%~ dehydrate) + _ x + ) + <&> (patchSomeRefsO_ %~ remapObjectIds) + -- & patchSomeRefsO_ . someRef_ %~ _ + let (localPatchIds, localPatch) = S.LocalizeObject.localizePatch newPatch + newHash <- runDB conn (liftQ (Hashing.dbPatchHash newPatch)) + newObjectId <- runDB conn (Ops.saveDbPatch (PatchHash (Cv.hash1to2 newHash)) (S.Patch.Format.Full localPatchIds localPatch)) + newHashId <- runDB conn (liftQ (Q.expectHashIdByHash (Cv.hash1to2 newHash))) + field @"objLookup" %= Map.insert (unPatchObjectId oldObjectId) (unPatchObjectId newObjectId, newHashId, newHash) + pure Sync.Done -- | PLAN -- * @@ -390,6 +413,28 @@ branchSomeRefs_ f S.Branch.Full.Branch {children, patches, terms, types} = do let newTermsMap = terms & traversed . someReferentMetadata_ undefined (someReferent_ undefined) %%~ f S.Branch.Full.Branch <$> newTermsMap <*> newTypesMap <*> pure patches <*> pure children +patchSomeRefsH_ :: (Ord t, Ord h) => Traversal (S.Patch' t h o) (S.Patch' t h o) (SomeReference (UReference.Id' h)) (SomeReference (UReference.Id' h)) +patchSomeRefsH_ f S.Patch {termEdits, typeEdits} = do + newTermEdits <- Map.fromList <$> (Map.toList termEdits & traversed . _1 . (someReferent_ _TermReference) %%~ f) + newTypeEdits <- Map.fromList <$> (Map.toList typeEdits & traversed . _1 . (someReference_ undefined) %%~ f) + pure S.Patch {termEdits = newTermEdits, typeEdits = newTypeEdits} + +patchSomeRefsO_ :: (Ord t, Ord h, Ord o) => Traversal' (S.Patch' t h o) (SomeReference (UReference.Id' o)) +patchSomeRefsO_ f S.Patch {termEdits, typeEdits} = do + newTermEdits <- (termEdits & traversed . Set.traverse . termEditRefs_ %%~ f) + newTypeEdits <- (typeEdits & traversed . Set.traverse . typeEditRefs_ %%~ f) + pure (S.Patch {termEdits = newTermEdits, typeEdits = newTypeEdits}) + +termEditRefs_ :: Traversal' (TermEdit.TermEdit' t h) (SomeReference (UReference.Id' h)) +termEditRefs_ f (TermEdit.Replace ref typing) = + TermEdit.Replace <$> (ref & someReferent_ _TermReference %%~ f) <*> pure typing +termEditRefs_ _f (TermEdit.Deprecate) = pure TermEdit.Deprecate + +typeEditRefs_ :: Traversal' (TypeEdit.TypeEdit' t h) (SomeReference (UReference.Id' h)) +typeEditRefs_ f (TypeEdit.Replace ref) = + TypeEdit.Replace <$> (ref & someReference_ undefined %%~ f) +typeEditRefs_ _f (TypeEdit.Deprecate) = pure TypeEdit.Deprecate + migrateTermComponent :: forall m v a. (Ord v, Var v, Monad m) => @@ -671,7 +716,7 @@ data SomeReference ref = TermReference ref | TypeReference ref | ConstructorReference ref ConstructorId - deriving (Eq, Functor, Generic, Ord, Show) + deriving (Eq, Functor, Generic, Ord, Show, Foldable, Traversable) someRef_ :: Lens (SomeReference ref) (SomeReference ref') ref ref' someRef_ = lens getter setter From 0a5a01b31eeca4b4dc8c9f0c7bb1c4e30b8f15c7 Mon Sep 17 00:00:00 2001 From: Arya Irani Date: Wed, 3 Nov 2021 20:20:54 -0400 Subject: [PATCH 073/297] refactor Operations.hs to utilize LocalizeObject --- .../U/Codebase/Sqlite/Operations.hs | 216 +++++------------- codebase2/codebase/U/Codebase/Branch.hs | 6 +- 2 files changed, 54 insertions(+), 168 deletions(-) diff --git a/codebase2/codebase-sqlite/U/Codebase/Sqlite/Operations.hs b/codebase2/codebase-sqlite/U/Codebase/Sqlite/Operations.hs index 309761b9e8..be29b4082b 100644 --- a/codebase2/codebase-sqlite/U/Codebase/Sqlite/Operations.hs +++ b/codebase2/codebase-sqlite/U/Codebase/Sqlite/Operations.hs @@ -112,10 +112,10 @@ import Control.Monad (MonadPlus (mzero), join, unless, when, (<=<)) import Control.Monad.Except (ExceptT, MonadError, MonadIO (liftIO), runExceptT) import qualified Control.Monad.Except as Except import qualified Control.Monad.Extra as Monad -import Control.Monad.State (MonadState, StateT, evalStateT) +import Control.Monad.State (MonadState, evalStateT) import Control.Monad.Trans (MonadTrans (lift)) import Control.Monad.Trans.Maybe (MaybeT (MaybeT), runMaybeT) -import Control.Monad.Writer (MonadWriter, WriterT, runWriterT) +import Control.Monad.Writer (MonadWriter, runWriterT) import qualified Control.Monad.Writer as Writer import Data.Bifoldable (bifoldMap) import Data.Bifunctor (Bifunctor (bimap)) @@ -168,16 +168,14 @@ import U.Codebase.Sqlite.Connection (Connection) import qualified U.Codebase.Sqlite.DbId as Db import qualified U.Codebase.Sqlite.Decl.Format as S.Decl import U.Codebase.Sqlite.LocalIds - ( LocalBranchChildId (..), - LocalDefnId (..), - LocalHashId (..), + ( LocalDefnId (..), LocalIds, LocalIds' (..), - LocalPatchObjectId (..), LocalTextId (..), WatchLocalIds, ) import qualified U.Codebase.Sqlite.LocalIds as LocalIds +import qualified U.Codebase.Sqlite.LocalizeObject as LocalizeObject import qualified U.Codebase.Sqlite.ObjectType as OT import qualified U.Codebase.Sqlite.Patch.Diff as S import qualified U.Codebase.Sqlite.Patch.Format as S @@ -363,12 +361,23 @@ s2cReferent = bitraverse s2cReference s2cReference s2cReferentId :: EDB m => S.Referent.Id -> m C.Referent.Id s2cReferentId = bitraverse loadHashByObjectId loadHashByObjectId +c2sReferent :: EDB m => C.Referent -> m S.Referent +c2sReferent = bitraverse c2sReference c2sReference + c2sReferentId :: EDB m => C.Referent.Id -> m S.Referent.Id c2sReferentId = bitraverse primaryHashToExistingObjectId primaryHashToExistingObjectId h2cReferent :: EDB m => S.ReferentH -> m C.Referent h2cReferent = bitraverse h2cReference h2cReference +-- ** convert and save references +-- | Save the text and hash parts of a Reference to the database and substitute their ids. +saveReferenceH :: DB m => C.Reference -> m S.ReferenceH +saveReferenceH = bitraverse Q.saveText Q.saveHashHash + +saveReferentH :: DB m => C.Referent -> m S.ReferentH +saveReferentH = bitraverse saveReferenceH saveReferenceH + -- ** Edits transformations s2cTermEdit :: EDB m => S.TermEdit -> m C.TermEdit @@ -393,76 +402,21 @@ s2cTypeEdit = \case S.TypeEdit.Replace r -> C.TypeEdit.Replace <$> s2cReference r S.TypeEdit.Deprecate -> pure C.TypeEdit.Deprecate --- | assumes that all relevant values are already in the DB -c2lPatch :: EDB m => C.Branch.Patch -> m (S.PatchLocalIds, S.LocalPatch) -c2lPatch (C.Branch.Patch termEdits typeEdits) = - done =<< (runWriterT . flip evalStateT startState) do - S.Patch - <$> Map.bitraverse saveReferentH (Set.traverse saveTermEdit) termEdits - <*> Map.bitraverse saveReferenceH (Set.traverse saveTypeEdit) typeEdits +-- | assumes that all relevant defns are already in the DB +c2sPatch :: EDB m => C.Branch.Patch -> m S.Patch +c2sPatch (C.Branch.Patch termEdits typeEdits) = + S.Patch + <$> Map.bitraverse saveReferentH (Set.traverse c2sTermEdit) termEdits + <*> Map.bitraverse saveReferenceH (Set.traverse c2sTypeEdit) typeEdits where - startState = mempty @(Map Text LocalTextId, Map H.Hash LocalHashId, Map H.Hash LocalDefnId) - done :: - EDB m => - (a, (Seq Text, Seq H.Hash, Seq H.Hash)) -> - m (S.Patch.Format.PatchLocalIds, a) - done (lPatch, (textValues, hashValues, defnValues)) = do - textIds <- liftQ $ traverse Q.saveText textValues - hashIds <- liftQ $ traverse Q.saveHashHash hashValues - objectIds <- traverse primaryHashToExistingObjectId defnValues - let ids = - S.Patch.Format.LocalIds - (Vector.fromList (Foldable.toList textIds)) - (Vector.fromList (Foldable.toList hashIds)) - (Vector.fromList (Foldable.toList objectIds)) - pure (ids, lPatch) - - lookupText :: - ( MonadState s m, - MonadWriter w m, - Lens.Field1' s (Map t LocalTextId), - Lens.Field1' w (Seq t), - Ord t - ) => - t -> - m LocalTextId - lookupText = lookup_ Lens._1 Lens._1 LocalTextId - - lookupHash :: - ( MonadState s m, - MonadWriter w m, - Lens.Field2' s (Map d LocalHashId), - Lens.Field2' w (Seq d), - Ord d - ) => - d -> - m LocalHashId - lookupHash = lookup_ Lens._2 Lens._2 LocalHashId - - lookupDefn :: - ( MonadState s m, - MonadWriter w m, - Lens.Field3' s (Map d LocalDefnId), - Lens.Field3' w (Seq d), - Ord d - ) => - d -> - m LocalDefnId - lookupDefn = lookup_ Lens._3 Lens._3 LocalDefnId - - saveTermEdit = \case - C.TermEdit.Replace r t -> S.TermEdit.Replace <$> saveReferent r <*> pure (c2sTyping t) + c2sTermEdit = \case + C.TermEdit.Replace r t -> S.TermEdit.Replace <$> c2sReferent r <*> pure (c2sTyping t) C.TermEdit.Deprecate -> pure S.TermEdit.Deprecate - saveTypeEdit = \case - C.TypeEdit.Replace r -> S.TypeEdit.Replace <$> saveReference r + c2sTypeEdit = \case + C.TypeEdit.Replace r -> S.TypeEdit.Replace <$> c2sReference r C.TypeEdit.Deprecate -> pure S.TypeEdit.Deprecate - saveReference = bitraverse lookupText lookupDefn - saveReferenceH = bitraverse lookupText lookupHash - saveReferent = bitraverse saveReference saveReference - saveReferentH = bitraverse saveReferenceH saveReferenceH - -- | produces a diff -- diff = full - ref; full = diff + ref diffPatch :: S.LocalPatch -> S.LocalPatch -> S.LocalPatchDiff @@ -583,10 +537,6 @@ saveTermComponent h terms = do pure oId --- | Save the text and hash parts of a Reference to the database and substitute their ids. -saveReferenceH :: DB m => C.Reference' Text H.Hash -> m (C.Reference' Db.TextId Db.HashId) -saveReferenceH = bitraverse Q.saveText Q.saveHashHash - -- | implementation detail of c2{s,w}Term -- The Type is optional, because we don't store them for watch expression results. c2xTerm :: forall m t d. Monad m => (Text -> m t) -> (H.Hash -> m d) -> C.Term Symbol -> Maybe (C.Term.Type Symbol) -> m (LocalIds' t d, S.Term.Term, Maybe (S.Term.Type)) @@ -990,15 +940,6 @@ s2cBranch (S.Branch.Full.Branch tms tps patches children) = Nothing -> throwError (ExpectedBranch' chId) Just boId -> loadBranchByObjectId boId --- this maps from the key used by C.Branch to a local id -type BranchSavingState = (Map Text LocalTextId, Map H.Hash LocalDefnId, Map Db.PatchObjectId LocalPatchObjectId, Map (Db.BranchObjectId, Db.CausalHashId) LocalBranchChildId) - -type BranchSavingWriter = (Seq Text, Seq H.Hash, Seq Db.PatchObjectId, Seq (Db.BranchObjectId, Db.CausalHashId)) - -type BranchSavingConstraint m = (MonadState BranchSavingState m, MonadWriter BranchSavingWriter m) - -type BranchSavingMonad m = StateT BranchSavingState (WriterT BranchSavingWriter m) - saveRootBranch :: EDB m => C.Branch.Causal m -> m (Db.BranchObjectId, Db.CausalHashId) saveRootBranch c = do when debug $ traceM $ "Operations.saveRootBranch " ++ show (C.causalHash c) @@ -1067,89 +1008,34 @@ saveBranch (C.Causal hc he parents me) = do liftQ (Q.saveCausalParents chId parentCausalHashIds) pure (chId, bhId) boId <- flip Monad.fromMaybeM (liftQ $ Q.loadBranchObjectIdByCausalHashId chId) do - (li, lBranch) <- c2lBranch =<< me + branch <- c2sBranch =<< me + let (li, lBranch) = LocalizeObject.localizeBranch branch saveBranchObject bhId li lBranch pure (boId, chId) where - c2lBranch :: EDB m => C.Branch.Branch m -> m (BranchLocalIds, S.Branch.Full.LocalBranch) - c2lBranch (C.Branch.Branch terms types patches children) = - done =<< (runWriterT . flip evalStateT startState) do - S.Branch - <$> Map.bitraverse saveNameSegment (Map.bitraverse saveReferent saveMetadata) terms - <*> Map.bitraverse saveNameSegment (Map.bitraverse saveReference saveMetadata) types - <*> Map.bitraverse saveNameSegment savePatch' patches - <*> Map.bitraverse saveNameSegment saveChild children - saveNameSegment (C.Branch.NameSegment t) = lookupText t - saveReference :: BranchSavingConstraint m => C.Reference.Reference -> m S.Reference.LocalReference - saveReference = bitraverse lookupText lookupDefn - saveReferent :: BranchSavingConstraint m => C.Referent.Referent -> m S.Referent.LocalReferent - saveReferent = bitraverse saveReference saveReference - saveMetadata :: Monad m => m C.Branch.MdValues -> BranchSavingMonad m S.Branch.Full.LocalMetadataSet - saveMetadata mm = do - C.Branch.MdValues m <- (lift . lift) mm - S.Branch.Full.Inline <$> Set.traverse saveReference (Map.keysSet m) - savePatch' :: EDB m => (PatchHash, m C.Branch.Patch) -> BranchSavingMonad m LocalPatchObjectId - savePatch' (h, mp) = do - patchOID <- - primaryHashToMaybePatchObjectId h >>= \case - Just patchOID -> pure patchOID - Nothing -> savePatch h =<< (lift . lift) mp - lookupPatch patchOID - saveChild :: EDB m => C.Branch.Causal m -> BranchSavingMonad m LocalBranchChildId - saveChild c = (lift . lift) (saveBranch c) >>= lookupChild - lookupText :: - ( MonadState s m, - MonadWriter w m, - Lens.Field1' s (Map t LocalTextId), - Lens.Field1' w (Seq t), - Ord t - ) => - t -> - m LocalTextId - lookupText = lookup_ Lens._1 Lens._1 LocalTextId - lookupDefn :: - ( MonadState s m, - MonadWriter w m, - Lens.Field2' s (Map d LocalDefnId), - Lens.Field2' w (Seq d), - Ord d - ) => - d -> - m LocalDefnId - lookupDefn = lookup_ Lens._2 Lens._2 LocalDefnId - lookupPatch :: - ( MonadState s m, - MonadWriter w m, - Lens.Field3' s (Map p LocalPatchObjectId), - Lens.Field3' w (Seq p), - Ord p - ) => - p -> - m LocalPatchObjectId - lookupPatch = lookup_ Lens._3 Lens._3 LocalPatchObjectId - lookupChild :: - ( MonadState s m, - MonadWriter w m, - Lens.Field4' s (Map c LocalBranchChildId), - Lens.Field4' w (Seq c), - Ord c - ) => - c -> - m LocalBranchChildId - lookupChild = lookup_ Lens._4 Lens._4 LocalBranchChildId - startState = mempty @BranchSavingState - done :: (EDB m, Show a) => (a, BranchSavingWriter) -> m (BranchLocalIds, a) - done (lBranch, written@(textValues, defnHashes, patchObjectIds, branchCausalIds)) = do - when debug $ traceM $ "saveBranch.done\n\tlBranch = " ++ show lBranch ++ "\n\twritten = " ++ show written - textIds <- liftQ $ traverse Q.saveText textValues - defnObjectIds <- traverse primaryHashToExistingObjectId defnHashes - let ids = - S.BranchFormat.LocalIds - (Vector.fromList (Foldable.toList textIds)) - (Vector.fromList (Foldable.toList defnObjectIds)) - (Vector.fromList (Foldable.toList patchObjectIds)) - (Vector.fromList (Foldable.toList branchCausalIds)) - pure (ids, lBranch) + c2sBranch :: EDB m => C.Branch.Branch m -> m S.DbBranch + c2sBranch (C.Branch.Branch terms types patches children) = + S.Branch + <$> Map.bitraverse saveNameSegment (Map.bitraverse c2sReferent c2sMetadata) terms + <*> Map.bitraverse saveNameSegment (Map.bitraverse c2sReference c2sMetadata) types + <*> Map.bitraverse saveNameSegment savePatchObjectId patches + <*> Map.bitraverse saveNameSegment saveBranch children + + saveNameSegment :: EDB m => C.Branch.NameSegment -> m Db.TextId + saveNameSegment = liftQ . Q.saveText . C.Branch.unNameSegment + + c2sMetadata :: EDB m => m C.Branch.MdValues -> m S.Branch.Full.DbMetadataSet + c2sMetadata mm = do + C.Branch.MdValues m <- mm + S.Branch.Full.Inline <$> Set.traverse c2sReference (Map.keysSet m) + + savePatchObjectId :: EDB m => (PatchHash, m C.Branch.Patch) -> m Db.PatchObjectId + savePatchObjectId (h, mp) = do + primaryHashToMaybePatchObjectId h >>= \case + Just patchOID -> pure patchOID + Nothing -> do + patch <- mp + savePatch h patch saveBranchObject :: DB m => Db.BranchHashId -> BranchLocalIds -> S.Branch.Full.LocalBranch -> m Db.BranchObjectId saveBranchObject id@(Db.unBranchHashId -> hashId) li lBranch = do @@ -1320,7 +1206,7 @@ loadDbPatchById patchId = savePatch :: EDB m => PatchHash -> C.Branch.Patch -> m Db.PatchObjectId savePatch h c = do - (li, lPatch) <- c2lPatch c + (li, lPatch) <- LocalizeObject.localizePatch <$> c2sPatch c saveDbPatch h (S.Patch.Format.Full li lPatch) saveDbPatch :: EDB m => PatchHash -> S.PatchFormat -> m Db.PatchObjectId diff --git a/codebase2/codebase/U/Codebase/Branch.hs b/codebase2/codebase/U/Codebase/Branch.hs index 576e18b735..7bcec9cac4 100644 --- a/codebase2/codebase/U/Codebase/Branch.hs +++ b/codebase2/codebase/U/Codebase/Branch.hs @@ -11,12 +11,12 @@ import U.Codebase.TermEdit (TermEdit) import U.Codebase.TypeEdit (TypeEdit) import qualified Data.Map as Map -newtype NameSegment = NameSegment Text deriving (Eq, Ord, Show) +newtype NameSegment = NameSegment { unNameSegment :: Text } deriving (Eq, Ord, Show) type MetadataType = Reference type MetadataValue = Reference data MdValues = MdValues (Map MetadataValue MetadataType) deriving (Eq, Ord, Show) - + type Causal m = C.Causal m CausalHash BranchHash (Branch m) -- | V2.Branch is like V1.Branch0; I would rename it, at least temporarily, but too hard. @@ -36,4 +36,4 @@ instance Show (Branch m) where show b = "Branch { terms = " ++ show (fmap Map.keys (terms b)) ++ ", types = " ++ show (fmap Map.keys (types b)) ++ ", patches = " ++ show (fmap fst (patches b)) ++ - ", children = " ++ show (Map.keys (children b)) \ No newline at end of file + ", children = " ++ show (Map.keys (children b)) From 7c42120d0c8f9f946b8bc27a56d32362d088bd07 Mon Sep 17 00:00:00 2001 From: Arya Irani Date: Wed, 3 Nov 2021 20:21:18 -0400 Subject: [PATCH 074/297] use DeclName in MigrateSchema12 --- .../src/Unison/Codebase/SqliteCodebase/MigrateSchema12.hs | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/parser-typechecker/src/Unison/Codebase/SqliteCodebase/MigrateSchema12.hs b/parser-typechecker/src/Unison/Codebase/SqliteCodebase/MigrateSchema12.hs index e8ec9b9b23..200506409c 100644 --- a/parser-typechecker/src/Unison/Codebase/SqliteCodebase/MigrateSchema12.hs +++ b/parser-typechecker/src/Unison/Codebase/SqliteCodebase/MigrateSchema12.hs @@ -16,7 +16,6 @@ import Data.Generics.Product import Data.Generics.Sum (_Ctor) import Data.List.Extra (nubOrd) import qualified Data.Map as Map -import Data.Maybe import qualified Data.Set as Set import Data.Tuple (swap) import qualified Data.Zip as Zip @@ -425,7 +424,7 @@ migrateDeclComponent Codebase {..} hash = fmap (either id id) . runExceptT $ do let componentIDMap :: Map (Old Reference.Id) (DD.Decl v a) componentIDMap = Map.fromList $ Reference.componentFor hash declComponent - let unhashed :: Map (Old Reference.Id) (v, DD.Decl v a) + let unhashed :: Map (Old Reference.Id) (DeclName v, DD.Decl v a) unhashed = DD.unhashComponent componentIDMap let allTypes :: [Type v a] @@ -455,7 +454,7 @@ migrateDeclComponent Codebase {..} hash = fmap (either id id) . runExceptT $ do let remapTerm :: Type v a -> Type v a remapTerm = typeReferences_ %~ \ref -> Map.findWithDefault (error "unmigrated reference") ref migratedReferences - let remappedReferences :: Map (Old Reference.Id) (v, DD.Decl v a) + let remappedReferences :: Map (Old Reference.Id) (DeclName v, DD.Decl v a) remappedReferences = unhashed & traversed -- Traverse map of reference IDs @@ -496,6 +495,7 @@ migrateDeclComponent Codebase {..} hash = fmap (either id id) . runExceptT $ do (ConstructorReference newReferenceId newConstructorId) lift . lift $ putTypeDeclaration newReferenceId dd + pure Sync.Done typeReferences_ :: (Monad m, Ord v) => LensLike' m (Type v a) SomeReferenceId From a30b932cc6ed14e164036840e8cdfd6d2cfff708 Mon Sep 17 00:00:00 2001 From: Chris Penner Date: Fri, 5 Nov 2021 10:30:43 -0600 Subject: [PATCH 075/297] Add a bunch of helper traversals --- .../U/Codebase/Sqlite/Patch/Full.hs | 20 +++++++++- .../U/Codebase/Sqlite/Patch/TermEdit.hs | 16 +++++++- .../U/Codebase/Sqlite/Patch/TypeEdit.hs | 13 ++++++- codebase2/codebase/U/Codebase/Reference.hs | 8 ++-- codebase2/codebase/U/Codebase/Referent.hs | 11 +++++- codebase2/util/src/U/Util/Map.hs | 11 ++++++ .../SqliteCodebase/MigrateSchema12.hs | 37 +++++++++++-------- .../Unison/Test/Codebase/MigrateSchema12.hs | 14 ------- .../unison-parser-typechecker.cabal | 1 - 9 files changed, 93 insertions(+), 38 deletions(-) delete mode 100644 parser-typechecker/tests/Unison/Test/Codebase/MigrateSchema12.hs diff --git a/codebase2/codebase-sqlite/U/Codebase/Sqlite/Patch/Full.hs b/codebase2/codebase-sqlite/U/Codebase/Sqlite/Patch/Full.hs index cb4eacb4e9..86e6eab2ce 100644 --- a/codebase2/codebase-sqlite/U/Codebase/Sqlite/Patch/Full.hs +++ b/codebase2/codebase-sqlite/U/Codebase/Sqlite/Patch/Full.hs @@ -1,16 +1,21 @@ module U.Codebase.Sqlite.Patch.Full where -import Data.Bifunctor (Bifunctor (bimap)) +import Control.Lens import Data.Map (Map) import Data.Set (Set) import qualified Data.Set as Set import U.Codebase.Reference (Reference') +import qualified U.Codebase.Reference as Reference import U.Codebase.Referent (Referent') +import qualified U.Codebase.Referent as Referent import qualified U.Codebase.Sqlite.DbId as Db import U.Codebase.Sqlite.LocalIds (LocalDefnId, LocalHashId, LocalTextId) import U.Codebase.Sqlite.Patch.TermEdit (TermEdit') import U.Codebase.Sqlite.Patch.TypeEdit (TypeEdit') import qualified U.Util.Map as Map +import qualified U.Util.Set as Set +import qualified U.Codebase.Sqlite.Patch.TermEdit as TermEdit +import qualified U.Codebase.Sqlite.Patch.TypeEdit as TypeEdit -- | -- @ @@ -37,6 +42,19 @@ data Patch' t h o = Patch typeEdits :: Map (Reference' t h) (Set (TypeEdit' t o)) } +patchH_ :: (Ord t, Ord h') => Traversal (Patch' t h o) (Patch' t h' o) h h' +patchH_ f Patch {termEdits, typeEdits} = do + newTermEdits <- termEdits & Map.traverseKeys . Referent.refs_ . Reference.h_ %%~ f + newTypeEdits <- typeEdits & Map.traverseKeys . Reference.h_ %%~ f + pure Patch {termEdits = newTermEdits, typeEdits = newTypeEdits} + +patchO_ :: (Ord t, Ord o') => Traversal (Patch' t h o) (Patch' t h o') o o' +patchO_ f Patch {termEdits, typeEdits} = do + newTermEdits <- termEdits & traversed . Set.traverse . TermEdit.h_ %%~ f + newTypeEdits <- typeEdits & traversed . Set.traverse . TypeEdit.h_ %%~ f + pure Patch {termEdits = newTermEdits, typeEdits = newTypeEdits} + + trimap :: (Ord t', Ord h', Ord o') => (t -> t') -> diff --git a/codebase2/codebase-sqlite/U/Codebase/Sqlite/Patch/TermEdit.hs b/codebase2/codebase-sqlite/U/Codebase/Sqlite/Patch/TermEdit.hs index 82a4029382..53ec605673 100644 --- a/codebase2/codebase-sqlite/U/Codebase/Sqlite/Patch/TermEdit.hs +++ b/codebase2/codebase-sqlite/U/Codebase/Sqlite/Patch/TermEdit.hs @@ -1,12 +1,13 @@ module U.Codebase.Sqlite.Patch.TermEdit where import Data.Bifoldable (Bifoldable (bifoldMap)) -import Data.Bifunctor (Bifunctor (bimap)) import Data.Bitraversable (Bitraversable (bitraverse)) import U.Codebase.Reference (Reference') import qualified U.Codebase.Referent as Referent import qualified U.Codebase.Sqlite.DbId as Db import U.Codebase.Sqlite.LocalIds (LocalDefnId, LocalTextId) +import Control.Lens +import qualified U.Codebase.Reference as Reference type TermEdit = TermEdit' Db.TextId Db.ObjectId @@ -17,6 +18,19 @@ type Referent' t h = Referent.Referent' (Reference' t h) (Reference' t h) data TermEdit' t h = Replace (Referent' t h) Typing | Deprecate deriving (Eq, Ord, Show) +_Replace :: Prism (TermEdit' t h) (TermEdit' t' h') (Referent' t h, Typing) (Referent' t' h', Typing) +_Replace = prism embed project + where + project :: TermEdit' t h -> Either (TermEdit' t' h') (Referent' t h, Typing) + project (Replace ref typ) = Right (ref, typ) + project Deprecate = Left Deprecate + + embed :: (Referent' t' h', Typing) -> TermEdit' t' h' + embed (ref, typ) = Replace ref typ + +h_ :: Traversal (TermEdit' t h) (TermEdit' t h') h h' +h_ f = _Replace . _1 . Referent.refs_ . Reference.h_ %%~ f + -- Replacements with the Same type can be automatically propagated. -- Replacements with a Subtype can be automatically propagated but may result in dependents getting more general types, so requires re-inference. -- Replacements of a Different type need to be manually propagated by the programmer. diff --git a/codebase2/codebase-sqlite/U/Codebase/Sqlite/Patch/TypeEdit.hs b/codebase2/codebase-sqlite/U/Codebase/Sqlite/Patch/TypeEdit.hs index c2c0233a8f..79f3c9fba9 100644 --- a/codebase2/codebase-sqlite/U/Codebase/Sqlite/Patch/TypeEdit.hs +++ b/codebase2/codebase-sqlite/U/Codebase/Sqlite/Patch/TypeEdit.hs @@ -1,11 +1,12 @@ module U.Codebase.Sqlite.Patch.TypeEdit where import Data.Bifoldable (Bifoldable (bifoldMap)) -import Data.Bifunctor (Bifunctor (bimap)) import Data.Bitraversable (Bitraversable (bitraverse)) import U.Codebase.Reference (Reference') import qualified U.Codebase.Sqlite.DbId as Db import U.Codebase.Sqlite.LocalIds (LocalDefnId, LocalTextId) +import Control.Lens +import qualified U.Codebase.Reference as Reference type LocalTypeEdit = TypeEdit' LocalTextId LocalDefnId @@ -14,6 +15,16 @@ type TypeEdit = TypeEdit' Db.TextId Db.ObjectId data TypeEdit' t h = Replace (Reference' t h) | Deprecate deriving (Eq, Ord, Show) +_Replace :: Prism (TypeEdit' t h) (TypeEdit' t' h') (Reference' t h) (Reference' t' h') +_Replace = prism Replace project + where + project :: TypeEdit' t h -> Either (TypeEdit' t' h') (Reference' t h) + project (Replace ref) = Right ref + project Deprecate = Left Deprecate + +h_ :: Traversal (TypeEdit' t h) (TypeEdit' t h') h h' +h_ = _Replace . Reference.h_ + instance Bifunctor TypeEdit' where bimap f g (Replace r) = Replace (bimap f g r) bimap _ _ Deprecate = Deprecate diff --git a/codebase2/codebase/U/Codebase/Reference.hs b/codebase2/codebase/U/Codebase/Reference.hs index 31107821ab..ee3ae15d19 100644 --- a/codebase2/codebase/U/Codebase/Reference.hs +++ b/codebase2/codebase/U/Codebase/Reference.hs @@ -39,13 +39,13 @@ type Pos = Word64 data Id' h = Id h Pos deriving (Eq, Ord, Show, Functor, Foldable, Traversable) -t :: Traversal (Reference' t h) (Reference' t' h) t t' -t f = \case +t_ :: Traversal (Reference' t h) (Reference' t' h) t t' +t_ f = \case ReferenceBuiltin t -> ReferenceBuiltin <$> f t ReferenceDerived id -> pure (ReferenceDerived id) -h :: Traversal (Reference' t h) (Reference' t h') h h' -h f = \case +h_ :: Traversal (Reference' t h) (Reference' t h') h h' +h_ f = \case ReferenceBuiltin t -> pure (ReferenceBuiltin t) Derived h i -> Derived <$> f h <*> pure i diff --git a/codebase2/codebase/U/Codebase/Referent.hs b/codebase2/codebase/U/Codebase/Referent.hs index 6aac268639..3bbba4df1f 100644 --- a/codebase2/codebase/U/Codebase/Referent.hs +++ b/codebase2/codebase/U/Codebase/Referent.hs @@ -14,7 +14,7 @@ import Data.Bifunctor (Bifunctor(..)) import Data.Bifoldable (Bifoldable(..)) import Data.Bitraversable (Bitraversable(..)) import U.Codebase.Decl (ConstructorId) -import Control.Lens (Prism) +import Control.Lens (Prism, Traversal) import Data.Generics.Sum (_Ctor) import Unison.Prelude @@ -26,6 +26,15 @@ data Referent' termRef typeRef | Con typeRef ConstructorId deriving (Eq, Ord, Show, Generic, Functor, Foldable, Traversable) +refs_ :: Traversal (Referent' ref ref) (Referent' ref' ref') ref ref' +refs_ f r = bitraverse f f r + +typeRef_ :: Traversal (Referent' typeRef termRef) (Referent' typeRef' termRef) typeRef typeRef' +typeRef_ f = bitraverse f pure + +termRef_ :: Traversal (Referent' typeRef termRef) (Referent' typeRef termRef') termRef termRef' +termRef_ f = bitraverse pure f + _Ref :: Prism (Referent' tmr tyr) (Referent' tmr' tyr) tmr tmr' _Ref = _Ctor @"Ref" _Con :: Prism (Referent' tmr tyr) (Referent' tmr tyr') (tyr, ConstructorId) (tyr', ConstructorId) diff --git a/codebase2/util/src/U/Util/Map.hs b/codebase2/util/src/U/Util/Map.hs index af41464017..e1ea68176d 100644 --- a/codebase2/util/src/U/Util/Map.hs +++ b/codebase2/util/src/U/Util/Map.hs @@ -3,9 +3,12 @@ module U.Util.Map bitraverse, swap, valuesVector, + traverseKeys, + traverseKeysWith, ) where +import Control.Lens (traversed, (%%~), (&), _1) import qualified Data.Bifunctor as B import qualified Data.Bitraversable as B import Data.Map (Map) @@ -27,3 +30,11 @@ swap = valuesVector :: Map k v -> Vector v valuesVector = Vector.fromList . Map.elems + +traverseKeys :: (Applicative f, Ord k') => (k -> f k') -> Map k v -> f (Map k' v) +traverseKeys f m = + Map.fromList <$> (Map.toList m & traversed . _1 %%~ f) + +traverseKeysWith :: (Applicative f, Ord k') => (v -> v -> v) -> (k -> f k') -> Map k v -> f (Map k' v) +traverseKeysWith combine f m = + Map.fromListWith combine <$> (Map.toList m & traversed . _1 %%~ f) diff --git a/parser-typechecker/src/Unison/Codebase/SqliteCodebase/MigrateSchema12.hs b/parser-typechecker/src/Unison/Codebase/SqliteCodebase/MigrateSchema12.hs index 8990007fba..2f300dd0a5 100644 --- a/parser-typechecker/src/Unison/Codebase/SqliteCodebase/MigrateSchema12.hs +++ b/parser-typechecker/src/Unison/Codebase/SqliteCodebase/MigrateSchema12.hs @@ -35,7 +35,7 @@ import U.Codebase.Sqlite.DbId CausalHashId (..), HashId, ObjectId, - PatchObjectId (..), + PatchObjectId (..), TextId (TextId) ) import qualified U.Codebase.Sqlite.LocalizeObject as S.LocalizeObject import qualified U.Codebase.Sqlite.Operations as Ops @@ -201,17 +201,9 @@ migrateCausal conn oldCausalHashId = runDB conn . fmap (either id id) . runExcep pure Sync.Done --- Chris: Remaining Plan: --- Convert all term & type object IDs in DbBranch into Hashes using database, can't use the skymap since they might not be migrated yet. --- Collect all unmigrated Reference Ids and require them --- Using the `objLookup` skymap, map over the original dbBranch, and use the objLookup table to convert all object id references. --- Save this branch. migrateBranch :: MonadIO m => Connection -> ObjectId -> StateT MigrationState m (Sync.TrySyncResult Entity) migrateBranch conn oldObjectId = runDB conn . fmap (either id id) . runExceptT $ do - -- Read the old branch oldBranch <- runDB conn (Ops.loadDbBranchByObjectId (BranchObjectId oldObjectId)) - -- let convertBranch :: S.DbBranch -> m (S.Branch' TextId Hash PatchObjectId (BranchObjectId, CausalHashId)) - -- type DbBranch = Branch' TextId ObjectId PatchObjectId (BranchObjectId, CausalHashId) oldBranchWithHashes <- traverseOf S.branchHashes_ (lift . Ops.loadHashByObjectId) oldBranch migratedRefs <- gets referenceMapping migratedObjects <- gets objLookup @@ -282,25 +274,41 @@ migrateBranch conn oldObjectId = runDB conn . fmap (either id id) . runExceptT $ pure Sync.Done migratePatch :: + forall m. MonadIO m => Connection -> Old PatchObjectId -> StateT MigrationState m (Sync.TrySyncResult Entity) migratePatch conn oldObjectId = runDB conn . fmap (either id id) . runExceptT $ do oldPatch <- runDB conn (Ops.loadDbPatchById oldObjectId) + let hydrateHashes :: forall m. Q.DB m => HashId -> m Hash + hydrateHashes hashId = do + Cv.hash2to1 <$> Q.loadHashHashById hashId + let hydrateObjectIds :: Q.DB m => ObjectId -> m Hash + hydrateObjectIds objId = do + Cv.hash2to1 <$> Ops.loadHashByObjectId objId + + oldPatchWithHashes :: S.Patch' TextId Hash ObjectId + <- lift . lift $ do + (oldPatch & S.patchH_ %%~ hydrateHashes) + >>= S.patchO_ %%~ hydrateObjectIds + -- 2. Determine whether all things the patch refers to are built. let dependencies :: [Entity] - dependencies = undefined - when (not . null $ dependencies) (throwE (Sync.Missing dependencies)) - --objMapping <- gets objLookup - let hydrate :: Q.DB m => HashId -> m Hash - hydrate = undefined + dependencies = + oldPatch + ^.. patchSomeRefsH_ . to someReferenceIdToEntity + <> oldPatch + ^.. patchSomeRefsO_ . to _ + let dehydrate :: Q.DB m => Hash -> m HashId dehydrate = undefined let remapRef :: SomeReferenceId -> SomeReferenceId remapRef = undefined let remapObjectIds :: SomeReferenceObjId -> SomeReferenceObjId remapObjectIds = undefined + + when (not . null $ dependencies) (throwE (Sync.Missing dependencies)) newPatch <- oldPatch & patchSomeRefsH_ @@ -313,7 +321,6 @@ migratePatch conn oldObjectId = runDB conn . fmap (either id id) . runExceptT $ undefined x ) <&> (patchSomeRefsO_ %~ remapObjectIds) - -- & patchSomeRefsO_ . someRef_ %~ _ let (localPatchIds, localPatch) = S.LocalizeObject.localizePatch newPatch newHash <- runDB conn (liftQ (Hashing.dbPatchHash newPatch)) newObjectId <- runDB conn (Ops.saveDbPatch (PatchHash (Cv.hash1to2 newHash)) (S.Patch.Format.Full localPatchIds localPatch)) diff --git a/parser-typechecker/tests/Unison/Test/Codebase/MigrateSchema12.hs b/parser-typechecker/tests/Unison/Test/Codebase/MigrateSchema12.hs deleted file mode 100644 index 1266cdc219..0000000000 --- a/parser-typechecker/tests/Unison/Test/Codebase/MigrateSchema12.hs +++ /dev/null @@ -1,14 +0,0 @@ -module Unison.Test.Codebase.MigrateSchema12 where - -{- -testType :: Type v a -testType = _ - -test :: Test () -test = - scope "migrate12" - . tests - $ [ scope "threeWayMerge.ex1" - . expect $ Causal.head testThreeWay == Set.fromList [3, 4] - ] --} \ No newline at end of file diff --git a/parser-typechecker/unison-parser-typechecker.cabal b/parser-typechecker/unison-parser-typechecker.cabal index 4887ff1bb2..6a714a63a2 100644 --- a/parser-typechecker/unison-parser-typechecker.cabal +++ b/parser-typechecker/unison-parser-typechecker.cabal @@ -372,7 +372,6 @@ executable tests Unison.Test.ClearCache Unison.Test.Codebase.Branch Unison.Test.Codebase.Causal - Unison.Test.Codebase.MigrateSchema12 Unison.Test.Codebase.Path Unison.Test.CodebaseInit Unison.Test.ColorText From 931e4f606f1f491b341550af08f7c273e0c2a049 Mon Sep 17 00:00:00 2001 From: Chris Penner Date: Fri, 5 Nov 2021 10:46:20 -0600 Subject: [PATCH 076/297] Finish up most of migratePatch, just have a few lifts to write --- .../U/Codebase/Sqlite/Patch/Full.hs | 1 - .../SqliteCodebase/MigrateSchema12.hs | 71 +++++++++---------- 2 files changed, 35 insertions(+), 37 deletions(-) diff --git a/codebase2/codebase-sqlite/U/Codebase/Sqlite/Patch/Full.hs b/codebase2/codebase-sqlite/U/Codebase/Sqlite/Patch/Full.hs index 86e6eab2ce..4007569381 100644 --- a/codebase2/codebase-sqlite/U/Codebase/Sqlite/Patch/Full.hs +++ b/codebase2/codebase-sqlite/U/Codebase/Sqlite/Patch/Full.hs @@ -54,7 +54,6 @@ patchO_ f Patch {termEdits, typeEdits} = do newTypeEdits <- typeEdits & traversed . Set.traverse . TypeEdit.h_ %%~ f pure Patch {termEdits = newTermEdits, typeEdits = newTypeEdits} - trimap :: (Ord t', Ord h', Ord o') => (t -> t') -> diff --git a/parser-typechecker/src/Unison/Codebase/SqliteCodebase/MigrateSchema12.hs b/parser-typechecker/src/Unison/Codebase/SqliteCodebase/MigrateSchema12.hs index 2f300dd0a5..d29f30c32e 100644 --- a/parser-typechecker/src/Unison/Codebase/SqliteCodebase/MigrateSchema12.hs +++ b/parser-typechecker/src/Unison/Codebase/SqliteCodebase/MigrateSchema12.hs @@ -1,9 +1,9 @@ {-# LANGUAGE ApplicativeDo #-} {-# LANGUAGE DataKinds #-} +{-# LANGUAGE DeriveTraversable #-} {-# LANGUAGE RankNTypes #-} {-# LANGUAGE RecordWildCards #-} {-# LANGUAGE ViewPatterns #-} -{-# LANGUAGE DeriveTraversable #-} module Unison.Codebase.SqliteCodebase.MigrateSchema12 where @@ -204,7 +204,7 @@ migrateCausal conn oldCausalHashId = runDB conn . fmap (either id id) . runExcep migrateBranch :: MonadIO m => Connection -> ObjectId -> StateT MigrationState m (Sync.TrySyncResult Entity) migrateBranch conn oldObjectId = runDB conn . fmap (either id id) . runExceptT $ do oldBranch <- runDB conn (Ops.loadDbBranchByObjectId (BranchObjectId oldObjectId)) - oldBranchWithHashes <- traverseOf S.branchHashes_ (lift . Ops.loadHashByObjectId) oldBranch + oldBranchWithHashes <- traverseOf S.branchHashes_ (lift . fmap Cv.hash2to1 . Ops.loadHashByObjectId) oldBranch migratedRefs <- gets referenceMapping migratedObjects <- gets objLookup migratedCausals <- gets causalMapping @@ -281,48 +281,44 @@ migratePatch :: StateT MigrationState m (Sync.TrySyncResult Entity) migratePatch conn oldObjectId = runDB conn . fmap (either id id) . runExceptT $ do oldPatch <- runDB conn (Ops.loadDbPatchById oldObjectId) - let hydrateHashes :: forall m. Q.DB m => HashId -> m Hash + let hydrateHashes :: forall m. Q.EDB m => HashId -> m Hash hydrateHashes hashId = do Cv.hash2to1 <$> Q.loadHashHashById hashId - let hydrateObjectIds :: Q.DB m => ObjectId -> m Hash + let hydrateObjectIds :: forall m. Ops.EDB m => ObjectId -> m Hash hydrateObjectIds objId = do Cv.hash2to1 <$> Ops.loadHashByObjectId objId - oldPatchWithHashes :: S.Patch' TextId Hash ObjectId - <- lift . lift $ do + oldPatchWithHashes :: S.Patch' TextId Hash Hash <- + lift . lift $ do (oldPatch & S.patchH_ %%~ hydrateHashes) - >>= S.patchO_ %%~ hydrateObjectIds + >>= (S.patchO_ %%~ hydrateObjectIds) -- 2. Determine whether all things the patch refers to are built. let dependencies :: [Entity] dependencies = - oldPatch - ^.. patchSomeRefsH_ . to someReferenceIdToEntity - <> oldPatch - ^.. patchSomeRefsO_ . to _ + oldPatchWithHashes ^.. patchSomeRefsH_ . uRefIdAsRefId_ . to someReferenceIdToEntity + <> oldPatchWithHashes ^.. patchSomeRefsO_ . uRefIdAsRefId_ . to someReferenceIdToEntity + when (not . null $ dependencies) (throwE (Sync.Missing dependencies)) - let dehydrate :: Q.DB m => Hash -> m HashId - dehydrate = undefined + let dehydrateHashesToHashId :: forall m. Q.DB m => Hash -> m HashId + dehydrateHashesToHashId = undefined + let dehydrateHashesToObjectId :: forall m. Q.DB m => Hash -> m ObjectId + dehydrateHashesToObjectId = undefined + migratedReferences <- gets referenceMapping let remapRef :: SomeReferenceId -> SomeReferenceId - remapRef = undefined - let remapObjectIds :: SomeReferenceObjId -> SomeReferenceObjId - remapObjectIds = undefined + remapRef ref = Map.findWithDefault ref ref migratedReferences - when (not . null $ dependencies) (throwE (Sync.Missing dependencies)) - newPatch <- - oldPatch - & patchSomeRefsH_ - %%~ ( \someRef -> do - hydratedRef <- (someRef & (traverse . traverse) %%~ hydrate) - hydratedRefId :: SomeReferenceId <- undefined hydratedRef - -- Hrmm, we can't really use Reference.Id here since it's not parameterized - -- over Hash. - x <- (remapRef hydratedRefId & (traverse . undefined) %%~ dehydrate) - undefined x - ) - <&> (patchSomeRefsO_ %~ remapObjectIds) - let (localPatchIds, localPatch) = S.LocalizeObject.localizePatch newPatch - newHash <- runDB conn (liftQ (Hashing.dbPatchHash newPatch)) + let newPatch = + oldPatchWithHashes & patchSomeRefsH_ . uRefIdAsRefId_ %~ remapRef + & patchSomeRefsO_ . uRefIdAsRefId_ %~ remapRef + + newPatchWithIds :: S.Patch <- + lift . lift $ do + (newPatch & S.patchH_ %%~ dehydrateHashesToHashId) + >>= (S.patchO_ %%~ dehydrateHashesToObjectId) + + let (localPatchIds, localPatch) = S.LocalizeObject.localizePatch newPatchWithIds + newHash <- runDB conn (liftQ (Hashing.dbPatchHash newPatchWithIds)) newObjectId <- runDB conn (Ops.saveDbPatch (PatchHash (Cv.hash1to2 newHash)) (S.Patch.Format.Full localPatchIds localPatch)) newHashId <- runDB conn (liftQ (Q.expectHashIdByHash (Cv.hash1to2 newHash))) field @"objLookup" %= Map.insert (unPatchObjectId oldObjectId) (unPatchObjectId newObjectId, newHashId, newHash) @@ -362,14 +358,17 @@ migrateWatch Codebase {..} watchKind oldWatchId = fmap (either id id) . runExcep lift . lift $ putWatch watchKindV1 newWatchId remappedTerm pure Sync.Done -uRefIdAsRefId_ :: Iso' (SomeReference (UReference.Id' U.Util.Hash)) SomeReferenceId +uHash_ :: Iso' U.Util.Hash Hash +uHash_ = iso Cv.hash2to1 Cv.hash1to2 + +uRefIdAsRefId_ :: Iso' (SomeReference (UReference.Id' Hash)) SomeReferenceId uRefIdAsRefId_ = mapping uRefAsRef_ -uRefAsRef_ :: Iso' (UReference.Id' U.Util.Hash) Reference.Id +uRefAsRef_ :: Iso' (UReference.Id' Hash) Reference.Id uRefAsRef_ = iso intoRef intoURef where - intoRef (UReference.Id hash pos) = Reference.Id (Cv.hash2to1 hash) pos - intoURef (Reference.Id hash pos) = UReference.Id (Cv.hash1to2 hash) pos + intoRef (UReference.Id hash pos) = Reference.Id hash pos + intoURef (Reference.Id hash pos) = UReference.Id hash pos -- Project an S.Referent'' into its SomeReferenceObjId's someReferent_ :: @@ -711,7 +710,7 @@ remapObjIdRefs objMapping refMapping someObjIdRef = newSomeObjId Nothing -> error $ "Expected object mapping for ID: " <> show oldObjId Just found -> found someRefId :: SomeReferenceId - someRefId = (someObjIdRef & someRef_ . UReference.idH .~ Cv.hash1to2 newHash) ^. uRefIdAsRefId_ + someRefId = (someObjIdRef & someRef_ . UReference.idH .~ newHash) ^. uRefIdAsRefId_ newRefId :: SomeReferenceId newRefId = case Map.lookup someRefId refMapping of Nothing -> error $ "Expected reference mapping for ID: " <> show someRefId From 7a990632c19ff9623470337472680e992534de0a Mon Sep 17 00:00:00 2001 From: Chris Penner Date: Fri, 5 Nov 2021 10:51:31 -0600 Subject: [PATCH 077/297] Fix compilation errors --- .../Unison/Codebase/SqliteCodebase/MigrateSchema12.hs | 10 ++++++---- 1 file changed, 6 insertions(+), 4 deletions(-) diff --git a/parser-typechecker/src/Unison/Codebase/SqliteCodebase/MigrateSchema12.hs b/parser-typechecker/src/Unison/Codebase/SqliteCodebase/MigrateSchema12.hs index d29f30c32e..731f6a02de 100644 --- a/parser-typechecker/src/Unison/Codebase/SqliteCodebase/MigrateSchema12.hs +++ b/parser-typechecker/src/Unison/Codebase/SqliteCodebase/MigrateSchema12.hs @@ -35,7 +35,8 @@ import U.Codebase.Sqlite.DbId CausalHashId (..), HashId, ObjectId, - PatchObjectId (..), TextId (TextId) + PatchObjectId (..), + TextId, ) import qualified U.Codebase.Sqlite.LocalizeObject as S.LocalizeObject import qualified U.Codebase.Sqlite.Operations as Ops @@ -289,8 +290,8 @@ migratePatch conn oldObjectId = runDB conn . fmap (either id id) . runExceptT $ Cv.hash2to1 <$> Ops.loadHashByObjectId objId oldPatchWithHashes :: S.Patch' TextId Hash Hash <- - lift . lift $ do - (oldPatch & S.patchH_ %%~ hydrateHashes) + lift $ do + (oldPatch & S.patchH_ %%~ liftQ . hydrateHashes) >>= (S.patchO_ %%~ hydrateObjectIds) -- 2. Determine whether all things the patch refers to are built. @@ -304,6 +305,7 @@ migratePatch conn oldObjectId = runDB conn . fmap (either id id) . runExceptT $ dehydrateHashesToHashId = undefined let dehydrateHashesToObjectId :: forall m. Q.DB m => Hash -> m ObjectId dehydrateHashesToObjectId = undefined + migratedReferences <- gets referenceMapping let remapRef :: SomeReferenceId -> SomeReferenceId remapRef ref = Map.findWithDefault ref ref migratedReferences @@ -313,7 +315,7 @@ migratePatch conn oldObjectId = runDB conn . fmap (either id id) . runExceptT $ & patchSomeRefsO_ . uRefIdAsRefId_ %~ remapRef newPatchWithIds :: S.Patch <- - lift . lift $ do + lift $ do (newPatch & S.patchH_ %%~ dehydrateHashesToHashId) >>= (S.patchO_ %%~ dehydrateHashesToObjectId) From d354cf99f7f1298284c9a9ad05640ded7b80ba21 Mon Sep 17 00:00:00 2001 From: Mitchell Rosen Date: Fri, 5 Nov 2021 13:23:33 -0400 Subject: [PATCH 078/297] begin implementing migrateSchema12 --- .../src/Unison/Codebase/SqliteCodebase.hs | 40 +++++++----- .../SqliteCodebase/MigrateSchema12.hs | 61 +++++++++++++------ 2 files changed, 70 insertions(+), 31 deletions(-) diff --git a/parser-typechecker/src/Unison/Codebase/SqliteCodebase.hs b/parser-typechecker/src/Unison/Codebase/SqliteCodebase.hs index 76ccd4668b..184b5858c5 100644 --- a/parser-typechecker/src/Unison/Codebase/SqliteCodebase.hs +++ b/parser-typechecker/src/Unison/Codebase/SqliteCodebase.hs @@ -86,6 +86,7 @@ import Unison.Codebase.ShortBranchHash (ShortBranchHash) import qualified Unison.Codebase.SqliteCodebase.Branch.Dependencies as BD import qualified Unison.Codebase.SqliteCodebase.Conversions as Cv import qualified Unison.Codebase.SqliteCodebase.GitError as GitError +import Unison.Codebase.SqliteCodebase.MigrateSchema12 (migrateSchema12) import qualified Unison.Codebase.SqliteCodebase.SyncEphemeral as SyncEphemeral import Unison.Codebase.SyncMode (SyncMode) import qualified Unison.Codebase.Type as C @@ -271,7 +272,12 @@ shutdownConnection conn = do Monad.when debug $ traceM $ "shutdown connection " ++ show conn liftIO $ Sqlite.close (Connection.underlying conn) -sqliteCodebase :: (MonadIO m, MonadCatch m) => Codebase.DebugName -> CodebasePath -> m (Either SchemaVersion (m (), Codebase m Symbol Ann)) +sqliteCodebase :: + forall m. + (MonadIO m, MonadCatch m) => + Codebase.DebugName -> + CodebasePath -> + m (Either SchemaVersion (m (), Codebase m Symbol Ann)) sqliteCodebase debugName root = do Monad.when debug $ traceM $ "sqliteCodebase " ++ debugName ++ " " ++ root conn <- unsafeGetConnection debugName root @@ -279,6 +285,7 @@ sqliteCodebase debugName root = do typeOfTermCache <- Cache.semispaceCache 8192 declCache <- Cache.semispaceCache 1024 let + startCodebase :: m (m (), Codebase m Symbol Ann) startCodebase = do rootBranchCache <- newTVarIO Nothing -- The v1 codebase interface has operations to read and write individual definitions @@ -293,7 +300,7 @@ sqliteCodebase debugName root = do term2 <- Ops.loadTermByReference (C.Reference.Id h2 i) Cv.term2to1 h1 getDeclType term2 - getDeclType :: EDB m => C.Reference.Reference -> m CT.ConstructorType + getDeclType :: forall m. EDB m => C.Reference.Reference -> m CT.ConstructorType getDeclType = Cache.apply declTypeCache \case C.Reference.ReferenceBuiltin t -> let err = @@ -305,7 +312,7 @@ sqliteCodebase debugName root = do Map.lookup (Reference.Builtin t) Builtins.builtinConstructorType C.Reference.ReferenceDerived i -> getDeclTypeById i - getDeclTypeById :: EDB m => C.Reference.Id -> m CT.ConstructorType + getDeclTypeById :: forall m. EDB m => C.Reference.Id -> m CT.ConstructorType getDeclTypeById = fmap Cv.decltype2to1 . Ops.getDeclTypeById getTypeOfTermImpl :: MonadIO m => Reference.Id -> m (Maybe (Type Symbol Ann)) @@ -373,12 +380,12 @@ sqliteCodebase debugName root = do tryFlushTermBuffer h ) - putBuffer :: (MonadIO m, Show a) => TVar (Map Hash (BufferEntry a)) -> Hash -> BufferEntry a -> m () + putBuffer :: forall a m. (MonadIO m, Show a) => TVar (Map Hash (BufferEntry a)) -> Hash -> BufferEntry a -> m () putBuffer tv h e = do Monad.when debug $ traceM $ "putBuffer " ++ prettyBufferEntry h e atomically $ modifyTVar tv (Map.insert h e) - withBuffer :: (MonadIO m, Show a) => TVar (Map Hash (BufferEntry a)) -> Hash -> (BufferEntry a -> m b) -> m b + withBuffer :: forall a b m. (MonadIO m, Show a) => TVar (Map Hash (BufferEntry a)) -> Hash -> (BufferEntry a -> m b) -> m b withBuffer tv h f = do Monad.when debug $ readTVarIO tv >>= \tv -> traceM $ "tv = " ++ show tv Map.lookup h <$> readTVarIO tv >>= \case @@ -389,17 +396,18 @@ sqliteCodebase debugName root = do Monad.when debug $ traceM $ "SqliteCodebase.with(new)Buffer " ++ show h f (BufferEntry Nothing Map.empty Set.empty Set.empty) - removeBuffer :: (MonadIO m, Show a) => TVar (Map Hash (BufferEntry a)) -> Hash -> m () + removeBuffer :: forall a m. (MonadIO m, Show a) => TVar (Map Hash (BufferEntry a)) -> Hash -> m () removeBuffer _tv h | debug && trace ("removeBuffer " ++ show h) False = undefined removeBuffer tv h = do Monad.when debug $ readTVarIO tv >>= \tv -> traceM $ "before delete: " ++ show tv atomically $ modifyTVar tv (Map.delete h) Monad.when debug $ readTVarIO tv >>= \tv -> traceM $ "after delete: " ++ show tv - addBufferDependent :: (MonadIO m, Show a) => Hash -> TVar (Map Hash (BufferEntry a)) -> Hash -> m () + addBufferDependent :: forall a m. (MonadIO m, Show a) => Hash -> TVar (Map Hash (BufferEntry a)) -> Hash -> m () addBufferDependent dependent tv dependency = withBuffer tv dependency \be -> do putBuffer tv dependency be {beWaitingDependents = Set.insert dependent $ beWaitingDependents be} tryFlushBuffer :: + forall a m. (EDB m, Show a) => TVar (Map Hash (BufferEntry a)) -> (H2.Hash -> [a] -> m ()) -> @@ -435,7 +443,7 @@ sqliteCodebase debugName root = do -- it's never even been added, so there's nothing to do. pure () - addTermComponentTypeIndex :: EDB m => ObjectId -> [Type Symbol Ann] -> m () + addTermComponentTypeIndex :: forall m. EDB m => ObjectId -> [Type Symbol Ann] -> m () addTermComponentTypeIndex oId types = for_ (types `zip` [0..]) \(tp, i) -> do let self = C.Referent.RefId (C.Reference.Id oId i) typeForIndexing = Hashing.typeToReference tp @@ -443,7 +451,7 @@ sqliteCodebase debugName root = do Ops.addTypeToIndexForTerm self (Cv.reference1to2 typeForIndexing) Ops.addTypeMentionsToIndexForTerm self (Set.map Cv.reference1to2 typeMentionsForIndexing) - addDeclComponentTypeIndex :: EDB m => ObjectId -> [[Type Symbol Ann]] -> m () + addDeclComponentTypeIndex :: forall m. EDB m => ObjectId -> [[Type Symbol Ann]] -> m () addDeclComponentTypeIndex oId ctorss = for_ (ctorss `zip` [0..]) \(ctors, i) -> for_ (ctors `zip` [0..]) \(tp, j) -> do @@ -453,7 +461,7 @@ sqliteCodebase debugName root = do Ops.addTypeToIndexForTerm self (Cv.reference1to2 typeForIndexing) Ops.addTypeMentionsToIndexForTerm self (Set.map Cv.reference1to2 typeMentionsForIndexing) - tryFlushTermBuffer :: EDB m => Hash -> m () + tryFlushTermBuffer :: forall m. EDB m => Hash -> m () tryFlushTermBuffer h | debug && trace ("tryFlushTermBuffer " ++ show h) False = undefined tryFlushTermBuffer h = tryFlushBuffer @@ -466,7 +474,7 @@ sqliteCodebase debugName root = do tryFlushTermBuffer h - tryFlushDeclBuffer :: EDB m => Hash -> m () + tryFlushDeclBuffer :: forall m. EDB m => Hash -> m () tryFlushDeclBuffer h | debug && trace ("tryFlushDeclBuffer " ++ show h) False = undefined tryFlushDeclBuffer h = tryFlushBuffer @@ -776,7 +784,7 @@ sqliteCodebase debugName root = do printBuffer "Decls:" decls printBuffer "Terms:" terms - pure . Right $ + pure $ ( finalizer, let code = Codebase1.Codebase @@ -823,8 +831,12 @@ sqliteCodebase debugName root = do in code ) runReaderT Q.schemaVersion conn >>= \case - SchemaVersion 2 -> startCodebase - SchemaVersion 1 -> undefined -- migrate12 conn >> startCodebase + SchemaVersion 2 -> Right <$> startCodebase + SchemaVersion 1 -> do + (cleanup, codebase) <- startCodebase + migrateSchema12 conn codebase + -- it's ok to pass codebase along; whatever it cached during the migration won't break anything + pure (Right (cleanup, codebase)) v -> shutdownConnection conn $> Left v -- well one or the other. :zany_face: the thinking being that they wouldn't hash-collide diff --git a/parser-typechecker/src/Unison/Codebase/SqliteCodebase/MigrateSchema12.hs b/parser-typechecker/src/Unison/Codebase/SqliteCodebase/MigrateSchema12.hs index 731f6a02de..7ace2d56d7 100644 --- a/parser-typechecker/src/Unison/Codebase/SqliteCodebase/MigrateSchema12.hs +++ b/parser-typechecker/src/Unison/Codebase/SqliteCodebase/MigrateSchema12.hs @@ -5,7 +5,10 @@ {-# LANGUAGE RecordWildCards #-} {-# LANGUAGE ViewPatterns #-} -module Unison.Codebase.SqliteCodebase.MigrateSchema12 where +module Unison.Codebase.SqliteCodebase.MigrateSchema12 + ( migrateSchema12, + ) +where import Control.Lens import Control.Monad.Except (ExceptT, runExceptT) @@ -50,6 +53,7 @@ import qualified U.Codebase.Sync as Sync import U.Codebase.WatchKind (WatchKind) import qualified U.Codebase.WatchKind as WK import qualified U.Util.Hash as U.Util +import U.Util.Monoid (foldMapM) import qualified U.Util.Set as Set import qualified Unison.ABT as ABT import Unison.Codebase (Codebase (Codebase)) @@ -86,6 +90,29 @@ import Unison.Var (Var) -- * delete old objects -- * refer to github megaticket https://github.com/unisonweb/unison/issues/2471 +migrateSchema12 :: forall a m v. (MonadIO m, Var v) => Connection -> Codebase m v a -> m () +migrateSchema12 conn codebase = do + rootCausalHashId <- runDB conn (liftQ Q.loadNamespaceRoot) + watches <- + foldMapM + (\watchKind -> map (W watchKind) <$> Codebase.watches codebase (Cv.watchKind2to1 watchKind)) + [WK.RegularWatch, WK.TestWatch] + (Sync.sync @_ @Entity migrationSync progress (CausalE rootCausalHashId : watches)) + `runReaderT` Env {db = conn, codebase} + `evalStateT` MigrationState Map.empty Map.empty Map.empty + where + progress :: Sync.Progress (ReaderT (Env m v a) (StateT MigrationState m)) Entity + progress = + let need :: Entity -> ReaderT (Env m v a) (StateT MigrationState m) () + need = undefined + done :: Entity -> ReaderT (Env m v a) (StateT MigrationState m) () + done = undefined + error :: Entity -> ReaderT (Env m v a) (StateT MigrationState m) () + error = undefined + allDone :: ReaderT (Env m v a) (StateT MigrationState m) () + allDone = undefined + in Sync.Progress {need, done, error, allDone} + type TypeIdentifier = (ObjectId, Pos) type Old a = a @@ -338,7 +365,7 @@ migrateWatch :: WatchKind -> Reference.Id -> StateT MigrationState m (Sync.TrySyncResult Entity) -migrateWatch Codebase {..} watchKind oldWatchId = fmap (either id id) . runExceptT $ do +migrateWatch Codebase {getWatch, putWatch} watchKind oldWatchId = fmap (either id id) . runExceptT $ do let watchKindV1 = Cv.watchKind2to1 watchKind watchResultTerm <- (lift . lift) (getWatch watchKindV1 oldWatchId) >>= \case @@ -753,21 +780,21 @@ someReferenceIdToEntity = \case -- Constructors are migrated by their decl component. (ConstructorReference ref _conId) -> DeclComponent (Reference.idToHash ref) --- | migrate sqlite codebase from version 1 to 2, return False and rollback on failure -migrateSchema12 :: Applicative m => Connection -> m Bool -migrateSchema12 _db = do - -- todo: drop and recreate corrected type/mentions index schema - -- do we want to garbage collect at this time? ✅ - -- or just convert everything without going in dependency order? ✅ - error "todo: go through " - -- todo: double-hash all the types and produce an constructor mapping - -- object ids will stay the same - -- todo: rehash all the terms using the new constructor mapping - -- and adding the type to the term - -- do we want to diff namespaces at this time? ❌ - -- do we want to look at supporting multiple simultaneous representations of objects at this time? - pure "todo: migrate12" - pure True +-- -- | migrate sqlite codebase from version 1 to 2, return False and rollback on failure +-- migrateSchema12 :: Applicative m => Connection -> m Bool +-- migrateSchema12 _db = do +-- -- todo: drop and recreate corrected type/mentions index schema +-- -- do we want to garbage collect at this time? ✅ +-- -- or just convert everything without going in dependency order? ✅ +-- error "todo: go through " +-- -- todo: double-hash all the types and produce an constructor mapping +-- -- object ids will stay the same +-- -- todo: rehash all the terms using the new constructor mapping +-- -- and adding the type to the term +-- -- do we want to diff namespaces at this time? ❌ +-- -- do we want to look at supporting multiple simultaneous representations of objects at this time? +-- pure "todo: migrate12" +-- pure True foldSetter :: LensLike (Writer [a]) s t a a -> s -> [a] foldSetter t s = execWriter (s & t %%~ \a -> tell [a] *> pure a) From 6ac566f625701466cfe50ff495b69ba32197ecdf Mon Sep 17 00:00:00 2001 From: Chris Penner Date: Fri, 5 Nov 2021 12:29:52 -0600 Subject: [PATCH 079/297] Fill in more missing bits of migratePatch --- .../Codebase/SqliteCodebase/MigrateSchema12.hs | 17 +++++++++-------- 1 file changed, 9 insertions(+), 8 deletions(-) diff --git a/parser-typechecker/src/Unison/Codebase/SqliteCodebase/MigrateSchema12.hs b/parser-typechecker/src/Unison/Codebase/SqliteCodebase/MigrateSchema12.hs index 731f6a02de..6ee9f5a643 100644 --- a/parser-typechecker/src/Unison/Codebase/SqliteCodebase/MigrateSchema12.hs +++ b/parser-typechecker/src/Unison/Codebase/SqliteCodebase/MigrateSchema12.hs @@ -301,10 +301,11 @@ migratePatch conn oldObjectId = runDB conn . fmap (either id id) . runExceptT $ <> oldPatchWithHashes ^.. patchSomeRefsO_ . uRefIdAsRefId_ . to someReferenceIdToEntity when (not . null $ dependencies) (throwE (Sync.Missing dependencies)) - let dehydrateHashesToHashId :: forall m. Q.DB m => Hash -> m HashId - dehydrateHashesToHashId = undefined - let dehydrateHashesToObjectId :: forall m. Q.DB m => Hash -> m ObjectId - dehydrateHashesToObjectId = undefined + let hashToHashId :: forall m. Q.EDB m => Hash -> m HashId + hashToHashId h = + fromMaybe (error $ "expected hashId for hash: " <> show h) <$> (Q.loadHashIdByHash (Cv.hash1to2 h)) + let hashToObjectId :: forall m. Q.EDB m => Hash -> m ObjectId + hashToObjectId = hashToHashId >=> Q.expectObjectIdForPrimaryHashId migratedReferences <- gets referenceMapping let remapRef :: SomeReferenceId -> SomeReferenceId @@ -315,9 +316,9 @@ migratePatch conn oldObjectId = runDB conn . fmap (either id id) . runExceptT $ & patchSomeRefsO_ . uRefIdAsRefId_ %~ remapRef newPatchWithIds :: S.Patch <- - lift $ do - (newPatch & S.patchH_ %%~ dehydrateHashesToHashId) - >>= (S.patchO_ %%~ dehydrateHashesToObjectId) + lift . liftQ $ do + (newPatch & S.patchH_ %%~ hashToHashId) + >>= (S.patchO_ %%~ hashToObjectId) let (localPatchIds, localPatch) = S.LocalizeObject.localizePatch newPatchWithIds newHash <- runDB conn (liftQ (Hashing.dbPatchHash newPatchWithIds)) @@ -518,7 +519,7 @@ migrateDeclComponent :: migrateDeclComponent Codebase {..} hash = fmap (either id id) . runExceptT $ do declComponent :: [DD.Decl v a] <- (lift . lift $ getDeclComponent hash) >>= \case - Nothing -> error undefined -- not non-fatal! + Nothing -> error $ "Expected decl component for hash:" <> show hash Just dc -> pure dc let componentIDMap :: Map (Old Reference.Id) (DD.Decl v a) From 4114ae5bf4fa93ad3048027e9baeb71f686c560e Mon Sep 17 00:00:00 2001 From: Chris Penner Date: Fri, 5 Nov 2021 12:54:46 -0600 Subject: [PATCH 080/297] Implement branchHashes_ --- .../U/Codebase/Sqlite/Branch/Full.hs | 13 ++++++++----- codebase2/util/src/U/Util/Map.hs | 11 ++++++++--- 2 files changed, 16 insertions(+), 8 deletions(-) diff --git a/codebase2/codebase-sqlite/U/Codebase/Sqlite/Branch/Full.hs b/codebase2/codebase-sqlite/U/Codebase/Sqlite/Branch/Full.hs index 37f9898c6b..30769e8ff2 100644 --- a/codebase2/codebase-sqlite/U/Codebase/Sqlite/Branch/Full.hs +++ b/codebase2/codebase-sqlite/U/Codebase/Sqlite/Branch/Full.hs @@ -7,8 +7,7 @@ module U.Codebase.Sqlite.Branch.Full where -import Control.Lens (Traversal, Traversal') -import Data.Bifunctor (Bifunctor (bimap)) +import Control.Lens import qualified Data.Set as Set import U.Codebase.Reference (Reference') import U.Codebase.Referent (Referent') @@ -16,6 +15,7 @@ import U.Codebase.Sqlite.DbId (BranchObjectId, CausalHashId, ObjectId, PatchObje import U.Codebase.Sqlite.LocalIds (LocalBranchChildId, LocalDefnId, LocalPatchObjectId, LocalTextId) import qualified U.Util.Map as Map import Unison.Prelude +import qualified U.Codebase.Reference as Reference -- | -- @ @@ -49,8 +49,11 @@ data Branch' t h p c = Branch } deriving (Show, Generic) -branchHashes_ :: Traversal (Branch' t h p c) (Branch' t h' p c) h h' -branchHashes_ _f _ = undefined +branchHashes_ :: (Ord h', Ord t, Ord h) => Traversal (Branch' t h p c) (Branch' t h' p c) h h' +branchHashes_ f Branch{..}= do + newTerms <- for terms (Map.bitraversed both metadataSetFormatReferences_ . Reference.h_ %%~ f) + newTypes <- for types (Map.bitraversed id metadataSetFormatReferences_ . Reference.h_ %%~ f) + pure Branch{terms=newTerms, types=newTypes, patches, children} patches_ :: Traversal (Branch' t h p c) (Branch' t h p' c) p p' patches_ f Branch {..} = (\newPatches -> Branch terms types newPatches children) <$> traverse f patches @@ -71,7 +74,7 @@ data MetadataSetFormat' t h = Inline (Set (Reference' t h)) metadataSetFormatReferences_ :: (Ord t, Ord h) => - Traversal' (MetadataSetFormat' t h) (Reference' t h) + Traversal (MetadataSetFormat' t h) (MetadataSetFormat' t h') (Reference' t h) (Reference' t h') metadataSetFormatReferences_ f (Inline refs) = fmap (Inline . Set.fromList) . traverse f . Set.toList $ refs diff --git a/codebase2/util/src/U/Util/Map.hs b/codebase2/util/src/U/Util/Map.hs index e1ea68176d..1deee9396d 100644 --- a/codebase2/util/src/U/Util/Map.hs +++ b/codebase2/util/src/U/Util/Map.hs @@ -1,6 +1,8 @@ +{-# LANGUAGE RankNTypes #-} module U.Util.Map ( bimap, bitraverse, + bitraversed, swap, valuesVector, traverseKeys, @@ -8,7 +10,7 @@ module U.Util.Map ) where -import Control.Lens (traversed, (%%~), (&), _1) +import Control.Lens (traversed, (%%~), (&), _1, Traversal) import qualified Data.Bifunctor as B import qualified Data.Bitraversable as B import Data.Map (Map) @@ -22,6 +24,10 @@ bimap fa fb = Map.fromList . map (B.bimap fa fb) . Map.toList bitraverse :: (Applicative f, Ord a') => (a -> f a') -> (b -> f b') -> Map a b -> f (Map a' b') bitraverse fa fb = fmap Map.fromList . traverse (B.bitraverse fa fb) . Map.toList +bitraversed :: (Ord a', Ord k') => Traversal k k' a a' -> Traversal v v' a a' -> Traversal (Map k v) (Map k' v') a a' +bitraversed keyT valT f m = + bitraverse (keyT f) (valT f) m + -- | 'swap' throws away data if the input contains duplicate values swap :: Ord b => Map a b -> Map b a swap = @@ -32,8 +38,7 @@ valuesVector = Vector.fromList . Map.elems traverseKeys :: (Applicative f, Ord k') => (k -> f k') -> Map k v -> f (Map k' v) -traverseKeys f m = - Map.fromList <$> (Map.toList m & traversed . _1 %%~ f) +traverseKeys f = bitraverse f pure traverseKeysWith :: (Applicative f, Ord k') => (v -> v -> v) -> (k -> f k') -> Map k v -> f (Map k' v) traverseKeysWith combine f m = From 7cb1ab306e8910e25e74f290de1daa5be7d31229 Mon Sep 17 00:00:00 2001 From: Chris Penner Date: Fri, 5 Nov 2021 12:59:22 -0600 Subject: [PATCH 081/297] Reformat --- .../codebase-sqlite/U/Codebase/Sqlite/Branch/Full.hs | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/codebase2/codebase-sqlite/U/Codebase/Sqlite/Branch/Full.hs b/codebase2/codebase-sqlite/U/Codebase/Sqlite/Branch/Full.hs index 30769e8ff2..5c71630c60 100644 --- a/codebase2/codebase-sqlite/U/Codebase/Sqlite/Branch/Full.hs +++ b/codebase2/codebase-sqlite/U/Codebase/Sqlite/Branch/Full.hs @@ -10,12 +10,13 @@ module U.Codebase.Sqlite.Branch.Full where import Control.Lens import qualified Data.Set as Set import U.Codebase.Reference (Reference') +import qualified U.Codebase.Reference as Reference import U.Codebase.Referent (Referent') import U.Codebase.Sqlite.DbId (BranchObjectId, CausalHashId, ObjectId, PatchObjectId, TextId) import U.Codebase.Sqlite.LocalIds (LocalBranchChildId, LocalDefnId, LocalPatchObjectId, LocalTextId) import qualified U.Util.Map as Map +import qualified U.Util.Set as Set import Unison.Prelude -import qualified U.Codebase.Reference as Reference -- | -- @ @@ -50,10 +51,10 @@ data Branch' t h p c = Branch deriving (Show, Generic) branchHashes_ :: (Ord h', Ord t, Ord h) => Traversal (Branch' t h p c) (Branch' t h' p c) h h' -branchHashes_ f Branch{..}= do +branchHashes_ f Branch {..} = do newTerms <- for terms (Map.bitraversed both metadataSetFormatReferences_ . Reference.h_ %%~ f) newTypes <- for types (Map.bitraversed id metadataSetFormatReferences_ . Reference.h_ %%~ f) - pure Branch{terms=newTerms, types=newTypes, patches, children} + pure Branch {terms = newTerms, types = newTypes, patches, children} patches_ :: Traversal (Branch' t h p c) (Branch' t h p' c) p p' patches_ f Branch {..} = (\newPatches -> Branch terms types newPatches children) <$> traverse f patches @@ -73,10 +74,9 @@ data MetadataSetFormat' t h = Inline (Set (Reference' t h)) deriving (Show) metadataSetFormatReferences_ :: - (Ord t, Ord h) => + (Ord t, Ord h, Ord h') => Traversal (MetadataSetFormat' t h) (MetadataSetFormat' t h') (Reference' t h) (Reference' t h') -metadataSetFormatReferences_ f (Inline refs) = - fmap (Inline . Set.fromList) . traverse f . Set.toList $ refs +metadataSetFormatReferences_ f (Inline refs) = Inline <$> Set.traverse f refs quadmap :: forall t h p c t' h' p' c'. (Ord t', Ord h') => (t -> t') -> (h -> h') -> (p -> p') -> (c -> c') -> Branch' t h p c -> Branch' t' h' p' c' quadmap ft fh fp fc (Branch terms types patches children) = From 5990ab240c95eea0cbbbdd326aadadb18d3a84bc Mon Sep 17 00:00:00 2001 From: Chris Penner Date: Fri, 5 Nov 2021 13:29:07 -0600 Subject: [PATCH 082/297] Remove unsafeInsidePrism --- .../SqliteCodebase/MigrateSchema12.hs | 82 +++++++++---------- 1 file changed, 40 insertions(+), 42 deletions(-) diff --git a/parser-typechecker/src/Unison/Codebase/SqliteCodebase/MigrateSchema12.hs b/parser-typechecker/src/Unison/Codebase/SqliteCodebase/MigrateSchema12.hs index 1fdd84cd3c..cac12ceb35 100644 --- a/parser-typechecker/src/Unison/Codebase/SqliteCodebase/MigrateSchema12.hs +++ b/parser-typechecker/src/Unison/Codebase/SqliteCodebase/MigrateSchema12.hs @@ -52,7 +52,6 @@ import U.Codebase.Sync (Sync (Sync)) import qualified U.Codebase.Sync as Sync import U.Codebase.WatchKind (WatchKind) import qualified U.Codebase.WatchKind as WK -import qualified U.Util.Hash as U.Util import U.Util.Monoid (foldMapM) import qualified U.Util.Set as Set import qualified Unison.ABT as ABT @@ -69,7 +68,6 @@ import qualified Unison.Hashing.V2.Convert as Convert import Unison.Pattern (Pattern) import qualified Unison.Pattern as Pattern import Unison.Prelude -import Unison.Reference (Pos) import qualified Unison.Reference as Reference import qualified Unison.Referent as Referent import qualified Unison.Referent' as Referent' @@ -113,16 +111,12 @@ migrateSchema12 conn codebase = do allDone = undefined in Sync.Progress {need, done, error, allDone} -type TypeIdentifier = (ObjectId, Pos) - type Old a = a type New a = a type ConstructorName v = v -type ComponentName v = v - type DeclName v = v data MigrationState = MigrationState @@ -388,9 +382,6 @@ migrateWatch Codebase {getWatch, putWatch} watchKind oldWatchId = fmap (either i lift . lift $ putWatch watchKindV1 newWatchId remappedTerm pure Sync.Done -uHash_ :: Iso' U.Util.Hash Hash -uHash_ = iso Cv.hash2to1 Cv.hash1to2 - uRefIdAsRefId_ :: Iso' (SomeReference (UReference.Id' Hash)) SomeReferenceId uRefIdAsRefId_ = mapping uRefAsRef_ @@ -403,13 +394,13 @@ uRefAsRef_ = iso intoRef intoURef -- Project an S.Referent'' into its SomeReferenceObjId's someReferent_ :: forall t h. - (forall ref. Prism' (SomeReference ref) ref) -> + (forall ref. Traversal' ref (SomeReference ref)) -> Traversal' (S.Branch.Full.Referent'' t h) (SomeReference (UReference.Id' h)) -someReferent_ typeOrTermPrism = - (UReferent._Ref . someReference_ typeOrTermPrism) +someReferent_ typeOrTermTraversal_ = + (UReferent._Ref . someReference_ typeOrTermTraversal_) `failing` ( UReferent._Con . asPair_ -- Need to unpack the embedded reference AND remap between mismatched Constructor ID types. - . unsafeInsidePrism _ConstructorReference + . asConstructorReference_ ) where asPair_ f (UReference.ReferenceDerived id', conId) = @@ -418,27 +409,27 @@ someReferent_ typeOrTermPrism = asPair_ _ (UReference.ReferenceBuiltin x, conId) = pure (UReference.ReferenceBuiltin x, conId) someReference_ :: - (forall ref. Prism' (SomeReference ref) ref) -> + (forall ref. Traversal' ref (SomeReference ref)) -> Traversal' (UReference.Reference' t h) (SomeReference (UReference.Id' h)) -someReference_ typeOrTermPrism = UReference._ReferenceDerived . unsafeInsidePrism typeOrTermPrism +someReference_ typeOrTermTraversal_ = UReference._ReferenceDerived . typeOrTermTraversal_ someMetadataSetFormat_ :: (Ord t, Ord h) => - (forall ref. Prism' (SomeReference ref) ref) -> + (forall ref. Traversal' ref (SomeReference ref)) -> Traversal' (S.Branch.Full.MetadataSetFormat' t h) (SomeReference (UReference.Id' h)) -someMetadataSetFormat_ typeOrTermPrism = - S.Branch.Full.metadataSetFormatReferences_ . someReference_ typeOrTermPrism +someMetadataSetFormat_ typeOrTermTraversal_ = + S.Branch.Full.metadataSetFormatReferences_ . someReference_ typeOrTermTraversal_ someReferentMetadata_ :: (Ord k, Ord t, Ord h) => - (forall ref. Prism' (SomeReference ref) ref) -> + (forall ref. Traversal' ref (SomeReference ref)) -> Traversal' k (SomeReference (UReference.Id' h)) -> Traversal' (Map k (S.Branch.Full.MetadataSetFormat' t h)) (SomeReference (UReference.Id' h)) -someReferentMetadata_ typeOrTermPrism keyTraversal f m = +someReferentMetadata_ typeOrTermTraversal_ keyTraversal f m = Map.toList m - & traversed . beside keyTraversal (someMetadataSetFormat_ typeOrTermPrism) %%~ f + & traversed . beside keyTraversal (someMetadataSetFormat_ typeOrTermTraversal_) %%~ f <&> Map.fromList branchSomeRefs_ :: (Ord t, Ord h) => Traversal' (S.Branch' t h p c) (SomeReference (UReference.Id' h)) @@ -450,7 +441,7 @@ branchSomeRefs_ f S.Branch.Full.Branch {children, patches, terms, types} = do patchSomeRefsH_ :: (Ord t, Ord h) => Traversal (S.Patch' t h o) (S.Patch' t h o) (SomeReference (UReference.Id' h)) (SomeReference (UReference.Id' h)) patchSomeRefsH_ f S.Patch {termEdits, typeEdits} = do - newTermEdits <- Map.fromList <$> (Map.toList termEdits & traversed . _1 . (someReferent_ _TermReference) %%~ f) + newTermEdits <- Map.fromList <$> (Map.toList termEdits & traversed . _1 . (someReferent_ asTermReference_) %%~ f) newTypeEdits <- Map.fromList <$> (Map.toList typeEdits & traversed . _1 . (someReference_ undefined) %%~ f) pure S.Patch {termEdits = newTermEdits, typeEdits = newTypeEdits} @@ -462,7 +453,7 @@ patchSomeRefsO_ f S.Patch {termEdits, typeEdits} = do termEditRefs_ :: Traversal' (TermEdit.TermEdit' t h) (SomeReference (UReference.Id' h)) termEditRefs_ f (TermEdit.Replace ref typing) = - TermEdit.Replace <$> (ref & someReferent_ _TermReference %%~ f) <*> pure typing + TermEdit.Replace <$> (ref & someReferent_ asTermReference_ %%~ f) <*> pure typing termEditRefs_ _f (TermEdit.Deprecate) = pure TermEdit.Deprecate typeEditRefs_ :: Traversal' (TypeEdit.TypeEdit' t h) (SomeReference (UReference.Id' h)) @@ -632,12 +623,7 @@ typeReferences_ = . ABT.baseFunctor_ -- Focus Type.F . Type._Ref -- Only the Ref constructor has references . Reference._DerivedId - . unsafeInsidePrism _TypeReference - --- | This is only lawful so long as your changes to 's' won't cause the prism to fail to match. -unsafeInsidePrism :: Prism' s a -> Lens' a s -unsafeInsidePrism p f a = do - fromMaybe a . preview p <$> f (review p a) + . asTypeReference_ termReferences_ :: (Monad m, Ord v) => LensLike' m (Term.Term v a) SomeReferenceId termReferences_ = @@ -647,17 +633,17 @@ termReferences_ = termFReferences_ :: (Ord tv, Monad m) => LensLike' m (Term.F tv ta pa a) SomeReferenceId termFReferences_ f t = - (t & Term._Ref . Reference._DerivedId . unsafeInsidePrism _TermReference %%~ f) + (t & Term._Ref . Reference._DerivedId . asTermReference_ %%~ f) >>= Term._Constructor . someRefCon_ %%~ f >>= Term._Request . someRefCon_ %%~ f >>= Term._Ann . _2 . typeReferences_ %%~ f >>= Term._Match . _2 . traversed . Term.matchPattern_ . patternReferences_ %%~ f - >>= Term._TermLink . referentAsSomeTerm_ %%~ f - >>= Term._TypeLink . Reference._DerivedId . unsafeInsidePrism _TypeReference %%~ f + >>= Term._TermLink . referentAsSomeTermReference_ %%~ f + >>= Term._TypeLink . Reference._DerivedId . asTypeReference_ %%~ f -- | Build a SomeConstructorReference someRefCon_ :: Traversal' (Reference.Reference, ConstructorId) SomeReferenceId -someRefCon_ = refConPair_ . unsafeInsidePrism _ConstructorReference +someRefCon_ = refConPair_ . asConstructorReference_ where refConPair_ :: Traversal' (Reference.Reference, ConstructorId) (Reference.Id, ConstructorId) refConPair_ f s = @@ -692,13 +678,13 @@ patternReferences_ f = \case Pattern.SequenceOp loc pat seqOp pat2 -> do Pattern.SequenceOp loc <$> patternReferences_ f pat <*> pure seqOp <*> patternReferences_ f pat2 -referentAsSomeTerm_ :: Traversal' Referent.Referent SomeReferenceId -referentAsSomeTerm_ f = \case +referentAsSomeTermReference_ :: Traversal' Referent.Referent SomeReferenceId +referentAsSomeTermReference_ f = \case (Referent'.Ref' (Reference.DerivedId refId)) -> do - newRefId <- refId & unsafeInsidePrism _TermReference %%~ f + newRefId <- refId & asTermReference_ %%~ f pure (Referent'.Ref' (Reference.DerivedId newRefId)) (Referent'.Con' (Reference.DerivedId refId) conId conType) -> - ((refId, conId) & unsafeInsidePrism _ConstructorReference %%~ f) + ((refId, conId) & asConstructorReference_ %%~ f) <&> (\(newRefId, newConId) -> (Referent'.Con' (Reference.DerivedId newRefId) newConId conType)) r -> pure r @@ -768,11 +754,23 @@ someRef_ = lens getter setter _TermReference :: Prism' (SomeReference ref) ref _TermReference = _Ctor @"TermReference" -_TypeReference :: Prism' (SomeReference ref) ref -_TypeReference = _Ctor @"TypeReference" - -_ConstructorReference :: Prism' (SomeReference ref) (ref, ConstructorId) -_ConstructorReference = _Ctor @"ConstructorReference" +-- | This is only safe as long as you don't change the constructor of your SomeReference +asTermReference_ :: Traversal' ref (SomeReference ref) +asTermReference_ f ref = f (TermReference ref) <&> \case + TermReference ref' -> ref' + _ -> error "asTermReference_: SomeReferenceId constructor was changed." + +-- | This is only safe as long as you don't change the constructor of your SomeReference +asTypeReference_ :: Traversal' ref (SomeReference ref) +asTypeReference_ f ref = f (TypeReference ref) <&> \case + TypeReference ref' -> ref' + _ -> error "asTypeReference_: SomeReferenceId constructor was changed." + +-- | This is only safe as long as you don't change the constructor of your SomeReference +asConstructorReference_ :: Traversal' (ref, ConstructorId) (SomeReference ref) +asConstructorReference_ f (ref, cId) = f (ConstructorReference ref cId) <&> \case + (ConstructorReference ref' cId) -> (ref', cId) + _ -> error "asConstructorReference_: SomeReferenceId constructor was changed." someReferenceIdToEntity :: SomeReferenceId -> Entity someReferenceIdToEntity = \case From e1bd091b77a2c01dc9662c6d69c07b036dc6ef8a Mon Sep 17 00:00:00 2001 From: Chris Penner Date: Fri, 5 Nov 2021 13:30:30 -0600 Subject: [PATCH 083/297] Reformat --- .../SqliteCodebase/MigrateSchema12.hs | 24 +++++++++---------- 1 file changed, 12 insertions(+), 12 deletions(-) diff --git a/parser-typechecker/src/Unison/Codebase/SqliteCodebase/MigrateSchema12.hs b/parser-typechecker/src/Unison/Codebase/SqliteCodebase/MigrateSchema12.hs index cac12ceb35..11dea10c13 100644 --- a/parser-typechecker/src/Unison/Codebase/SqliteCodebase/MigrateSchema12.hs +++ b/parser-typechecker/src/Unison/Codebase/SqliteCodebase/MigrateSchema12.hs @@ -77,9 +77,6 @@ import qualified Unison.Type as Type import Unison.Var (Var) -- todo: --- * migrateBranch --- * migratePatch - -- * write a harness to call & seed algorithm, then do cleanup -- * may involve writing a `Progress` -- * raw DB things: @@ -756,21 +753,24 @@ _TermReference = _Ctor @"TermReference" -- | This is only safe as long as you don't change the constructor of your SomeReference asTermReference_ :: Traversal' ref (SomeReference ref) -asTermReference_ f ref = f (TermReference ref) <&> \case - TermReference ref' -> ref' - _ -> error "asTermReference_: SomeReferenceId constructor was changed." +asTermReference_ f ref = + f (TermReference ref) <&> \case + TermReference ref' -> ref' + _ -> error "asTermReference_: SomeReferenceId constructor was changed." -- | This is only safe as long as you don't change the constructor of your SomeReference asTypeReference_ :: Traversal' ref (SomeReference ref) -asTypeReference_ f ref = f (TypeReference ref) <&> \case - TypeReference ref' -> ref' - _ -> error "asTypeReference_: SomeReferenceId constructor was changed." +asTypeReference_ f ref = + f (TypeReference ref) <&> \case + TypeReference ref' -> ref' + _ -> error "asTypeReference_: SomeReferenceId constructor was changed." -- | This is only safe as long as you don't change the constructor of your SomeReference asConstructorReference_ :: Traversal' (ref, ConstructorId) (SomeReference ref) -asConstructorReference_ f (ref, cId) = f (ConstructorReference ref cId) <&> \case - (ConstructorReference ref' cId) -> (ref', cId) - _ -> error "asConstructorReference_: SomeReferenceId constructor was changed." +asConstructorReference_ f (ref, cId) = + f (ConstructorReference ref cId) <&> \case + (ConstructorReference ref' cId) -> (ref', cId) + _ -> error "asConstructorReference_: SomeReferenceId constructor was changed." someReferenceIdToEntity :: SomeReferenceId -> Entity someReferenceIdToEntity = \case From 8d47300f0e259f71944f0878197257f5e9ed4b00 Mon Sep 17 00:00:00 2001 From: Mitchell Rosen Date: Mon, 8 Nov 2021 14:34:54 -0500 Subject: [PATCH 084/297] eliminate double 'runDB conn' things --- .../SqliteCodebase/MigrateSchema12.hs | 25 ++++++++++--------- 1 file changed, 13 insertions(+), 12 deletions(-) diff --git a/parser-typechecker/src/Unison/Codebase/SqliteCodebase/MigrateSchema12.hs b/parser-typechecker/src/Unison/Codebase/SqliteCodebase/MigrateSchema12.hs index 11dea10c13..96f48a8e7a 100644 --- a/parser-typechecker/src/Unison/Codebase/SqliteCodebase/MigrateSchema12.hs +++ b/parser-typechecker/src/Unison/Codebase/SqliteCodebase/MigrateSchema12.hs @@ -168,11 +168,11 @@ liftQ :: Monad m => ReaderT Connection (ExceptT Q.Integrity m) a -> ReaderT Conn liftQ = mapReaderT lift migrateCausal :: MonadIO m => Connection -> CausalHashId -> StateT MigrationState m (Sync.TrySyncResult Entity) -migrateCausal conn oldCausalHashId = runDB conn . fmap (either id id) . runExceptT $ do - oldBranchHashId <- lift . liftQ $ Q.loadCausalValueHashId oldCausalHashId - oldCausalParentHashIds <- lift . liftQ $ Q.loadCausalParents oldCausalHashId +migrateCausal conn oldCausalHashId = fmap (either id id) . runExceptT $ do + oldBranchHashId <- runDB conn . liftQ $ Q.loadCausalValueHashId oldCausalHashId + oldCausalParentHashIds <- runDB conn . liftQ $ Q.loadCausalParents oldCausalHashId - branchObjId <- lift . liftQ $ Q.expectObjectIdForAnyHashId (unBranchHashId oldBranchHashId) + branchObjId <- runDB conn . liftQ $ Q.expectObjectIdForAnyHashId (unBranchHashId oldBranchHashId) migratedObjIds <- gets objLookup -- If the branch for this causal hasn't been migrated, migrate it first. let unmigratedBranch = @@ -206,24 +206,25 @@ migrateCausal conn oldCausalHashId = runDB conn . fmap (either id id) . runExcep parents = Set.mapMonotonic Cv.hash2to1 newParentHashes } ) - newCausalHashId <- Q.saveCausalHash newCausalHash + newCausalHashId <- runDB conn (Q.saveCausalHash newCausalHash) let newCausal = DbCausal { selfHash = newCausalHashId, valueHash = BranchHashId $ view _2 (migratedObjIds Map.! branchObjId), parents = newParentHashIds } - Q.saveCausal (SC.selfHash newCausal) (SC.valueHash newCausal) - Q.saveCausalParents (SC.selfHash newCausal) (Set.toList $ SC.parents newCausal) + runDB conn do + Q.saveCausal (SC.selfHash newCausal) (SC.valueHash newCausal) + Q.saveCausalParents (SC.selfHash newCausal) (Set.toList $ SC.parents newCausal) field @"causalMapping" %= Map.insert oldCausalHashId (newCausalHash, newCausalHashId) pure Sync.Done migrateBranch :: MonadIO m => Connection -> ObjectId -> StateT MigrationState m (Sync.TrySyncResult Entity) -migrateBranch conn oldObjectId = runDB conn . fmap (either id id) . runExceptT $ do +migrateBranch conn oldObjectId = fmap (either id id) . runExceptT $ do oldBranch <- runDB conn (Ops.loadDbBranchByObjectId (BranchObjectId oldObjectId)) - oldBranchWithHashes <- traverseOf S.branchHashes_ (lift . fmap Cv.hash2to1 . Ops.loadHashByObjectId) oldBranch + oldBranchWithHashes <- runDB conn (traverseOf S.branchHashes_ (fmap Cv.hash2to1 . Ops.loadHashByObjectId) oldBranch) migratedRefs <- gets referenceMapping migratedObjects <- gets objLookup migratedCausals <- gets causalMapping @@ -298,7 +299,7 @@ migratePatch :: Connection -> Old PatchObjectId -> StateT MigrationState m (Sync.TrySyncResult Entity) -migratePatch conn oldObjectId = runDB conn . fmap (either id id) . runExceptT $ do +migratePatch conn oldObjectId = fmap (either id id) . runExceptT $ do oldPatch <- runDB conn (Ops.loadDbPatchById oldObjectId) let hydrateHashes :: forall m. Q.EDB m => HashId -> m Hash hydrateHashes hashId = do @@ -308,7 +309,7 @@ migratePatch conn oldObjectId = runDB conn . fmap (either id id) . runExceptT $ Cv.hash2to1 <$> Ops.loadHashByObjectId objId oldPatchWithHashes :: S.Patch' TextId Hash Hash <- - lift $ do + runDB conn do (oldPatch & S.patchH_ %%~ liftQ . hydrateHashes) >>= (S.patchO_ %%~ hydrateObjectIds) @@ -334,7 +335,7 @@ migratePatch conn oldObjectId = runDB conn . fmap (either id id) . runExceptT $ & patchSomeRefsO_ . uRefIdAsRefId_ %~ remapRef newPatchWithIds :: S.Patch <- - lift . liftQ $ do + runDB conn . liftQ $ do (newPatch & S.patchH_ %%~ hashToHashId) >>= (S.patchO_ %%~ hashToObjectId) From d5fcd214ebb3e0ba44dab2c2b5e8e397b3b73602 Mon Sep 17 00:00:00 2001 From: Chris Penner Date: Mon, 8 Nov 2021 13:20:24 -0600 Subject: [PATCH 085/297] Fill in term/type someReference traversals instead of undefined --- .../SqliteCodebase/MigrateSchema12.hs | 19 +++++++++---------- 1 file changed, 9 insertions(+), 10 deletions(-) diff --git a/parser-typechecker/src/Unison/Codebase/SqliteCodebase/MigrateSchema12.hs b/parser-typechecker/src/Unison/Codebase/SqliteCodebase/MigrateSchema12.hs index 96f48a8e7a..0f81abe2b3 100644 --- a/parser-typechecker/src/Unison/Codebase/SqliteCodebase/MigrateSchema12.hs +++ b/parser-typechecker/src/Unison/Codebase/SqliteCodebase/MigrateSchema12.hs @@ -331,7 +331,8 @@ migratePatch conn oldObjectId = fmap (either id id) . runExceptT $ do remapRef ref = Map.findWithDefault ref ref migratedReferences let newPatch = - oldPatchWithHashes & patchSomeRefsH_ . uRefIdAsRefId_ %~ remapRef + oldPatchWithHashes + & patchSomeRefsH_ . uRefIdAsRefId_ %~ remapRef & patchSomeRefsO_ . uRefIdAsRefId_ %~ remapRef newPatchWithIds :: S.Patch <- @@ -418,29 +419,27 @@ someMetadataSetFormat_ :: someMetadataSetFormat_ typeOrTermTraversal_ = S.Branch.Full.metadataSetFormatReferences_ . someReference_ typeOrTermTraversal_ -someReferentMetadata_ :: +someReferenceMetadata_ :: (Ord k, Ord t, Ord h) => - (forall ref. Traversal' ref (SomeReference ref)) -> Traversal' k (SomeReference (UReference.Id' h)) -> Traversal' (Map k (S.Branch.Full.MetadataSetFormat' t h)) (SomeReference (UReference.Id' h)) -someReferentMetadata_ typeOrTermTraversal_ keyTraversal f m = +someReferenceMetadata_ keyTraversal_ f m = Map.toList m - & traversed . beside keyTraversal (someMetadataSetFormat_ typeOrTermTraversal_) %%~ f + & traversed . beside keyTraversal_ (someMetadataSetFormat_ asTermReference_) %%~ f <&> Map.fromList branchSomeRefs_ :: (Ord t, Ord h) => Traversal' (S.Branch' t h p c) (SomeReference (UReference.Id' h)) branchSomeRefs_ f S.Branch.Full.Branch {children, patches, terms, types} = do - -- Chris: Chat with Arya about which of these undefined's match refs which are types vs terms, metadata is confusing - let newTypesMap = types & traversed . someReferentMetadata_ undefined (someReference_ undefined) %%~ f - let newTermsMap = terms & traversed . someReferentMetadata_ undefined (someReferent_ undefined) %%~ f + let newTypesMap = types & traversed . someReferenceMetadata_ (someReference_ asTypeReference_) %%~ f + let newTermsMap = terms & traversed . someReferenceMetadata_ (someReferent_ asTermReference_) %%~ f S.Branch.Full.Branch <$> newTermsMap <*> newTypesMap <*> pure patches <*> pure children patchSomeRefsH_ :: (Ord t, Ord h) => Traversal (S.Patch' t h o) (S.Patch' t h o) (SomeReference (UReference.Id' h)) (SomeReference (UReference.Id' h)) patchSomeRefsH_ f S.Patch {termEdits, typeEdits} = do newTermEdits <- Map.fromList <$> (Map.toList termEdits & traversed . _1 . (someReferent_ asTermReference_) %%~ f) - newTypeEdits <- Map.fromList <$> (Map.toList typeEdits & traversed . _1 . (someReference_ undefined) %%~ f) + newTypeEdits <- Map.fromList <$> (Map.toList typeEdits & traversed . _1 . (someReference_ asTypeReference_) %%~ f) pure S.Patch {termEdits = newTermEdits, typeEdits = newTypeEdits} patchSomeRefsO_ :: (Ord t, Ord h, Ord o) => Traversal' (S.Patch' t h o) (SomeReference (UReference.Id' o)) @@ -456,7 +455,7 @@ termEditRefs_ _f (TermEdit.Deprecate) = pure TermEdit.Deprecate typeEditRefs_ :: Traversal' (TypeEdit.TypeEdit' t h) (SomeReference (UReference.Id' h)) typeEditRefs_ f (TypeEdit.Replace ref) = - TypeEdit.Replace <$> (ref & someReference_ undefined %%~ f) + TypeEdit.Replace <$> (ref & someReference_ asTypeReference_ %%~ f) typeEditRefs_ _f (TypeEdit.Deprecate) = pure TypeEdit.Deprecate migrateTermComponent :: From 482f9c24be886d7819953297837ca90efa066077 Mon Sep 17 00:00:00 2001 From: Chris Penner Date: Mon, 8 Nov 2021 13:35:55 -0600 Subject: [PATCH 086/297] implement HashDataDecls vs HashDecls --- .../src/Unison/Hashing/V2/Convert.hs | 29 +++++++++++++++---- 1 file changed, 23 insertions(+), 6 deletions(-) diff --git a/parser-typechecker/src/Unison/Hashing/V2/Convert.hs b/parser-typechecker/src/Unison/Hashing/V2/Convert.hs index 9d17cc60b8..f8bf15c0ec 100644 --- a/parser-typechecker/src/Unison/Hashing/V2/Convert.hs +++ b/parser-typechecker/src/Unison/Hashing/V2/Convert.hs @@ -4,8 +4,8 @@ module Unison.Hashing.V2.Convert ( ResolutionResult, tokensBranch0, + hashDataDecls, hashDecls, - hashDecls', hashPatch, hashClosedTerm, hashTermComponents, @@ -57,6 +57,8 @@ import Unison.Var (Var) import qualified Unison.ConstructorType as Memory.ConstructorType import Control.Monad.Trans.Writer.CPS (Writer) import qualified Control.Monad.Trans.Writer.CPS as Writer +import qualified Unison.ConstructorType as CT +import Data.Functor ((<&>)) typeToReference :: Var v => Memory.Type.Type v a -> Memory.Reference.Reference typeToReference = h2mReference . Hashing.Type.toReference . m2hType . Memory.Type.removeAllEffectVars @@ -198,11 +200,11 @@ h2mReferent getCT = \case let mRef = h2mReference ref in Memory.Referent.Con mRef n (getCT mRef) -hashDecls :: +hashDataDecls :: Var v => Map v (Memory.DD.DataDeclaration v a) -> ResolutionResult v a [(v, Memory.Reference.Id, Memory.DD.DataDeclaration v a)] -hashDecls memDecls = do +hashDataDecls memDecls = do let hashingDecls = fmap m2hDecl memDecls hashingResult <- Hashing.DD.hashDecls hashingDecls pure $ map h2mDeclResult hashingResult @@ -210,12 +212,27 @@ hashDecls memDecls = do h2mDeclResult :: Ord v => (v, Hashing.Reference.Id, Hashing.DD.DataDeclaration v a) -> (v, Memory.Reference.Id, Memory.DD.DataDeclaration v a) h2mDeclResult (v, id, dd) = (v, h2mReferenceId id, h2mDecl dd) --- TODO: rename hashDecls to hashDataDecls, remove tick from this -hashDecls' :: +hashDecls :: Var v => Map v (Memory.DD.Decl v a) -> ResolutionResult v a [(v, Memory.Reference.Id, Memory.DD.Decl v a)] -hashDecls' = undefined +hashDecls memDecls = do + -- want to unwrap the decl before doing the rehashing, and then wrap it back up the same way + let howToReassemble = + memDecls <&> \case + Left {} -> CT.Effect + Right {} -> CT.Data + memDeclsAsDDs = Memory.DD.asDataDecl <$> memDecls + result <- hashDataDecls memDeclsAsDDs + pure $ + result <&> \(v, id', decl) -> + case Map.lookup v howToReassemble of + Nothing -> error "Unknown v in hashDecls'" + Just ct -> (v, id', retag ct decl) + where + retag :: CT.ConstructorType -> Memory.DD.DataDeclaration v a -> Memory.DD.Decl v a + retag CT.Effect = Left . Memory.DD.EffectDeclaration + retag CT.Data = Right m2hDecl :: Ord v => Memory.DD.DataDeclaration v a -> Hashing.DD.DataDeclaration v a m2hDecl (Memory.DD.DataDeclaration mod ann bound ctors) = From e742e8cc1a33aa285ad70761313ae81fdd1373c6 Mon Sep 17 00:00:00 2001 From: Chris Penner Date: Mon, 8 Nov 2021 13:42:09 -0600 Subject: [PATCH 087/297] Write a version of hashDecls which preserves declaration tagging --- parser-typechecker/src/Unison/Builtin/Decls.hs | 8 ++++---- .../src/Unison/Codebase/Editor/Propagate.hs | 6 +++--- .../src/Unison/Codebase/SqliteCodebase/MigrateSchema12.hs | 2 +- parser-typechecker/src/Unison/UnisonFile/Names.hs | 2 +- 4 files changed, 9 insertions(+), 9 deletions(-) diff --git a/parser-typechecker/src/Unison/Builtin/Decls.hs b/parser-typechecker/src/Unison/Builtin/Decls.hs index b7eb67559a..cbf0db9c1b 100644 --- a/parser-typechecker/src/Unison/Builtin/Decls.hs +++ b/parser-typechecker/src/Unison/Builtin/Decls.hs @@ -15,7 +15,7 @@ import Unison.DataDeclaration Modifier (Structural, Unique), ) import qualified Unison.DataDeclaration as DD -import Unison.Hashing.V2.Convert (hashDecls) +import Unison.Hashing.V2.Convert (hashDataDecls) import qualified Unison.Pattern as Pattern import Unison.Reference (Reference) import qualified Unison.Reference as Reference @@ -121,10 +121,10 @@ failConstructorReferent = Referent.Con testResultRef failConstructorId CT.Data builtinDataDecls :: Var v => [(v, Reference.Id, DataDeclaration v ())] builtinDataDecls = rs1 ++ rs where - rs1 = case hashDecls $ Map.fromList + rs1 = case hashDataDecls $ Map.fromList [ (v "Link" , link) ] of Right a -> a; Left e -> error $ "builtinDataDecls: " <> show e - rs = case hashDecls $ Map.fromList + rs = case hashDataDecls $ Map.fromList [ (v "Unit" , unit) , (v "Tuple" , tuple) , (v "Optional" , opt) @@ -310,7 +310,7 @@ builtinDataDecls = rs1 ++ rs builtinEffectDecls :: Var v => [(v, Reference.Id, DD.EffectDeclaration v ())] builtinEffectDecls = - case hashDecls $ Map.fromList [ (v "Exception", exception) ] of + case hashDataDecls $ Map.fromList [ (v "Exception", exception) ] of Right a -> over _3 DD.EffectDeclaration <$> a Left e -> error $ "builtinEffectDecls: " <> show e where diff --git a/parser-typechecker/src/Unison/Codebase/Editor/Propagate.hs b/parser-typechecker/src/Unison/Codebase/Editor/Propagate.hs index e3f5c70b5e..1db0e34661 100644 --- a/parser-typechecker/src/Unison/Codebase/Editor/Propagate.hs +++ b/parser-typechecker/src/Unison/Codebase/Editor/Propagate.hs @@ -324,7 +324,7 @@ propagate rootNames patch b = case validatePatch patch of declMap = over _2 (either Decl.toDataDecl id) <$> componentMap' -- TODO: kind-check the new components hashedDecls = (fmap . fmap) (over _2 DerivedId) - . Hashing.hashDecls + . Hashing.hashDataDecls $ view _2 <$> declMap hashedComponents' <- case hashedDecls of Left _ -> @@ -594,9 +594,9 @@ applyPropagate patch Edits {..} = do types = updateMetadatas id $ Star3.replaceFacts replaceType typeEdits _types - updateMetadatas :: + updateMetadatas :: Ord r => - (Reference -> r) -> + (Reference -> r) -> Star3.Star3 r NameSegment Metadata.Type (Metadata.Type, Metadata.Value) -> Star3.Star3 r NameSegment Metadata.Type (Metadata.Type, Metadata.Value) updateMetadatas ref s = clearPropagated $ Star3.mapD3 go s diff --git a/parser-typechecker/src/Unison/Codebase/SqliteCodebase/MigrateSchema12.hs b/parser-typechecker/src/Unison/Codebase/SqliteCodebase/MigrateSchema12.hs index 0f81abe2b3..43cd6786ee 100644 --- a/parser-typechecker/src/Unison/Codebase/SqliteCodebase/MigrateSchema12.hs +++ b/parser-typechecker/src/Unison/Codebase/SqliteCodebase/MigrateSchema12.hs @@ -589,7 +589,7 @@ migrateDeclComponent Codebase {..} hash = fmap (either id id) . runExceptT $ do remappedReferences & Map.elems & Map.fromList - & Convert.hashDecls' + & Convert.hashDecls & fromRight (error "unexpected resolution error") for_ newComponent $ \(declName, newReferenceId, dd) -> do diff --git a/parser-typechecker/src/Unison/UnisonFile/Names.hs b/parser-typechecker/src/Unison/UnisonFile/Names.hs index f26ff91893..de415a8b17 100644 --- a/parser-typechecker/src/Unison/UnisonFile/Names.hs +++ b/parser-typechecker/src/Unison/UnisonFile/Names.hs @@ -97,7 +97,7 @@ environmentFor names dataDecls0 effectDecls0 = do traverse (DD.withEffectDeclM (DD.Names.bindNames locallyBoundTypes names)) effectDecls0 let allDecls0 :: Map v (DataDeclaration v a) allDecls0 = Map.union dataDecls (toDataDecl <$> effectDecls) - hashDecls' :: [(v, Reference.Id, DataDeclaration v a)] <- Hashing.hashDecls allDecls0 + hashDecls' :: [(v, Reference.Id, DataDeclaration v a)] <- Hashing.hashDataDecls allDecls0 -- then we have to pick out the dataDecls from the effectDecls let allDecls = Map.fromList [ (v, (r, de)) | (v, r, de) <- hashDecls' ] From 6d39ecf8dde2ebccb45ab5e9e472641fb2331f50 Mon Sep 17 00:00:00 2001 From: Chris Penner Date: Mon, 8 Nov 2021 13:44:50 -0600 Subject: [PATCH 088/297] Remove unused migration combinators --- .../SqliteCodebase/MigrateSchema12.hs | 20 ------------------- 1 file changed, 20 deletions(-) diff --git a/parser-typechecker/src/Unison/Codebase/SqliteCodebase/MigrateSchema12.hs b/parser-typechecker/src/Unison/Codebase/SqliteCodebase/MigrateSchema12.hs index 43cd6786ee..a619331f84 100644 --- a/parser-typechecker/src/Unison/Codebase/SqliteCodebase/MigrateSchema12.hs +++ b/parser-typechecker/src/Unison/Codebase/SqliteCodebase/MigrateSchema12.hs @@ -685,30 +685,10 @@ referentAsSomeTermReference_ f = \case <&> (\(newRefId, newConId) -> (Referent'.Con' (Reference.DerivedId newRefId) newConId conType)) r -> pure r -remapReferences :: - Map (Old Reference.Id) (New Reference.Id) -> - Type.F (Type v a) -> - Type.F (Type v a) -remapReferences declMap = \case - (Type.Ref (Reference.DerivedId refId)) -> - Type.Ref . Reference.DerivedId $ - fromMaybe - (error $ "Expected reference to exist in decl mapping, but it wasn't found: " <> show refId) - (Map.lookup refId declMap) - x -> x - type SomeReferenceId = SomeReference Reference.Id type SomeReferenceObjId = SomeReference (UReference.Id' ObjectId) -objIdsToHashed :: MonadState MigrationState m => SomeReferenceObjId -> m SomeReferenceId -objIdsToHashed = - someRef_ %%~ \(UReference.Id objId pos) -> do - objMapping <- gets objLookup - case Map.lookup objId objMapping of - Nothing -> error $ "Expected object mapping for ID: " <> show objId - Just (_, _, hash) -> pure (Reference.Id hash pos) - remapObjIdRefs :: (Map (Old ObjectId) (New ObjectId, New HashId, New Hash)) -> (Map SomeReferenceId SomeReferenceId) -> From 6ee3b40f170d67aa995db4156d71ca22de45f9b4 Mon Sep 17 00:00:00 2001 From: Chris Penner Date: Mon, 8 Nov 2021 13:47:16 -0600 Subject: [PATCH 089/297] Fix tests --- parser-typechecker/tests/Unison/Test/DataDeclaration.hs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/parser-typechecker/tests/Unison/Test/DataDeclaration.hs b/parser-typechecker/tests/Unison/Test/DataDeclaration.hs index 80d8763158..639d3c5f40 100644 --- a/parser-typechecker/tests/Unison/Test/DataDeclaration.hs +++ b/parser-typechecker/tests/Unison/Test/DataDeclaration.hs @@ -22,7 +22,7 @@ import qualified Unison.Var.RefNamed as Var test :: Test () test = scope "datadeclaration" $ - let Right hashes = Hashing.hashDecls . (snd <$>) . dataDeclarationsId $ file + let Right hashes = Hashing.hashDataDecls . (snd <$>) . dataDeclarationsId $ file hashMap = Map.fromList $ fmap (\(a,b,_) -> (a,b)) hashes hashOf k = Map.lookup (Var.named k) hashMap in tests [ From 9ceae3eb0fc198536d0ed56a290a7e78a2408858 Mon Sep 17 00:00:00 2001 From: Mitchell Rosen Date: Mon, 8 Nov 2021 14:49:05 -0500 Subject: [PATCH 090/297] don't migrate any entity twice --- .../SqliteCodebase/MigrateSchema12.hs | 20 +++++++++++++++++-- 1 file changed, 18 insertions(+), 2 deletions(-) diff --git a/parser-typechecker/src/Unison/Codebase/SqliteCodebase/MigrateSchema12.hs b/parser-typechecker/src/Unison/Codebase/SqliteCodebase/MigrateSchema12.hs index a619331f84..8e90291b85 100644 --- a/parser-typechecker/src/Unison/Codebase/SqliteCodebase/MigrateSchema12.hs +++ b/parser-typechecker/src/Unison/Codebase/SqliteCodebase/MigrateSchema12.hs @@ -94,7 +94,7 @@ migrateSchema12 conn codebase = do [WK.RegularWatch, WK.TestWatch] (Sync.sync @_ @Entity migrationSync progress (CausalE rootCausalHashId : watches)) `runReaderT` Env {db = conn, codebase} - `evalStateT` MigrationState Map.empty Map.empty Map.empty + `evalStateT` MigrationState Map.empty Map.empty Map.empty Set.empty where progress :: Sync.Progress (ReaderT (Env m v a) (StateT MigrationState m)) Entity progress = @@ -120,7 +120,9 @@ data MigrationState = MigrationState -- Mapping between old cycle-position -> new cycle-position for a given Decl object. { referenceMapping :: Map (Old SomeReferenceId) (New SomeReferenceId), causalMapping :: Map (Old CausalHashId) (New (CausalHash, CausalHashId)), - objLookup :: Map (Old ObjectId) (New (ObjectId, HashId, Hash)) + objLookup :: Map (Old ObjectId) (New (ObjectId, HashId, Hash)), + -- Remember the hashes of term/decls that we have already migrated to avoid migrating them twice. + migratedDefnHashes :: Set (Old Hash) } deriving (Generic) @@ -169,6 +171,8 @@ liftQ = mapReaderT lift migrateCausal :: MonadIO m => Connection -> CausalHashId -> StateT MigrationState m (Sync.TrySyncResult Entity) migrateCausal conn oldCausalHashId = fmap (either id id) . runExceptT $ do + whenM (Map.member oldCausalHashId <$> use (field @"causalMapping")) (throwE Sync.PreviouslyDone) + oldBranchHashId <- runDB conn . liftQ $ Q.loadCausalValueHashId oldCausalHashId oldCausalParentHashIds <- runDB conn . liftQ $ Q.loadCausalParents oldCausalHashId @@ -223,6 +227,8 @@ migrateCausal conn oldCausalHashId = fmap (either id id) . runExceptT $ do migrateBranch :: MonadIO m => Connection -> ObjectId -> StateT MigrationState m (Sync.TrySyncResult Entity) migrateBranch conn oldObjectId = fmap (either id id) . runExceptT $ do + whenM (Map.member oldObjectId <$> use (field @"objLookup")) (throwE Sync.PreviouslyDone) + oldBranch <- runDB conn (Ops.loadDbBranchByObjectId (BranchObjectId oldObjectId)) oldBranchWithHashes <- runDB conn (traverseOf S.branchHashes_ (fmap Cv.hash2to1 . Ops.loadHashByObjectId) oldBranch) migratedRefs <- gets referenceMapping @@ -300,6 +306,8 @@ migratePatch :: Old PatchObjectId -> StateT MigrationState m (Sync.TrySyncResult Entity) migratePatch conn oldObjectId = fmap (either id id) . runExceptT $ do + whenM (Map.member (unPatchObjectId oldObjectId) <$> use (field @"objLookup")) (throwE Sync.PreviouslyDone) + oldPatch <- runDB conn (Ops.loadDbPatchById oldObjectId) let hydrateHashes :: forall m. Q.EDB m => HashId -> m Hash hydrateHashes hashId = do @@ -465,6 +473,8 @@ migrateTermComponent :: Unison.Hash -> StateT MigrationState m (Sync.TrySyncResult Entity) migrateTermComponent Codebase {..} hash = fmap (either id id) . runExceptT $ do + whenM (Set.member hash <$> use (field @"migratedDefnHashes")) (throwE Sync.PreviouslyDone) + component <- (lift . lift $ getTermComponentWithTypes hash) >>= \case Nothing -> error $ "Hash was missing from codebase: " <> show hash @@ -523,6 +533,8 @@ migrateTermComponent Codebase {..} hash = fmap (either id id) . runExceptT $ do field @"referenceMapping" %= Map.insert (TermReference oldReferenceId) (TermReference newReferenceId) lift . lift $ putTerm newReferenceId trm typ + field @"migratedDefnHashes" %= Set.insert hash + pure Sync.Done migrateDeclComponent :: @@ -532,6 +544,8 @@ migrateDeclComponent :: Unison.Hash -> StateT MigrationState m (Sync.TrySyncResult Entity) migrateDeclComponent Codebase {..} hash = fmap (either id id) . runExceptT $ do + whenM (Set.member hash <$> use (field @"migratedDefnHashes")) (throwE Sync.PreviouslyDone) + declComponent :: [DD.Decl v a] <- (lift . lift $ getDeclComponent hash) >>= \case Nothing -> error $ "Expected decl component for hash:" <> show hash @@ -612,6 +626,8 @@ migrateDeclComponent Codebase {..} hash = fmap (either id id) . runExceptT $ do lift . lift $ putTypeDeclaration newReferenceId dd + field @"migratedDefnHashes" %= Set.insert hash + pure Sync.Done typeReferences_ :: (Monad m, Ord v) => LensLike' m (Type v a) SomeReferenceId From 1b6bbae5b705efb5a03e78f7f766fe70da99899c Mon Sep 17 00:00:00 2001 From: Dan Doel Date: Tue, 9 Nov 2021 11:26:19 -0500 Subject: [PATCH 091/297] Try to only float a single copy of a definition --- parser-typechecker/src/Unison/Runtime/ANF.hs | 15 ++++++++++----- .../transcripts/bug-strange-closure.output.md | 3 ++- 2 files changed, 12 insertions(+), 6 deletions(-) diff --git a/parser-typechecker/src/Unison/Runtime/ANF.hs b/parser-typechecker/src/Unison/Runtime/ANF.hs index b2401a8bec..23adc80ff7 100644 --- a/parser-typechecker/src/Unison/Runtime/ANF.hs +++ b/parser-typechecker/src/Unison/Runtime/ANF.hs @@ -269,11 +269,16 @@ lamFloater :: (Var v, Monoid a) => Bool -> Term v a -> Maybe v -> a -> [v] -> Term v a -> FloatM v a v lamFloater closed tm mv a vs bd - = state $ \(cvs, ctx, dcmp) -> - let v = ABT.freshIn cvs $ fromMaybe (typed Var.Float) mv - in (v, ( Set.insert v cvs - , ctx <> [(v, lam' a vs bd)] - , floatDecomp closed v tm dcmp)) + = state $ \trip@(cvs, ctx, dcmp) -> case find p ctx of + Just (v, _) -> (v, trip) + Nothing -> + let v = ABT.freshIn cvs $ fromMaybe (typed Var.Float) mv + in (v, ( Set.insert v cvs + , ctx <> [(v, lam' a vs bd)] + , floatDecomp closed v tm dcmp)) + where + tgt = unannotate (lam' a vs bd) + p (_, flam) = unannotate flam == tgt floatDecomp :: Bool -> v -> Term v a -> [(v, Term v a)] -> [(v, Term v a)] diff --git a/unison-src/transcripts/bug-strange-closure.output.md b/unison-src/transcripts/bug-strange-closure.output.md index 1dfe955f0a..bf820e2523 100644 --- a/unison-src/transcripts/bug-strange-closure.output.md +++ b/unison-src/transcripts/bug-strange-closure.output.md @@ -2275,7 +2275,8 @@ rendered = Pretty.get (docFormatConsole doc.guide) 1 (Term.Term (Any - '(x -> + (_ + x -> sqr x))))), !Lit From 20146d233fc174bab02abcab2a1231008de17c77 Mon Sep 17 00:00:00 2001 From: Chris Penner Date: Tue, 9 Nov 2021 14:19:53 -0600 Subject: [PATCH 092/297] Update checklist --- .../SqliteCodebase/MigrateSchema12.hs | 22 ++++++++++++------- 1 file changed, 14 insertions(+), 8 deletions(-) diff --git a/parser-typechecker/src/Unison/Codebase/SqliteCodebase/MigrateSchema12.hs b/parser-typechecker/src/Unison/Codebase/SqliteCodebase/MigrateSchema12.hs index 8e90291b85..80a2db7081 100644 --- a/parser-typechecker/src/Unison/Codebase/SqliteCodebase/MigrateSchema12.hs +++ b/parser-typechecker/src/Unison/Codebase/SqliteCodebase/MigrateSchema12.hs @@ -77,13 +77,19 @@ import qualified Unison.Type as Type import Unison.Var (Var) -- todo: --- * write a harness to call & seed algorithm, then do cleanup +-- * write a harness to call & seed algorithm -- * may involve writing a `Progress` -- * raw DB things: --- * overwrite object_id column in hash_object table to point at new objects --- * delete references to old objects in index tables (where else?) --- * delete old objects +-- * [ ] overwrite object_id column in hash_object table to point at new objects <-- mitchell has started +-- * [ ] delete references to old objects in index tables (where else?) +-- * [ ] delete old objects +-- -- * refer to github megaticket https://github.com/unisonweb/unison/issues/2471 +-- ☢️ [ ] incorporate type signature into hash of term <- chris/arya have started ☢️ +-- * [ ] Salt V2 hashes with version number +-- * [ ] Use V2 hashing for Causals +-- * [ ] Delete V1 Hashing to ensure it's unused +-- * [ ] confirm that pulls are handled ok migrateSchema12 :: forall a m v. (MonadIO m, Var v) => Connection -> Codebase m v a -> m () migrateSchema12 conn codebase = do @@ -99,13 +105,13 @@ migrateSchema12 conn codebase = do progress :: Sync.Progress (ReaderT (Env m v a) (StateT MigrationState m)) Entity progress = let need :: Entity -> ReaderT (Env m v a) (StateT MigrationState m) () - need = undefined + need e = liftIO $ putStrLn $ "Need: " ++ show e done :: Entity -> ReaderT (Env m v a) (StateT MigrationState m) () - done = undefined + done e = liftIO $ putStrLn $ "Done: " ++ show e error :: Entity -> ReaderT (Env m v a) (StateT MigrationState m) () - error = undefined + error e = liftIO $ putStrLn $ "Error: " ++ show e allDone :: ReaderT (Env m v a) (StateT MigrationState m) () - allDone = undefined + allDone = liftIO $ putStrLn $ "All Done" in Sync.Progress {need, done, error, allDone} type Old a = a From 8a449ef45347878b13358dab8b377e6f7512756d Mon Sep 17 00:00:00 2001 From: Mitchell Rosen Date: Tue, 9 Nov 2021 16:04:01 -0500 Subject: [PATCH 093/297] some post-migration migration/cleanup --- .../U/Codebase/Sqlite/Queries.hs | 49 +++++++++++++++++++ .../SqliteCodebase/MigrateSchema12.hs | 13 +++-- 2 files changed, 59 insertions(+), 3 deletions(-) diff --git a/codebase2/codebase-sqlite/U/Codebase/Sqlite/Queries.hs b/codebase2/codebase-sqlite/U/Codebase/Sqlite/Queries.hs index cffecb472a..8c7bf8e68d 100644 --- a/codebase2/codebase-sqlite/U/Codebase/Sqlite/Queries.hs +++ b/codebase2/codebase-sqlite/U/Codebase/Sqlite/Queries.hs @@ -47,6 +47,7 @@ module U.Codebase.Sqlite.Queries ( expectObjectIdForAnyHashId, maybeObjectIdForPrimaryHashId, maybeObjectIdForAnyHashId, + recordObjectRehash, -- * object table saveObject, @@ -55,6 +56,7 @@ module U.Codebase.Sqlite.Queries ( loadObjectWithTypeById, loadObjectWithHashIdAndTypeById, updateObjectBlob, -- unused + deleteObject, -- * namespace_root table loadMaybeNamespaceRoot, @@ -84,6 +86,7 @@ module U.Codebase.Sqlite.Queries ( clearWatches, -- * indexes + deleteIndexesForObject, -- ** dependents index addToDependentsIndex, getDependentsForDependency, @@ -392,11 +395,34 @@ hashIdWithVersionForObject = query sql . Only where sql = [here| SELECT hash_id, hash_version FROM hash_object WHERE object_id = ? |] +-- | @recordObjectRehash old new@ records that object @old@ was rehashed and inserted as a new object, @new@. +-- +-- This function rewrites @old@'s @hash_object@ rows in place to point at the new object. +recordObjectRehash :: DB m => ObjectId -> ObjectId -> m () +recordObjectRehash old new = + execute sql (new, old) + where + sql = [here| + UPDATE hash_object + SET object_id = ? + WHERE object_id = ? + |] + updateObjectBlob :: DB m => ObjectId -> ByteString -> m () updateObjectBlob oId bs = execute sql (oId, bs) where sql = [here| UPDATE object SET bytes = ? WHERE id = ? |] +-- | Delete a row in the @object@ table. +deleteObject :: DB m => ObjectId -> m () +deleteObject oid = + execute + [here| + DELETE FROM object + WHERE id = ? + |] + (Only oid) + -- |Maybe we would generalize this to something other than NamespaceHash if we -- end up wanting to store other kinds of Causals here too. saveCausal :: DB m => CausalHashId -> BranchHashId -> m () @@ -635,6 +661,29 @@ getTypeMentionsReferencesForComponent r = fixupTypeIndexRow :: Reference' TextId HashId :. Referent.Id -> (Reference' TextId HashId, Referent.Id) fixupTypeIndexRow (rh :. ri) = (rh, ri) +-- | Delete all mentions of an object in index tables. +deleteIndexesForObject :: DB m => ObjectId -> m () +deleteIndexesForObject oid = do + execute + [here| + DELETE FROM dependents_index + WHERE dependency_object_id = ? + OR dependent_object_id = ? + |] + (oid, oid) + execute + [here| + DELETE FROM find_type_index + WHERE term_referent_object_id = ? + |] + (Only oid) + execute + [here| + DELETE FROM find_type_mentions_index + WHERE term_referent_object_id = ? + |] + (Only oid) + addToDependentsIndex :: DB m => Reference.Reference -> Reference.Id -> m () addToDependentsIndex dependency dependent = execute sql (dependency :. dependent) where sql = [here| diff --git a/parser-typechecker/src/Unison/Codebase/SqliteCodebase/MigrateSchema12.hs b/parser-typechecker/src/Unison/Codebase/SqliteCodebase/MigrateSchema12.hs index 80a2db7081..bf51aba069 100644 --- a/parser-typechecker/src/Unison/Codebase/SqliteCodebase/MigrateSchema12.hs +++ b/parser-typechecker/src/Unison/Codebase/SqliteCodebase/MigrateSchema12.hs @@ -98,9 +98,16 @@ migrateSchema12 conn codebase = do foldMapM (\watchKind -> map (W watchKind) <$> Codebase.watches codebase (Cv.watchKind2to1 watchKind)) [WK.RegularWatch, WK.TestWatch] - (Sync.sync @_ @Entity migrationSync progress (CausalE rootCausalHashId : watches)) - `runReaderT` Env {db = conn, codebase} - `evalStateT` MigrationState Map.empty Map.empty Map.empty Set.empty + migrationState <- + (Sync.sync @_ @Entity migrationSync progress (CausalE rootCausalHashId : watches)) + `runReaderT` Env {db = conn, codebase} + `execStateT` MigrationState Map.empty Map.empty Map.empty Set.empty + ifor_ (objLookup migrationState) \old (new, _, _) -> do + (runDB conn . liftQ) do + Q.recordObjectRehash old new + Q.deleteIndexesForObject old + -- what about deleting old watches? + Q.deleteObject old where progress :: Sync.Progress (ReaderT (Env m v a) (StateT MigrationState m)) Entity progress = From 6dcb4861400172c0a17c068699c9d04d1baf696c Mon Sep 17 00:00:00 2001 From: Chris Penner Date: Mon, 8 Nov 2021 15:25:00 -0600 Subject: [PATCH 094/297] Hash types into hashes --- parser-typechecker/src/Unison/Builtin.hs | 2 +- .../src/Unison/Builtin/Terms.hs | 25 ++++++------ .../src/Unison/Codebase/Editor/AuthorInfo.hs | 15 +++++--- .../src/Unison/Hashing/V2/Convert.hs | 38 +++++++++++++------ .../src/Unison/Hashing/V2/Term.hs | 26 ++++++++++++- parser-typechecker/src/Unison/UnisonFile.hs | 3 +- 6 files changed, 77 insertions(+), 32 deletions(-) diff --git a/parser-typechecker/src/Unison/Builtin.hs b/parser-typechecker/src/Unison/Builtin.hs index 53f078a37a..cbb4110d29 100644 --- a/parser-typechecker/src/Unison/Builtin.hs +++ b/parser-typechecker/src/Unison/Builtin.hs @@ -65,7 +65,7 @@ names0 = Names terms types where ((CT.Effect,) . (second . second) DD.toDataDecl <$> builtinEffectDecls) , ((_,vc,_), cid) <- DD.constructors' decl `zip` [0..]] <> Rel.fromList [ (Name.unsafeFromVar v, Referent.Ref (R.DerivedId i)) - | (v,i) <- Map.toList $ TD.builtinTermsRef @Symbol Intrinsic] + | (v,i) <- Map.toList $ TD.builtinTermsRef @Symbol ] types = Rel.fromList builtinTypes <> Rel.fromList [ (Name.unsafeFromVar v, R.DerivedId r) | (v,(r,_)) <- builtinDataDecls @Symbol ] <> diff --git a/parser-typechecker/src/Unison/Builtin/Terms.hs b/parser-typechecker/src/Unison/Builtin/Terms.hs index 9057c6d0cf..6772545e40 100644 --- a/parser-typechecker/src/Unison/Builtin/Terms.hs +++ b/parser-typechecker/src/Unison/Builtin/Terms.hs @@ -1,7 +1,10 @@ {-# LANGUAGE OverloadedStrings #-} {-# LANGUAGE TypeApplications #-} -module Unison.Builtin.Terms where +module Unison.Builtin.Terms + ( builtinTermsRef + , builtinTermsSrc + ) where import Data.Map (Map) import qualified Data.Map as Map @@ -17,24 +20,24 @@ import Unison.Var (Var) import qualified Unison.Var as Var builtinTermsSrc :: Var v => a -> [(v, Term v a, Type v a)] -builtinTermsSrc a = +builtinTermsSrc ann = [ ( v "metadata.isPropagated", - Term.constructor a Decls.isPropagatedRef Decls.isPropagatedConstructorId, - Type.ref a Decls.isPropagatedRef + Term.constructor ann Decls.isPropagatedRef Decls.isPropagatedConstructorId, + Type.ref ann Decls.isPropagatedRef ), ( v "metadata.isTest", - Term.constructor a Decls.isTestRef Decls.isTestConstructorId, - Type.ref a Decls.isTestRef + Term.constructor ann Decls.isTestRef Decls.isTestConstructorId, + Type.ref ann Decls.isTestRef ) ] v :: Var v => Text -> v v = Var.named -builtinTermsRef :: Var v => a -> Map v Reference.Id -builtinTermsRef a = - fmap fst +builtinTermsRef :: Var v => Map v Reference.Id +builtinTermsRef = + fmap (\(refId, _, _) -> refId) . H.hashTermComponents . Map.fromList - . fmap (\(v, tm, _tp) -> (v, tm)) - $ builtinTermsSrc a + . fmap (\(v, tm, tp) -> (v, (tm, tp))) + $ builtinTermsSrc () diff --git a/parser-typechecker/src/Unison/Codebase/Editor/AuthorInfo.hs b/parser-typechecker/src/Unison/Codebase/Editor/AuthorInfo.hs index c76b232b2e..a511103c43 100644 --- a/parser-typechecker/src/Unison/Codebase/Editor/AuthorInfo.hs +++ b/parser-typechecker/src/Unison/Codebase/Editor/AuthorInfo.hs @@ -27,7 +27,7 @@ createAuthorInfo a t = createAuthorInfo' . unpack <$> liftIO (getRandomBytes 32) createAuthorInfo' :: [Word8] -> AuthorInfo v a createAuthorInfo' bytes = let [(guidRef, guidTerm)] = - hashAndWrangle "guid" $ + hashAndWrangle "guid" guidType $ Term.app a (Term.constructor a guidTypeRef 0) @@ -38,7 +38,7 @@ createAuthorInfo a t = createAuthorInfo' . unpack <$> liftIO (getRandomBytes 32) ) [(authorRef, authorTerm)] = - hashAndWrangle "author" $ + hashAndWrangle "author" authorType $ Term.apps (Term.constructor a authorTypeRef 0) [ (a, Term.ref a (Reference.DerivedId guidRef)), @@ -46,7 +46,7 @@ createAuthorInfo a t = createAuthorInfo' . unpack <$> liftIO (getRandomBytes 32) ] [(chRef, chTerm)] = - hashAndWrangle "copyrightHolder" $ + hashAndWrangle "copyrightHolder" chType $ Term.apps (Term.constructor a chTypeRef 0) [ (a, Term.ref a (Reference.DerivedId guidRef)), @@ -56,10 +56,13 @@ createAuthorInfo a t = createAuthorInfo' . unpack <$> liftIO (getRandomBytes 32) (guidRef, guidTerm, guidType) (authorRef, authorTerm, authorType) (chRef, chTerm, chType) - hashAndWrangle v tm = - Foldable.toList $ + hashAndWrangle :: Text + -> Type v a -> Term v a + -> [(Reference.Id, Term v a)] + hashAndWrangle v typ tm = + Foldable.toList $ fmap (\(id,tm,_tp) -> (id,tm)) $ H.hashTermComponents - (Map.fromList [(Var.named v, tm)]) + (Map.singleton (Var.named v) (tm, typ)) (chType, chTypeRef) = (Type.ref a chTypeRef, unsafeParse copyrightHolderHash) (authorType, authorTypeRef) = (Type.ref a authorTypeRef, unsafeParse authorHash) (guidType, guidTypeRef) = (Type.ref a guidTypeRef, unsafeParse guidHash) diff --git a/parser-typechecker/src/Unison/Hashing/V2/Convert.hs b/parser-typechecker/src/Unison/Hashing/V2/Convert.hs index f8bf15c0ec..98dfa596eb 100644 --- a/parser-typechecker/src/Unison/Hashing/V2/Convert.hs +++ b/parser-typechecker/src/Unison/Hashing/V2/Convert.hs @@ -9,7 +9,6 @@ module Unison.Hashing.V2.Convert hashPatch, hashClosedTerm, hashTermComponents, - hashTermComponents', typeToReference, typeToReferenceMentions, ) @@ -59,6 +58,7 @@ import Control.Monad.Trans.Writer.CPS (Writer) import qualified Control.Monad.Trans.Writer.CPS as Writer import qualified Unison.ConstructorType as CT import Data.Functor ((<&>)) +import Data.Bitraversable (bitraverse) typeToReference :: Var v => Memory.Type.Type v a -> Memory.Reference.Reference typeToReference = h2mReference . Hashing.Type.toReference . m2hType . Memory.Type.removeAllEffectVars @@ -66,18 +66,34 @@ typeToReference = h2mReference . Hashing.Type.toReference . m2hType . Memory.Typ typeToReferenceMentions :: Var v => Memory.Type.Type v a -> Set Memory.Reference.Reference typeToReferenceMentions = Set.map h2mReference . Hashing.Type.toReferenceMentions . m2hType . Memory.Type.removeAllEffectVars -hashTermComponents :: Var v => Map v (Memory.Term.Term v a) -> Map v (Memory.Reference.Id, Memory.Term.Term v a) -hashTermComponents mTerms = - case Writer.runWriter (traverse m2hTerm mTerms) of - (hTerms, constructorTypes) -> h2mTermResult (constructorTypes Map.!) <$> Hashing.Term.hashComponents hTerms - where - h2mTermResult :: Ord v => (Memory.Reference.Reference -> Memory.ConstructorType.ConstructorType) -> (Hashing.Reference.Id, Hashing.Term.Term v a) -> (Memory.Reference.Id, Memory.Term.Term v a) - h2mTermResult getCtorType (id, tm) = (h2mReferenceId id, h2mTerm getCtorType tm) - -- TODO: remove non-prime version -- include type in hash -hashTermComponents' :: Var v => Map v (Memory.Term.Term v a, Memory.Type.Type v a) -> Map v (Memory.Reference.Id, Memory.Term.Term v a, Memory.Type.Type v a) -hashTermComponents' = undefined +hashTermComponents :: + forall v a. + Var v => + Map v (Memory.Term.Term v a, Memory.Type.Type v a) -> + Map v (Memory.Reference.Id, Memory.Term.Term v a, Memory.Type.Type v a) +hashTermComponents vTerms = + case Writer.runWriter (traverse (bitraverse m2hTerm (pure . m2hType)) vTerms) of + (hTerms, constructorTypes) -> h2mTermResult (constructorTypes Map.!) <$> Hashing.Term.hashComponents hTerms + where + h2mTermResult :: + Ord v => + ( Memory.Reference.Reference -> + Memory.ConstructorType.ConstructorType + ) -> + (Hashing.Reference.Id, Hashing.Term.Term v a, Hashing.Type.Type v a) -> + (Memory.Reference.Id, Memory.Term.Term v a, Memory.Type.Type v a) + h2mTermResult getCtorType (id, tm, typ) = (h2mReferenceId id, h2mTerm getCtorType tm, h2mType typ) + + + +-- hashTermComponents' mTerms = +-- case Writer.runWriter (traverse m2hTerm mTerms) of +-- (hTerms, constructorTypes) -> h2mTermResult (constructorTypes Map.!) <$> Hashing.Term.hashComponents hTerms +-- where +-- h2mTermResult :: Ord v => (Memory.Reference.Reference -> Memory.ConstructorType.ConstructorType) -> (Hashing.Reference.Id, Hashing.Term.Term v a) -> (Memory.Reference.Id, Memory.Term.Term v a) +-- h2mTermResult getCtorType (id, tm) = (h2mReferenceId id, h2mTerm getCtorType tm) hashClosedTerm :: Var v => Memory.Term.Term v a -> Memory.Reference.Id hashClosedTerm = h2mReferenceId . Hashing.Term.hashClosedTerm . fst . Writer.runWriter . m2hTerm diff --git a/parser-typechecker/src/Unison/Hashing/V2/Term.hs b/parser-typechecker/src/Unison/Hashing/V2/Term.hs index c1b63635fb..15a767468a 100644 --- a/parser-typechecker/src/Unison/Hashing/V2/Term.hs +++ b/parser-typechecker/src/Unison/Hashing/V2/Term.hs @@ -36,6 +36,7 @@ import Unison.Hashing.V2.Type (Type) import Unison.Prelude import Unison.Var (Var) import Prelude hiding (and, or) +import qualified Data.Zip as Zip data MatchCase loc a = MatchCase (Pattern loc) (Maybe a) a deriving (Show, Eq, Foldable, Functor, Generic, Generic1, Traversable) @@ -101,8 +102,29 @@ refId :: Ord v => a -> Reference.Id -> Term2 vt at ap v a refId a = ref a . Reference.DerivedId hashComponents :: - Var v => Map v (Term v a) -> Map v (Reference.Id, Term v a) -hashComponents = ReferenceUtil.hashComponents $ refId () + forall v a. + Var v => Map v (Term v a, Type v a) -> Map v (Reference.Id, Term v a, Type v a) +hashComponents terms = + Zip.zipWith keepType terms (ReferenceUtil.hashComponents (refId ()) terms') + where + terms' :: Map v (Term v a) + terms' = uncurry incorporateType <$> terms + + keepType :: ((Term v a, Type v a) -> (Reference.Id, Term v a) -> (Reference.Id, Term v a, Type v a)) + keepType (_, typ) (refId, trm) = (refId, trm, typ) + + incorporateType :: Term v a -> Type v a -> Term v a + incorporateType a@(ABT.out -> ABT.Tm (Ann e _tp)) typ = ABT.tm' (ABT.annotation a) (Ann e typ) + incorporateType e typ = ABT.tm' (ABT.annotation e) (Ann e typ) + + -- keep these until we decide if we want to add the appropriate smart constructors back into this module + -- incorporateType (Term.Ann' e _) typ = Term.ann () e typ + -- incorporateType e typ = Term.ann () e typ + + -- Need to insert an "Ann" node inside the 'Tm' ABT wrapper + -- iff there isn't already a top-level annotation. + -- What if there's a top-level Annotation but it doesn't match + -- the type that was provided? hashClosedTerm :: Var v => Term v a -> Reference.Id hashClosedTerm tm = Reference.Id (ABT.hash tm) 0 diff --git a/parser-typechecker/src/Unison/UnisonFile.hs b/parser-typechecker/src/Unison/UnisonFile.hs index 9f03a64144..63d152466e 100644 --- a/parser-typechecker/src/Unison/UnisonFile.hs +++ b/parser-typechecker/src/Unison/UnisonFile.hs @@ -112,7 +112,8 @@ typecheckedUnisonFile datas effects tlcs watches = [(v,Nothing) | (v,_e,_t) <- join tlcs] ++ [(v, Just wk) | (wk, wkTerms) <- watches, (v, _e, _t) <- wkTerms ] -- good spot incorporate type of term into its hash, if not already present as an annotation (#2276) - hcs = Hashing.hashTermComponents $ Map.fromList $ (\(v, e, _t) -> (v, e)) <$> allTerms + -- hcs = Hashing.hashTermComponents $ Map.fromList $ (\(v, e, _t) -> (v, e)) <$> allTerms + hcs = Hashing.hashTermComponents' $ Map.fromList $ (\(v, e, t) -> (v, (e, t))) <$> allTerms in Map.fromList [ (v, (r, wk, e, t)) | (v, (r, e)) <- Map.toList hcs From a1d9b77605e77873f817b84c3e4f210d3dae1832 Mon Sep 17 00:00:00 2001 From: Chris Penner Date: Mon, 8 Nov 2021 16:22:19 -0600 Subject: [PATCH 095/297] Updates --- .../Unison/Codebase/SqliteCodebase/MigrateSchema12.hs | 2 +- parser-typechecker/src/Unison/DeclPrinter.hs | 10 +++++++--- parser-typechecker/src/Unison/UnisonFile.hs | 6 ++---- 3 files changed, 10 insertions(+), 8 deletions(-) diff --git a/parser-typechecker/src/Unison/Codebase/SqliteCodebase/MigrateSchema12.hs b/parser-typechecker/src/Unison/Codebase/SqliteCodebase/MigrateSchema12.hs index bf51aba069..bd08d297c3 100644 --- a/parser-typechecker/src/Unison/Codebase/SqliteCodebase/MigrateSchema12.hs +++ b/parser-typechecker/src/Unison/Codebase/SqliteCodebase/MigrateSchema12.hs @@ -539,7 +539,7 @@ migrateTermComponent Codebase {..} hash = fmap (either id id) . runExceptT $ do & Map.elems & fmap (\(v, trm, typ) -> (v, (trm, typ))) & Map.fromList - & Convert.hashTermComponents' + & Convert.hashTermComponents ifor newTermComponents $ \v (newReferenceId, trm, typ) -> do let oldReferenceId = vToOldReferenceMapping Map.! v diff --git a/parser-typechecker/src/Unison/DeclPrinter.hs b/parser-typechecker/src/Unison/DeclPrinter.hs index c20e41e6b3..fedb18d204 100644 --- a/parser-typechecker/src/Unison/DeclPrinter.hs +++ b/parser-typechecker/src/Unison/DeclPrinter.hs @@ -32,6 +32,7 @@ import Unison.Util.Pretty ( Pretty ) import qualified Unison.Util.Pretty as P import Unison.Var ( Var ) import qualified Unison.Var as Var +import qualified Unison.Term as Term type SyntaxText = S.SyntaxText' Reference @@ -139,10 +140,13 @@ fieldNames env r name dd = case DD.constructors dd of [(_, typ)] -> let vars :: [v] vars = [ Var.freshenId (fromIntegral n) (Var.named "_") | n <- [0..Type.arity typ - 1]] + accessorTypes :: [Type.Type v ()] + accessorTypes = () -- error "TODO: get types for accessors" + accessors :: [(v, Term.Term v ())] accessors = DD.generateRecordAccessors (map (,()) vars) (HQ.toVar name) r - hashes = Hashing.hashTermComponents (Map.fromList accessors) + hashes = Hashing.hashTermComponents (Map.fromList (zipWith (\(v, trm) typ -> (v, (trm, typ))) accessors accessorTypes)) names = [ (r, HQ.toString . PPE.termName env . Referent.Ref $ DerivedId r) - | r <- fst <$> Map.elems hashes ] + | r <- (\(refId, _trm, _typ) -> refId) <$> Map.elems hashes ] fieldNames = Map.fromList [ (r, f) | (r, n) <- names , typename <- pure (HQ.toString name) @@ -153,7 +157,7 @@ fieldNames env r name dd = case DD.constructors dd of in if Map.size fieldNames == length names then Just [ HQ.unsafeFromString name | v <- vars - , Just (ref, _) <- [Map.lookup (Var.namespaced [HQ.toVar name, v]) hashes] + , Just (ref, _, _) <- [Map.lookup (Var.namespaced [HQ.toVar name, v]) hashes] , Just name <- [Map.lookup ref fieldNames] ] else Nothing _ -> Nothing diff --git a/parser-typechecker/src/Unison/UnisonFile.hs b/parser-typechecker/src/Unison/UnisonFile.hs index 63d152466e..2a059348ea 100644 --- a/parser-typechecker/src/Unison/UnisonFile.hs +++ b/parser-typechecker/src/Unison/UnisonFile.hs @@ -111,12 +111,10 @@ typecheckedUnisonFile datas effects tlcs watches = watchKinds = Map.fromList $ [(v,Nothing) | (v,_e,_t) <- join tlcs] ++ [(v, Just wk) | (wk, wkTerms) <- watches, (v, _e, _t) <- wkTerms ] - -- good spot incorporate type of term into its hash, if not already present as an annotation (#2276) - -- hcs = Hashing.hashTermComponents $ Map.fromList $ (\(v, e, _t) -> (v, e)) <$> allTerms - hcs = Hashing.hashTermComponents' $ Map.fromList $ (\(v, e, t) -> (v, (e, t))) <$> allTerms + hcs = Hashing.hashTermComponents $ Map.fromList $ (\(v, e, t) -> (v, (e, t))) <$> allTerms in Map.fromList [ (v, (r, wk, e, t)) - | (v, (r, e)) <- Map.toList hcs + | (v, (r, e, _typ)) <- Map.toList hcs , Just t <- [Map.lookup v types] , wk <- [Map.findWithDefault (error $ show v ++ " missing from watchKinds") v watchKinds]] From 10683ad7665a8b344c40e221e79afea641384ec8 Mon Sep 17 00:00:00 2001 From: Chris Penner Date: Tue, 9 Nov 2021 15:07:13 -0600 Subject: [PATCH 096/297] Acquire and hash types for terms in runtime and field accessor lookups --- .../SqliteCodebase/MigrateSchema12.hs | 1 + parser-typechecker/src/Unison/DeclPrinter.hs | 27 ++++++++++++++++--- .../src/Unison/Hashing/V2/Convert.hs | 27 ++++++++++++------- .../src/Unison/Hashing/V2/Term.hs | 4 +++ .../src/Unison/Runtime/Interface.hs | 2 +- 5 files changed, 47 insertions(+), 14 deletions(-) diff --git a/parser-typechecker/src/Unison/Codebase/SqliteCodebase/MigrateSchema12.hs b/parser-typechecker/src/Unison/Codebase/SqliteCodebase/MigrateSchema12.hs index bd08d297c3..a5247dc3d9 100644 --- a/parser-typechecker/src/Unison/Codebase/SqliteCodebase/MigrateSchema12.hs +++ b/parser-typechecker/src/Unison/Codebase/SqliteCodebase/MigrateSchema12.hs @@ -86,6 +86,7 @@ import Unison.Var (Var) -- -- * refer to github megaticket https://github.com/unisonweb/unison/issues/2471 -- ☢️ [ ] incorporate type signature into hash of term <- chris/arya have started ☢️ +-- [ ] store type annotation in the term -- * [ ] Salt V2 hashes with version number -- * [ ] Use V2 hashing for Causals -- * [ ] Delete V1 Hashing to ensure it's unused diff --git a/parser-typechecker/src/Unison/DeclPrinter.hs b/parser-typechecker/src/Unison/DeclPrinter.hs index fedb18d204..c256c9ccff 100644 --- a/parser-typechecker/src/Unison/DeclPrinter.hs +++ b/parser-typechecker/src/Unison/DeclPrinter.hs @@ -25,8 +25,12 @@ import Unison.PrettyPrintEnvDecl ( PrettyPrintEnvDecl(..) ) import qualified Unison.PrettyPrintEnv as PPE import qualified Unison.Referent as Referent import Unison.Reference ( Reference(DerivedId) ) +import qualified Unison.Result as Result import qualified Unison.Util.SyntaxText as S import qualified Unison.Type as Type +import qualified Unison.Typechecker as Typechecker +import Unison.Typechecker.TypeLookup (TypeLookup(TypeLookup)) +import qualified Unison.Typechecker.TypeLookup as TypeLookup import qualified Unison.TypePrinter as TypePrinter import Unison.Util.Pretty ( Pretty ) import qualified Unison.Util.Pretty as P @@ -140,11 +144,28 @@ fieldNames env r name dd = case DD.constructors dd of [(_, typ)] -> let vars :: [v] vars = [ Var.freshenId (fromIntegral n) (Var.named "_") | n <- [0..Type.arity typ - 1]] - accessorTypes :: [Type.Type v ()] - accessorTypes = () -- error "TODO: get types for accessors" accessors :: [(v, Term.Term v ())] accessors = DD.generateRecordAccessors (map (,()) vars) (HQ.toVar name) r - hashes = Hashing.hashTermComponents (Map.fromList (zipWith (\(v, trm) typ -> (v, (trm, typ))) accessors accessorTypes)) + accessorsWithTypes :: [(v, Term.Term v (), Type.Type v ())] + accessorsWithTypes = accessors <&> \(v, trm) -> + case Result.result (Typechecker.synthesize typecheckingEnv trm) of + Nothing -> error $ "Failed to typecheck record field: " <> show v + Just typ -> (v, trm, typ) + typeLookup :: TypeLookup v () + typeLookup = + TypeLookup + { TypeLookup.typeOfTerms = mempty, + TypeLookup.dataDecls = Map.singleton r (void dd), + TypeLookup.effectDecls = mempty + } + typecheckingEnv :: Typechecker.Env v () + typecheckingEnv = + Typechecker.Env + { Typechecker._ambientAbilities = mempty, + Typechecker._typeLookup = typeLookup, + Typechecker._termsByShortname = mempty + } + hashes = Hashing.hashTermComponents (Map.fromList . fmap (\(v, trm, typ) -> (v, (trm, typ))) $ accessorsWithTypes) names = [ (r, HQ.toString . PPE.termName env . Referent.Ref $ DerivedId r) | r <- (\(refId, _trm, _typ) -> refId) <$> Map.elems hashes ] fieldNames = Map.fromList diff --git a/parser-typechecker/src/Unison/Hashing/V2/Convert.hs b/parser-typechecker/src/Unison/Hashing/V2/Convert.hs index 98dfa596eb..5664347ea4 100644 --- a/parser-typechecker/src/Unison/Hashing/V2/Convert.hs +++ b/parser-typechecker/src/Unison/Hashing/V2/Convert.hs @@ -9,6 +9,7 @@ module Unison.Hashing.V2.Convert hashPatch, hashClosedTerm, hashTermComponents, + hashTermComponentsWithoutTypes, typeToReference, typeToReferenceMentions, ) @@ -73,8 +74,8 @@ hashTermComponents :: Var v => Map v (Memory.Term.Term v a, Memory.Type.Type v a) -> Map v (Memory.Reference.Id, Memory.Term.Term v a, Memory.Type.Type v a) -hashTermComponents vTerms = - case Writer.runWriter (traverse (bitraverse m2hTerm (pure . m2hType)) vTerms) of +hashTermComponents mTerms = + case Writer.runWriter (traverse (bitraverse m2hTerm (pure . m2hType)) mTerms) of (hTerms, constructorTypes) -> h2mTermResult (constructorTypes Map.!) <$> Hashing.Term.hashComponents hTerms where h2mTermResult :: @@ -86,14 +87,20 @@ hashTermComponents vTerms = (Memory.Reference.Id, Memory.Term.Term v a, Memory.Type.Type v a) h2mTermResult getCtorType (id, tm, typ) = (h2mReferenceId id, h2mTerm getCtorType tm, h2mType typ) - - --- hashTermComponents' mTerms = --- case Writer.runWriter (traverse m2hTerm mTerms) of --- (hTerms, constructorTypes) -> h2mTermResult (constructorTypes Map.!) <$> Hashing.Term.hashComponents hTerms --- where --- h2mTermResult :: Ord v => (Memory.Reference.Reference -> Memory.ConstructorType.ConstructorType) -> (Hashing.Reference.Id, Hashing.Term.Term v a) -> (Memory.Reference.Id, Memory.Term.Term v a) --- h2mTermResult getCtorType (id, tm) = (h2mReferenceId id, h2mTerm getCtorType tm) +-- | This shouldn't be used when storing terms in the codebase, as it doesn't incorporate the type into the hash. +-- this should only be used in cases where you just need a way to identify some terms that you have, but won't be +-- saving them. +hashTermComponentsWithoutTypes :: + forall v a. + Var v => + Map v (Memory.Term.Term v a) -> + Map v (Memory.Reference.Id, Memory.Term.Term v a) +hashTermComponentsWithoutTypes mTerms = + case Writer.runWriter (traverse m2hTerm mTerms) of + (hTerms, constructorTypes) -> h2mTermResult (constructorTypes Map.!) <$> Hashing.Term.hashComponentsWithoutTypes hTerms + where + h2mTermResult :: Ord v => (Memory.Reference.Reference -> Memory.ConstructorType.ConstructorType) -> (Hashing.Reference.Id, Hashing.Term.Term v a) -> (Memory.Reference.Id, Memory.Term.Term v a) + h2mTermResult getCtorType (id, tm) = (h2mReferenceId id, h2mTerm getCtorType tm) hashClosedTerm :: Var v => Memory.Term.Term v a -> Memory.Reference.Id hashClosedTerm = h2mReferenceId . Hashing.Term.hashClosedTerm . fst . Writer.runWriter . m2hTerm diff --git a/parser-typechecker/src/Unison/Hashing/V2/Term.hs b/parser-typechecker/src/Unison/Hashing/V2/Term.hs index 15a767468a..9c14705d1e 100644 --- a/parser-typechecker/src/Unison/Hashing/V2/Term.hs +++ b/parser-typechecker/src/Unison/Hashing/V2/Term.hs @@ -15,6 +15,7 @@ module Unison.Hashing.V2.Term MatchCase (..), hashClosedTerm, hashComponents, + hashComponentsWithoutTypes, ) where @@ -126,6 +127,9 @@ hashComponents terms = -- What if there's a top-level Annotation but it doesn't match -- the type that was provided? +hashComponentsWithoutTypes :: Var v => Map v (Term v a) -> Map v (Reference.Id, Term v a) +hashComponentsWithoutTypes = ReferenceUtil.hashComponents $ refId () + hashClosedTerm :: Var v => Term v a -> Reference.Id hashClosedTerm tm = Reference.Id (ABT.hash tm) 0 diff --git a/parser-typechecker/src/Unison/Runtime/Interface.hs b/parser-typechecker/src/Unison/Runtime/Interface.hs index 7f803d4977..09f63442b1 100644 --- a/parser-typechecker/src/Unison/Runtime/Interface.hs +++ b/parser-typechecker/src/Unison/Runtime/Interface.hs @@ -291,7 +291,7 @@ prepareEvaluation ppe tm ctx = do (rmn, rtms) | Tm.LetRecNamed' bs mn0 <- tm , hcs <- fmap (first RF.DerivedId) - . Hashing.hashTermComponents $ Map.fromList bs + . Hashing.hashTermComponentsWithoutTypes $ Map.fromList bs , mn <- Tm.substs (Map.toList $ Tm.ref () . fst <$> hcs) mn0 , rmn <- RF.DerivedId $ Hashing.hashClosedTerm mn = (rmn , (rmn, mn) : Map.elems hcs) From f22d9613e464b4e912c80814a38224e1b188c9ad Mon Sep 17 00:00:00 2001 From: Chris Penner Date: Tue, 9 Nov 2021 16:00:44 -0600 Subject: [PATCH 097/297] Filter patch dependencies --- .../Codebase/SqliteCodebase/MigrateSchema12.hs | 16 +++++++++------- 1 file changed, 9 insertions(+), 7 deletions(-) diff --git a/parser-typechecker/src/Unison/Codebase/SqliteCodebase/MigrateSchema12.hs b/parser-typechecker/src/Unison/Codebase/SqliteCodebase/MigrateSchema12.hs index a5247dc3d9..43614796b9 100644 --- a/parser-typechecker/src/Unison/Codebase/SqliteCodebase/MigrateSchema12.hs +++ b/parser-typechecker/src/Unison/Codebase/SqliteCodebase/MigrateSchema12.hs @@ -88,8 +88,8 @@ import Unison.Var (Var) -- ☢️ [ ] incorporate type signature into hash of term <- chris/arya have started ☢️ -- [ ] store type annotation in the term -- * [ ] Salt V2 hashes with version number --- * [ ] Use V2 hashing for Causals --- * [ ] Delete V1 Hashing to ensure it's unused +-- * [ ] Refactor Causal helper functions to use V2 hashing +-- * [ ] Delete V1 Hashing to ensure it's unused -- * [ ] confirm that pulls are handled ok migrateSchema12 :: forall a m v. (MonadIO m, Var v) => Connection -> Codebase m v a -> m () @@ -335,12 +335,14 @@ migratePatch conn oldObjectId = fmap (either id id) . runExceptT $ do (oldPatch & S.patchH_ %%~ liftQ . hydrateHashes) >>= (S.patchO_ %%~ hydrateObjectIds) + migratedRefs <- gets referenceMapping + let isUnmigratedRef ref = Map.notMember ref migratedRefs -- 2. Determine whether all things the patch refers to are built. - let dependencies :: [Entity] - dependencies = - oldPatchWithHashes ^.. patchSomeRefsH_ . uRefIdAsRefId_ . to someReferenceIdToEntity - <> oldPatchWithHashes ^.. patchSomeRefsO_ . uRefIdAsRefId_ . to someReferenceIdToEntity - when (not . null $ dependencies) (throwE (Sync.Missing dependencies)) + let unmigratedDependencies :: [Entity] + unmigratedDependencies = + oldPatchWithHashes ^.. patchSomeRefsH_ . uRefIdAsRefId_ . filtered isUnmigratedRef . to someReferenceIdToEntity + <> oldPatchWithHashes ^.. patchSomeRefsO_ . uRefIdAsRefId_ . filtered isUnmigratedRef . to someReferenceIdToEntity + when (not . null $ unmigratedDependencies) (throwE (Sync.Missing unmigratedDependencies)) let hashToHashId :: forall m. Q.EDB m => Hash -> m HashId hashToHashId h = From 8d5b284d837068dfab69c5a5d1edefe722013af5 Mon Sep 17 00:00:00 2001 From: Chris Penner Date: Tue, 9 Nov 2021 16:30:40 -0600 Subject: [PATCH 098/297] Fix bad err messages --- .../src/Unison/Codebase/SqliteCodebase/MigrateSchema12.hs | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/parser-typechecker/src/Unison/Codebase/SqliteCodebase/MigrateSchema12.hs b/parser-typechecker/src/Unison/Codebase/SqliteCodebase/MigrateSchema12.hs index 43614796b9..95a8c194c0 100644 --- a/parser-typechecker/src/Unison/Codebase/SqliteCodebase/MigrateSchema12.hs +++ b/parser-typechecker/src/Unison/Codebase/SqliteCodebase/MigrateSchema12.hs @@ -293,10 +293,10 @@ migrateBranch conn oldObjectId = fmap (either id id) . runExceptT $ do Nothing -> error $ "Expected patch: " <> show patchObjId <> " to be migrated" Just (newPatchObjId, _, _) -> PatchObjectId newPatchObjId let remapCausalHashId causalHashId = case Map.lookup causalHashId migratedCausals of - Nothing -> error $ "Expected patch: " <> show causalHashId <> " to be migrated" + Nothing -> error $ "Expected causal hash id: " <> show causalHashId <> " to be migrated" Just (_, newCausalHashId) -> newCausalHashId - let remapBranchObjectId patchObjId = case Map.lookup (unBranchObjectId patchObjId) migratedObjects of - Nothing -> error $ "Expected patch: " <> show patchObjId <> " to be migrated" + let remapBranchObjectId objId = case Map.lookup (unBranchObjectId objId) migratedObjects of + Nothing -> error $ "Expected object: " <> show objId <> " to be migrated" Just (newBranchObjId, _, _) -> BranchObjectId newBranchObjId let newBranch :: S.DbBranch From 91fdf0f2679f1f4df269234c47a45af13cdba7dc Mon Sep 17 00:00:00 2001 From: Arya Irani Date: Wed, 10 Nov 2021 11:51:52 -0500 Subject: [PATCH 099/297] update todo list --- .../src/Unison/Codebase/SqliteCodebase/MigrateSchema12.hs | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/parser-typechecker/src/Unison/Codebase/SqliteCodebase/MigrateSchema12.hs b/parser-typechecker/src/Unison/Codebase/SqliteCodebase/MigrateSchema12.hs index 95a8c194c0..27154b11d1 100644 --- a/parser-typechecker/src/Unison/Codebase/SqliteCodebase/MigrateSchema12.hs +++ b/parser-typechecker/src/Unison/Codebase/SqliteCodebase/MigrateSchema12.hs @@ -85,11 +85,12 @@ import Unison.Var (Var) -- * [ ] delete old objects -- -- * refer to github megaticket https://github.com/unisonweb/unison/issues/2471 --- ☢️ [ ] incorporate type signature into hash of term <- chris/arya have started ☢️ --- [ ] store type annotation in the term --- * [ ] Salt V2 hashes with version number +-- ☢️ [x] incorporate type signature into hash of term <- chris/arya have started ☢️ +-- [x] store type annotation in the term -- * [ ] Refactor Causal helper functions to use V2 hashing +-- * [ ] I guess move Hashable to V2.Hashing pseudo-package -- * [ ] Delete V1 Hashing to ensure it's unused +-- * [ ] Salt V2 hashes with version number -- * [ ] confirm that pulls are handled ok migrateSchema12 :: forall a m v. (MonadIO m, Var v) => Connection -> Codebase m v a -> m () From ce15a7c37d552469fb3dbc075fd0eb6da6c594e3 Mon Sep 17 00:00:00 2001 From: Arya Irani Date: Wed, 10 Nov 2021 11:52:19 -0500 Subject: [PATCH 100/297] cleanup --- parser-typechecker/src/Unison/Hashing/V2/Term.hs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/parser-typechecker/src/Unison/Hashing/V2/Term.hs b/parser-typechecker/src/Unison/Hashing/V2/Term.hs index 9c14705d1e..e106db4e26 100644 --- a/parser-typechecker/src/Unison/Hashing/V2/Term.hs +++ b/parser-typechecker/src/Unison/Hashing/V2/Term.hs @@ -112,7 +112,7 @@ hashComponents terms = terms' = uncurry incorporateType <$> terms keepType :: ((Term v a, Type v a) -> (Reference.Id, Term v a) -> (Reference.Id, Term v a, Type v a)) - keepType (_, typ) (refId, trm) = (refId, trm, typ) + keepType (_oldTrm, typ) (refId, trm) = (refId, trm, typ) incorporateType :: Term v a -> Type v a -> Term v a incorporateType a@(ABT.out -> ABT.Tm (Ann e _tp)) typ = ABT.tm' (ABT.annotation a) (Ann e typ) From 1c2a5c6fe434ecd1a7e159bcfc72d8c0fed8fd2b Mon Sep 17 00:00:00 2001 From: Arya Irani Date: Wed, 10 Nov 2021 11:53:23 -0500 Subject: [PATCH 101/297] salt v2 hashes with version number this seems like an ok place to put this, but i'm not 100% set this module I guess should move into hashing v2 pseudopackage though, after updating how Causal hashes are computed, to also use v2 pseudopackage --- .../src/Unison/Codebase/SqliteCodebase/MigrateSchema12.hs | 2 +- unison-core/src/Unison/Hashable.hs | 3 ++- 2 files changed, 3 insertions(+), 2 deletions(-) diff --git a/parser-typechecker/src/Unison/Codebase/SqliteCodebase/MigrateSchema12.hs b/parser-typechecker/src/Unison/Codebase/SqliteCodebase/MigrateSchema12.hs index 27154b11d1..70c8a771ee 100644 --- a/parser-typechecker/src/Unison/Codebase/SqliteCodebase/MigrateSchema12.hs +++ b/parser-typechecker/src/Unison/Codebase/SqliteCodebase/MigrateSchema12.hs @@ -90,7 +90,7 @@ import Unison.Var (Var) -- * [ ] Refactor Causal helper functions to use V2 hashing -- * [ ] I guess move Hashable to V2.Hashing pseudo-package -- * [ ] Delete V1 Hashing to ensure it's unused --- * [ ] Salt V2 hashes with version number +-- * [x] Salt V2 hashes with version number -- * [ ] confirm that pulls are handled ok migrateSchema12 :: forall a m v. (MonadIO m, Var v) => Connection -> Codebase m v a -> m () diff --git a/unison-core/src/Unison/Hashable.hs b/unison-core/src/Unison/Hashable.hs index d07e8b2d72..b6ab2248d6 100644 --- a/unison-core/src/Unison/Hashable.hs +++ b/unison-core/src/Unison/Hashable.hs @@ -29,7 +29,8 @@ accumulateToken :: (Accumulate h, Hashable t) => t -> Token h accumulateToken = Hashed . accumulate' accumulate' :: (Accumulate h, Hashable t) => t -> h -accumulate' = accumulate . tokens +accumulate' = accumulate . (hashVersion :). tokens + where hashVersion = Tag 2 class Hashable t where tokens :: Accumulate h => t -> [Token h] From 6f5bedabca714b881c6867a3037ae718a172caae Mon Sep 17 00:00:00 2001 From: Chris Penner Date: Wed, 10 Nov 2021 16:56:56 -0600 Subject: [PATCH 102/297] Store object mappings for term and type components --- .../SqliteCodebase/MigrateSchema12.hs | 65 +++++++++++++------ 1 file changed, 46 insertions(+), 19 deletions(-) diff --git a/parser-typechecker/src/Unison/Codebase/SqliteCodebase/MigrateSchema12.hs b/parser-typechecker/src/Unison/Codebase/SqliteCodebase/MigrateSchema12.hs index 70c8a771ee..3146edef8d 100644 --- a/parser-typechecker/src/Unison/Codebase/SqliteCodebase/MigrateSchema12.hs +++ b/parser-typechecker/src/Unison/Codebase/SqliteCodebase/MigrateSchema12.hs @@ -135,7 +135,7 @@ data MigrationState = MigrationState -- Mapping between old cycle-position -> new cycle-position for a given Decl object. { referenceMapping :: Map (Old SomeReferenceId) (New SomeReferenceId), causalMapping :: Map (Old CausalHashId) (New (CausalHash, CausalHashId)), - objLookup :: Map (Old ObjectId) (New (ObjectId, HashId, Hash)), + objLookup :: Map (Old ObjectId) ((New ObjectId, New HashId, New Hash)), -- Remember the hashes of term/decls that we have already migrated to avoid migrating them twice. migratedDefnHashes :: Set (Old Hash) } @@ -157,11 +157,11 @@ migrationSync :: Sync (ReaderT (Env m v a) (StateT MigrationState m)) Entity migrationSync = Sync \case TermComponent hash -> do - Env {codebase} <- ask - lift (migrateTermComponent codebase hash) + Env {codebase, db} <- ask + lift (migrateTermComponent db codebase hash) DeclComponent hash -> do - Env {codebase} <- ask - lift (migrateDeclComponent codebase hash) + Env {codebase, db} <- ask + lift (migrateDeclComponent db codebase hash) BranchE objectId -> do Env {db} <- ask lift (migrateBranch db objectId) @@ -485,20 +485,21 @@ typeEditRefs_ _f (TypeEdit.Deprecate) = pure TypeEdit.Deprecate migrateTermComponent :: forall m v a. - (Ord v, Var v, Monad m) => + (Ord v, Var v, Monad m, MonadIO m) => + Connection -> Codebase m v a -> Unison.Hash -> StateT MigrationState m (Sync.TrySyncResult Entity) -migrateTermComponent Codebase {..} hash = fmap (either id id) . runExceptT $ do - whenM (Set.member hash <$> use (field @"migratedDefnHashes")) (throwE Sync.PreviouslyDone) +migrateTermComponent conn Codebase {..} oldHash = fmap (either id id) . runExceptT $ do + whenM (Set.member oldHash <$> use (field @"migratedDefnHashes")) (throwE Sync.PreviouslyDone) component <- - (lift . lift $ getTermComponentWithTypes hash) >>= \case - Nothing -> error $ "Hash was missing from codebase: " <> show hash + (lift . lift $ getTermComponentWithTypes oldHash) >>= \case + Nothing -> error $ "Hash was missing from codebase: " <> show oldHash Just component -> pure component let componentIDMap :: Map (Old Reference.Id) (Term.Term v a, Type v a) - componentIDMap = Map.fromList $ Reference.componentFor hash component + componentIDMap = Map.fromList $ Reference.componentFor oldHash component let unhashed :: Map (Old Reference.Id) (v, Term.Term v a) unhashed = Term.unhashComponent (fst <$> componentIDMap) let vToOldReferenceMapping :: Map v (Old Reference.Id) @@ -550,26 +551,32 @@ migrateTermComponent Codebase {..} hash = fmap (either id id) . runExceptT $ do field @"referenceMapping" %= Map.insert (TermReference oldReferenceId) (TermReference newReferenceId) lift . lift $ putTerm newReferenceId trm typ - field @"migratedDefnHashes" %= Set.insert hash + -- Need to get one of the new references to grab its hash, doesn't matter which one since + -- all hashes in the component are the same. + case newTermComponents ^? traversed . _1 . to Reference.idToHash of + Nothing -> pure () + Just newHash -> insertObjectMappingForHash conn oldHash newHash + field @"migratedDefnHashes" %= Set.insert oldHash pure Sync.Done migrateDeclComponent :: forall m v a. - (Ord v, Var v, Monad m) => + (Ord v, Var v, Monad m, MonadIO m) => + Connection -> Codebase m v a -> Unison.Hash -> StateT MigrationState m (Sync.TrySyncResult Entity) -migrateDeclComponent Codebase {..} hash = fmap (either id id) . runExceptT $ do - whenM (Set.member hash <$> use (field @"migratedDefnHashes")) (throwE Sync.PreviouslyDone) +migrateDeclComponent conn Codebase {..} oldHash = fmap (either id id) . runExceptT $ do + whenM (Set.member oldHash <$> use (field @"migratedDefnHashes")) (throwE Sync.PreviouslyDone) declComponent :: [DD.Decl v a] <- - (lift . lift $ getDeclComponent hash) >>= \case - Nothing -> error $ "Expected decl component for hash:" <> show hash + (lift . lift $ getDeclComponent oldHash) >>= \case + Nothing -> error $ "Expected decl component for hash:" <> show oldHash Just dc -> pure dc let componentIDMap :: Map (Old Reference.Id) (DD.Decl v a) - componentIDMap = Map.fromList $ Reference.componentFor hash declComponent + componentIDMap = Map.fromList $ Reference.componentFor oldHash declComponent let unhashed :: Map (Old Reference.Id) (DeclName v, DD.Decl v a) unhashed = DD.unhashComponent componentIDMap @@ -643,10 +650,30 @@ migrateDeclComponent Codebase {..} hash = fmap (either id id) . runExceptT $ do lift . lift $ putTypeDeclaration newReferenceId dd - field @"migratedDefnHashes" %= Set.insert hash + -- Need to get one of the new references to grab its hash, doesn't matter which one since + -- all hashes in the component are the same. + case newComponent ^? traversed . _2 . to Reference.idToHash of + Nothing -> pure () + Just newHash -> insertObjectMappingForHash conn oldHash newHash + field @"migratedDefnHashes" %= Set.insert oldHash pure Sync.Done +insertObjectMappingForHash :: + (MonadIO m, MonadState MigrationState m) => + Connection -> + Old Hash -> + New Hash -> + m () +insertObjectMappingForHash conn oldHash newHash = do + (oldObjectId, newHashId, newObjectId) <- runDB conn . liftQ $ do + oldHashId <- Q.expectHashIdByHash . Cv.hash1to2 $ oldHash + oldObjectId <- Q.expectObjectIdForPrimaryHashId $ oldHashId + newHashId <- Q.expectHashIdByHash . Cv.hash1to2 $ newHash + newObjectId <- Q.expectObjectIdForPrimaryHashId $ newHashId + pure (oldObjectId, newHashId, newObjectId) + field @"objLookup" %= Map.insert oldObjectId (newObjectId, newHashId, newHash) + typeReferences_ :: (Monad m, Ord v) => LensLike' m (Type v a) SomeReferenceId typeReferences_ = ABT.rewriteDown_ -- Focus all terms From 2bd2a4260ec365ae85768196009258e82cf27f5b Mon Sep 17 00:00:00 2001 From: Chris Penner Date: Wed, 10 Nov 2021 17:09:07 -0600 Subject: [PATCH 103/297] Store and use old hashes in objLookup --- .../SqliteCodebase/MigrateSchema12.hs | 46 ++++++++++--------- 1 file changed, 25 insertions(+), 21 deletions(-) diff --git a/parser-typechecker/src/Unison/Codebase/SqliteCodebase/MigrateSchema12.hs b/parser-typechecker/src/Unison/Codebase/SqliteCodebase/MigrateSchema12.hs index 3146edef8d..4be4e03372 100644 --- a/parser-typechecker/src/Unison/Codebase/SqliteCodebase/MigrateSchema12.hs +++ b/parser-typechecker/src/Unison/Codebase/SqliteCodebase/MigrateSchema12.hs @@ -104,12 +104,12 @@ migrateSchema12 conn codebase = do (Sync.sync @_ @Entity migrationSync progress (CausalE rootCausalHashId : watches)) `runReaderT` Env {db = conn, codebase} `execStateT` MigrationState Map.empty Map.empty Map.empty Set.empty - ifor_ (objLookup migrationState) \old (new, _, _) -> do + ifor_ (objLookup migrationState) \oldObjId (newObjId, _, _, _) -> do (runDB conn . liftQ) do - Q.recordObjectRehash old new - Q.deleteIndexesForObject old + Q.recordObjectRehash oldObjId newObjId + Q.deleteIndexesForObject oldObjId -- what about deleting old watches? - Q.deleteObject old + Q.deleteObject oldObjId where progress :: Sync.Progress (ReaderT (Env m v a) (StateT MigrationState m)) Entity progress = @@ -135,7 +135,9 @@ data MigrationState = MigrationState -- Mapping between old cycle-position -> new cycle-position for a given Decl object. { referenceMapping :: Map (Old SomeReferenceId) (New SomeReferenceId), causalMapping :: Map (Old CausalHashId) (New (CausalHash, CausalHashId)), - objLookup :: Map (Old ObjectId) ((New ObjectId, New HashId, New Hash)), + -- We also store the old hash for this object ID since we need a way to + -- convert Object Reference IDs into Hash Reference IDs so we can use the referenceMapping. + objLookup :: Map (Old ObjectId) (New ObjectId, New HashId, New Hash, Old Hash), -- Remember the hashes of term/decls that we have already migrated to avoid migrating them twice. migratedDefnHashes :: Set (Old Hash) } @@ -207,7 +209,7 @@ migrateCausal conn oldCausalHashId = fmap (either id id) . runExceptT $ do let unmigratedEntities = unmigratedBranch <> unmigratedParents when (not . null $ unmigratedParents <> unmigratedBranch) (throwE $ Sync.Missing unmigratedEntities) - (_, _, newBranchHash) <- gets (\MigrationState {objLookup} -> objLookup Map.! branchObjId) + (_, _, newBranchHash, _) <- gets (\MigrationState {objLookup} -> objLookup Map.! branchObjId) let (newParentHashes, newParentHashIds) = oldCausalParentHashIds @@ -245,6 +247,7 @@ migrateBranch conn oldObjectId = fmap (either id id) . runExceptT $ do whenM (Map.member oldObjectId <$> use (field @"objLookup")) (throwE Sync.PreviouslyDone) oldBranch <- runDB conn (Ops.loadDbBranchByObjectId (BranchObjectId oldObjectId)) + oldHash <- fmap Cv.hash2to1 . runDB conn $ Ops.loadHashByObjectId oldObjectId oldBranchWithHashes <- runDB conn (traverseOf S.branchHashes_ (fmap Cv.hash2to1 . Ops.loadHashByObjectId) oldBranch) migratedRefs <- gets referenceMapping migratedObjects <- gets objLookup @@ -292,13 +295,13 @@ migrateBranch conn oldObjectId = fmap (either id id) . runExceptT $ do let remapPatchObjectId patchObjId = case Map.lookup (unPatchObjectId patchObjId) migratedObjects of Nothing -> error $ "Expected patch: " <> show patchObjId <> " to be migrated" - Just (newPatchObjId, _, _) -> PatchObjectId newPatchObjId + Just (newPatchObjId, _, _, _) -> PatchObjectId newPatchObjId let remapCausalHashId causalHashId = case Map.lookup causalHashId migratedCausals of Nothing -> error $ "Expected causal hash id: " <> show causalHashId <> " to be migrated" Just (_, newCausalHashId) -> newCausalHashId let remapBranchObjectId objId = case Map.lookup (unBranchObjectId objId) migratedObjects of Nothing -> error $ "Expected object: " <> show objId <> " to be migrated" - Just (newBranchObjId, _, _) -> BranchObjectId newBranchObjId + Just (newBranchObjId, _, _, _) -> BranchObjectId newBranchObjId let newBranch :: S.DbBranch newBranch = @@ -308,10 +311,10 @@ migrateBranch conn oldObjectId = fmap (either id id) . runExceptT $ do & S.childrenHashes_ %~ (remapBranchObjectId *** remapCausalHashId) let (localBranchIds, localBranch) = S.LocalizeObject.localizeBranch newBranch - hash <- runDB conn (Ops.liftQ (Hashing.dbBranchHash newBranch)) - newHashId <- runDB conn (Ops.liftQ (Q.saveBranchHash (BranchHash (Cv.hash1to2 hash)))) + newHash <- runDB conn (Ops.liftQ (Hashing.dbBranchHash newBranch)) + newHashId <- runDB conn (Ops.liftQ (Q.saveBranchHash (BranchHash (Cv.hash1to2 newHash)))) newObjectId <- runDB conn (Ops.saveBranchObject newHashId localBranchIds localBranch) - field @"objLookup" %= Map.insert oldObjectId (unBranchObjectId newObjectId, unBranchHashId newHashId, hash) + field @"objLookup" %= Map.insert oldObjectId (unBranchObjectId newObjectId, unBranchHashId newHashId, newHash, oldHash) pure Sync.Done migratePatch :: @@ -323,6 +326,7 @@ migratePatch :: migratePatch conn oldObjectId = fmap (either id id) . runExceptT $ do whenM (Map.member (unPatchObjectId oldObjectId) <$> use (field @"objLookup")) (throwE Sync.PreviouslyDone) + oldHash <- fmap Cv.hash2to1 . runDB conn $ Ops.loadHashByObjectId (unPatchObjectId oldObjectId) oldPatch <- runDB conn (Ops.loadDbPatchById oldObjectId) let hydrateHashes :: forall m. Q.EDB m => HashId -> m Hash hydrateHashes hashId = do @@ -369,7 +373,7 @@ migratePatch conn oldObjectId = fmap (either id id) . runExceptT $ do newHash <- runDB conn (liftQ (Hashing.dbPatchHash newPatchWithIds)) newObjectId <- runDB conn (Ops.saveDbPatch (PatchHash (Cv.hash1to2 newHash)) (S.Patch.Format.Full localPatchIds localPatch)) newHashId <- runDB conn (liftQ (Q.expectHashIdByHash (Cv.hash1to2 newHash))) - field @"objLookup" %= Map.insert (unPatchObjectId oldObjectId) (unPatchObjectId newObjectId, newHashId, newHash) + field @"objLookup" %= Map.insert (unPatchObjectId oldObjectId) (unPatchObjectId newObjectId, newHashId, newHash, oldHash) pure Sync.Done -- | PLAN @@ -672,7 +676,7 @@ insertObjectMappingForHash conn oldHash newHash = do newHashId <- Q.expectHashIdByHash . Cv.hash1to2 $ newHash newObjectId <- Q.expectObjectIdForPrimaryHashId $ newHashId pure (oldObjectId, newHashId, newObjectId) - field @"objLookup" %= Map.insert oldObjectId (newObjectId, newHashId, newHash) + field @"objLookup" %= Map.insert oldObjectId (newObjectId, newHashId, newHash, oldHash) typeReferences_ :: (Monad m, Ord v) => LensLike' m (Type v a) SomeReferenceId typeReferences_ = @@ -750,7 +754,7 @@ type SomeReferenceId = SomeReference Reference.Id type SomeReferenceObjId = SomeReference (UReference.Id' ObjectId) remapObjIdRefs :: - (Map (Old ObjectId) (New ObjectId, New HashId, New Hash)) -> + (Map (Old ObjectId) (New ObjectId, New HashId, New Hash, Old Hash)) -> (Map SomeReferenceId SomeReferenceId) -> SomeReferenceObjId -> SomeReferenceObjId @@ -758,18 +762,18 @@ remapObjIdRefs objMapping refMapping someObjIdRef = newSomeObjId where oldObjId :: ObjectId oldObjId = someObjIdRef ^. someRef_ . UReference.idH - (newObjId, _, newHash) = + (newObjId, _, _, oldHash) = case Map.lookup oldObjId objMapping of Nothing -> error $ "Expected object mapping for ID: " <> show oldObjId Just found -> found - someRefId :: SomeReferenceId - someRefId = (someObjIdRef & someRef_ . UReference.idH .~ newHash) ^. uRefIdAsRefId_ - newRefId :: SomeReferenceId - newRefId = case Map.lookup someRefId refMapping of - Nothing -> error $ "Expected reference mapping for ID: " <> show someRefId + oldSomeRefId :: SomeReferenceId + oldSomeRefId = (someObjIdRef & someRef_ . UReference.idH .~ oldHash) ^. uRefIdAsRefId_ + newSomeRefId :: SomeReferenceId + newSomeRefId = case Map.lookup oldSomeRefId refMapping of + Nothing -> error $ "Expected reference mapping for ID: " <> show oldSomeRefId Just r -> r newSomeObjId :: SomeReference (UReference.Id' (New ObjectId)) - newSomeObjId = (newRefId ^. from uRefIdAsRefId_) & someRef_ . UReference.idH .~ newObjId + newSomeObjId = (newSomeRefId ^. from uRefIdAsRefId_) & someRef_ . UReference.idH .~ newObjId data SomeReference ref = TermReference ref From 20e81a5f5349149558b0093fd1788fefe6d7c99d Mon Sep 17 00:00:00 2001 From: Chris Penner Date: Wed, 10 Nov 2021 17:20:29 -0600 Subject: [PATCH 104/297] Filter for UNmigrated causals, not migrated ones --- .../Codebase/SqliteCodebase/MigrateSchema12.hs | 16 ++++++++-------- 1 file changed, 8 insertions(+), 8 deletions(-) diff --git a/parser-typechecker/src/Unison/Codebase/SqliteCodebase/MigrateSchema12.hs b/parser-typechecker/src/Unison/Codebase/SqliteCodebase/MigrateSchema12.hs index 4be4e03372..11241f3532 100644 --- a/parser-typechecker/src/Unison/Codebase/SqliteCodebase/MigrateSchema12.hs +++ b/parser-typechecker/src/Unison/Codebase/SqliteCodebase/MigrateSchema12.hs @@ -204,17 +204,17 @@ migrateCausal conn oldCausalHashId = fmap (either id id) . runExceptT $ do migratedCausals <- gets causalMapping let unmigratedParents = oldCausalParentHashIds - & filter (`Map.member` migratedCausals) + & filter (`Map.notMember` migratedCausals) & fmap CausalE let unmigratedEntities = unmigratedBranch <> unmigratedParents when (not . null $ unmigratedParents <> unmigratedBranch) (throwE $ Sync.Missing unmigratedEntities) - (_, _, newBranchHash, _) <- gets (\MigrationState {objLookup} -> objLookup Map.! branchObjId) + (_, _, newBranchHash, _) <- gets (\MigrationState {objLookup} -> objLookup ^?! ix branchObjId) let (newParentHashes, newParentHashIds) = oldCausalParentHashIds & fmap - (\oldParentHashId -> migratedCausals Map.! oldParentHashId) + (\oldParentHashId -> migratedCausals ^?! ix oldParentHashId) & unzip & bimap (Set.fromList . map unCausalHash) Set.fromList @@ -231,7 +231,7 @@ migrateCausal conn oldCausalHashId = fmap (either id id) . runExceptT $ do let newCausal = DbCausal { selfHash = newCausalHashId, - valueHash = BranchHashId $ view _2 (migratedObjIds Map.! branchObjId), + valueHash = BranchHashId $ view _2 (migratedObjIds ^?! ix branchObjId), parents = newParentHashIds } runDB conn do @@ -551,7 +551,7 @@ migrateTermComponent conn Codebase {..} oldHash = fmap (either id id) . runExcep & Convert.hashTermComponents ifor newTermComponents $ \v (newReferenceId, trm, typ) -> do - let oldReferenceId = vToOldReferenceMapping Map.! v + let oldReferenceId = vToOldReferenceMapping ^?! ix v field @"referenceMapping" %= Map.insert (TermReference oldReferenceId) (TermReference newReferenceId) lift . lift $ putTerm newReferenceId trm typ @@ -635,12 +635,12 @@ migrateDeclComponent conn Codebase {..} oldHash = fmap (either id id) . runExcep & fromRight (error "unexpected resolution error") for_ newComponent $ \(declName, newReferenceId, dd) -> do - let oldReferenceId = declNameToOldReference Map.! declName + let oldReferenceId = declNameToOldReference ^?! ix declName field @"referenceMapping" %= Map.insert (TypeReference oldReferenceId) (TypeReference newReferenceId) let oldConstructorIds :: Map (ConstructorName v) (Old ConstructorId) oldConstructorIds = - (componentIDMap Map.! oldReferenceId) + (componentIDMap ^?! ix oldReferenceId) & DD.asDataDecl & DD.constructors' & imap (\constructorId (_ann, constructorName, _type) -> (constructorName, fromIntegral constructorId)) @@ -649,7 +649,7 @@ migrateDeclComponent conn Codebase {..} oldHash = fmap (either id id) . runExcep ifor_ (DD.constructors' (DD.asDataDecl dd)) \(fromIntegral -> newConstructorId) (_ann, constructorName, _type) -> do field @"referenceMapping" %= Map.insert - (ConstructorReference oldReferenceId (oldConstructorIds Map.! constructorName)) + (ConstructorReference oldReferenceId (oldConstructorIds ^?! ix constructorName)) (ConstructorReference newReferenceId newConstructorId) lift . lift $ putTypeDeclaration newReferenceId dd From 8d01c48e5e63bd6933512c80dda201ed8c664ec6 Mon Sep 17 00:00:00 2001 From: Chris Penner Date: Wed, 10 Nov 2021 17:32:38 -0600 Subject: [PATCH 105/297] Ensure we also count references in types as dependencies. --- .../SqliteCodebase/MigrateSchema12.hs | 28 +++++++++---------- 1 file changed, 14 insertions(+), 14 deletions(-) diff --git a/parser-typechecker/src/Unison/Codebase/SqliteCodebase/MigrateSchema12.hs b/parser-typechecker/src/Unison/Codebase/SqliteCodebase/MigrateSchema12.hs index 11241f3532..a7169366ef 100644 --- a/parser-typechecker/src/Unison/Codebase/SqliteCodebase/MigrateSchema12.hs +++ b/parser-typechecker/src/Unison/Codebase/SqliteCodebase/MigrateSchema12.hs @@ -314,7 +314,7 @@ migrateBranch conn oldObjectId = fmap (either id id) . runExceptT $ do newHash <- runDB conn (Ops.liftQ (Hashing.dbBranchHash newBranch)) newHashId <- runDB conn (Ops.liftQ (Q.saveBranchHash (BranchHash (Cv.hash1to2 newHash)))) newObjectId <- runDB conn (Ops.saveBranchObject newHashId localBranchIds localBranch) - field @"objLookup" %= Map.insert oldObjectId (unBranchObjectId newObjectId, unBranchHashId newHashId, newHash, oldHash) + field @"objLookup" %= Map.insert oldObjectId (unBranchObjectId newObjectId, unBranchHashId newHashId, newHash, oldHash) pure Sync.Done migratePatch :: @@ -497,13 +497,13 @@ migrateTermComponent :: migrateTermComponent conn Codebase {..} oldHash = fmap (either id id) . runExceptT $ do whenM (Set.member oldHash <$> use (field @"migratedDefnHashes")) (throwE Sync.PreviouslyDone) - component <- + oldComponent <- (lift . lift $ getTermComponentWithTypes oldHash) >>= \case Nothing -> error $ "Hash was missing from codebase: " <> show oldHash - Just component -> pure component + Just c -> pure c let componentIDMap :: Map (Old Reference.Id) (Term.Term v a, Type v a) - componentIDMap = Map.fromList $ Reference.componentFor oldHash component + componentIDMap = Map.fromList $ Reference.componentFor oldHash oldComponent let unhashed :: Map (Old Reference.Id) (v, Term.Term v a) unhashed = Term.unhashComponent (fst <$> componentIDMap) let vToOldReferenceMapping :: Map v (Old Reference.Id) @@ -514,23 +514,23 @@ migrateTermComponent conn Codebase {..} oldHash = fmap (either id id) . runExcep & Map.fromList referencesMap <- gets referenceMapping - let getMigratedReference :: Old SomeReferenceId -> New SomeReferenceId - getMigratedReference ref = - Map.findWithDefault (error "unmigrated reference") ref referencesMap let allMissingReferences :: [Old SomeReferenceId] allMissingReferences = - unhashed - & foldSetter - ( traversed - . _2 - . termReferences_ - . filtered (`Map.notMember` referencesMap) - ) + let missingTermRefs = + unhashed & foldSetter (traversed . _2 . termReferences_) + missingTypeRefs = + componentIDMap + & foldSetter (traversed . _2 . typeReferences_) + in filter (`Map.notMember` referencesMap) (missingTermRefs <> missingTypeRefs) when (not . null $ allMissingReferences) $ throwE $ Sync.Missing . nubOrd $ (someReferenceIdToEntity <$> allMissingReferences) + let getMigratedReference :: Old SomeReferenceId -> New SomeReferenceId + getMigratedReference ref = + Map.findWithDefault (error $ "unmigrated reference" <> show ref) ref referencesMap + let remappedReferences :: Map (Old Reference.Id) (v, Term.Term v a, Type v a) = Zip.zipWith ( \(v, trm) (_, typ) -> From bb6574305dc4ae30dfb56bf36fa36635772f41d5 Mon Sep 17 00:00:00 2001 From: Chris Penner Date: Wed, 10 Nov 2021 18:43:07 -0600 Subject: [PATCH 106/297] New steps --- .../src/Unison/Codebase/SqliteCodebase/MigrateSchema12.hs | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/parser-typechecker/src/Unison/Codebase/SqliteCodebase/MigrateSchema12.hs b/parser-typechecker/src/Unison/Codebase/SqliteCodebase/MigrateSchema12.hs index a7169366ef..b7e9d3bf47 100644 --- a/parser-typechecker/src/Unison/Codebase/SqliteCodebase/MigrateSchema12.hs +++ b/parser-typechecker/src/Unison/Codebase/SqliteCodebase/MigrateSchema12.hs @@ -78,8 +78,11 @@ import Unison.Var (Var) -- todo: -- * write a harness to call & seed algorithm --- * may involve writing a `Progress` +-- * [ ] embed migration in a transaction/savepoint and ensure that we never leave the codebase in a +-- weird state even if we crash. +-- * [x] may involve writing a `Progress` -- * raw DB things: +-- * [ ] write new namespace root after migration. -- * [ ] overwrite object_id column in hash_object table to point at new objects <-- mitchell has started -- * [ ] delete references to old objects in index tables (where else?) -- * [ ] delete old objects From d136f684afe1da4488003595e0201870344b6ba6 Mon Sep 17 00:00:00 2001 From: Arya Irani Date: Wed, 10 Nov 2021 20:06:35 -0500 Subject: [PATCH 107/297] move hashing of Kind to hashing pseudopackage --- .../src/Unison/Hashing/V2/Convert.hs | 28 +++++++++++++------ .../src/Unison/Hashing/V2/Kind.hs | 15 ++++++++++ .../src/Unison/Hashing/V2/Type.hs | 2 +- .../unison-parser-typechecker.cabal | 1 + unison-core/src/Unison/ConstructorType.hs | 4 --- unison-core/src/Unison/Kind.hs | 8 ------ 6 files changed, 37 insertions(+), 21 deletions(-) create mode 100644 parser-typechecker/src/Unison/Hashing/V2/Kind.hs diff --git a/parser-typechecker/src/Unison/Hashing/V2/Convert.hs b/parser-typechecker/src/Unison/Hashing/V2/Convert.hs index 5664347ea4..e85bdc1957 100644 --- a/parser-typechecker/src/Unison/Hashing/V2/Convert.hs +++ b/parser-typechecker/src/Unison/Hashing/V2/Convert.hs @@ -17,8 +17,12 @@ where import Control.Lens (over, _3) import qualified Control.Lens as Lens +import Control.Monad.Trans.Writer.CPS (Writer) +import qualified Control.Monad.Trans.Writer.CPS as Writer import Data.Bifunctor (bimap) +import Data.Bitraversable (bitraverse) import Data.Foldable (toList) +import Data.Functor ((<&>)) import Data.Map (Map) import qualified Data.Map as Map import Data.Set (Set) @@ -29,6 +33,8 @@ import qualified Unison.Codebase.Causal as Memory.Causal import qualified Unison.Codebase.Patch as Memory.Patch import qualified Unison.Codebase.TermEdit as Memory.TermEdit import qualified Unison.Codebase.TypeEdit as Memory.TypeEdit +import qualified Unison.ConstructorType as CT +import qualified Unison.ConstructorType as Memory.ConstructorType import qualified Unison.DataDeclaration as Memory.DD import Unison.Hash (Hash) import Unison.Hashable (Accumulate, Token) @@ -36,6 +42,7 @@ import qualified Unison.Hashable as H import qualified Unison.Hashing.V2.Branch as Hashing.Branch import qualified Unison.Hashing.V2.Causal as Hashing.Causal import qualified Unison.Hashing.V2.DataDeclaration as Hashing.DD +import qualified Unison.Hashing.V2.Kind as Hashing.Kind import qualified Unison.Hashing.V2.Patch as Hashing.Patch import qualified Unison.Hashing.V2.Pattern as Hashing.Pattern import qualified Unison.Hashing.V2.Reference as Hashing.Reference @@ -44,6 +51,7 @@ import qualified Unison.Hashing.V2.Term as Hashing.Term import qualified Unison.Hashing.V2.TermEdit as Hashing.TermEdit import qualified Unison.Hashing.V2.Type as Hashing.Type import qualified Unison.Hashing.V2.TypeEdit as Hashing.TypeEdit +import qualified Unison.Kind as Memory.Kind import Unison.NameSegment (NameSegment) import Unison.Names.ResolutionResult (ResolutionResult) import qualified Unison.Pattern as Memory.Pattern @@ -54,12 +62,6 @@ import qualified Unison.Type as Memory.Type import qualified Unison.Util.Relation as Relation import qualified Unison.Util.Star3 as Memory.Star3 import Unison.Var (Var) -import qualified Unison.ConstructorType as Memory.ConstructorType -import Control.Monad.Trans.Writer.CPS (Writer) -import qualified Control.Monad.Trans.Writer.CPS as Writer -import qualified Unison.ConstructorType as CT -import Data.Functor ((<&>)) -import Data.Bitraversable (bitraverse) typeToReference :: Var v => Memory.Type.Type v a -> Memory.Reference.Reference typeToReference = h2mReference . Hashing.Type.toReference . m2hType . Memory.Type.removeAllEffectVars @@ -265,13 +267,18 @@ m2hType :: Ord v => Memory.Type.Type v a -> Hashing.Type.Type v a m2hType = ABT.transform \case Memory.Type.Ref ref -> Hashing.Type.Ref (m2hReference ref) Memory.Type.Arrow a1 a1' -> Hashing.Type.Arrow a1 a1' - Memory.Type.Ann a1 ki -> Hashing.Type.Ann a1 ki + Memory.Type.Ann a1 ki -> Hashing.Type.Ann a1 (m2hKind ki) Memory.Type.App a1 a1' -> Hashing.Type.App a1 a1' Memory.Type.Effect a1 a1' -> Hashing.Type.Effect a1 a1' Memory.Type.Effects a1s -> Hashing.Type.Effects a1s Memory.Type.Forall a1 -> Hashing.Type.Forall a1 Memory.Type.IntroOuter a1 -> Hashing.Type.IntroOuter a1 +m2hKind :: Memory.Kind.Kind -> Hashing.Kind.Kind +m2hKind = \case + Memory.Kind.Star -> Hashing.Kind.Star + Memory.Kind.Arrow k1 k2 -> Hashing.Kind.Arrow (m2hKind k1) (m2hKind k2) + m2hReference :: Memory.Reference.Reference -> Hashing.Reference.Reference m2hReference = \case Memory.Reference.Builtin t -> Hashing.Reference.Builtin t @@ -298,13 +305,18 @@ h2mType :: Ord v => Hashing.Type.Type v a -> Memory.Type.Type v a h2mType = ABT.transform \case Hashing.Type.Ref ref -> Memory.Type.Ref (h2mReference ref) Hashing.Type.Arrow a1 a1' -> Memory.Type.Arrow a1 a1' - Hashing.Type.Ann a1 ki -> Memory.Type.Ann a1 ki + Hashing.Type.Ann a1 ki -> Memory.Type.Ann a1 (h2mKind ki) Hashing.Type.App a1 a1' -> Memory.Type.App a1 a1' Hashing.Type.Effect a1 a1' -> Memory.Type.Effect a1 a1' Hashing.Type.Effects a1s -> Memory.Type.Effects a1s Hashing.Type.Forall a1 -> Memory.Type.Forall a1 Hashing.Type.IntroOuter a1 -> Memory.Type.IntroOuter a1 +h2mKind :: Hashing.Kind.Kind -> Memory.Kind.Kind +h2mKind = \case + Hashing.Kind.Star -> Memory.Kind.Star + Hashing.Kind.Arrow k1 k2 -> Memory.Kind.Arrow (h2mKind k1) (h2mKind k2) + h2mReference :: Hashing.Reference.Reference -> Memory.Reference.Reference h2mReference = \case Hashing.Reference.Builtin t -> Memory.Reference.Builtin t diff --git a/parser-typechecker/src/Unison/Hashing/V2/Kind.hs b/parser-typechecker/src/Unison/Hashing/V2/Kind.hs new file mode 100644 index 0000000000..6cdc6224f5 --- /dev/null +++ b/parser-typechecker/src/Unison/Hashing/V2/Kind.hs @@ -0,0 +1,15 @@ +{-# LANGUAGE DeriveGeneric #-} + +module Unison.Hashing.V2.Kind where + +import Unison.Prelude + +import Unison.Hashable (Hashable) +import qualified Unison.Hashable as Hashable + +data Kind = Star | Arrow Kind Kind deriving (Eq,Ord,Read,Show,Generic) + +instance Hashable Kind where + tokens k = case k of + Star -> [Hashable.Tag 0] + Arrow k1 k2 -> (Hashable.Tag 1 : Hashable.tokens k1) ++ Hashable.tokens k2 diff --git a/parser-typechecker/src/Unison/Hashing/V2/Type.hs b/parser-typechecker/src/Unison/Hashing/V2/Type.hs index 49fc2134a9..3813073454 100644 --- a/parser-typechecker/src/Unison/Hashing/V2/Type.hs +++ b/parser-typechecker/src/Unison/Hashing/V2/Type.hs @@ -32,7 +32,7 @@ import qualified Unison.Hashable as Hashable import qualified Unison.Hashing.V2.ABT as ABT import Unison.Hashing.V2.Reference (Reference) import qualified Unison.Hashing.V2.Reference as Reference -import qualified Unison.Kind as K +import qualified Unison.Hashing.V2.Kind as K import qualified Unison.Name as Name import qualified Unison.Names.ResolutionResult as Names import Unison.Prelude diff --git a/parser-typechecker/unison-parser-typechecker.cabal b/parser-typechecker/unison-parser-typechecker.cabal index 6a714a63a2..adc0dab0fa 100644 --- a/parser-typechecker/unison-parser-typechecker.cabal +++ b/parser-typechecker/unison-parser-typechecker.cabal @@ -105,6 +105,7 @@ library Unison.Hashing.V2.Causal Unison.Hashing.V2.Convert Unison.Hashing.V2.DataDeclaration + Unison.Hashing.V2.Kind Unison.Hashing.V2.Patch Unison.Hashing.V2.Pattern Unison.Hashing.V2.Reference diff --git a/unison-core/src/Unison/ConstructorType.hs b/unison-core/src/Unison/ConstructorType.hs index 02a0f98f3b..a0e2b2940b 100644 --- a/unison-core/src/Unison/ConstructorType.hs +++ b/unison-core/src/Unison/ConstructorType.hs @@ -3,9 +3,5 @@ module Unison.ConstructorType where import Unison.Prelude -import Unison.Hashable (Hashable, Token(Tag), tokens) data ConstructorType = Data | Effect deriving (Eq, Ord, Show, Enum, Generic) - -instance Hashable ConstructorType where - tokens b = [Tag . fromIntegral $ fromEnum b] diff --git a/unison-core/src/Unison/Kind.hs b/unison-core/src/Unison/Kind.hs index 531ff42268..312ab9ee21 100644 --- a/unison-core/src/Unison/Kind.hs +++ b/unison-core/src/Unison/Kind.hs @@ -4,12 +4,4 @@ module Unison.Kind where import Unison.Prelude -import Unison.Hashable (Hashable) -import qualified Unison.Hashable as Hashable - data Kind = Star | Arrow Kind Kind deriving (Eq,Ord,Read,Show,Generic) - -instance Hashable Kind where - tokens k = case k of - Star -> [Hashable.Tag 0] - Arrow k1 k2 -> (Hashable.Tag 1 : Hashable.tokens k1) ++ Hashable.tokens k2 From 49ba7037649286b8d87d2cf08a7b88b062b19138 Mon Sep 17 00:00:00 2001 From: Arya Irani Date: Thu, 11 Nov 2021 00:38:29 -0500 Subject: [PATCH 108/297] merge trunk and 2625 --- .github/workflows/ci.yaml | 10 +- CONTRIBUTORS.markdown | 2 + codebase2/util/bench/Main.hs | 4 +- codebase2/util/package.yaml | 1 + codebase2/util/src/U/Util/Base32Hex.hs | 27 +- codebase2/util/src/U/Util/Hash.hs | 33 +- codebase2/util/unison-util.cabal | 2 + contrib/cabal.project | 3 + editor-support/vim/autoload/unison.vim | 104 + editor-support/vim/doc/unison.txt | 125 + editor-support/vim/ftplugin/unison.vim | 7 + hie.yaml | 6 + parser-typechecker/.DS_Store | Bin 6148 -> 0 bytes parser-typechecker/package.yaml | 2 + parser-typechecker/src/Unison/Builtin.hs | 1 + .../src/Unison/Codebase/Branch.hs | 129 +- .../src/Unison/Codebase/Causal.hs | 21 + .../src/Unison/Codebase/Editor/HandleInput.hs | 3075 --------------- .../src/Unison/Codebase/PushBehavior.hs | 13 + .../src/Unison/CommandLine/OutputMessages.hs | 2084 ---------- parser-typechecker/src/Unison/DeclPrinter.hs | 6 +- parser-typechecker/src/Unison/Parser.hs | 4 +- .../src/Unison/PrettyPrintEnv.hs | 17 +- .../src/Unison/PrettyPrintEnv/Names.hs | 10 +- parser-typechecker/src/Unison/Runtime/ANF.hs | 27 +- .../src/Unison/Runtime/ANF/Serialize.hs | 13 +- .../src/Unison/Runtime/Builtin.hs | 91 +- .../src/Unison/Runtime/Decompile.hs | 3 +- .../src/Unison/Runtime/Foreign.hs | 4 +- .../src/Unison/Runtime/Foreign/Function.hs | 2 +- .../src/Unison/Runtime/MCode.hs | 5 +- .../src/Unison/Runtime/MCode/Serialize.hs | 9 +- .../src/Unison/Runtime/Machine.hs | 55 +- .../src/Unison/Runtime/Serialize.hs | 19 +- .../src/Unison/Server/Doc/AsHtml.hs | 35 +- .../src/Unison/Server/Syntax.hs | 1 + parser-typechecker/src/Unison/Util/Bytes.hs | 415 +- parser-typechecker/src/Unison/Util/Rope.hs | 236 ++ parser-typechecker/src/Unison/Util/Text.hs | 130 + parser-typechecker/tests/Suite.hs | 12 +- parser-typechecker/tests/Unison/Test/ANF.hs | 5 +- .../tests/Unison/Test/Codebase/Causal.hs | 3 +- .../tests/Unison/Test/DataDeclaration.hs | 5 +- parser-typechecker/tests/Unison/Test/Term.hs | 5 +- .../tests/Unison/Test/Util/Text.hs | 107 + .../unison-parser-typechecker.cabal | 36 +- .../IntegrationTests/ArgumentParsing.hs | 16 +- unison-cli/package.yaml | 111 +- .../src/Unison/Codebase/Editor/AuthorInfo.hs | 0 .../src/Unison/Codebase/Editor/Command.hs | 2 +- .../Unison/Codebase/Editor/HandleCommand.hs | 11 +- .../src/Unison/Codebase/Editor/HandleInput.hs | 3355 +++++++++++++++++ .../src/Unison/Codebase/Editor/Input.hs | 13 +- .../src/Unison/Codebase/Editor/Output.hs | 395 +- .../Codebase/Editor/Output/BranchDiff.hs | 0 .../Codebase/Editor/Output/DumpNamespace.hs | 0 .../src/Unison/Codebase/Editor/Propagate.hs | 0 .../Unison/Codebase/Editor/SlurpComponent.hs | 0 .../src/Unison/Codebase/Editor/SlurpResult.hs | 0 .../src/Unison/Codebase/Editor/TodoOutput.hs | 0 .../src/Unison/Codebase/Editor/UriParser.hs | 0 .../Unison/Codebase/Editor/VersionParser.hs | 0 .../src/Unison/Codebase/TranscriptParser.hs | 6 +- .../src/Unison/CommandLine.hs | 69 +- .../src/Unison/CommandLine/DisplayValues.hs | 0 .../src/Unison/CommandLine/FuzzySelect.hs | 0 .../src/Unison/CommandLine/Globbing.hs | 38 +- .../src/Unison/CommandLine/InputPattern.hs | 0 .../src/Unison/CommandLine/InputPatterns.hs | 93 +- .../src/Unison/CommandLine/Main.hs | 0 .../src/Unison/CommandLine/OutputMessages.hs | 2525 +++++++++++++ .../src/Unison/CommandLine/Welcome.hs | 0 unison-cli/tests/Main.hs | 30 + .../tests/Unison/Test/ClearCache.hs | 0 .../tests/Unison/Test/CommandLine.hs | 0 .../tests/Unison/Test/GitSync.hs | 38 +- .../tests/Unison/Test/Ucm.hs | 2 +- .../tests/Unison/Test/UriParser.hs | 0 .../tests/Unison/Test/VersionParser.hs | 0 unison-cli/unison-cli.cabal | 252 +- unison-cli/unison/ArgParse.hs | 65 +- unison-cli/unison/Main.hs | 20 +- unison-core/package.yaml | 2 +- unison-core/src/Unison/Hash.hs | 126 +- unison-core/src/Unison/HashQualified'.hs | 7 + unison-core/src/Unison/HashQualified.hs | 5 +- unison-core/src/Unison/Hashable.hs | 33 +- unison-core/src/Unison/NamesWithHistory.hs | 10 +- unison-core/unison-core1.cabal | 2 +- unison-src/transcripts/alias-many.output.md | 411 +- .../transcripts/builtins-merge.output.md | 2 +- unison-src/transcripts/diff.output.md | 14 +- .../transcripts/emptyCodebase.output.md | 4 +- unison-src/transcripts/fix2567.md | 18 + unison-src/transcripts/fix2567.output.md | 36 + unison-src/transcripts/globbing.md | 11 +- unison-src/transcripts/globbing.output.md | 38 +- unison-src/transcripts/io.md | 60 + unison-src/transcripts/io.output.md | 97 + unison-src/transcripts/merges.output.md | 12 +- unison-src/transcripts/reflog.output.md | 12 +- unison-src/transcripts/squash.output.md | 52 +- 102 files changed, 8467 insertions(+), 6430 deletions(-) create mode 100644 editor-support/vim/autoload/unison.vim create mode 100644 editor-support/vim/doc/unison.txt create mode 100644 editor-support/vim/ftplugin/unison.vim delete mode 100644 parser-typechecker/.DS_Store delete mode 100644 parser-typechecker/src/Unison/Codebase/Editor/HandleInput.hs create mode 100644 parser-typechecker/src/Unison/Codebase/PushBehavior.hs delete mode 100644 parser-typechecker/src/Unison/CommandLine/OutputMessages.hs create mode 100644 parser-typechecker/src/Unison/Util/Rope.hs create mode 100644 parser-typechecker/src/Unison/Util/Text.hs create mode 100644 parser-typechecker/tests/Unison/Test/Util/Text.hs rename {parser-typechecker => unison-cli}/src/Unison/Codebase/Editor/AuthorInfo.hs (100%) rename {parser-typechecker => unison-cli}/src/Unison/Codebase/Editor/Command.hs (99%) rename {parser-typechecker => unison-cli}/src/Unison/Codebase/Editor/HandleCommand.hs (98%) create mode 100644 unison-cli/src/Unison/Codebase/Editor/HandleInput.hs rename {parser-typechecker => unison-cli}/src/Unison/Codebase/Editor/Input.hs (95%) rename {parser-typechecker => unison-cli}/src/Unison/Codebase/Editor/Output.hs (53%) rename {parser-typechecker => unison-cli}/src/Unison/Codebase/Editor/Output/BranchDiff.hs (100%) rename {parser-typechecker => unison-cli}/src/Unison/Codebase/Editor/Output/DumpNamespace.hs (100%) rename {parser-typechecker => unison-cli}/src/Unison/Codebase/Editor/Propagate.hs (100%) rename {parser-typechecker => unison-cli}/src/Unison/Codebase/Editor/SlurpComponent.hs (100%) rename {parser-typechecker => unison-cli}/src/Unison/Codebase/Editor/SlurpResult.hs (100%) rename {parser-typechecker => unison-cli}/src/Unison/Codebase/Editor/TodoOutput.hs (100%) rename {parser-typechecker => unison-cli}/src/Unison/Codebase/Editor/UriParser.hs (100%) rename {parser-typechecker => unison-cli}/src/Unison/Codebase/Editor/VersionParser.hs (100%) rename {parser-typechecker => unison-cli}/src/Unison/Codebase/TranscriptParser.hs (98%) rename {parser-typechecker => unison-cli}/src/Unison/CommandLine.hs (85%) rename {parser-typechecker => unison-cli}/src/Unison/CommandLine/DisplayValues.hs (100%) rename {parser-typechecker => unison-cli}/src/Unison/CommandLine/FuzzySelect.hs (100%) rename {parser-typechecker => unison-cli}/src/Unison/CommandLine/Globbing.hs (87%) rename {parser-typechecker => unison-cli}/src/Unison/CommandLine/InputPattern.hs (100%) rename {parser-typechecker => unison-cli}/src/Unison/CommandLine/InputPatterns.hs (96%) rename {parser-typechecker => unison-cli}/src/Unison/CommandLine/Main.hs (100%) create mode 100644 unison-cli/src/Unison/CommandLine/OutputMessages.hs rename {parser-typechecker => unison-cli}/src/Unison/CommandLine/Welcome.hs (100%) create mode 100644 unison-cli/tests/Main.hs rename {parser-typechecker => unison-cli}/tests/Unison/Test/ClearCache.hs (100%) rename {parser-typechecker => unison-cli}/tests/Unison/Test/CommandLine.hs (100%) rename {parser-typechecker => unison-cli}/tests/Unison/Test/GitSync.hs (95%) rename {parser-typechecker => unison-cli}/tests/Unison/Test/Ucm.hs (97%) rename {parser-typechecker => unison-cli}/tests/Unison/Test/UriParser.hs (100%) rename {parser-typechecker => unison-cli}/tests/Unison/Test/VersionParser.hs (100%) create mode 100644 unison-src/transcripts/fix2567.md create mode 100644 unison-src/transcripts/fix2567.output.md diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml index b48ad32326..82dc2537be 100644 --- a/.github/workflows/ci.yaml +++ b/.github/workflows/ci.yaml @@ -91,11 +91,15 @@ jobs: - name: build dependencies run: stack --no-terminal build --fast --only-dependencies - name: build - run: stack --no-terminal build --fast + run: stack --no-terminal build --fast --no-run-tests --test # Run each test suite (tests and transcripts) - - name: tests + - name: unison-cli tests + run: stack --no-terminal build --fast --test unison-cli + - name: unison-parser-typechecker tests run: stack --no-terminal exec tests + - name: unison-util-relation tests + run: stack --no-terminal build --fast --test unison-util-relation - name: transcripts run: | stack --no-terminal exec transcripts @@ -105,5 +109,3 @@ jobs: run: stack --no-terminal exec unison transcript unison-src/transcripts-round-trip/main.md - name: integration-tests run: stack --no-terminal exec integration-tests - - name: other test suites - run: stack --no-terminal test unison-util-relation diff --git a/CONTRIBUTORS.markdown b/CONTRIBUTORS.markdown index fdb010366a..68040bd282 100644 --- a/CONTRIBUTORS.markdown +++ b/CONTRIBUTORS.markdown @@ -61,3 +61,5 @@ The format for this list: name, GitHub handle, and then optional blurb about wha * Evan Minsk (@iamevn) * Karthik Ravikanti (@plumenator) * Alberto Flores (@albertoefguerrero) +* Shawn Bachlet (@shawn-bachlet) +* Solomon Bothwell (@solomon-b) diff --git a/codebase2/util/bench/Main.hs b/codebase2/util/bench/Main.hs index 721fb4bc65..3280c31645 100644 --- a/codebase2/util/bench/Main.hs +++ b/codebase2/util/bench/Main.hs @@ -12,7 +12,7 @@ import qualified U.Util.Base32Hex as U.Base32Hex main :: IO () main = do - let textual = U.Base32Hex.UnsafeBase32Hex "kccnret7m1895ta8ncs3ct5pqmguqntvjlcsr270ug8mbqvkh07v983i12obpgsii0gbga2esk1423t6evr03f62hkkfllrrj7iil30" + let textual = U.Base32Hex.UnsafeFromText "kccnret7m1895ta8ncs3ct5pqmguqntvjlcsr270ug8mbqvkh07v983i12obpgsii0gbga2esk1423t6evr03f62hkkfllrrj7iil30" let binary = "\163\EM}\187\167\176P\146\245H\187\&86t\185\213\161\237_\191\157Y\205\136\224\244\DC1e\235\244\136\SI\244\160r\b\176\188\195\146\144 \184(N\229\STXA\SI\166w\246\SOH\188\194\141(\250\215{\153\229*\140" defaultMain @@ -29,7 +29,7 @@ sandi_fromByteString bs = -- The old implementation of `toByteString` which used `sandi` sandi_toByteString :: U.Base32Hex.Base32Hex -> ByteString -sandi_toByteString (U.Base32Hex.UnsafeBase32Hex txt) = +sandi_toByteString (U.Base32Hex.UnsafeFromText txt) = case Sandi.decode (Text.encodeUtf8 (Text.toUpper txt <> paddingChars)) of Left (_, _rem) -> error ("not base32: " <> Text.unpack txt) Right h -> h diff --git a/codebase2/util/package.yaml b/codebase2/util/package.yaml index 5271e63b7f..96d664f085 100644 --- a/codebase2/util/package.yaml +++ b/codebase2/util/package.yaml @@ -19,6 +19,7 @@ dependencies: - bytestring - containers - cryptonite + - extra - lens - memory - safe diff --git a/codebase2/util/src/U/Util/Base32Hex.hs b/codebase2/util/src/U/Util/Base32Hex.hs index 6da9fc06d4..e5ffdc0104 100644 --- a/codebase2/util/src/U/Util/Base32Hex.hs +++ b/codebase2/util/src/U/Util/Base32Hex.hs @@ -1,28 +1,47 @@ +{-# LANGUAGE ViewPatterns #-} + module U.Util.Base32Hex - ( Base32Hex (..), + ( Base32Hex (UnsafeFromText), fromByteString, toByteString, + fromText, + toText, + validChars, ) where import Data.ByteString (ByteString) import qualified Data.ByteString.Base32.Hex as Base32.Hex +import Data.Set (Set) +import qualified Data.Set as Set import Data.Text (Text) import qualified Data.Text as Text import qualified Data.Text.Encoding as Text -newtype Base32Hex = UnsafeBase32Hex {toText :: Text} +newtype Base32Hex = UnsafeFromText Text deriving (Eq, Ord, Show) +toText :: Base32Hex -> Text +toText (UnsafeFromText s) = s + -- | Return the lowercase unpadded base32Hex encoding of this 'ByteString'. -- Multibase prefix would be 'v', see https://github.com/multiformats/multibase fromByteString :: ByteString -> Base32Hex fromByteString = - UnsafeBase32Hex . Text.toLower . Base32.Hex.encodeBase32Unpadded + UnsafeFromText . Text.toLower . Base32.Hex.encodeBase32Unpadded -- | Produce a 'Hash' from a base32hex-encoded version of its binary representation toByteString :: Base32Hex -> ByteString -toByteString (UnsafeBase32Hex s) = +toByteString (UnsafeFromText s) = case Base32.Hex.decodeBase32Unpadded (Text.encodeUtf8 s) of Left _ -> error ("not base32: " <> Text.unpack s) Right h -> h + +fromText :: Text -> Maybe Base32Hex +fromText s = + if Base32.Hex.isBase32Hex . Text.encodeUtf8 . Text.toUpper $ s + then Just (UnsafeFromText s) + else Nothing + +validChars :: Set Char +validChars = Set.fromList $ ['0' .. '9'] ++ ['a' .. 'v'] diff --git a/codebase2/util/src/U/Util/Hash.hs b/codebase2/util/src/U/Util/Hash.hs index 91e3a47ef7..6bdee966d5 100644 --- a/codebase2/util/src/U/Util/Hash.hs +++ b/codebase2/util/src/U/Util/Hash.hs @@ -1,33 +1,40 @@ {-# LANGUAGE DeriveGeneric #-} {-# LANGUAGE OverloadedStrings #-} -module U.Util.Hash where - --- (Hash, toBytes, base32Hex, base32Hexs, fromBase32Hex, fromBytes, unsafeFromBase32Hex, showBase32Hex, validBase32HexChars) where - --- import Unison.Prelude +module U.Util.Hash + ( Hash (Hash, toShort), + fromBase32Hex, + fromByteString, + toBase32Hex, + toByteString, + ) +where import Data.ByteString (ByteString) +import Data.ByteString.Short (ShortByteString, fromShort) import qualified Data.ByteString.Short as B.Short +import Data.Text (Text) import GHC.Generics (Generic) -import Data.ByteString.Short (fromShort, ShortByteString) -import qualified U.Util.Base32Hex as Base32Hex import U.Util.Base32Hex (Base32Hex) +import qualified U.Util.Base32Hex as Base32Hex -- | Hash which uniquely identifies a Unison type or term newtype Hash = Hash {toShort :: ShortByteString} deriving (Eq, Ord, Generic) toBase32Hex :: Hash -> Base32Hex -toBase32Hex = Base32Hex.fromByteString . toBytes +toBase32Hex = Base32Hex.fromByteString . toByteString + +toBase32HexText :: Hash -> Text +toBase32HexText = Base32Hex.toText . toBase32Hex fromBase32Hex :: Base32Hex -> Hash fromBase32Hex = Hash . B.Short.toShort . Base32Hex.toByteString -toBytes :: Hash -> ByteString -toBytes = fromShort . toShort +toByteString :: Hash -> ByteString +toByteString = fromShort . toShort -fromBytes :: ByteString -> Hash -fromBytes = Hash . B.Short.toShort +fromByteString :: ByteString -> Hash +fromByteString = Hash . B.Short.toShort instance Show Hash where - show h = "fromBase32Hex " ++ (show . Base32Hex.toText . toBase32Hex) h \ No newline at end of file + show h = (show . toBase32HexText) h diff --git a/codebase2/util/unison-util.cabal b/codebase2/util/unison-util.cabal index 12714daf39..a23de1d081 100644 --- a/codebase2/util/unison-util.cabal +++ b/codebase2/util/unison-util.cabal @@ -54,6 +54,7 @@ library , bytestring , containers , cryptonite + , extra , lens , memory , safe @@ -94,6 +95,7 @@ benchmark bench , containers , criterion , cryptonite + , extra , lens , memory , safe diff --git a/contrib/cabal.project b/contrib/cabal.project index d12a7d8c06..ac1976c6cb 100644 --- a/contrib/cabal.project +++ b/contrib/cabal.project @@ -2,6 +2,9 @@ packages: yaks/easytest parser-typechecker unison-core + unison-cli + lib/unison-prelude + lib/unison-util-relation codebase2/codebase codebase2/codebase-sqlite codebase2/codebase-sync diff --git a/editor-support/vim/autoload/unison.vim b/editor-support/vim/autoload/unison.vim new file mode 100644 index 0000000000..b76f45185f --- /dev/null +++ b/editor-support/vim/autoload/unison.vim @@ -0,0 +1,104 @@ +" Unison functionality for Vim, including type/term omnicompletion. +" +" Maintainer: Unison Computing +" Original Author: Cody Allen (ceedubs) + +if exists('g:autoloaded_unison') + finish +endif +let g:autoloaded_unison = 1 + +let s:required_config_value = "!REQUIRED!" + +" adapted from https://github.com/rust-lang/rust.vim/blob/4aa69b84c8a58fcec6b6dad6fe244b916b1cf830/autoload/rust.vim#L9-L18 +function! s:config(name, default) abort + let name = 'unison_' . a:name + " Local buffer variable with same name takes predeence over global + if has_key(b:, name) + return get(b:, name) + elseif has_key(g:, name) + return get(g:, name) + elseif a:default == s:required_config_value + throw 'Missing required configuration value: ' . name + else + return a:default + endif +endfunction + +function! s:curl_path() abort + return s:config('curl_path', "curl") +endfunction + +function! s:jq_path() abort + return s:config('jq_path', "jq") +endfunction + +function! unison#SetBufferDefaults() abort + if s:config('set_buffer_defaults', 1) + " Since Unison completion is fuzzy and not prefix-based, 'longest' doesn't + " work well, and 'noinsert' behaves a little better. + setlocal completeopt=menuone,noinsert,preview + + setlocal omnifunc=unison#Complete + endif +endfunction + +" Unison completion satisfying the standard vim completion signature, such +" that it can be assigned to omnifunc. +" vim will first call this to find the base input that should be completed, +" and then will call it again with the base input. +function! unison#Complete(findstart, base) abort + if a:findstart + " locate the start of the word + let line = getline('.') + let start = col('.') - 1 + while start > 0 && line[start - 1] !~ '\s' && line[start - 1] != '(' && line[start - 1] != ')' + let start -= 1 + endwhile + return start + else + return unison#CompleteForBase(a:base) + endif +endfunction + +" Return an array of completion items for the provided base input. For example +" base could be 'List.foldL', in which case the top result would probably be +" 'List.foldLeft'. +function! unison#CompleteForBase(base) abort + let resultLimit = s:config('complete_result_limit', 20) + let apiHost = s:config('api_host', 'localhost') + let apiPort = s:config('api_port', s:required_config_value) + let apiToken = s:config('api_token', s:required_config_value) + let apiUri = 'http://' . apiHost . ':' . apiPort . '/' . apiToken . '/api/find' + + let curlCommand = s:curl_path() . " -Gfs + \ --data-urlencode 'limit=" . resultLimit . "' + \ --data-urlencode 'query=" . a:base . "' " + \ . apiUri + + let jqFilter = ' + \ def prettyTermType: .termType|[(.[] | .segment)]|add; + \ def prettyTypeDef: if .tag == "BuiltinObject" then "builtin type " else "" end + (.contents|[(.[] | .segment)]|add); + \ def termToMatch: { + \ word: .bestFoundTermName, + \ info: (.namedTerm.termName + " : " + (.namedTerm|prettyTermType)), + \ menu: .namedTerm|prettyTermType + \ }; + \ def typeToMatch: { + \ word: .bestFoundTypeName, + \ info: (.namedType.typeName + " : " + (.typeDef|prettyTypeDef)), + \ menu: .typeDef|prettyTypeDef + \ }; + \ .[][1]|( + \ (select(.tag == "FoundTermResult")|.contents|termToMatch), + \ (select(.tag == "FoundTypeResult")|.contents|typeToMatch) + \ )' + + let command = curlCommand . " | " . s:jq_path() . " -c '" . jqFilter . "'" + let lines = system(command) + let resultObjects = split(lines, "\n") + call map(resultObjects, {_, val -> json_decode(val)}) + return resultObjects +endfunction + +" vim: set et sw=2 sts=2 ts=2: diff --git a/editor-support/vim/doc/unison.txt b/editor-support/vim/doc/unison.txt new file mode 100644 index 0000000000..57ecdd28dd --- /dev/null +++ b/editor-support/vim/doc/unison.txt @@ -0,0 +1,125 @@ +*unison.txt* Filetype plugin for the Unison programming language + +============================================================================== +CONTENTS *unison* + +1. Quick start |unison-quick-start| +2. Installation |unison-installation| +3. Configuration |unison-configuration| +4. Recommended Settings |unison-recommended-settings| + +============================================================================== +QUICK START *unison-quick-start* + +Install with your plugin manager of choice. For example with vim-plug: > + Plug 'unisonweb/unison', { 'branch': 'trunk', 'rtp': 'editor-support/vim' } +< +You should now have syntax highlighting for `.u` files. + +If you want advanced features such as code completion, you'll need to point +the plugin to a running Unison API server. + +In one terminal window run: > + ucm --port=6789 --token='local_ucm' +< +Feel free to substitute a port number and token of your choice. The token can +be whatever you want, but you should probably avoid characters that have +special meaning in shells and URLs. + +Now open a vim window and run the following commands: > + :let g:unison_api_port = 6789 + :let g:unison_api_token = 'local_ucm' +< +It's easiest if you always use the same port and token and add these commands +to your |vimrc|. + +If all went well you should be able to open a `scratch.u` file, enter insert +mode and start typing (ex: `map`), trigger omni completion (see |compl-omni|) +with `CTRL-X CTRL-O`, and see completion results for Unison types and terms! + + +See |unison-installation| and |unison-configuration| for troubleshooting and +more information. + +============================================================================== +INSTALLATION *unison-installation* + +Install with your plugin manager of choice. For example with vim-plug: > + Plug 'unisonweb/unison', { 'branch': 'trunk', 'rtp': 'editor-support/vim' } + +Note: This plugin requires both `curl` and `jq` to be installed. If they are +installed but not on the `PATH` of the running vim process, you can set them +explicitly with |unison_curl_path| and |unison_jq_path|. + +If you are a Nix user, you can use https://github.com/ceedubs/unison-nix/ to +install this plugin, and the dependencies will automatically be managed by +Nix. + +============================================================================== +CONFIGURATION *unison-configuration* + +All Unison configuration values allow buffer-local settings (ex: +`b:unison_api_port`) to override an global configuration (ex: +`g:unison_api_port`). + + +Required configuration values: *unison-configuration-required* + + *g:unison_api_port* +g:unison_api_port~ + + Example: > + let g:unison_api_port='5862' +< + + *g:unison_api_token* +g:unison_api_token~ + + Example: > + let g:unison_api_token='z8123l_acv2' +< + +Optional configuration values: *unison-configuration-optional* + + *g:unison_api_host* +g:unison_api_host~ + + Example: > + let g:unison_api_host='localhost' +< + + *g:unison_set_buffer_defaults* +g:unison_set_buffer_defaults~ + Set to 0 to disable default settings (such as 'omnifunc') + + Example: > + let g:unison_set_buffer_defaults=0 +< + *g:unison_curl_path* +g:unison_curl_path~ + The path to the `curl` executable. + + Example: > + let g:unison_curl_path='/usr/bin/curl' +< + + *g:unison_jq_path* +g:unison_jq_path~ + The path to the `jq` executable. + + Example: > + let g:unison_jq_path='/usr/local/bin/jq' +< +============================================================================== +RECOMMENDED SETTINGS *unison-recommended-settings* + +These settings aren't specific to Unison, but they will likely improve the +experience of the Unison support. + +> + " close preview window when completion is finished + autocmd CompleteDone * silent! pclose! +< + +============================================================================== + vim:tw=78:et:ft=help:norl: diff --git a/editor-support/vim/ftplugin/unison.vim b/editor-support/vim/ftplugin/unison.vim new file mode 100644 index 0000000000..ce410f0912 --- /dev/null +++ b/editor-support/vim/ftplugin/unison.vim @@ -0,0 +1,7 @@ +" Only do this when not done yet for this buffer +if exists("b:did_ftplugin") + finish +endif +let b:did_ftplugin = 1 + +call unison#SetBufferDefaults() diff --git a/hie.yaml b/hie.yaml index e0dbef5734..49496bc52b 100644 --- a/hie.yaml +++ b/hie.yaml @@ -45,6 +45,12 @@ cradle: - path: "parser-typechecker/tests" component: "unison-parser-typechecker:exe:tests" + - path: "unison-cli/src" + component: "unison-cli:lib" + + - path: "unison-cli/tests" + component: "unison-cli:test:tests" + - path: "unison-cli/transcripts" component: "unison-cli:exe:transcripts" diff --git a/parser-typechecker/.DS_Store b/parser-typechecker/.DS_Store deleted file mode 100644 index 949ebd51bf74f6fd18ae3e5f0e5326d13a9e4d68..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 6148 zcmeHK%}N6?5Kh`!vlX!i!5;VGt%v@I*n_aF_25ld(Su66ON(8oo6_A{w65%H=o|S2 zzK%0VS}4_n7ZE!HlW#JaNyxWLCSis4J2)9z?F@kSq$O z@hBL|U?rLye~|(Fc11Q}5er$&mcQS7FdBw&QmcLNT)wbT+!UfDO53GdZy={$;wO{X z^~cxPJNJ`8VC)MihYQ;eF2nw`Q{Fw1QR0VDf2@+jULQhkuEMA%r>>kty;#lT=>Z`u zVRg!t*{sp3RqbZ|D6QJFRs-yV=3$yzVsF2GeAc~xe40I{FRz+e4nLui9fNat17m5E z58gN$%IFUK^PG7sAu&J<5Cdz#fI0fa)*9@Uwnz*R13zN`&j%Y6(J`25R7VFiczwk2 z0wN07_?AGl4LSxhjW7bjbt<4v<>raObvpQM6XzJrH0pH5)ygoBS(%#`3RkOx-&Wy_ zI~u7a28e-W2J*UF#`=Hy{ri78iF(8UG4QV#;Dxr=c414VwytarYpo611x3NQOyeR2 j3{{FD7E5sjR0;TPGyolgnMUw{&_zJeKn*eQs|v diff --git a/parser-typechecker/package.yaml b/parser-typechecker/package.yaml index e2d3c72ae3..30a0f38fc3 100644 --- a/parser-typechecker/package.yaml +++ b/parser-typechecker/package.yaml @@ -48,6 +48,7 @@ library: - binary - bytes - bytestring + - bytestring-to-vector - cereal - containers >= 0.6.3 - configurator @@ -111,6 +112,7 @@ library: - temporary - terminal-size - text + - text-short - time - tls - transformers diff --git a/parser-typechecker/src/Unison/Builtin.hs b/parser-typechecker/src/Unison/Builtin.hs index cbb4110d29..c890edbbff 100644 --- a/parser-typechecker/src/Unison/Builtin.hs +++ b/parser-typechecker/src/Unison/Builtin.hs @@ -571,6 +571,7 @@ ioBuiltins = , ("IO.seekHandle.impl.v3", handle --> smode --> int --> iof unit) , ("IO.handlePosition.impl.v3", handle --> iof nat) , ("IO.getEnv.impl.v1", text --> iof text) + , ("IO.getArgs.impl.v1", unit --> iof (list text)) , ("IO.getBuffering.impl.v3", handle --> iof bmode) , ("IO.setBuffering.impl.v3", handle --> bmode --> iof unit) , ("IO.getBytes.impl.v3", handle --> nat --> iof bytes) diff --git a/parser-typechecker/src/Unison/Codebase/Branch.hs b/parser-typechecker/src/Unison/Codebase/Branch.hs index d3db1c6f77..7bb39a65f7 100644 --- a/parser-typechecker/src/Unison/Codebase/Branch.hs +++ b/parser-typechecker/src/Unison/Codebase/Branch.hs @@ -119,10 +119,22 @@ deepTypeReferences :: Branch0 m -> Set Reference deepTypeReferences = R.dom . deepTypes terms :: Lens' (Branch0 m) (Star Referent NameSegment) -terms = lens _terms (\Branch0{..} x -> branch0 x _types _children _edits) +terms = + lens + _terms + \branch terms -> + branch {_terms = terms} + & deriveDeepTerms + & deriveDeepTermMetadata types :: Lens' (Branch0 m) (Star Reference NameSegment) -types = lens _types (\Branch0{..} x -> branch0 _terms x _children _edits) +types = + lens + _types + \branch types -> + branch {_types = types} + & deriveDeepTypes + & deriveDeepTypeMetadata children :: Lens' (Branch0 m) (Map NameSegment (Branch m)) children = lens _children (\Branch0{..} x -> branch0 _terms _types x _edits) @@ -141,54 +153,97 @@ branch0 terms types children edits = _types = types, _children = children, _edits = edits, - deepTerms = deepTerms', - deepTypes = deepTypes', - deepTermMetadata = deepTermMetadata', - deepTypeMetadata = deepTypeMetadata', - deepPaths = deepPaths', - deepEdits = deepEdits' + -- These are all overwritten immediately + deepTerms = R.empty, + deepTypes = R.empty, + deepTermMetadata = R4.empty, + deepTypeMetadata = R4.empty, + deepPaths = Set.empty, + deepEdits = Map.empty } + & deriveDeepTerms + & deriveDeepTypes + & deriveDeepTermMetadata + & deriveDeepTypeMetadata + & deriveDeepPaths + & deriveDeepEdits + +-- | Derive the 'deepTerms' field of a branch. +deriveDeepTerms :: Branch0 m -> Branch0 m +deriveDeepTerms branch = + branch {deepTerms = makeDeepTerms (_terms branch) (_children branch)} where - children' :: [(NameSegment, Branch m)] - children' = - Map.toList children - deepTerms' :: Relation Referent Name - deepTerms' = - R.mapRanMonotonic Name.fromSegment (Star3.d1 terms) <> foldMap go children' + makeDeepTerms :: Metadata.Star Referent NameSegment -> Map NameSegment (Branch m) -> Relation Referent Name + makeDeepTerms terms children = + R.mapRanMonotonic Name.fromSegment (Star3.d1 terms) <> ifoldMap go children where - go :: (NameSegment, Branch m) -> Relation Referent Name - go (n, b) = + go :: NameSegment -> Branch m -> Relation Referent Name + go n b = R.mapRan (Name.cons n) (deepTerms $ head b) - deepTypes' :: Relation Reference Name - deepTypes' = - R.mapRanMonotonic Name.fromSegment (Star3.d1 types) <> foldMap go children' + +-- | Derive the 'deepTypes' field of a branch. +deriveDeepTypes :: Branch0 m -> Branch0 m +deriveDeepTypes branch = + branch {deepTypes = makeDeepTypes (_types branch) (_children branch)} + where + makeDeepTypes :: Metadata.Star Reference NameSegment -> Map NameSegment (Branch m) -> Relation Reference Name + makeDeepTypes types children = + R.mapRanMonotonic Name.fromSegment (Star3.d1 types) <> ifoldMap go children where - go :: (NameSegment, Branch m) -> Relation Reference Name - go (n, b) = + go :: NameSegment -> Branch m -> Relation Reference Name + go n b = R.mapRan (Name.cons n) (deepTypes $ head b) - deepTermMetadata' :: Metadata.R4 Referent Name - deepTermMetadata' = - R4.mapD2Monotonic Name.fromSegment (Metadata.starToR4 terms) <> foldMap go children' + +-- | Derive the 'deepTermMetadata' field of a branch. +deriveDeepTermMetadata :: Branch0 m -> Branch0 m +deriveDeepTermMetadata branch = + branch {deepTermMetadata = makeDeepTermMetadata (_terms branch) (_children branch)} + where + makeDeepTermMetadata :: Metadata.Star Referent NameSegment -> Map NameSegment (Branch m) -> Metadata.R4 Referent Name + makeDeepTermMetadata terms children = + R4.mapD2Monotonic Name.fromSegment (Metadata.starToR4 terms) <> ifoldMap go children where - go (n, b) = + go :: NameSegment -> Branch m -> Metadata.R4 Referent Name + go n b = R4.mapD2 (Name.cons n) (deepTermMetadata $ head b) - deepTypeMetadata' :: Metadata.R4 Reference Name - deepTypeMetadata' = - R4.mapD2Monotonic Name.fromSegment (Metadata.starToR4 types) <> foldMap go children' + +-- | Derive the 'deepTypeMetadata' field of a branch. +deriveDeepTypeMetadata :: Branch0 m -> Branch0 m +deriveDeepTypeMetadata branch = + branch {deepTypeMetadata = makeDeepTypeMetadata (_types branch) (_children branch)} + where + makeDeepTypeMetadata :: Metadata.Star Reference NameSegment -> Map NameSegment (Branch m) -> Metadata.R4 Reference Name + makeDeepTypeMetadata types children = + R4.mapD2Monotonic Name.fromSegment (Metadata.starToR4 types) <> ifoldMap go children where - go (n, b) = + go :: NameSegment -> Branch m -> Metadata.R4 Reference Name + go n b = R4.mapD2 (Name.cons n) (deepTypeMetadata $ head b) - deepPaths' :: Set Path - deepPaths' = - Set.mapMonotonic Path.singleton (Map.keysSet children) <> foldMap go children' + +-- | Derive the 'deepPaths' field of a branch. +deriveDeepPaths :: Branch0 m -> Branch0 m +deriveDeepPaths branch = + branch {deepPaths = makeDeepPaths (_children branch)} + where + makeDeepPaths :: Map NameSegment (Branch m) -> Set Path + makeDeepPaths children = + Set.mapMonotonic Path.singleton (Map.keysSet children) <> ifoldMap go children where - go (n, b) = + go :: NameSegment -> Branch m -> Set Path + go n b = Set.map (Path.cons n) (deepPaths $ head b) - deepEdits' :: Map Name EditHash - deepEdits' = - Map.mapKeysMonotonic Name.fromSegment (Map.map fst edits) <> foldMap go children' + +-- | Derive the 'deepEdits' field of a branch. +deriveDeepEdits :: Branch0 m -> Branch0 m +deriveDeepEdits branch = + branch {deepEdits = makeDeepEdits (_edits branch) (_children branch)} + where + makeDeepEdits :: Map NameSegment (EditHash, m Patch) -> Map NameSegment (Branch m) -> Map Name EditHash + makeDeepEdits edits children = + Map.mapKeysMonotonic Name.fromSegment (Map.map fst edits) <> ifoldMap go children where - go (n, b) = + go :: NameSegment -> Branch m -> Map Name EditHash + go n b = Map.mapKeys (Name.cons n) (deepEdits $ head b) -- | a version of `deepEdits` that returns the `m Patch` as well. diff --git a/parser-typechecker/src/Unison/Codebase/Causal.hs b/parser-typechecker/src/Unison/Codebase/Causal.hs index ac39b716ea..29f6e4eef9 100644 --- a/parser-typechecker/src/Unison/Codebase/Causal.hs +++ b/parser-typechecker/src/Unison/Codebase/Causal.hs @@ -27,13 +27,16 @@ module Unison.Codebase.Causal transform, unsafeMapHashPreserving, before, + beforeHash, ) where import Unison.Prelude +import qualified Control.Monad.Extra as Monad (anyM) import Control.Monad.State (StateT) import qualified Control.Monad.State as State +import qualified Control.Monad.Reader as Reader import qualified Data.Map as Map import Data.Sequence (ViewL (..)) import qualified Data.Sequence as Seq @@ -264,6 +267,24 @@ threeWayMerge' lca combine c1 c2 = do before :: Monad m => Causal m h e -> Causal m h e -> m Bool before a b = (== Just a) <$> lca a b +-- `True` if `h` is found in the history of `c` within `maxDepth` path length +-- from the tip of `c` +beforeHash :: forall m h e . Monad m => Word -> RawHash h -> Causal m h e -> m Bool +beforeHash maxDepth h c = + Reader.runReaderT (State.evalStateT (go c) Set.empty) (0 :: Word) + where + go c | h == currentHash c = pure True + go c = do + currentDepth :: Word <- Reader.ask + if currentDepth >= maxDepth + then pure False + else do + seen <- State.get + cs <- lift . lift $ toList <$> sequence (children c) + let unseens = filter (\c -> c `Set.notMember` seen) cs + State.modify' (<> Set.fromList cs) + Monad.anyM (Reader.local (1+) . go) unseens + hash :: Hashable e => e -> Hash hash = Hashable.accumulate' diff --git a/parser-typechecker/src/Unison/Codebase/Editor/HandleInput.hs b/parser-typechecker/src/Unison/Codebase/Editor/HandleInput.hs deleted file mode 100644 index 100c3fa7ee..0000000000 --- a/parser-typechecker/src/Unison/Codebase/Editor/HandleInput.hs +++ /dev/null @@ -1,3075 +0,0 @@ -{-# LANGUAGE TemplateHaskell #-} - -module Unison.Codebase.Editor.HandleInput - ( loop - , loopState0 - , LoopState(..) - , currentPath - , parseSearchType - ) -where - -import Unison.Prelude - --- TODO: Don't import backend -import qualified Unison.Server.Backend as Backend -import Unison.Server.QueryResult -import Unison.Server.Backend (ShallowListEntry(..), TermEntry(..), TypeEntry(..)) -import qualified Unison.Codebase.MainTerm as MainTerm -import Unison.Codebase.Editor.Command as Command -import Unison.Codebase.Editor.Input -import Unison.Codebase.Editor.Output -import Unison.Codebase.Editor.DisplayObject -import qualified Unison.Codebase.Editor.Output as Output -import Unison.Codebase.Editor.SlurpResult (SlurpResult(..)) -import qualified Unison.Codebase.Editor.SlurpResult as Slurp -import Unison.Codebase.Editor.SlurpComponent (SlurpComponent(..)) -import qualified Unison.Codebase.Editor.SlurpComponent as SC -import Unison.Codebase.Editor.RemoteRepo (printNamespace, WriteRemotePath, writeToRead, writePathToRead) -import qualified Unison.CommandLine.InputPattern as InputPattern -import qualified Unison.CommandLine.InputPatterns as InputPatterns - -import Control.Lens -import Control.Monad.State ( StateT ) -import qualified Control.Monad.State as State -import Control.Monad.Except ( ExceptT(..), runExceptT, withExceptT) -import Data.Bifunctor ( second, first ) -import Data.Configurator () -import qualified Data.Foldable as Foldable -import qualified Data.List as List -import Data.List.Extra ( nubOrd ) -import qualified Data.Map as Map -import qualified Data.Text as Text -import qualified Text.Megaparsec as P -import qualified Data.Set as Set -import Data.Sequence ( Seq(..) ) -import qualified Unison.ABT as ABT -import qualified Unison.Codebase.BranchDiff as BranchDiff -import qualified Unison.Codebase.Editor.Output.BranchDiff as OBranchDiff -import Unison.Codebase.Branch ( Branch(..) - , Branch0(..) - ) -import qualified Unison.Codebase.Branch as Branch -import qualified Unison.Codebase.Branch.Merge as Branch -import qualified Unison.Codebase.Branch.Names as Branch -import qualified Unison.Codebase.BranchUtil as BranchUtil -import qualified Unison.Codebase.Causal as Causal -import qualified Unison.Codebase.Editor.Output.DumpNamespace as Output.DN -import qualified Unison.Codebase.Metadata as Metadata -import Unison.Codebase.Patch ( Patch(..) ) -import qualified Unison.Codebase.Patch as Patch -import Unison.Codebase.Path ( Path - , Path'(..) ) -import qualified Unison.Codebase.Path as Path -import qualified Unison.Codebase.Path.Parse as Path -import qualified Unison.Codebase.Reflog as Reflog -import Unison.Server.SearchResult ( SearchResult ) -import qualified Unison.Server.SearchResult as SR -import qualified Unison.Server.SearchResult' as SR' -import qualified Unison.Codebase.ShortBranchHash as SBH -import qualified Unison.Codebase.SyncMode as SyncMode -import qualified Unison.Builtin.Decls as DD -import qualified Unison.Runtime.IOSource as DD -import qualified Unison.DataDeclaration as DD -import qualified Unison.HashQualified as HQ -import qualified Unison.HashQualified' as HQ' -import qualified Unison.Name as Name -import Unison.Name ( Name ) -import Unison.NamesWithHistory ( NamesWithHistory(..) ) -import Unison.Names (Names(Names)) -import qualified Unison.Names as Names -import qualified Unison.NamesWithHistory as NamesWithHistory -import Unison.Parser.Ann (Ann(..)) -import Unison.Reference ( Reference(..) ) -import qualified Unison.Reference as Reference -import Unison.Referent ( Referent ) -import qualified Unison.Referent as Referent -import Unison.Result ( pattern Result ) -import qualified Unison.ShortHash as SH -import Unison.Term (Term) -import qualified Unison.Term as Term -import qualified Unison.Type as Type -import qualified Unison.Type.Names as Type -import qualified Unison.Result as Result -import qualified Unison.UnisonFile as UF -import qualified Unison.UnisonFile.Names as UF -import qualified Unison.Util.Find as Find -import Unison.Util.Free ( Free ) -import qualified Unison.Util.Free as Free -import Unison.Util.List ( uniqueBy ) -import qualified Unison.Util.Relation as R -import qualified Unison.Util.Relation4 as R4 -import U.Util.Timing (unsafeTime) -import Unison.Util.TransitiveClosure (transitiveClosure) -import Unison.Var ( Var ) -import qualified Unison.Var as Var -import qualified Unison.Codebase.TypeEdit as TypeEdit -import Unison.Codebase.TermEdit (TermEdit(..)) -import qualified Unison.Codebase.TermEdit as TermEdit -import qualified Unison.Codebase.TermEdit.Typing as TermEdit -import qualified Unison.Typechecker as Typechecker -import qualified Unison.WatchKind as WK -import qualified Unison.PrettyPrintEnv as PPE -import qualified Unison.PrettyPrintEnv.Names as PPE -import qualified Unison.PrettyPrintEnvDecl as PPE -import qualified Unison.PrettyPrintEnvDecl.Names as PPE -import Unison.Runtime.IOSource ( isTest ) -import qualified Unison.Runtime.IOSource as IOSource -import qualified Unison.Util.Monoid as Monoid -import Unison.UnisonFile (TypecheckedUnisonFile) -import qualified Unison.Codebase.Editor.TodoOutput as TO -import qualified Unison.Lexer as L -import qualified Unison.LabeledDependency as LD -import Unison.LabeledDependency (LabeledDependency) -import Unison.Type (Type) -import qualified Unison.Builtin as Builtin -import qualified Unison.Builtin.Terms as Builtin -import Unison.NameSegment (NameSegment(..)) -import qualified Unison.NameSegment as NameSegment -import Unison.Codebase.ShortBranchHash (ShortBranchHash) -import qualified Unison.Codebase.Editor.Propagate as Propagate -import qualified Unison.Codebase.Editor.UriParser as UriParser -import Data.Tuple.Extra (uncurry3) -import qualified Unison.CommandLine.DisplayValues as DisplayValues -import qualified Control.Error.Util as ErrorUtil -import Unison.Util.Monoid (intercalateMap) -import qualified Unison.Util.Star3 as Star3 -import qualified Unison.Util.Pretty as P -import qualified Unison.Util.Relation as Relation -import Data.List.NonEmpty (NonEmpty) -import qualified Data.List.NonEmpty as Nel -import Unison.Codebase.Editor.AuthorInfo (AuthorInfo(..)) -import qualified Unison.Hashing.V2.Convert as Hashing -import qualified Unison.Codebase.Verbosity as Verbosity -import qualified Unison.CommandLine.FuzzySelect as Fuzzy -import Data.Either.Extra (eitherToMaybe) - -type F m i v = Free (Command m i v) - --- type (Action m i v) a -type Action m i v = MaybeT (StateT (LoopState m v) (F m i v)) - -data LoopState m v - = LoopState - { _root :: Branch m - , _lastSavedRoot :: Branch m - -- the current position in the namespace - , _currentPathStack :: NonEmpty Path.Absolute - - -- TBD - -- , _activeEdits :: Set Branch.EditGuid - - -- The file name last modified, and whether to skip the next file - -- change event for that path (we skip file changes if the file has - -- just been modified programmatically) - , _latestFile :: Maybe (FilePath, SkipNextUpdate) - , _latestTypecheckedFile :: Maybe (UF.TypecheckedUnisonFile v Ann) - - -- The previous user input. Used to request confirmation of - -- questionable user commands. - , _lastInput :: Maybe Input - - -- A 1-indexed list of strings that can be referenced by index at the - -- CLI prompt. e.g. Given ["Foo.bat", "Foo.cat"], - -- `rename 2 Foo.foo` will rename `Foo.cat` to `Foo.foo`. - , _numberedArgs :: NumberedArgs - } - -type SkipNextUpdate = Bool -type InputDescription = Text - -makeLenses ''LoopState - --- replacing the old read/write scalar Lens with "peek" Getter for the NonEmpty -currentPath :: Getter (LoopState m v) Path.Absolute -currentPath = currentPathStack . to Nel.head - -loopState0 :: Branch m -> Path.Absolute -> LoopState m v -loopState0 b p = LoopState b b (pure p) Nothing Nothing Nothing [] - -type Action' m v = Action m (Either Event Input) v - -defaultPatchNameSegment :: NameSegment -defaultPatchNameSegment = "patch" - -prettyPrintEnvDecl :: NamesWithHistory -> Action' m v PPE.PrettyPrintEnvDecl -prettyPrintEnvDecl ns = eval CodebaseHashLength <&> (`PPE.fromNamesDecl` ns) - -loop :: forall m v . (Monad m, Var v) => Action m (Either Event Input) v () -loop = do - uf <- use latestTypecheckedFile - root' <- use root - currentPath' <- use currentPath - latestFile' <- use latestFile - currentBranch' <- getAt currentPath' - e <- eval Input - hqLength <- eval CodebaseHashLength - sbhLength <- eval BranchHashLength - let - currentPath'' = Path.unabsolute currentPath' - hqNameQuery q = eval $ HQNameQuery (Just currentPath'') root' q - sbh = SBH.fromHash sbhLength - root0 = Branch.head root' - currentBranch0 = Branch.head currentBranch' - defaultPatchPath :: PatchPath - defaultPatchPath = (Path' $ Left currentPath', defaultPatchNameSegment) - resolveSplit' :: (Path', a) -> (Path, a) - resolveSplit' = Path.fromAbsoluteSplit . Path.toAbsoluteSplit currentPath' - resolveToAbsolute :: Path' -> Path.Absolute - resolveToAbsolute = Path.resolve currentPath' - getAtSplit :: Path.Split -> Maybe (Branch m) - getAtSplit p = BranchUtil.getBranch p root0 - getAtSplit' :: Path.Split' -> Maybe (Branch m) - getAtSplit' = getAtSplit . resolveSplit' - getPatchAtSplit' :: Path.Split' -> Action' m v (Maybe Patch) - getPatchAtSplit' s = do - let (p, seg) = Path.toAbsoluteSplit currentPath' s - b <- getAt p - eval . Eval $ Branch.getMaybePatch seg (Branch.head b) - getHQ'TermsIncludingHistorical p = - getTermsIncludingHistorical (resolveSplit' p) root0 - - getHQ'Terms :: Path.HQSplit' -> Set Referent - getHQ'Terms p = BranchUtil.getTerm (resolveSplit' p) root0 - getHQ'Types :: Path.HQSplit' -> Set Reference - getHQ'Types p = BranchUtil.getType (resolveSplit' p) root0 - - basicPrettyPrintNames :: Names - basicPrettyPrintNames = - Backend.basicPrettyPrintNames root' (Backend.AllNames $ Path.unabsolute currentPath') - - resolveHHQS'Types :: HashOrHQSplit' -> Action' m v (Set Reference) - resolveHHQS'Types = either - (eval . TypeReferencesByShortHash) - (pure . getHQ'Types) - -- Term Refs and Cons - resolveHHQS'Referents = either - (eval . TermReferentsByShortHash) - (pure . getHQ'Terms) - getTypes :: Path.Split' -> Set Reference - getTypes = getHQ'Types . fmap HQ'.NameOnly - getTerms :: Path.Split' -> Set Referent - getTerms = getHQ'Terms . fmap HQ'.NameOnly - getPatchAt :: Path.Split' -> Action' m v Patch - getPatchAt patchPath' = do - let (p, seg) = Path.toAbsoluteSplit currentPath' patchPath' - b <- getAt p - eval . Eval $ Branch.getPatch seg (Branch.head b) - withFile ambient sourceName lexed@(text, tokens) k = do - let - getHQ = \case - L.Backticks s (Just sh) -> - Just (HQ.HashQualified (Name.unsafeFromString s) sh) - L.WordyId s (Just sh) -> - Just (HQ.HashQualified (Name.unsafeFromString s) sh) - L.SymbolyId s (Just sh) -> - Just (HQ.HashQualified (Name.unsafeFromString s) sh) - L.Hash sh -> Just (HQ.HashOnly sh) - _ -> Nothing - hqs = Set.fromList . mapMaybe (getHQ . L.payload) $ tokens - let parseNames = Backend.getCurrentParseNames (Backend.AllNames currentPath'') root' - latestFile .= Just (Text.unpack sourceName, False) - latestTypecheckedFile .= Nothing - Result notes r <- eval $ Typecheck ambient parseNames sourceName lexed - case r of - -- Parsing failed - Nothing -> respond $ - ParseErrors text [ err | Result.Parsing err <- toList notes ] - Just (Left errNames) -> do - ns <- makeShadowedPrintNamesFromHQ hqs errNames - ppe <- suffixifiedPPE ns - let tes = [ err | Result.TypeError err <- toList notes ] - cbs = [ bug - | Result.CompilerBug (Result.TypecheckerBug bug) - <- toList notes - ] - when (not $ null tes) . respond $ TypeErrors text ppe tes - when (not $ null cbs) . respond $ CompilerBugs text ppe cbs - Just (Right uf) -> k uf - loadUnisonFile sourceName text = do - let lexed = L.lexer (Text.unpack sourceName) (Text.unpack text) - withFile [] sourceName (text, lexed) $ \unisonFile -> do - sr <- toSlurpResult currentPath' unisonFile <$> slurpResultNames - names <- displayNames unisonFile - pped <- prettyPrintEnvDecl names - let ppe = PPE.suffixifiedPPE pped - eval . Notify $ Typechecked sourceName ppe sr unisonFile - unlessError' EvaluationFailure do - (bindings, e) <- ExceptT . eval . Evaluate ppe $ unisonFile - lift do - let e' = Map.map go e - go (ann, kind, _hash, _uneval, eval, isHit) = (ann, kind, eval, isHit) - unless (null e') $ - eval . Notify $ Evaluated text ppe bindings e' - latestTypecheckedFile .= Just unisonFile - - case e of - Left (IncomingRootBranch hashes) -> - eval . Notify $ WarnIncomingRootBranch - (SBH.fromHash sbhLength $ Branch.headHash root') - (Set.map (SBH.fromHash sbhLength) hashes) - Left (UnisonFileChanged sourceName text) -> - -- We skip this update if it was programmatically generated - if maybe False snd latestFile' - then modifying latestFile (fmap (const False) <$>) - else loadUnisonFile sourceName text - Right input -> - let - ifConfirmed = ifM (confirmedCommand input) - branchNotFound = respond . BranchNotFound - branchNotFound' = respond . BranchNotFound . Path.unsplit' - patchNotFound :: Path.Split' -> Action' m v () - patchNotFound s = respond $ PatchNotFound s - patchExists :: Path.Split' -> Action' m v () - patchExists s = respond $ PatchAlreadyExists s - typeNotFound = respond . TypeNotFound - typeNotFound' = respond . TypeNotFound' - termNotFound = respond . TermNotFound - termNotFound' = respond . TermNotFound' - nameConflicted src tms tys = respond (DeleteNameAmbiguous hqLength src tms tys) - typeConflicted src = nameConflicted src Set.empty - termConflicted src tms = nameConflicted src tms Set.empty - hashConflicted src = respond . HashAmbiguous src - typeReferences :: [SearchResult] -> [Reference] - typeReferences rs - = [ r | SR.Tp (SR.TypeResult _ r _) <- rs ] - termReferences :: [SearchResult] -> [Reference] - termReferences rs = - [ r | SR.Tm (SR.TermResult _ (Referent.Ref r) _) <- rs ] - termResults rs = [ r | SR.Tm r <- rs ] - typeResults rs = [ r | SR.Tp r <- rs ] - doRemoveReplacement from patchPath isTerm = do - let patchPath' = fromMaybe defaultPatchPath patchPath - patch <- getPatchAt patchPath' - QueryResult misses' hits <- hqNameQuery [from] - let tpRefs = Set.fromList $ typeReferences hits - tmRefs = Set.fromList $ termReferences hits - misses = Set.difference (Set.fromList misses') if isTerm - then Set.fromList $ SR.termName <$> termResults hits - else Set.fromList $ SR.typeName <$> typeResults hits - go :: Reference -> Action m (Either Event Input) v () - go fr = do - let termPatch = - over Patch.termEdits (R.deleteDom fr) patch - typePatch = - over Patch.typeEdits (R.deleteDom fr) patch - (patchPath'', patchName) = resolveSplit' patchPath' - -- Save the modified patch - stepAtM inputDescription - (patchPath'', - Branch.modifyPatches - patchName - (const (if isTerm then termPatch else typePatch))) - -- Say something - success - unless (Set.null misses) $ - respond $ SearchTermsNotFound (Set.toList misses) - traverse_ go (if isTerm then tmRefs else tpRefs) - branchExists dest _x = respond $ BranchAlreadyExists dest - branchExistsSplit = branchExists . Path.unsplit' - typeExists dest = respond . TypeAlreadyExists dest - termExists dest = respond . TermAlreadyExists dest - -- | try to get these as close as possible to the command that caused the change - inputDescription :: InputDescription - inputDescription = case input of - ForkLocalBranchI src dest -> "fork " <> hp' src <> " " <> p' dest - MergeLocalBranchI src dest mode -> case mode of - Branch.RegularMerge -> "merge " <> p' src <> " " <> p' dest - Branch.SquashMerge -> "merge.squash " <> p' src <> " " <> p' dest - ResetRootI src -> "reset-root " <> hp' src - AliasTermI src dest -> "alias.term " <> hhqs' src <> " " <> ps' dest - AliasTypeI src dest -> "alias.type " <> hhqs' src <> " " <> ps' dest - AliasManyI srcs dest -> - "alias.many " <> intercalateMap " " hqs srcs <> " " <> p' dest - MoveTermI src dest -> "move.term " <> hqs' src <> " " <> ps' dest - MoveTypeI src dest -> "move.type " <> hqs' src <> " " <> ps' dest - MoveBranchI src dest -> "move.namespace " <> ops' src <> " " <> ps' dest - MovePatchI src dest -> "move.patch " <> ps' src <> " " <> ps' dest - CopyPatchI src dest -> "copy.patch " <> ps' src <> " " <> ps' dest - DeleteI thing -> "delete " <> hqs' thing - DeleteTermI def -> "delete.term " <> hqs' def - DeleteTypeI def -> "delete.type " <> hqs' def - DeleteBranchI opath -> "delete.namespace " <> ops' opath - DeletePatchI path -> "delete.patch " <> ps' path - ReplaceI src target p -> - "replace " <> HQ.toText src <> " " - <> HQ.toText target <> " " - <> opatch p - ResolveTermNameI path -> "resolve.termName " <> hqs' path - ResolveTypeNameI path -> "resolve.typeName " <> hqs' path - AddI _selection -> "add" - UpdateI p _selection -> "update " <> opatch p - PropagatePatchI p scope -> "patch " <> ps' p <> " " <> p' scope - UndoI{} -> "undo" - UiI -> "ui" - DocsToHtmlI path dir -> "docs.to-html " <> Path.toText' path <> " " <> Text.pack dir - ExecuteI s -> "execute " <> Text.pack s - IOTestI hq -> "io.test " <> HQ.toText hq - LinkI md defs -> - "link " <> HQ.toText md <> " " <> intercalateMap " " hqs' defs - UnlinkI md defs -> - "unlink " <> HQ.toText md <> " " <> intercalateMap " " hqs' defs - UpdateBuiltinsI -> "builtins.update" - MergeBuiltinsI -> "builtins.merge" - MergeIOBuiltinsI -> "builtins.mergeio" - MakeStandaloneI out nm -> - "compile.output " <> Text.pack out <> " " <> HQ.toText nm - PullRemoteBranchI orepo dest _syncMode _ -> - (Text.pack . InputPattern.patternName - $ InputPatterns.patternFromInput input) - <> " " - -- todo: show the actual config-loaded namespace - <> maybe "(remote namespace from .unisonConfig)" - (uncurry3 printNamespace) orepo - <> " " - <> p' dest - CreateMessage{} -> wat - LoadI{} -> wat - PreviewAddI{} -> wat - PreviewUpdateI{} -> wat - CreateAuthorI (NameSegment id) name -> "create.author " <> id <> " " <> name - CreatePullRequestI{} -> wat - LoadPullRequestI base head dest -> - "pr.load " - <> uncurry3 printNamespace base - <> " " - <> uncurry3 printNamespace head - <> " " - <> p' dest - PushRemoteBranchI{} -> wat - PreviewMergeLocalBranchI{} -> wat - DiffNamespaceI{} -> wat - SwitchBranchI{} -> wat - UpI{} -> wat - PopBranchI{} -> wat - NamesI{} -> wat - TodoI{} -> wat - ListEditsI{} -> wat - ListDependenciesI{} -> wat - ListDependentsI{} -> wat - HistoryI{} -> wat - TestI{} -> wat - LinksI{} -> wat - SearchByNameI{} -> wat - FindShallowI{} -> wat - FindPatchI{} -> wat - ShowDefinitionI{} -> wat - DisplayI{} -> wat - DocsI{} -> wat - ShowDefinitionByPrefixI{} -> wat - ShowReflogI{} -> wat - DebugNumberedArgsI{} -> wat - DebugTypecheckedUnisonFileI{} -> wat - DebugDumpNamespacesI{} -> wat - DebugDumpNamespaceSimpleI{} -> wat - DebugClearWatchI {} -> wat - QuitI{} -> wat - DeprecateTermI{} -> undefined - DeprecateTypeI{} -> undefined - RemoveTermReplacementI src p -> - "delete.term-replacement" <> HQ.toText src <> " " <> opatch p - RemoveTypeReplacementI src p -> - "delete.type-replacement" <> HQ.toText src <> " " <> opatch p - where - hp' = either (Text.pack . show) p' - p' = Text.pack . show . resolveToAbsolute - ops' = maybe "." ps' - opatch = ps' . fromMaybe defaultPatchPath - wat = error $ show input ++ " is not expected to alter the branch" - hhqs' (Left sh) = SH.toText sh - hhqs' (Right x) = hqs' x - hqs' (p, hq) = - Monoid.unlessM (Path.isRoot' p) (p' p) <> "." <> Text.pack (show hq) - hqs (p, hq) = hqs' (Path' . Right . Path.Relative $ p, hq) - ps' = p' . Path.unsplit' - stepAt = Unison.Codebase.Editor.HandleInput.stepAt inputDescription - stepManyAt = Unison.Codebase.Editor.HandleInput.stepManyAt inputDescription - stepManyAtNoSync = - Unison.Codebase.Editor.HandleInput.stepManyAtNoSync - updateRoot = flip Unison.Codebase.Editor.HandleInput.updateRoot inputDescription - syncRoot = use root >>= updateRoot - updateAtM = Unison.Codebase.Editor.HandleInput.updateAtM inputDescription - unlessGitError = unlessError' (Output.GitError input) - importRemoteBranch ns mode = ExceptT . eval $ ImportRemoteBranch ns mode - viewRemoteBranch ns = ExceptT . eval $ ViewRemoteBranch ns - syncRemoteRootBranch repo b mode = - ExceptT . eval $ SyncRemoteRootBranch repo b mode - loadSearchResults = eval . LoadSearchResults - handleFailedDelete failed failedDependents = do - failed <- loadSearchResults $ SR.fromNames failed - failedDependents <- loadSearchResults $ SR.fromNames failedDependents - ppe <- fqnPPE =<< makePrintNamesFromLabeled' - (foldMap SR'.labeledDependencies $ failed <> failedDependents) - respond $ CantDelete ppe failed failedDependents - saveAndApplyPatch patchPath'' patchName patch' = do - stepAtM (inputDescription <> " (1/2)") - (patchPath'', - Branch.modifyPatches patchName (const patch')) - -- Apply the modified patch to the current path - -- since we might be able to propagate further. - void $ propagatePatch inputDescription patch' currentPath' - -- Say something - success - previewResponse sourceName sr uf = do - names <- displayNames uf - ppe <- PPE.suffixifiedPPE <$> prettyPrintEnvDecl names - respond $ Typechecked (Text.pack sourceName) ppe sr uf - - -- Add default metadata to all added types and terms in a slurp component. - -- - -- No-op if the slurp component is empty. - addDefaultMetadata - :: SlurpComponent v - -> Action m (Either Event Input) v () - addDefaultMetadata adds = - when (not (SC.isEmpty adds)) do - let addedVs = Set.toList $ SC.types adds <> SC.terms adds - addedNs = traverse (Path.hqSplitFromName' . Name.unsafeFromVar) addedVs - case addedNs of - Nothing -> - error $ "I couldn't parse a name I just added to the codebase! " - <> "-- Added names: " <> show addedVs - Just addedNames -> do - dm <- resolveDefaultMetadata currentPath' - case toList dm of - [] -> pure () - dm' -> do - let hqs = traverse InputPatterns.parseHashQualifiedName dm' - case hqs of - Left e -> respond $ ConfiguredMetadataParseError - (Path.absoluteToPath' currentPath') - (show dm') - e - Right defaultMeta -> - manageLinks True addedNames defaultMeta Metadata.insert - - -- Add/remove links between definitions and metadata. - -- `silent` controls whether this produces any output to the user. - -- `srcs` is (names of the) definitions to pass to `op` - -- `mdValues` is (names of the) metadata to pass to `op` - -- `op` is the operation to add/remove/alter metadata mappings. - -- e.g. `Metadata.insert` is passed to add metadata links. - manageLinks :: - Bool -> - [(Path', HQ'.HQSegment)] -> - [HQ.HashQualified Name] -> - ( forall r. - Ord r => - (r, Metadata.Type, Metadata.Value) -> - Branch.Star r NameSegment -> - Branch.Star r NameSegment - ) -> - Action m (Either Event Input) v () - manageLinks silent srcs mdValues op = do - runExceptT (for mdValues \val -> ExceptT (getMetadataFromName val)) >>= \case - Left output -> respond output - Right metadata -> do - before <- Branch.head <$> use root - traverse_ go metadata - if silent - then respond DefaultMetadataNotification - else do - after <- Branch.head <$> use root - (ppe, outputDiff) <- diffHelper before after - if OBranchDiff.isEmpty outputDiff - then respond NoOp - else - respondNumbered $ - ShowDiffNamespace - Path.absoluteEmpty - Path.absoluteEmpty - ppe - outputDiff - where - go :: (Metadata.Type, Metadata.Value) -> Action m (Either Event Input) v () - go (mdType, mdValue) = do - newRoot <- use root - let r0 = Branch.head newRoot - getTerms p = BranchUtil.getTerm (resolveSplit' p) r0 - getTypes p = BranchUtil.getType (resolveSplit' p) r0 - !srcle = toList . getTerms =<< srcs - !srclt = toList . getTypes =<< srcs - let step b0 = - let tmUpdates terms = foldl' go terms srcle - where - go terms src = op (src, mdType, mdValue) terms - tyUpdates types = foldl' go types srclt - where - go types src = op (src, mdType, mdValue) types - in over Branch.terms tmUpdates . over Branch.types tyUpdates $ b0 - steps = srcs <&> \(path, _hq) -> (Path.unabsolute (resolveToAbsolute path), step) - stepManyAtNoSync steps - - delete - :: (Path.HQSplit' -> Set Referent) -- compute matching terms - -> (Path.HQSplit' -> Set Reference) -- compute matching types - -> Path.HQSplit' - -> Action' m v () - delete getHQ'Terms getHQ'Types hq = do - let matchingTerms = toList (getHQ'Terms hq) - let matchingTypes = toList (getHQ'Types hq) - case (matchingTerms, matchingTypes) of - ([], []) -> respond (NameNotFound hq) - (Set.fromList -> tms, Set.fromList -> tys) -> goMany tms tys - where - resolvedPath = resolveSplit' (HQ'.toName <$> hq) - goMany tms tys = do - let rootNames = Branch.toNames root0 - name = Path.toName (Path.unsplit resolvedPath) - toRel :: Ord ref => Set ref -> R.Relation Name ref - toRel = R.fromList . fmap (name,) . toList - -- these names are relative to the root - toDelete = Names (toRel tms) (toRel tys) - (failed, failedDependents) <- - getEndangeredDependents (eval . GetDependents) toDelete rootNames - if failed == mempty then do - let makeDeleteTermNames = fmap (BranchUtil.makeDeleteTermName resolvedPath) . toList $ tms - let makeDeleteTypeNames = fmap (BranchUtil.makeDeleteTypeName resolvedPath) . toList $ tys - stepManyAt (makeDeleteTermNames ++ makeDeleteTypeNames) - root'' <- use root - diffHelper (Branch.head root') (Branch.head root'') >>= - respondNumbered . uncurry ShowDiffAfterDeleteDefinitions - else handleFailedDelete failed failedDependents - - in case input of - - CreateMessage pretty -> - respond $ PrintMessage pretty - - ShowReflogI -> do - entries <- convertEntries Nothing [] <$> eval LoadReflog - numberedArgs .= fmap (('#':) . SBH.toString . Output.hash) entries - respond $ ShowReflog entries - where - -- reverses & formats entries, adds synthetic entries when there is a - -- discontinuity in the reflog. - convertEntries :: Maybe Branch.Hash - -> [Output.ReflogEntry] - -> [Reflog.Entry Branch.Hash] - -> [Output.ReflogEntry] - convertEntries _ acc [] = acc - convertEntries Nothing acc entries@(Reflog.Entry old _ _ : _) = - convertEntries - (Just old) - (Output.ReflogEntry (SBH.fromHash sbhLength old) "(initial reflogged namespace)" : acc) - entries - convertEntries (Just lastHash) acc entries@(Reflog.Entry old new reason : rest) = - if lastHash /= old then - convertEntries - (Just old) - (Output.ReflogEntry (SBH.fromHash sbhLength old) "(external change)" : acc) - entries - else - convertEntries - (Just new) - (Output.ReflogEntry (SBH.fromHash sbhLength new) reason : acc) - rest - - ResetRootI src0 -> - case src0 of - Left hash -> unlessError do - newRoot <- resolveShortBranchHash hash - lift do - updateRoot newRoot - success - Right path' -> do - newRoot <- getAt $ resolveToAbsolute path' - if Branch.isEmpty newRoot then respond $ BranchNotFound path' - else do - updateRoot newRoot - success - ForkLocalBranchI src0 dest0 -> do - let tryUpdateDest srcb dest0 = do - let dest = resolveToAbsolute dest0 - -- if dest isn't empty: leave dest unchanged, and complain. - destb <- getAt dest - if Branch.isEmpty destb then do - ok <- updateAtM dest (const $ pure srcb) - if ok then success else respond $ BranchEmpty src0 - else respond $ BranchAlreadyExists dest0 - case src0 of - Left hash -> unlessError do - srcb <- resolveShortBranchHash hash - lift $ tryUpdateDest srcb dest0 - Right path' -> do - srcb <- getAt $ resolveToAbsolute path' - if Branch.isEmpty srcb then respond $ BranchNotFound path' - else tryUpdateDest srcb dest0 - MergeLocalBranchI src0 dest0 mergeMode -> do - let [src, dest] = resolveToAbsolute <$> [src0, dest0] - srcb <- getAt src - if Branch.isEmpty srcb then branchNotFound src0 - else do - let err = Just $ MergeAlreadyUpToDate src0 dest0 - mergeBranchAndPropagateDefaultPatch mergeMode inputDescription err srcb (Just dest0) dest - - PreviewMergeLocalBranchI src0 dest0 -> do - let [src, dest] = resolveToAbsolute <$> [src0, dest0] - srcb <- getAt src - if Branch.isEmpty srcb then branchNotFound src0 - else do - destb <- getAt dest - merged <- eval $ Merge Branch.RegularMerge srcb destb - if merged == destb - then respond (PreviewMergeAlreadyUpToDate src0 dest0) - else - diffHelper (Branch.head destb) (Branch.head merged) >>= - respondNumbered . uncurry (ShowDiffAfterMergePreview dest0 dest) - - DiffNamespaceI before0 after0 -> do - let [beforep, afterp] = - resolveToAbsolute <$> [before0, after0] - before <- Branch.head <$> getAt beforep - after <- Branch.head <$> getAt afterp - (ppe, outputDiff) <- diffHelper before after - respondNumbered $ ShowDiffNamespace beforep afterp ppe outputDiff - - CreatePullRequestI baseRepo headRepo -> unlessGitError do - (cleanupBase, baseBranch) <- viewRemoteBranch baseRepo - (cleanupHead, headBranch) <- viewRemoteBranch headRepo - lift do - merged <- eval $ Merge Branch.RegularMerge baseBranch headBranch - (ppe, diff) <- diffHelper (Branch.head baseBranch) (Branch.head merged) - respondNumbered $ ShowDiffAfterCreatePR baseRepo headRepo ppe diff - eval . Eval $ do - cleanupBase - cleanupHead - - LoadPullRequestI baseRepo headRepo dest0 -> do - let desta = resolveToAbsolute dest0 - let dest = Path.unabsolute desta - destb <- getAt desta - if Branch.isEmpty0 (Branch.head destb) then unlessGitError do - baseb <- importRemoteBranch baseRepo SyncMode.ShortCircuit - headb <- importRemoteBranch headRepo SyncMode.ShortCircuit - lift $ do - mergedb <- eval $ Merge Branch.RegularMerge baseb headb - squashedb <- eval $ Merge Branch.SquashMerge headb baseb - stepManyAt - [BranchUtil.makeSetBranch (dest, "base") baseb - ,BranchUtil.makeSetBranch (dest, "head") headb - ,BranchUtil.makeSetBranch (dest, "merged") mergedb - ,BranchUtil.makeSetBranch (dest, "squashed") squashedb] - let base = snoc dest0 "base" - head = snoc dest0 "head" - merged = snoc dest0 "merged" - squashed = snoc dest0 "squashed" - respond $ LoadPullRequest baseRepo headRepo base head merged squashed - loadPropagateDiffDefaultPatch - inputDescription - (Just merged) - (snoc desta "merged") - else - respond . BranchNotEmpty . Path.Path' . Left $ currentPath' - - - -- move the root to a sub-branch - MoveBranchI Nothing dest -> do - b <- use root - stepManyAt [ (Path.empty, const Branch.empty0) - , BranchUtil.makeSetBranch (resolveSplit' dest) b ] - success - - MoveBranchI (Just src) dest -> - maybe (branchNotFound' src) srcOk (getAtSplit' src) - where - srcOk b = maybe (destOk b) (branchExistsSplit dest) (getAtSplit' dest) - destOk b = do - stepManyAt - [ BranchUtil.makeSetBranch (resolveSplit' src) Branch.empty - , BranchUtil.makeSetBranch (resolveSplit' dest) b ] - success -- could give rando stats about new defns - - MovePatchI src dest -> do - psrc <- getPatchAtSplit' src - pdest <- getPatchAtSplit' dest - case (psrc, pdest) of - (Nothing, _) -> patchNotFound src - (_, Just _) -> patchExists dest - (Just p, Nothing) -> do - stepManyAt [ - BranchUtil.makeDeletePatch (resolveSplit' src), - BranchUtil.makeReplacePatch (resolveSplit' dest) p ] - success - - CopyPatchI src dest -> do - psrc <- getPatchAtSplit' src - pdest <- getPatchAtSplit' dest - case (psrc, pdest) of - (Nothing, _) -> patchNotFound src - (_, Just _) -> patchExists dest - (Just p, Nothing) -> do - stepAt (BranchUtil.makeReplacePatch (resolveSplit' dest) p) - success - - DeletePatchI src -> do - psrc <- getPatchAtSplit' src - case psrc of - Nothing -> patchNotFound src - Just _ -> do - stepAt (BranchUtil.makeDeletePatch (resolveSplit' src)) - success - - DeleteBranchI Nothing -> - ifConfirmed - (do - stepAt (Path.empty, const Branch.empty0) - respond DeletedEverything) - (respond DeleteEverythingConfirmation) - - DeleteBranchI (Just p) -> - maybe (branchNotFound' p) go $ getAtSplit' p - where - go (Branch.head -> b) = do - (failed, failedDependents) <- - let rootNames = Branch.toNames root0 - toDelete = Names.prefix0 - (Path.toName . Path.unsplit . resolveSplit' $ p) -- resolveSplit' incorporates currentPath - (Branch.toNames b) - in getEndangeredDependents (eval . GetDependents) toDelete rootNames - if failed == mempty then do - stepAt $ BranchUtil.makeSetBranch (resolveSplit' p) Branch.empty - -- Looks similar to the 'toDelete' above... investigate me! ;) - diffHelper b Branch.empty0 >>= - respondNumbered - . uncurry (ShowDiffAfterDeleteBranch - $ resolveToAbsolute (Path.unsplit' p)) - else handleFailedDelete failed failedDependents - SwitchBranchI maybePath' -> do - mpath' <- case maybePath' of - Nothing -> fuzzySelectNamespace root0 <&> \case - [] -> Nothing - -- Shouldn't be possible to get multiple paths here, we can just take - -- the first. - (p:_) -> Just p - Just p -> pure $ Just p - case mpath' of - Nothing -> pure () - Just path' -> do - let path = resolveToAbsolute path' - currentPathStack %= Nel.cons path - branch' <- getAt path - when (Branch.isEmpty branch') (respond $ CreatedNewBranch path) - - UpI -> use currentPath >>= \p -> case Path.unsnoc (Path.unabsolute p) of - Nothing -> pure () - Just (path,_) -> currentPathStack %= Nel.cons (Path.Absolute path) - - PopBranchI -> use (currentPathStack . to Nel.uncons) >>= \case - (_, Nothing) -> respond StartOfCurrentPathHistory - (_, Just t) -> currentPathStack .= t - - HistoryI resultsCap diffCap from -> case from of - Left hash -> unlessError do - b <- resolveShortBranchHash hash - lift $ doHistory 0 b [] - Right path' -> do - let path = resolveToAbsolute path' - branch' <- getAt path - if Branch.isEmpty branch' then respond $ CreatedNewBranch path - else doHistory 0 branch' [] - where - doHistory !n b acc = - if maybe False (n >=) resultsCap then - respond $ History diffCap acc (PageEnd (sbh $ Branch.headHash b) n) - else case Branch._history b of - Causal.One{} -> - respond $ History diffCap acc (EndOfLog . sbh $ Branch.headHash b) - Causal.Merge{Causal.tails} -> - respond $ History diffCap acc (MergeTail (sbh $ Branch.headHash b) . map sbh $ Map.keys tails) - Causal.Cons{Causal.tail} -> do - b' <- fmap Branch.Branch . eval . Eval $ snd tail - let elem = (sbh $ Branch.headHash b, Branch.namesDiff b' b) - doHistory (n+1) b' (elem : acc) - - UndoI -> do - prev <- eval . Eval $ Branch.uncons root' - case prev of - Nothing -> - respond . CantUndo $ if Branch.isOne root' then CantUndoPastStart - else CantUndoPastMerge - Just (_, prev) -> do - updateRoot prev - diffHelper (Branch.head prev) (Branch.head root') >>= - respondNumbered . uncurry Output.ShowDiffAfterUndo - - UiI -> eval UI - - DocsToHtmlI namespacePath' sourceDirectory -> do - let absPath = Path.unabsolute $ resolveToAbsolute namespacePath' - eval (DocsToHtml root' absPath sourceDirectory) - - AliasTermI src dest -> do - referents <- resolveHHQS'Referents src - case (toList referents, toList (getTerms dest)) of - ([r], []) -> do - stepAt (BranchUtil.makeAddTermName (resolveSplit' dest) r (oldMD r)) - success - ([_], rs@(_:_)) -> termExists dest (Set.fromList rs) - ([], _) -> either termNotFound' termNotFound src - (rs, _) -> - either hashConflicted termConflicted src (Set.fromList rs) - where - oldMD r = either (const mempty) - (\src -> - let p = resolveSplit' src in - BranchUtil.getTermMetadataAt p r root0) - src - - AliasTypeI src dest -> do - refs <- resolveHHQS'Types src - case (toList refs, toList (getTypes dest)) of - ([r], []) -> do - stepAt (BranchUtil.makeAddTypeName (resolveSplit' dest) r (oldMD r)) - success - ([_], rs@(_:_)) -> typeExists dest (Set.fromList rs) - ([], _) -> either typeNotFound' typeNotFound src - (rs, _) -> - either - (\src -> hashConflicted src . Set.map Referent.Ref) - typeConflicted - src - (Set.fromList rs) - - - where - oldMD r = - either (const mempty) - (\src -> - let p = resolveSplit' src in - BranchUtil.getTypeMetadataAt p r root0) - src - - -- this implementation will happily produce name conflicts, - -- but will surface them in a normal diff at the end of the operation. - AliasManyI srcs dest' -> do - let destAbs = resolveToAbsolute dest' - old <- getAt destAbs - let (unknown, actions) = foldl' go mempty srcs - stepManyAt actions - new <- getAt destAbs - diffHelper (Branch.head old) (Branch.head new) >>= - respondNumbered . uncurry (ShowDiffAfterModifyBranch dest' destAbs) - unless (null unknown) $ - respond . SearchTermsNotFound . fmap fixupOutput $ unknown - where - -- a list of missing sources (if any) and the actions that do the work - go :: ([Path.HQSplit], [(Path, Branch0 m -> Branch0 m)]) - -> Path.HQSplit - -> ([Path.HQSplit], [(Path, Branch0 m -> Branch0 m)]) - go (missingSrcs, actions) hqsrc = - let - src :: Path.Split - src = second HQ'.toName hqsrc - proposedDest :: Path.Split - proposedDest = second HQ'.toName hqProposedDest - hqProposedDest :: Path.HQSplit - hqProposedDest = first Path.unabsolute $ - Path.resolve (resolveToAbsolute dest') hqsrc - -- `Nothing` if src doesn't exist - doType :: Maybe [(Path, Branch0 m -> Branch0 m)] - doType = case ( BranchUtil.getType hqsrc currentBranch0 - , BranchUtil.getType hqProposedDest root0 - ) of - (null -> True, _) -> Nothing -- missing src - (rsrcs, existing) -> -- happy path - Just . map addAlias . toList $ Set.difference rsrcs existing - where - addAlias r = BranchUtil.makeAddTypeName proposedDest r (oldMD r) - oldMD r = BranchUtil.getTypeMetadataAt src r currentBranch0 - doTerm :: Maybe [(Path, Branch0 m -> Branch0 m)] - doTerm = case ( BranchUtil.getTerm hqsrc currentBranch0 - , BranchUtil.getTerm hqProposedDest root0 - ) of - (null -> True, _) -> Nothing -- missing src - (rsrcs, existing) -> - Just . map addAlias . toList $ Set.difference rsrcs existing - where - addAlias r = BranchUtil.makeAddTermName proposedDest r (oldMD r) - oldMD r = BranchUtil.getTermMetadataAt src r currentBranch0 - in case (doType, doTerm) of - (Nothing, Nothing) -> (missingSrcs :> hqsrc, actions) - (Just as, Nothing) -> (missingSrcs, actions ++ as) - (Nothing, Just as) -> (missingSrcs, actions ++ as) - (Just as1, Just as2) -> (missingSrcs, actions ++ as1 ++ as2) - - fixupOutput :: Path.HQSplit -> HQ.HashQualified Name - fixupOutput = fmap Path.toName . HQ'.toHQ . Path.unsplitHQ - - NamesI thing -> do - ns0 <- basicParseNames - let ns = NamesWithHistory ns0 mempty - terms = NamesWithHistory.lookupHQTerm thing ns - types = NamesWithHistory.lookupHQType thing ns - printNames = NamesWithHistory basicPrettyPrintNames mempty - terms' :: Set (Referent, Set (HQ'.HashQualified Name)) - terms' = Set.map go terms where - go r = (r, NamesWithHistory.termName hqLength r printNames) - types' :: Set (Reference, Set (HQ'.HashQualified Name)) - types' = Set.map go types where - go r = (r, NamesWithHistory.typeName hqLength r printNames) - respond $ ListNames hqLength (toList types') (toList terms') - - LinkI mdValue srcs -> do - manageLinks False srcs [mdValue] Metadata.insert - syncRoot - - UnlinkI mdValue srcs -> do - manageLinks False srcs [mdValue] Metadata.delete - syncRoot - - -- > links List.map (.Docs .English) - -- > links List.map -- give me all the - -- > links Optional License - LinksI src mdTypeStr -> unlessError do - (ppe, out) <- getLinks (show input) src (Right mdTypeStr) - lift do - numberedArgs .= fmap (HQ.toString . view _1) out - respond $ ListOfLinks ppe out - - DocsI srcs -> do - srcs' <- case srcs of - [] -> fuzzySelectTermsAndTypes root0 - -- HQ names should always parse as a valid split, so we just discard any - -- that don't to satisfy the type-checker. - <&> mapMaybe (eitherToMaybe . Path.parseHQSplit' . HQ.toString) - xs -> pure xs - for_ srcs' (docsI (show input) basicPrettyPrintNames ) - - CreateAuthorI authorNameSegment authorFullName -> do - initialBranch <- getAt currentPath' - AuthorInfo - guid@(guidRef, _, _) - author@(authorRef, _, _) - copyrightHolder@(copyrightHolderRef, _, _) <- - eval $ CreateAuthorInfo authorFullName - -- add the new definitions to the codebase and to the namespace - traverse_ (eval . uncurry3 PutTerm) [guid, author, copyrightHolder] - stepManyAt - [ BranchUtil.makeAddTermName (resolveSplit' authorPath) (d authorRef) mempty - , BranchUtil.makeAddTermName (resolveSplit' copyrightHolderPath) (d copyrightHolderRef) mempty - , BranchUtil.makeAddTermName (resolveSplit' guidPath) (d guidRef) mempty - ] - finalBranch <- getAt currentPath' - -- print some output - diffHelper (Branch.head initialBranch) (Branch.head finalBranch) >>= - respondNumbered - . uncurry (ShowDiffAfterCreateAuthor - authorNameSegment - (Path.unsplit' base) - currentPath') - where - d :: Reference.Id -> Referent - d = Referent.Ref . Reference.DerivedId - base :: Path.Split' = (Path.relativeEmpty', "metadata") - authorPath = base |> "authors" |> authorNameSegment - copyrightHolderPath = base |> "copyrightHolders" |> authorNameSegment - guidPath = authorPath |> "guid" - - MoveTermI src dest -> - case (toList (getHQ'Terms src), toList (getTerms dest)) of - ([r], []) -> do - stepManyAt - [ BranchUtil.makeDeleteTermName p r - , BranchUtil.makeAddTermName (resolveSplit' dest) r (mdSrc r)] - success - ([_], rs) -> termExists dest (Set.fromList rs) - ([], _) -> termNotFound src - (rs, _) -> termConflicted src (Set.fromList rs) - where p = resolveSplit' (HQ'.toName <$> src) - mdSrc r = BranchUtil.getTermMetadataAt p r root0 - - MoveTypeI src dest -> - case (toList (getHQ'Types src), toList (getTypes dest)) of - ([r], []) -> do - stepManyAt - [ BranchUtil.makeDeleteTypeName p r - , BranchUtil.makeAddTypeName (resolveSplit' dest) r (mdSrc r) ] - success - ([_], rs) -> typeExists dest (Set.fromList rs) - ([], _) -> typeNotFound src - (rs, _) -> typeConflicted src (Set.fromList rs) - where - p = resolveSplit' (HQ'.toName <$> src) - mdSrc r = BranchUtil.getTypeMetadataAt p r root0 - - DeleteI hq -> delete getHQ'Terms getHQ'Types hq - DeleteTypeI hq -> delete (const Set.empty) getHQ'Types hq - DeleteTermI hq -> delete getHQ'Terms (const Set.empty) hq - - DisplayI outputLoc names' -> do - names <- case names' of - [] -> fuzzySelectTermsAndTypes root0 - ns -> pure ns - traverse_ (displayI basicPrettyPrintNames outputLoc) names - - ShowDefinitionI outputLoc query -> handleShowDefinition outputLoc query - FindPatchI -> do - let patches = - [ Path.toName $ Path.snoc p seg - | (p, b) <- Branch.toList0 currentBranch0 - , (seg, _) <- Map.toList (Branch._edits b) ] - respond $ ListOfPatches $ Set.fromList patches - numberedArgs .= fmap Name.toString patches - - FindShallowI pathArg -> do - let pathArgAbs = resolveToAbsolute pathArg - ppe = Backend.basicSuffixifiedNames - sbhLength - root' - (Backend.AllNames $ Path.fromPath' pathArg) - res <- eval $ FindShallow pathArgAbs - case res of - Left e -> handleBackendError e - Right entries -> do - -- caching the result as an absolute path, for easier jumping around - numberedArgs .= fmap entryToHQString entries - respond $ ListShallow ppe entries - where - entryToHQString :: ShallowListEntry v Ann -> String - entryToHQString e = - fixup $ case e of - ShallowTypeEntry (TypeEntry _ hq _) -> HQ'.toString hq - ShallowTermEntry (TermEntry _ hq _ _) -> HQ'.toString hq - ShallowBranchEntry ns _ _ -> NameSegment.toString ns - ShallowPatchEntry ns -> NameSegment.toString ns - where - fixup s = case pathArgStr of - "" -> s - p | last p == '.' -> p ++ s - p -> p ++ "." ++ s - pathArgStr = show pathArg - - SearchByNameI isVerbose _showAll ws -> do - let prettyPrintNames = basicPrettyPrintNames - unlessError do - results <- case ws of - -- no query, list everything - [] -> pure . listBranch $ Branch.head currentBranch' - - -- type query - ":" : ws -> - ExceptT (parseSearchType (show input) (unwords ws)) >>= \typ -> - ExceptT $ do - let named = Branch.deepReferents root0 - matches <- - fmap (filter (`Set.member` named) . toList) $ - eval $ GetTermsOfType typ - matches <- - if null matches then do - respond NoExactTypeMatches - fmap (filter (`Set.member` named) . toList) $ - eval $ GetTermsMentioningType typ - else pure matches - let results = - -- in verbose mode, aliases are shown, so we collapse all - -- aliases to a single search result; in non-verbose mode, - -- a separate result may be shown for each alias - (if isVerbose then uniqueBy SR.toReferent else id) $ - searchResultsFor prettyPrintNames matches [] - pure . pure $ results - - -- name query - (map HQ.unsafeFromString -> qs) -> do - let ns = basicPrettyPrintNames - let srs = searchBranchScored ns fuzzyNameDistance qs - pure $ uniqueBy SR.toReferent srs - lift do - numberedArgs .= fmap searchResultToHQString results - results' <- loadSearchResults results - ppe <- suffixifiedPPE =<< - makePrintNamesFromLabeled' - (foldMap SR'.labeledDependencies results') - respond $ ListOfDefinitions ppe isVerbose results' - - ResolveTypeNameI hq -> - zeroOneOrMore (getHQ'Types hq) (typeNotFound hq) go (typeConflicted hq) - where - conflicted = getHQ'Types (fmap HQ'.toNameOnly hq) - makeDelete = - BranchUtil.makeDeleteTypeName (resolveSplit' (HQ'.toName <$> hq)) - go r = stepManyAt . fmap makeDelete . toList . Set.delete r $ conflicted - - ResolveTermNameI hq -> do - refs <- getHQ'TermsIncludingHistorical hq - zeroOneOrMore refs (termNotFound hq) go (termConflicted hq) - where - conflicted = getHQ'Terms (fmap HQ'.toNameOnly hq) - makeDelete = - BranchUtil.makeDeleteTermName (resolveSplit' (HQ'.toName <$> hq)) - go r = stepManyAt . fmap makeDelete . toList . Set.delete r $ conflicted - - ReplaceI from to patchPath -> do - let patchPath' = fromMaybe defaultPatchPath patchPath - patch <- getPatchAt patchPath' - QueryResult fromMisses' fromHits <- hqNameQuery [from] - QueryResult toMisses' toHits <- hqNameQuery [to] - let termsFromRefs = termReferences fromHits - termsToRefs = termReferences toHits - typesFromRefs = typeReferences fromHits - typesToRefs = typeReferences toHits - --- Here are all the kinds of misses - --- [X] [X] - --- [Type] [Term] - --- [Term] [Type] - --- [Type] [X] - --- [Term] [X] - --- [X] [Type] - --- [X] [Term] - -- Type hits are term misses - termFromMisses = fromMisses' - <> (SR.typeName <$> typeResults fromHits) - termToMisses = toMisses' - <> (SR.typeName <$> typeResults toHits) - -- Term hits are type misses - typeFromMisses = fromMisses' - <> (SR.termName <$> termResults fromHits) - typeToMisses = toMisses' - <> (SR.termName <$> termResults toHits) - - termMisses = termFromMisses <> termToMisses - typeMisses = typeFromMisses <> typeToMisses - - replaceTerms :: Reference - -> Reference - -> Action m (Either Event Input) v () - replaceTerms fr tr = do - mft <- eval $ LoadTypeOfTerm fr - mtt <- eval $ LoadTypeOfTerm tr - let termNotFound = respond . TermNotFound' - . SH.take hqLength - . Reference.toShortHash - case (mft, mtt) of - (Nothing, _) -> termNotFound fr - (_, Nothing) -> termNotFound tr - (Just ft, Just tt) -> do - let - patch' = - -- The modified patch - over Patch.termEdits - (R.insert fr (Replace tr (TermEdit.typing tt ft)) - . R.deleteDom fr) - patch - (patchPath'', patchName) = resolveSplit' patchPath' - saveAndApplyPatch patchPath'' patchName patch' - - replaceTypes :: Reference - -> Reference - -> Action m (Either Event Input) v () - replaceTypes fr tr = do - let patch' = - -- The modified patch - over Patch.typeEdits - (R.insert fr (TypeEdit.Replace tr) . R.deleteDom fr) patch - (patchPath'', patchName) = resolveSplit' patchPath' - saveAndApplyPatch patchPath'' patchName patch' - - ambiguous t rs = - let rs' = Set.map Referent.Ref $ Set.fromList rs - in case t of - HQ.HashOnly h -> - hashConflicted h rs' - (Path.parseHQSplit' . HQ.toString -> Right n) -> - termConflicted n rs' - _ -> respond . BadName $ HQ.toString t - - mismatch typeName termName = respond $ TypeTermMismatch typeName termName - - - case (termsFromRefs, termsToRefs, typesFromRefs, typesToRefs) of - ([], [], [], []) -> respond $ SearchTermsNotFound termMisses - ([_], [], [], [_]) -> mismatch to from - ([], [_], [_], []) -> mismatch from to - ([_], [], _, _) -> respond $ SearchTermsNotFound termMisses - ([], [_], _, _) -> respond $ SearchTermsNotFound termMisses - (_, _, [_], []) -> respond $ SearchTermsNotFound typeMisses - (_, _, [], [_]) -> respond $ SearchTermsNotFound typeMisses - ([fr], [tr], [], []) -> replaceTerms fr tr - ([], [], [fr], [tr]) -> replaceTypes fr tr - (froms, [_], [], []) -> ambiguous from froms - ([], [], froms, [_]) -> ambiguous from froms - ([_], tos, [], []) -> ambiguous to tos - ([], [], [_], tos) -> ambiguous to tos - (_, _, _, _) -> error "unpossible" - - LoadI maybePath -> - case maybePath <|> (fst <$> latestFile') of - Nothing -> respond NoUnisonFile - Just path -> do - res <- eval . LoadSource . Text.pack $ path - case res of - InvalidSourceNameError -> respond $ InvalidSourceName path - LoadError -> respond $ SourceLoadFailed path - LoadSuccess contents -> loadUnisonFile (Text.pack path) contents - - AddI hqs -> - case uf of - Nothing -> respond NoUnisonFile - Just uf -> do - sr <- - Slurp.disallowUpdates - . applySelection hqs uf - . toSlurpResult currentPath' uf - <$> slurpResultNames - let adds = Slurp.adds sr - stepAtNoSync (Path.unabsolute currentPath', doSlurpAdds adds uf) - eval . AddDefsToCodebase . filterBySlurpResult sr $ uf - ppe <- prettyPrintEnvDecl =<< displayNames uf - respond $ SlurpOutput input (PPE.suffixifiedPPE ppe) sr - addDefaultMetadata adds - syncRoot - - PreviewAddI hqs -> case (latestFile', uf) of - (Just (sourceName, _), Just uf) -> do - sr <- Slurp.disallowUpdates - . applySelection hqs uf - . toSlurpResult currentPath' uf - <$> slurpResultNames - previewResponse sourceName sr uf - _ -> respond NoUnisonFile - - UpdateI maybePatchPath hqs -> case uf of - Nothing -> respond NoUnisonFile - Just uf -> do - let patchPath = fromMaybe defaultPatchPath maybePatchPath - slurpCheckNames <- slurpResultNames - currentPathNames <- currentPathNames - let sr = applySelection hqs uf - . toSlurpResult currentPath' uf - $ slurpCheckNames - addsAndUpdates = Slurp.updates sr <> Slurp.adds sr - fileNames = UF.typecheckedToNames uf - -- todo: display some error if typeEdits or termEdits itself contains a loop - typeEdits :: Map Name (Reference, Reference) - typeEdits = Map.fromList $ map f (toList $ SC.types (updates sr)) where - f v = case (toList (Names.typesNamed slurpCheckNames n) - ,toList (Names.typesNamed fileNames n)) of - ([old],[new]) -> (n, (old, new)) - _ -> error $ "Expected unique matches for " - ++ Var.nameStr v ++ " but got: " - ++ show otherwise - where n = Name.unsafeFromVar v - hashTerms :: Map Reference (Type v Ann) - hashTerms = Map.fromList (toList hashTerms0) where - hashTerms0 = (\(r, _wk, _tm, typ) -> (r, typ)) <$> UF.hashTerms uf - termEdits :: Map Name (Reference, Reference) - termEdits = Map.fromList $ map g (toList $ SC.terms (updates sr)) where - g v = case ( toList (Names.refTermsNamed slurpCheckNames n) - , toList (Names.refTermsNamed fileNames n)) of - ([old], [new]) -> (n, (old, new)) - _ -> error $ "Expected unique matches for " - ++ Var.nameStr v ++ " but got: " - ++ show otherwise - where n = Name.unsafeFromVar v - termDeprecations :: [(Name, Referent)] - termDeprecations = - [ (n, r) | (oldTypeRef,_) <- Map.elems typeEdits - , (n, r) <- Names.constructorsForType oldTypeRef currentPathNames ] - - ye'ol'Patch <- getPatchAt patchPath - -- If `uf` updates a -> a', we want to replace all (a0 -> a) in patch - -- with (a0 -> a') in patch'. - -- So for all (a0 -> a) in patch, for all (a -> a') in `uf`, - -- we must know the type of a0, a, a'. - let - -- we need: - -- all of the `old` references from the `new` edits, - -- plus all of the `old` references for edits from patch we're replacing - collectOldForTyping :: [(Reference, Reference)] -> Patch -> Set Reference - collectOldForTyping new old = foldl' f mempty (new ++ fromOld) where - f acc (r, _r') = Set.insert r acc - newLHS = Set.fromList . fmap fst $ new - fromOld :: [(Reference, Reference)] - fromOld = [ (r,r') | (r, TermEdit.Replace r' _) <- R.toList . Patch._termEdits $ old - , Set.member r' newLHS ] - neededTypes = collectOldForTyping (toList termEdits) ye'ol'Patch - - allTypes :: Map Reference (Type v Ann) <- - fmap Map.fromList . for (toList neededTypes) $ \r -> - (r,) . fromMaybe (Type.builtin External "unknown type") - <$> (eval . LoadTypeOfTerm) r - - let typing r1 r2 = case (Map.lookup r1 allTypes, Map.lookup r2 hashTerms) of - (Just t1, Just t2) - | Typechecker.isEqual t1 t2 -> TermEdit.Same - | Typechecker.isSubtype t1 t2 -> TermEdit.Subtype - | otherwise -> TermEdit.Different - e -> error $ "compiler bug: typing map not constructed properly\n" <> - "typing " <> show r1 <> " " <> show r2 <> " : " <> show e - - let updatePatch :: Patch -> Patch - updatePatch p = foldl' step2 p' termEdits - where - p' = foldl' step1 p typeEdits - step1 p (r,r') = Patch.updateType r (TypeEdit.Replace r') p - step2 p (r,r') = Patch.updateTerm typing r (TermEdit.Replace r' (typing r r')) p - (p, seg) = Path.toAbsoluteSplit currentPath' patchPath - updatePatches :: Branch0 m -> m (Branch0 m) - updatePatches = Branch.modifyPatches seg updatePatch - - when (Slurp.isNonempty sr) $ do - -- take a look at the `updates` from the SlurpResult - -- and make a patch diff to record a replacement from the old to new references - stepManyAtMNoSync - [( Path.unabsolute currentPath' - , pure . doSlurpUpdates typeEdits termEdits termDeprecations) - ,( Path.unabsolute currentPath' - , pure . doSlurpAdds addsAndUpdates uf) - ,( Path.unabsolute p, updatePatches )] - eval . AddDefsToCodebase . filterBySlurpResult sr $ uf - ppe <- prettyPrintEnvDecl =<< displayNames uf - respond $ SlurpOutput input (PPE.suffixifiedPPE ppe) sr - -- propagatePatch prints TodoOutput - void $ propagatePatchNoSync (updatePatch ye'ol'Patch) currentPath' - addDefaultMetadata addsAndUpdates - syncRoot - - PreviewUpdateI hqs -> case (latestFile', uf) of - (Just (sourceName, _), Just uf) -> do - sr <- applySelection hqs uf - . toSlurpResult currentPath' uf - <$> slurpResultNames - previewResponse sourceName sr uf - _ -> respond NoUnisonFile - - TodoI patchPath branchPath' -> do - patch <- getPatchAt (fromMaybe defaultPatchPath patchPath) - doShowTodoOutput patch $ resolveToAbsolute branchPath' - - TestI showOk showFail -> do - let - testTerms = Map.keys . R4.d1 . uncurry R4.selectD34 isTest - . Branch.deepTermMetadata $ currentBranch0 - testRefs = Set.fromList [ r | Referent.Ref r <- toList testTerms ] - oks results = - [ (r, msg) - | (r, Term.List' ts) <- Map.toList results - , Term.App' (Term.Constructor' ref cid) (Term.Text' msg) <- toList ts - , cid == DD.okConstructorId && ref == DD.testResultRef ] - fails results = - [ (r, msg) - | (r, Term.List' ts) <- Map.toList results - , Term.App' (Term.Constructor' ref cid) (Term.Text' msg) <- toList ts - , cid == DD.failConstructorId && ref == DD.testResultRef ] - cachedTests <- fmap Map.fromList . eval $ LoadWatches WK.TestWatch testRefs - let stats = Output.CachedTests (Set.size testRefs) (Map.size cachedTests) - names <- makePrintNamesFromLabeled' $ - LD.referents testTerms <> - LD.referents [ DD.okConstructorReferent, DD.failConstructorReferent ] - ppe <- fqnPPE names - respond $ TestResults stats ppe showOk showFail - (oks cachedTests) (fails cachedTests) - let toCompute = Set.difference testRefs (Map.keysSet cachedTests) - unless (Set.null toCompute) $ do - let total = Set.size toCompute - computedTests <- fmap join . for (toList toCompute `zip` [1..]) $ \(r,n) -> - case r of - Reference.DerivedId rid -> do - tm <- eval $ LoadTerm rid - case tm of - Nothing -> [] <$ respond (TermNotFound' . SH.take hqLength . Reference.toShortHash $ Reference.DerivedId rid) - Just tm -> do - respond $ TestIncrementalOutputStart ppe (n,total) r tm - -- v don't cache; test cache populated below - tm' <- eval $ Evaluate1 ppe False tm - case tm' of - Left e -> respond (EvaluationFailure e) $> [] - Right tm' -> do - -- After evaluation, cache the result of the test - eval $ PutWatch WK.TestWatch rid tm' - respond $ TestIncrementalOutputEnd ppe (n,total) r tm' - pure [(r, tm')] - r -> error $ "unpossible, tests can't be builtins: " <> show r - - let m = Map.fromList computedTests - respond $ TestResults Output.NewlyComputed ppe showOk showFail (oks m) (fails m) - - -- ListBranchesI -> - -- eval ListBranches >>= respond . ListOfBranches currentBranchName' - -- DeleteBranchI branchNames -> withBranches branchNames $ \bnbs -> do - -- uniqueToDelete <- prettyUniqueDefinitions bnbs - -- let deleteBranches b = - -- traverse (eval . DeleteBranch) b >> respond (Success input) - -- if (currentBranchName' `elem` branchNames) - -- then respond DeletingCurrentBranch - -- else if null uniqueToDelete - -- then deleteBranches branchNames - -- else ifM (confirmedCommand input) - -- (deleteBranches branchNames) - -- (respond . DeleteBranchConfirmation $ uniqueToDelete) - - PropagatePatchI patchPath scopePath -> do - patch <- getPatchAt patchPath - updated <- propagatePatch inputDescription patch (resolveToAbsolute scopePath) - unless updated (respond $ NothingToPatch patchPath scopePath) - - ExecuteI main -> addRunMain main uf >>= \case - NoTermWithThatName -> do - ppe <- suffixifiedPPE (NamesWithHistory.NamesWithHistory basicPrettyPrintNames mempty) - mainType <- eval RuntimeMain - respond $ NoMainFunction main ppe [mainType] - TermHasBadType ty -> do - ppe <- suffixifiedPPE (NamesWithHistory.NamesWithHistory basicPrettyPrintNames mempty) - mainType <- eval RuntimeMain - respond $ BadMainFunction main ty ppe [mainType] - RunMainSuccess unisonFile -> do - ppe <- executePPE unisonFile - e <- eval $ Execute ppe unisonFile - - case e of - Left e -> respond $ EvaluationFailure e - Right _ -> pure () -- TODO - - MakeStandaloneI output main -> do - mainType <- eval RuntimeMain - parseNames <- - flip NamesWithHistory.NamesWithHistory mempty <$> basicPrettyPrintNamesA - ppe <- suffixifiedPPE parseNames - let resolved = toList $ NamesWithHistory.lookupHQTerm main parseNames - smain = HQ.toString main - filtered <- catMaybes <$> - traverse (\r -> fmap (r,) <$> loadTypeOfTerm r) resolved - case filtered of - [(Referent.Ref ref, ty)] - | Typechecker.isSubtype ty mainType -> - eval (MakeStandalone ppe ref output) >>= \case - Just err -> respond $ EvaluationFailure err - Nothing -> pure () - | otherwise -> - respond $ BadMainFunction smain ty ppe [mainType] - _ -> respond $ NoMainFunction smain ppe [mainType] - - IOTestI main -> do - -- todo - allow this to run tests from scratch file, using addRunMain - testType <- eval RuntimeTest - parseNames <- (`NamesWithHistory.NamesWithHistory` mempty) <$> basicPrettyPrintNamesA - ppe <- suffixifiedPPE parseNames - -- use suffixed names for resolving the argument to display - let - oks results = - [ (r, msg) - | (r, Term.List' ts) <- results - , Term.App' (Term.Constructor' ref cid) (Term.Text' msg) <- toList ts - , cid == DD.okConstructorId && ref == DD.testResultRef ] - fails results = - [ (r, msg) - | (r, Term.List' ts) <- results - , Term.App' (Term.Constructor' ref cid) (Term.Text' msg) <- toList ts - , cid == DD.failConstructorId && ref == DD.testResultRef ] - - results = NamesWithHistory.lookupHQTerm main parseNames in - case toList results of - [Referent.Ref ref] -> do - typ <- loadTypeOfTerm (Referent.Ref ref) - case typ of - Just typ | Typechecker.isSubtype typ testType -> do - let a = ABT.annotation tm - tm = DD.forceTerm a a (Term.ref a ref) in do - -- v Don't cache IO tests - tm' <- eval $ Evaluate1 ppe False tm - case tm' of - Left e -> respond (EvaluationFailure e) - Right tm' -> - respond $ TestResults Output.NewlyComputed ppe True True (oks [(ref, tm')]) (fails [(ref, tm')]) - _ -> respond $ NoMainFunction (HQ.toString main) ppe [testType] - _ -> respond $ NoMainFunction (HQ.toString main) ppe [testType] - - -- UpdateBuiltinsI -> do - -- stepAt updateBuiltins - -- checkTodo - - MergeBuiltinsI -> do - -- these were added once, but maybe they've changed and need to be - -- added again. - let uf = UF.typecheckedUnisonFile (Map.fromList Builtin.builtinDataDecls) - (Map.fromList Builtin.builtinEffectDecls) - [Builtin.builtinTermsSrc Intrinsic] - mempty - eval $ AddDefsToCodebase uf - -- add the names; note, there are more names than definitions - -- due to builtin terms; so we don't just reuse `uf` above. - let srcb = BranchUtil.fromNames Builtin.names0 - _ <- updateAtM (currentPath' `snoc` "builtin") $ \destb -> - eval $ Merge Branch.RegularMerge srcb destb - success - - MergeIOBuiltinsI -> do - -- these were added once, but maybe they've changed and need to be - -- added again. - let uf = UF.typecheckedUnisonFile (Map.fromList Builtin.builtinDataDecls) - (Map.fromList Builtin.builtinEffectDecls) - [Builtin.builtinTermsSrc Intrinsic] - mempty - eval $ AddDefsToCodebase uf - -- these have not necessarily been added yet - eval $ AddDefsToCodebase IOSource.typecheckedFile' - - -- add the names; note, there are more names than definitions - -- due to builtin terms; so we don't just reuse `uf` above. - let names0 = Builtin.names0 - <> UF.typecheckedToNames @v IOSource.typecheckedFile' - let srcb = BranchUtil.fromNames names0 - _ <- updateAtM (currentPath' `snoc` "builtin") $ \destb -> - eval $ Merge Branch.RegularMerge srcb destb - - success - - ListEditsI maybePath -> do - let (p, seg) = - maybe (Path.toAbsoluteSplit currentPath' defaultPatchPath) - (Path.toAbsoluteSplit currentPath') - maybePath - patch <- eval . Eval . Branch.getPatch seg . Branch.head =<< getAt p - ppe <- suffixifiedPPE =<< - makePrintNamesFromLabeled' (Patch.labeledDependencies patch) - respond $ ListEdits patch ppe - - PullRemoteBranchI mayRepo path syncMode verbosity -> unlessError do - ns <- maybe (writePathToRead <$> resolveConfiguredGitUrl Pull path) pure mayRepo - lift $ unlessGitError do - b <- importRemoteBranch ns syncMode - let msg = Just $ PullAlreadyUpToDate ns path - let destAbs = resolveToAbsolute path - let printDiffPath = if Verbosity.isSilent verbosity then Nothing else Just path - lift $ mergeBranchAndPropagateDefaultPatch Branch.RegularMerge inputDescription msg b printDiffPath destAbs - - PushRemoteBranchI mayRepo path syncMode -> do - let srcAbs = resolveToAbsolute path - srcb <- getAt srcAbs - unlessError do - (repo, remotePath) <- maybe (resolveConfiguredGitUrl Push path) pure mayRepo - lift $ unlessGitError do - (cleanup, remoteRoot) <- unsafeTime "Push viewRemoteBranch" $ - viewRemoteBranch (writeToRead repo, Nothing, Path.empty) - -- We don't merge `srcb` with the remote namespace, `r`, we just - -- replace it. The push will be rejected if this rewinds time - -- or misses any new updates in `r` that aren't in `srcb` already. - let newRemoteRoot = Branch.modifyAt remotePath (const srcb) remoteRoot - unsafeTime "Push syncRemoteRootBranch" $ - syncRemoteRootBranch repo newRemoteRoot syncMode - lift . eval $ Eval cleanup - lift $ respond Success - ListDependentsI hq -> -- todo: add flag to handle transitive efficiently - resolveHQToLabeledDependencies hq >>= \lds -> - if null lds - then respond $ LabeledReferenceNotFound hq - else for_ lds $ \ld -> do - dependents <- let - tp r = eval $ GetDependents r - tm (Referent.Ref r) = eval $ GetDependents r - tm (Referent.Con r _i _ct) = eval $ GetDependents r - in LD.fold tp tm ld - (missing, names0) <- eval . Eval $ Branch.findHistoricalRefs' dependents root' - let types = R.toList $ Names.types names0 - let terms = fmap (second Referent.toReference) $ R.toList $ Names.terms names0 - let names = types <> terms - numberedArgs .= fmap (Text.unpack . Reference.toText) ((fmap snd names) <> toList missing) - respond $ ListDependents hqLength ld names missing - ListDependenciesI hq -> -- todo: add flag to handle transitive efficiently - resolveHQToLabeledDependencies hq >>= \lds -> - if null lds - then respond $ LabeledReferenceNotFound hq - else for_ lds $ \ld -> do - dependencies :: Set Reference <- let - tp r@(Reference.DerivedId i) = eval (LoadType i) <&> \case - Nothing -> error $ "What happened to " ++ show i ++ "?" - Just decl -> Set.delete r . DD.dependencies $ DD.asDataDecl decl - tp _ = pure mempty - tm (Referent.Ref r@(Reference.DerivedId i)) = eval (LoadTerm i) <&> \case - Nothing -> error $ "What happened to " ++ show i ++ "?" - Just tm -> Set.delete r $ Term.dependencies tm - tm con@(Referent.Con (Reference.DerivedId i) cid _ct) = eval (LoadType i) <&> \case - Nothing -> error $ "What happened to " ++ show i ++ "?" - Just decl -> case DD.typeOfConstructor (DD.asDataDecl decl) cid of - Nothing -> error $ "What happened to " ++ show con ++ "?" - Just tp -> Type.dependencies tp - tm _ = pure mempty - in LD.fold tp tm ld - (missing, names0) <- eval . Eval $ Branch.findHistoricalRefs' dependencies root' - let types = R.toList $ Names.types names0 - let terms = fmap (second Referent.toReference) $ R.toList $ Names.terms names0 - let names = types <> terms - numberedArgs .= fmap (Text.unpack . Reference.toText) ((fmap snd names) <> toList missing) - respond $ ListDependencies hqLength ld names missing - DebugNumberedArgsI -> use numberedArgs >>= respond . DumpNumberedArgs - DebugTypecheckedUnisonFileI -> case uf of - Nothing -> respond NoUnisonFile - Just uf -> let - datas, effects, terms :: [(Name, Reference.Id)] - datas = [ (Name.unsafeFromVar v, r) | (v, (r, _d)) <- Map.toList $ UF.dataDeclarationsId' uf ] - effects = [ (Name.unsafeFromVar v, r) | (v, (r, _e)) <- Map.toList $ UF.effectDeclarationsId' uf ] - terms = [ (Name.unsafeFromVar v, r) | (v, (r, _wk, _tm, _tp)) <- Map.toList $ UF.hashTermsId uf ] - in eval . Notify $ DumpUnisonFileHashes hqLength datas effects terms - DebugDumpNamespacesI -> do - let seen h = State.gets (Set.member h) - set h = State.modify (Set.insert h) - getCausal b = (Branch.headHash b, pure $ Branch._history b) - goCausal :: forall m. Monad m => [(Branch.Hash, m (Branch.UnwrappedBranch m))] -> StateT (Set Branch.Hash) m () - goCausal [] = pure () - goCausal ((h, mc) : queue) = do - ifM (seen h) (goCausal queue) do - lift mc >>= \case - Causal.One h b -> goBranch h b mempty queue - Causal.Cons h b tail -> goBranch h b [fst tail] (tail : queue) - Causal.Merge h b (Map.toList -> tails) -> goBranch h b (map fst tails) (tails ++ queue) - goBranch :: forall m. Monad m => Branch.Hash -> Branch0 m -> [Branch.Hash] -> [(Branch.Hash, m (Branch.UnwrappedBranch m))] -> StateT (Set Branch.Hash) m () - goBranch h b (Set.fromList -> causalParents) queue = case b of - Branch0 terms0 types0 children0 patches0 _ _ _ _ _ _ -> let - wrangleMetadata :: (Ord r, Ord n) => Metadata.Star r n -> r -> (r, (Set n, Set Metadata.Value)) - wrangleMetadata s r = - (r, (R.lookupDom r $ Star3.d1 s, Set.map snd . R.lookupDom r $ Star3.d3 s)) - terms = Map.fromList . map (wrangleMetadata terms0) . Foldable.toList $ Star3.fact terms0 - types = Map.fromList . map (wrangleMetadata types0) . Foldable.toList $ Star3.fact types0 - patches = fmap fst patches0 - children = fmap Branch.headHash children0 - in do - let d = Output.DN.DumpNamespace terms types patches children causalParents - -- the alternate implementation that doesn't rely on `traceM` blows up - traceM $ P.toPlain 200 (prettyDump (h, d)) - set h - goCausal (map getCausal (Foldable.toList children0) ++ queue) - prettyDump (h, Output.DN.DumpNamespace terms types patches children causalParents) = - P.lit "Namespace " <> P.shown h <> P.newline <> (P.indentN 2 $ P.linesNonEmpty [ - Monoid.unlessM (null causalParents) $ P.lit "Causal Parents:" <> P.newline <> P.indentN 2 (P.lines (map P.shown $ Set.toList causalParents)) - , Monoid.unlessM (null terms) $ P.lit "Terms:" <> P.newline <> P.indentN 2 (P.lines (map (prettyDefn Referent.toText) $ Map.toList terms)) - , Monoid.unlessM (null types) $ P.lit "Types:" <> P.newline <> P.indentN 2 (P.lines (map (prettyDefn Reference.toText) $ Map.toList types)) - , Monoid.unlessM (null patches) $ P.lit "Patches:" <> P.newline <> P.indentN 2 (P.column2 (map (bimap P.shown P.shown) $ Map.toList patches)) - , Monoid.unlessM (null children) $ P.lit "Children:" <> P.newline <> P.indentN 2 (P.column2 (map (bimap P.shown P.shown) $ Map.toList children)) - ]) - where - prettyLinks renderR r [] = P.indentN 2 $ P.text (renderR r) - prettyLinks renderR r links = P.indentN 2 (P.lines (P.text (renderR r) : (links <&> \r -> "+ " <> P.text (Reference.toText r)))) - prettyDefn renderR (r, (Foldable.toList -> names, Foldable.toList -> links)) = - P.lines (P.shown <$> if null names then [NameSegment ""] else names) <> P.newline <> prettyLinks renderR r links - void . eval . Eval . flip State.execStateT mempty $ goCausal [getCausal root'] - DebugDumpNamespaceSimpleI -> do - for_ (Relation.toList . Branch.deepTypes . Branch.head $ root') \(r, name) -> - traceM $ show name ++ ",Type," ++ Text.unpack (Reference.toText r) - for_ (Relation.toList . Branch.deepTerms . Branch.head $ root') \(r, name) -> - traceM $ show name ++ ",Term," ++ Text.unpack (Referent.toText r) - DebugClearWatchI {} -> eval ClearWatchCache - DeprecateTermI {} -> notImplemented - DeprecateTypeI {} -> notImplemented - RemoveTermReplacementI from patchPath -> - doRemoveReplacement from patchPath True - RemoveTypeReplacementI from patchPath -> - doRemoveReplacement from patchPath False - ShowDefinitionByPrefixI {} -> notImplemented - UpdateBuiltinsI -> notImplemented - QuitI -> MaybeT $ pure Nothing - where - notImplemented = eval $ Notify NotImplemented - success = respond Success - - resolveDefaultMetadata :: Path.Absolute -> Action' m v [String] - resolveDefaultMetadata path = do - let superpaths = Path.ancestors path - xs <- for - superpaths - (\path -> do - mayNames <- - eval . ConfigLookup @[String] $ configKey "DefaultMetadata" path - pure . join $ toList mayNames - ) - pure . join $ toList xs - - configKey k p = - Text.intercalate "." . toList $ k :<| fmap - NameSegment.toText - (Path.toSeq $ Path.unabsolute p) - - -- Takes a maybe (namespace address triple); returns it as-is if `Just`; - -- otherwise, tries to load a value from .unisonConfig, and complains - -- if needed. - resolveConfiguredGitUrl - :: PushPull - -> Path' - -> ExceptT (Output v) (Action' m v) WriteRemotePath - resolveConfiguredGitUrl pushPull destPath' = ExceptT do - let destPath = resolveToAbsolute destPath' - let configKey = gitUrlKey destPath - (eval . ConfigLookup) configKey >>= \case - Just url -> - case P.parse UriParser.writeRepoPath (Text.unpack configKey) url of - Left e -> - pure . Left $ - ConfiguredGitUrlParseError pushPull destPath' url (show e) - Right ns -> - pure . Right $ ns - Nothing -> - pure . Left $ NoConfiguredGitUrl pushPull destPath' - - gitUrlKey = configKey "GitUrl" - - case e of - Right input -> lastInput .= Just input - _ -> pure () - --- | Handle a @ShowDefinitionI@ input command, i.e. `view` or `edit`. -handleShowDefinition :: forall m v. Functor m => OutputLocation -> [HQ.HashQualified Name] -> Action' m v () -handleShowDefinition outputLoc inputQuery = do - -- If the query is empty, run a fuzzy search. - query <- - if null inputQuery - then do - branch <- fuzzyBranch - fuzzySelectTermsAndTypes branch - else pure inputQuery - currentPath' <- Path.unabsolute <$> use currentPath - root' <- use root - hqLength <- eval CodebaseHashLength - Backend.DefinitionResults terms types misses <- - eval (GetDefinitionsBySuffixes (Just currentPath') root' includeCycles query) - outputPath <- getOutputPath - when (not (null types && null terms)) do - let printNames = Backend.getCurrentPrettyNames (Backend.AllNames currentPath') root' - let ppe = PPE.fromNamesDecl hqLength printNames - respond (DisplayDefinitions outputPath ppe types terms) - when (not (null misses)) (respond (SearchTermsNotFound misses)) - -- We set latestFile to be programmatically generated, if we - -- are viewing these definitions to a file - this will skip the - -- next update for that file (which will happen immediately) - latestFile .= ((,True) <$> outputPath) - where - -- `view`: fuzzy find globally; `edit`: fuzzy find local to current branch - fuzzyBranch :: Action' m v (Branch0 m) - fuzzyBranch = - case outputLoc of - ConsoleLocation {} -> Branch.head <$> use root - -- fuzzy finding for 'edit's are local to the current branch - LatestFileLocation {} -> currentBranch0 - FileLocation {} -> currentBranch0 - where - currentBranch0 = do - currentPath' <- use currentPath - currentBranch <- getAt currentPath' - pure (Branch.head currentBranch) - -- `view`: don't include cycles; `edit`: include cycles - includeCycles = - case outputLoc of - ConsoleLocation -> Backend.DontIncludeCycles - FileLocation _ -> Backend.IncludeCycles - LatestFileLocation -> Backend.IncludeCycles - - -- Get the file path to send the definition(s) to. `Nothing` means the terminal. - getOutputPath :: Action' m v (Maybe FilePath) - getOutputPath = - case outputLoc of - ConsoleLocation -> pure Nothing - FileLocation path -> pure (Just path) - LatestFileLocation -> - use latestFile <&> \case - Nothing -> Just "scratch.u" - Just (path, _) -> Just path - --- todo: compare to `getHQTerms` / `getHQTypes`. Is one universally better? -resolveHQToLabeledDependencies :: Functor m => HQ.HashQualified Name -> Action' m v (Set LabeledDependency) -resolveHQToLabeledDependencies = \case - HQ.NameOnly n -> do - parseNames <- basicParseNames - let terms, types :: Set LabeledDependency - terms = Set.map LD.referent . Name.searchBySuffix n $ Names.terms parseNames - types = Set.map LD.typeRef . Name.searchBySuffix n $ Names.types parseNames - pure $ terms <> types - -- rationale: the hash should be unique enough that the name never helps - HQ.HashQualified _n sh -> resolveHashOnly sh - HQ.HashOnly sh -> resolveHashOnly sh - where - resolveHashOnly sh = do - terms <- eval $ TermReferentsByShortHash sh - types <- eval $ TypeReferencesByShortHash sh - pure $ Set.map LD.referent terms <> Set.map LD.typeRef types - -doDisplay :: Var v => OutputLocation -> NamesWithHistory -> Term v () -> Action' m v () -doDisplay outputLoc names tm = do - ppe <- prettyPrintEnvDecl names - tf <- use latestTypecheckedFile - let (tms, typs) = maybe mempty UF.indexByReference tf - latestFile' <- use latestFile - let - loc = case outputLoc of - ConsoleLocation -> Nothing - FileLocation path -> Just path - LatestFileLocation -> fmap fst latestFile' <|> Just "scratch.u" - useCache = True - evalTerm tm = fmap ErrorUtil.hush . fmap (fmap Term.unannotate) . eval $ - Evaluate1 (PPE.suffixifiedPPE ppe) useCache (Term.amap (const External) tm) - loadTerm (Reference.DerivedId r) = case Map.lookup r tms of - Nothing -> fmap (fmap Term.unannotate) . eval $ LoadTerm r - Just (tm,_) -> pure (Just $ Term.unannotate tm) - loadTerm _ = pure Nothing - loadDecl (Reference.DerivedId r) = case Map.lookup r typs of - Nothing -> fmap (fmap $ DD.amap (const ())) . eval $ LoadType r - Just decl -> pure (Just $ DD.amap (const ()) decl) - loadDecl _ = pure Nothing - loadTypeOfTerm' (Referent.Ref (Reference.DerivedId r)) - | Just (_,ty) <- Map.lookup r tms = pure $ Just (void ty) - loadTypeOfTerm' r = fmap (fmap void) . loadTypeOfTerm $ r - rendered <- DisplayValues.displayTerm ppe loadTerm loadTypeOfTerm' evalTerm loadDecl tm - respond $ DisplayRendered loc rendered - -getLinks :: (Var v, Monad m) - => SrcLoc - -> Path.HQSplit' - -> Either (Set Reference) (Maybe String) - -> ExceptT (Output v) - (Action' m v) - (PPE.PrettyPrintEnv, - -- e.g. ("Foo.doc", #foodoc, Just (#builtin.Doc) - [(HQ.HashQualified Name, Reference, Maybe (Type v Ann))]) -getLinks srcLoc src mdTypeStr = ExceptT $ do - let go = fmap Right . getLinks' src - case mdTypeStr of - Left s -> go (Just s) - Right Nothing -> go Nothing - Right (Just mdTypeStr) -> parseType srcLoc mdTypeStr >>= \case - Left e -> pure $ Left e - Right typ -> go . Just . Set.singleton $ Hashing.typeToReference typ - -getLinks' :: (Var v, Monad m) - => Path.HQSplit' -- definition to print metadata of - -> Maybe (Set Reference) -- return all metadata if empty - -> Action' m v (PPE.PrettyPrintEnv, - -- e.g. ("Foo.doc", #foodoc, Just (#builtin.Doc) - [(HQ.HashQualified Name, Reference, Maybe (Type v Ann))]) -getLinks' src selection0 = do - root0 <- Branch.head <$> use root - currentPath' <- use currentPath - let resolveSplit' = Path.fromAbsoluteSplit . Path.toAbsoluteSplit currentPath' - p = resolveSplit' src -- ex: the (parent,hqsegment) of `List.map` - `List` - -- all metadata (type+value) associated with name `src` - allMd = R4.d34 (BranchUtil.getTermMetadataHQNamed p root0) - <> R4.d34 (BranchUtil.getTypeMetadataHQNamed p root0) - allMd' = maybe allMd (`R.restrictDom` allMd) selection0 - -- then list the values after filtering by type - allRefs :: Set Reference = R.ran allMd' - sigs <- for (toList allRefs) (loadTypeOfTerm . Referent.Ref) - let deps = Set.map LD.termRef allRefs <> - Set.unions [ Set.map LD.typeRef . Type.dependencies $ t | Just t <- sigs ] - ppe <- prettyPrintEnvDecl =<< makePrintNamesFromLabeled' deps - let ppeDecl = PPE.unsuffixifiedPPE ppe - let sortedSigs = sortOn snd (toList allRefs `zip` sigs) - let out = [(PPE.termName ppeDecl (Referent.Ref r), r, t) | (r, t) <- sortedSigs ] - pure (PPE.suffixifiedPPE ppe, out) - -resolveShortBranchHash :: - ShortBranchHash -> ExceptT (Output v) (Action' m v) (Branch m) -resolveShortBranchHash hash = ExceptT do - hashSet <- eval $ BranchHashesByPrefix hash - len <- eval BranchHashLength - case Set.toList hashSet of - [] -> pure . Left $ NoBranchWithHash hash - [h] -> fmap Right . eval $ LoadLocalBranch h - _ -> pure . Left $ BranchHashAmbiguous hash (Set.map (SBH.fromHash len) hashSet) - --- Returns True if the operation changed the namespace, False otherwise. -propagatePatchNoSync - :: (Monad m, Var v) - => Patch - -> Path.Absolute - -> Action' m v Bool -propagatePatchNoSync patch scopePath = do - r <- use root - let nroot = Branch.toNames (Branch.head r) - stepAtMNoSync' (Path.unabsolute scopePath, - lift . lift . Propagate.propagateAndApply nroot patch) - --- Returns True if the operation changed the namespace, False otherwise. -propagatePatch :: (Monad m, Var v) => - InputDescription -> Patch -> Path.Absolute -> Action' m v Bool -propagatePatch inputDescription patch scopePath = do - r <- use root - let nroot = Branch.toNames (Branch.head r) - stepAtM' (inputDescription <> " (applying patch)") - (Path.unabsolute scopePath, - lift . lift . Propagate.propagateAndApply nroot patch) - --- | Create the args needed for showTodoOutput and call it -doShowTodoOutput :: Monad m => Patch -> Path.Absolute -> Action' m v () -doShowTodoOutput patch scopePath = do - scope <- getAt scopePath - let names0 = Branch.toNames (Branch.head scope) - -- only needs the local references to check for obsolete defs - let getPpe = do - names <- makePrintNamesFromLabeled' (Patch.labeledDependencies patch) - prettyPrintEnvDecl names - showTodoOutput getPpe patch names0 - --- | Show todo output if there are any conflicts or edits. -showTodoOutput - :: Action' m v PPE.PrettyPrintEnvDecl - -- ^ Action that fetches the pretty print env. It's expensive because it - -- involves looking up historical names, so only call it if necessary. - -> Patch - -> Names - -> Action' m v () -showTodoOutput getPpe patch names0 = do - todo <- checkTodo patch names0 - if TO.noConflicts todo && TO.noEdits todo - then respond NoConflictsOrEdits - else do - numberedArgs .= - (Text.unpack . Reference.toText . view _2 <$> - fst (TO.todoFrontierDependents todo)) - ppe <- getPpe - respond $ TodoOutput ppe todo - -checkTodo :: Patch -> Names -> Action m i v (TO.TodoOutput v Ann) -checkTodo patch names0 = do - let shouldUpdate = Names.contains names0 - f <- Propagate.computeFrontier (eval . GetDependents) patch shouldUpdate - let dirty = R.dom f - frontier = R.ran f - (frontierTerms, frontierTypes) <- loadDisplayInfo frontier - (dirtyTerms, dirtyTypes) <- loadDisplayInfo dirty - -- todo: something more intelligent here? - let scoreFn = const 1 - remainingTransitive <- - frontierTransitiveDependents (eval . GetDependents) names0 frontier - let - scoredDirtyTerms = - List.sortOn (view _1) [ (scoreFn r, r, t) | (r,t) <- dirtyTerms ] - scoredDirtyTypes = - List.sortOn (view _1) [ (scoreFn r, r, t) | (r,t) <- dirtyTypes ] - pure $ - TO.TodoOutput - (Set.size remainingTransitive) - (frontierTerms, frontierTypes) - (scoredDirtyTerms, scoredDirtyTypes) - (Names.conflicts names0) - (Patch.conflicts patch) - where - frontierTransitiveDependents :: - Monad m => (Reference -> m (Set Reference)) -> Names -> Set Reference -> m (Set Reference) - frontierTransitiveDependents dependents names0 rs = do - let branchDependents r = Set.filter (Names.contains names0) <$> dependents r - tdeps <- transitiveClosure branchDependents rs - -- we don't want the frontier in the result - pure $ tdeps `Set.difference` rs - -eval :: Command m i v a -> Action m i v a -eval = lift . lift . Free.eval - -confirmedCommand :: Input -> Action m i v Bool -confirmedCommand i = do - i0 <- use lastInput - pure $ Just i == i0 - -listBranch :: Branch0 m -> [SearchResult] -listBranch (Branch.toNames -> b) = - List.sortOn (\s -> (SR.name s, s)) (SR.fromNames b) - --- | restores the full hash to these search results, for _numberedArgs purposes -searchResultToHQString :: SearchResult -> String -searchResultToHQString = \case - SR.Tm' n r _ -> HQ.toString $ HQ.requalify n r - SR.Tp' n r _ -> HQ.toString $ HQ.requalify n (Referent.Ref r) - _ -> error "impossible match failure" - --- Return a list of definitions whose names fuzzy match the given queries. -fuzzyNameDistance :: Name -> Name -> Maybe Int -fuzzyNameDistance (Name.toString -> q) (Name.toString -> n) = - Find.simpleFuzzyScore q n - --- return `name` and `name....` -_searchBranchPrefix :: Branch m -> Name -> [SearchResult] -_searchBranchPrefix b n = case Path.unsnoc (Path.fromName n) of - Nothing -> [] - Just (init, last) -> case Branch.getAt init b of - Nothing -> [] - Just b -> SR.fromNames . Names.prefix0 n $ names0 - where - lastName = Path.toName (Path.singleton last) - subnames = Branch.toNames . Branch.head $ - Branch.getAt' (Path.singleton last) b - rootnames = - Names.filter (== lastName) . - Branch.toNames . set Branch.children mempty $ Branch.head b - names0 = rootnames <> Names.prefix0 lastName subnames - -searchResultsFor :: Names -> [Referent] -> [Reference] -> [SearchResult] -searchResultsFor ns terms types = - [ SR.termSearchResult ns name ref - | ref <- terms - , name <- toList (Names.namesForReferent ns ref) - ] <> - [ SR.typeSearchResult ns name ref - | ref <- types - , name <- toList (Names.namesForReference ns ref) - ] - -searchBranchScored :: forall score. (Ord score) - => Names - -> (Name -> Name -> Maybe score) - -> [HQ.HashQualified Name] - -> [SearchResult] -searchBranchScored names0 score queries = - nubOrd . fmap snd . toList $ searchTermNamespace <> searchTypeNamespace - where - searchTermNamespace = foldMap do1query queries - where - do1query :: HQ.HashQualified Name -> Set (Maybe score, SearchResult) - do1query q = foldMap (score1hq q) (R.toList . Names.terms $ names0) - score1hq :: HQ.HashQualified Name -> (Name, Referent) -> Set (Maybe score, SearchResult) - score1hq query (name, ref) = case query of - HQ.NameOnly qn -> - pair qn - HQ.HashQualified qn h | h `SH.isPrefixOf` Referent.toShortHash ref -> - pair qn - HQ.HashOnly h | h `SH.isPrefixOf` Referent.toShortHash ref -> - Set.singleton (Nothing, result) - _ -> mempty - where - result = SR.termSearchResult names0 name ref - pair qn = case score qn name of - Just score -> Set.singleton (Just score, result) - Nothing -> mempty - searchTypeNamespace = foldMap do1query queries - where - do1query :: HQ.HashQualified Name -> Set (Maybe score, SearchResult) - do1query q = foldMap (score1hq q) (R.toList . Names.types $ names0) - score1hq :: HQ.HashQualified Name -> (Name, Reference) -> Set (Maybe score, SearchResult) - score1hq query (name, ref) = case query of - HQ.NameOnly qn -> - pair qn - HQ.HashQualified qn h | h `SH.isPrefixOf` Reference.toShortHash ref -> - pair qn - HQ.HashOnly h | h `SH.isPrefixOf` Reference.toShortHash ref -> - Set.singleton (Nothing, result) - _ -> mempty - where - result = SR.typeSearchResult names0 name ref - pair qn = case score qn name of - Just score -> Set.singleton (Just score, result) - Nothing -> mempty - -handleBackendError :: Backend.BackendError -> Action m i v () -handleBackendError = \case - Backend.NoSuchNamespace path -> - respond . BranchNotFound $ Path.absoluteToPath' path - Backend.BadRootBranch e -> respond $ BadRootBranch e - Backend.NoBranchForHash h -> do - sbhLength <- eval BranchHashLength - respond . NoBranchWithHash $ SBH.fromHash sbhLength h - Backend.CouldntLoadBranch h -> do - respond . CouldntLoadBranch $ h - Backend.CouldntExpandBranchHash sbh -> respond $ NoBranchWithHash sbh - Backend.AmbiguousBranchHash h hashes -> - respond $ BranchHashAmbiguous h hashes - Backend.MissingSignatureForTerm r -> - respond $ TermMissingType r - -respond :: Output v -> Action m i v () -respond output = eval $ Notify output - -respondNumbered :: NumberedOutput v -> Action m i v () -respondNumbered output = do - args <- eval $ NotifyNumbered output - unless (null args) $ - numberedArgs .= toList args - -unlessError :: ExceptT (Output v) (Action' m v) () -> Action' m v () -unlessError ma = runExceptT ma >>= either (eval . Notify) pure - -unlessError' :: (e -> Output v) -> ExceptT e (Action' m v) () -> Action' m v () -unlessError' f ma = unlessError $ withExceptT f ma - --- | supply `dest0` if you want to print diff messages --- supply unchangedMessage if you want to display it if merge had no effect -mergeBranchAndPropagateDefaultPatch :: (Monad m, Var v) => Branch.MergeMode -> - InputDescription -> Maybe (Output v) -> Branch m -> Maybe Path.Path' -> Path.Absolute -> Action' m v () -mergeBranchAndPropagateDefaultPatch mode inputDescription unchangedMessage srcb dest0 dest = - ifM (mergeBranch mode inputDescription srcb dest0 dest) - (loadPropagateDiffDefaultPatch inputDescription dest0 dest) - (for_ unchangedMessage respond) - where - mergeBranch :: (Monad m, Var v) => - Branch.MergeMode -> InputDescription -> Branch m -> Maybe Path.Path' -> Path.Absolute -> Action' m v Bool - mergeBranch mode inputDescription srcb dest0 dest = unsafeTime "Merge Branch" $ do - destb <- getAt dest - merged <- eval $ Merge mode srcb destb - b <- updateAtM inputDescription dest (const $ pure merged) - for_ dest0 $ \dest0 -> - diffHelper (Branch.head destb) (Branch.head merged) >>= - respondNumbered . uncurry (ShowDiffAfterMerge dest0 dest) - pure b - -loadPropagateDiffDefaultPatch :: (Monad m, Var v) => - InputDescription -> Maybe Path.Path' -> Path.Absolute -> Action' m v () -loadPropagateDiffDefaultPatch inputDescription dest0 dest = unsafeTime "Propagate Default Patch" $ do - original <- getAt dest - patch <- eval . Eval $ Branch.getPatch defaultPatchNameSegment (Branch.head original) - patchDidChange <- propagatePatch inputDescription patch dest - when patchDidChange . for_ dest0 $ \dest0 -> do - patched <- getAt dest - let patchPath = snoc dest0 defaultPatchNameSegment - diffHelper (Branch.head original) (Branch.head patched) >>= - respondNumbered . uncurry (ShowDiffAfterMergePropagate dest0 dest patchPath) - --- | Get metadata type/value from a name. --- --- May fail with either: --- --- * 'MetadataMissingType', if the given name is associated with a single reference, but that reference doesn't have a --- type. --- * 'MetadataAmbiguous', if the given name is associated with more than one reference. -getMetadataFromName :: - Var v => - HQ.HashQualified Name -> - Action m (Either Event Input) v (Either (Output v) (Metadata.Type, Metadata.Value)) -getMetadataFromName name = do - (Set.toList <$> getHQTerms name) >>= \case - [ref@(Referent.Ref val)] -> - eval (LoadTypeOfTerm val) >>= \case - Nothing -> do - ppe <- getPPE - pure (Left (MetadataMissingType ppe ref)) - Just ty -> pure (Right (Hashing.typeToReference ty, val)) - -- FIXME: we want a different error message if the given name is associated with a data constructor (`Con`). - refs -> do - ppe <- getPPE - pure (Left (MetadataAmbiguous name ppe refs)) - where - getPPE :: Action m (Either Event Input) v PPE.PrettyPrintEnv - getPPE = do - currentPath' <- use currentPath - sbhLength <- eval BranchHashLength - Backend.basicSuffixifiedNames sbhLength <$> use root <*> pure (Backend.AllNames $ Path.unabsolute currentPath') - --- | Get the set of terms related to a hash-qualified name. -getHQTerms :: HQ.HashQualified Name -> Action' m v (Set Referent) -getHQTerms = \case - HQ.NameOnly n -> do - root0 <- Branch.head <$> use root - currentPath' <- use currentPath - -- absolute-ify the name, then lookup in deepTerms of root - let path = - n - & Path.fromName' - & Path.resolve currentPath' - & Path.unabsolute - & Path.toName - pure $ R.lookupRan path (Branch.deepTerms root0) - HQ.HashOnly sh -> hashOnly sh - HQ.HashQualified _ sh -> hashOnly sh - where - hashOnly sh = eval $ TermReferentsByShortHash sh - - -getAt :: Functor m => Path.Absolute -> Action m i v (Branch m) -getAt (Path.Absolute p) = - use root <&> fromMaybe Branch.empty . Branch.getAt p - --- Update a branch at the given path, returning `True` if --- an update occurred and false otherwise -updateAtM :: Applicative m - => InputDescription - -> Path.Absolute - -> (Branch m -> Action m i v (Branch m)) - -> Action m i v Bool -updateAtM reason (Path.Absolute p) f = do - b <- use lastSavedRoot - b' <- Branch.modifyAtM p f b - updateRoot b' reason - pure $ b /= b' - -stepAt - :: forall m i v - . Monad m - => InputDescription - -> (Path, Branch0 m -> Branch0 m) - -> Action m i v () -stepAt cause = stepManyAt @m @[] cause . pure - -stepAtNoSync :: forall m i v. Monad m - => (Path, Branch0 m -> Branch0 m) - -> Action m i v () -stepAtNoSync = stepManyAtNoSync @m @[] . pure - -stepAtM :: forall m i v. Monad m - => InputDescription - -> (Path, Branch0 m -> m (Branch0 m)) - -> Action m i v () -stepAtM cause = stepManyAtM @m @[] cause . pure - -stepAtM' - :: forall m i v - . Monad m - => InputDescription - -> (Path, Branch0 m -> Action m i v (Branch0 m)) - -> Action m i v Bool -stepAtM' cause = stepManyAtM' @m @[] cause . pure - -stepAtMNoSync' - :: forall m i v - . Monad m - => (Path, Branch0 m -> Action m i v (Branch0 m)) - -> Action m i v Bool -stepAtMNoSync' = stepManyAtMNoSync' @m @[] . pure - -stepManyAt - :: (Monad m, Foldable f) - => InputDescription - -> f (Path, Branch0 m -> Branch0 m) - -> Action m i v () -stepManyAt reason actions = do - stepManyAtNoSync actions - b <- use root - updateRoot b reason - --- Like stepManyAt, but doesn't update the root -stepManyAtNoSync - :: (Monad m, Foldable f) - => f (Path, Branch0 m -> Branch0 m) - -> Action m i v () -stepManyAtNoSync actions = do - b <- use root - let new = Branch.stepManyAt actions b - root .= new - -stepManyAtM :: (Monad m, Foldable f) - => InputDescription - -> f (Path, Branch0 m -> m (Branch0 m)) - -> Action m i v () -stepManyAtM reason actions = do - stepManyAtMNoSync actions - b <- use root - updateRoot b reason - -stepManyAtMNoSync :: (Monad m, Foldable f) - => f (Path, Branch0 m -> m (Branch0 m)) - -> Action m i v () -stepManyAtMNoSync actions = do - b <- use root - b' <- eval . Eval $ Branch.stepManyAtM actions b - root .= b' - -stepManyAtM' :: (Monad m, Foldable f) - => InputDescription - -> f (Path, Branch0 m -> Action m i v (Branch0 m)) - -> Action m i v Bool -stepManyAtM' reason actions = do - b <- use root - b' <- Branch.stepManyAtM actions b - updateRoot b' reason - pure (b /= b') - -stepManyAtMNoSync' :: (Monad m, Foldable f) - => f (Path, Branch0 m -> Action m i v (Branch0 m)) - -> Action m i v Bool -stepManyAtMNoSync' actions = do - b <- use root - b' <- Branch.stepManyAtM actions b - root .= b' - pure (b /= b') - -updateRoot :: Branch m -> InputDescription -> Action m i v () -updateRoot new reason = do - old <- use lastSavedRoot - when (old /= new) $ do - root .= new - eval $ SyncLocalRootBranch new - eval $ AppendToReflog reason old new - lastSavedRoot .= new - --- cata for 0, 1, or more elements of a Foldable --- tries to match as lazily as possible -zeroOneOrMore :: Foldable f => f a -> b -> (a -> b) -> (f a -> b) -> b -zeroOneOrMore f zero one more = case toList f of - _ : _ : _ -> more f - a : _ -> one a - _ -> zero - --- Goal: If `remaining = root - toBeDeleted` contains definitions X which --- depend on definitions Y not in `remaining` (which should also be in --- `toBeDeleted`), then complain by returning (Y, X). -getEndangeredDependents :: forall m. Monad m - => (Reference -> m (Set Reference)) - -> Names - -> Names - -> m (Names, Names) -getEndangeredDependents getDependents toDelete root = do - let remaining = root `Names.difference` toDelete - toDelete', remaining', extinct :: Set Reference - toDelete' = Names.allReferences toDelete - remaining' = Names.allReferences remaining -- left over after delete - extinct = toDelete' `Set.difference` remaining' -- deleting and not left over - accumulateDependents m r = getDependents r <&> \ds -> Map.insert r ds m - dependentsOfExtinct :: Map Reference (Set Reference) <- - foldM accumulateDependents mempty extinct - let orphaned, endangered, failed :: Set Reference - orphaned = fold dependentsOfExtinct - endangered = orphaned `Set.intersection` remaining' - failed = Set.filter hasEndangeredDependent extinct - hasEndangeredDependent r = any (`Set.member` endangered) - (dependentsOfExtinct Map.! r) - pure ( Names.restrictReferences failed toDelete - , Names.restrictReferences endangered root `Names.difference` toDelete) - --- Applies the selection filter to the adds/updates of a slurp result, --- meaning that adds/updates should only contain the selection or its transitive --- dependencies, any unselected transitive dependencies of the selection will --- be added to `extraDefinitions`. -applySelection - :: forall v a - . Var v - => [HQ'.HashQualified Name] - -> UF.TypecheckedUnisonFile v a - -> SlurpResult v - -> SlurpResult v -applySelection [] _ = id -applySelection hqs file = \sr@SlurpResult{adds, updates} -> - sr { adds = adds `SC.intersection` closed - , updates = updates `SC.intersection` closed - , extraDefinitions = closed `SC.difference` selection - } - where - selectedNames = - Names.filterByHQs (Set.fromList hqs) (UF.typecheckedToNames file) - selection, closed :: SlurpComponent v - selection = SlurpComponent selectedTypes selectedTerms - closed = SC.closeWithDependencies file selection - selectedTypes, selectedTerms :: Set v - selectedTypes = Set.map var $ R.dom (Names.types selectedNames) - selectedTerms = Set.map var $ R.dom (Names.terms selectedNames) - -var :: Var v => Name -> v -var name = Var.named (Name.toText name) - -toSlurpResult - :: forall v - . Var v - => Path.Absolute - -> UF.TypecheckedUnisonFile v Ann - -> Names - -> SlurpResult v -toSlurpResult currentPath uf existingNames = - Slurp.subtractComponent (conflicts <> ctorCollisions) $ SlurpResult - uf - mempty - adds - dups - mempty - conflicts - updates - termCtorCollisions - ctorTermCollisions - termAliases - typeAliases - mempty - where - fileNames = UF.typecheckedToNames uf - - sc :: R.Relation Name Referent -> R.Relation Name Reference -> SlurpComponent v - sc terms types = SlurpComponent { terms = Set.map var (R.dom terms) - , types = Set.map var (R.dom types) } - - -- conflict (n,r) if n is conflicted in names0 - conflicts :: SlurpComponent v - conflicts = sc terms types where - terms = R.filterDom (conflicted . Names.termsNamed existingNames) - (Names.terms fileNames) - types = R.filterDom (conflicted . Names.typesNamed existingNames) - (Names.types fileNames) - conflicted s = Set.size s > 1 - - ctorCollisions :: SlurpComponent v - ctorCollisions = - mempty { SC.terms = termCtorCollisions <> ctorTermCollisions } - - -- termCtorCollision (n,r) if (n, r' /= r) exists in existingNames and - -- r is Ref and r' is Con - termCtorCollisions :: Set v - termCtorCollisions = Set.fromList - [ var n - | (n, Referent.Ref{}) <- R.toList (Names.terms fileNames) - , [r@Referent.Con{}] <- [toList $ Names.termsNamed existingNames n] - -- ignore collisions w/ ctors of types being updated - , Set.notMember (Referent.toReference r) typesToUpdate - ] - - -- the set of typerefs that are being updated by this file - typesToUpdate :: Set Reference - typesToUpdate = Set.fromList - [ r - | (n, r') <- R.toList (Names.types fileNames) - , r <- toList (Names.typesNamed existingNames n) - , r /= r' - ] - - -- ctorTermCollisions (n,r) if (n, r' /= r) exists in names0 and r is Con - -- and r' is Ref except we relaxed it to where r' can be Con or Ref - -- what if (n,r) and (n,r' /= r) exists in names and r, r' are Con - ctorTermCollisions :: Set v - ctorTermCollisions = Set.fromList - [ var n - | (n, Referent.Con{}) <- R.toList (Names.terms fileNames) - , r <- toList $ Names.termsNamed existingNames n - -- ignore collisions w/ ctors of types being updated - , Set.notMember (Referent.toReference r) typesToUpdate - , Set.notMember (var n) (terms dups) - ] - - -- duplicate (n,r) if (n,r) exists in names0 - dups :: SlurpComponent v - dups = sc terms types where - terms = R.intersection (Names.terms existingNames) (Names.terms fileNames) - types = R.intersection (Names.types existingNames) (Names.types fileNames) - - -- update (n,r) if (n,r' /= r) exists in existingNames and r, r' are Ref - updates :: SlurpComponent v - updates = SlurpComponent (Set.fromList types) (Set.fromList terms) where - terms = - [ var n - | (n, r'@Referent.Ref{}) <- R.toList (Names.terms fileNames) - , [r@Referent.Ref{}] <- [toList $ Names.termsNamed existingNames n] - , r' /= r - ] - types = - [ var n - | (n, r') <- R.toList (Names.types fileNames) - , [r] <- [toList $ Names.typesNamed existingNames n] - , r' /= r - ] - - buildAliases - :: R.Relation Name Referent - -> R.Relation Name Referent - -> Set v - -> Map v Slurp.Aliases - buildAliases existingNames namesFromFile duplicates = Map.fromList - [ ( var n - , if null aliasesOfOld - then Slurp.AddAliases aliasesOfNew - else Slurp.UpdateAliases aliasesOfOld aliasesOfNew - ) - | (n, r@Referent.Ref{}) <- R.toList namesFromFile - -- All the refs whose names include `n`, and are not `r` - , let - refs = Set.delete r $ R.lookupDom n existingNames - aliasesOfNew = - Set.map (Path.unprefixName currentPath) . Set.delete n $ - R.lookupRan r existingNames - aliasesOfOld = - Set.map (Path.unprefixName currentPath) . Set.delete n . R.dom $ - R.restrictRan existingNames refs - , not (null aliasesOfNew && null aliasesOfOld) - , Set.notMember (var n) duplicates - ] - - termAliases :: Map v Slurp.Aliases - termAliases = buildAliases (Names.terms existingNames) - (Names.terms fileNames) - (SC.terms dups) - - typeAliases :: Map v Slurp.Aliases - typeAliases = buildAliases (R.mapRan Referent.Ref $ Names.types existingNames) - (R.mapRan Referent.Ref $ Names.types fileNames) - (SC.types dups) - - -- (n,r) is in `adds` if n isn't in existingNames - adds = sc terms types where - terms = addTerms (Names.terms existingNames) (Names.terms fileNames) - types = addTypes (Names.types existingNames) (Names.types fileNames) - addTerms existingNames = R.filter go where - go (n, Referent.Ref{}) = (not . R.memberDom n) existingNames - go _ = False - addTypes existingNames = R.filter go where - go (n, _) = (not . R.memberDom n) existingNames - -displayI :: (Monad m, Var v) => Names - -> OutputLocation - -> HQ.HashQualified Name - -> Action m (Either Event Input) v () -displayI prettyPrintNames outputLoc hq = do - uf <- use latestTypecheckedFile >>= addWatch (HQ.toString hq) - case uf of - Nothing -> do - let parseNames = (`NamesWithHistory.NamesWithHistory` mempty) prettyPrintNames - results = NamesWithHistory.lookupHQTerm hq parseNames - if Set.null results then - respond $ SearchTermsNotFound [hq] - else if Set.size results > 1 then - respond $ TermAmbiguous hq results - -- ... but use the unsuffixed names for display - else do - let tm = Term.fromReferent External $ Set.findMin results - pped <- prettyPrintEnvDecl parseNames - tm <- eval $ Evaluate1 (PPE.suffixifiedPPE pped) True tm - case tm of - Left e -> respond (EvaluationFailure e) - Right tm -> doDisplay outputLoc parseNames (Term.unannotate tm) - Just (toDisplay, unisonFile) -> do - ppe <- executePPE unisonFile - unlessError' EvaluationFailure do - evalResult <- ExceptT . eval . Evaluate ppe $ unisonFile - case Command.lookupEvalResult toDisplay evalResult of - Nothing -> error $ "Evaluation dropped a watch expression: " <> HQ.toString hq - Just tm -> lift do - ns <- displayNames unisonFile - doDisplay outputLoc ns tm - - -docsI :: - (Ord v, Monad m, Var v) => - SrcLoc -> - Names -> - Path.HQSplit' -> - Action m (Either Event Input) v () -docsI srcLoc prettyPrintNames src = do - fileByName where - {- Given `docs foo`, we look for docs in 3 places, in this order: - (fileByName) First check the file for `foo.doc`, and if found do `display foo.doc` - (codebaseByMetadata) Next check for doc metadata linked to `foo` in the codebase - (codebaseByName) Lastly check for `foo.doc` in the codebase and if found do `display foo.doc` - -} - hq :: HQ.HashQualified Name - hq = let - hq' :: HQ'.HashQualified Name - hq' = Name.convert @Path.Path' @Name <$> Name.convert src - in Name.convert hq' - - dotDoc :: HQ.HashQualified Name - dotDoc = hq <&> \n -> Name.joinDot n "doc" - - fileByName = do - ns <- maybe mempty UF.typecheckedToNames <$> use latestTypecheckedFile - fnames <- pure $ NamesWithHistory.NamesWithHistory ns mempty - case NamesWithHistory.lookupHQTerm dotDoc fnames of - s | Set.size s == 1 -> do - -- the displayI command expects full term names, so we resolve - -- the hash back to its full name in the file - fname' <- pure $ NamesWithHistory.longestTermName 10 (Set.findMin s) fnames - displayI prettyPrintNames ConsoleLocation fname' - _ -> codebaseByMetadata - - codebaseByMetadata = unlessError do - (ppe, out) <- getLinks srcLoc src (Left $ Set.fromList [DD.docRef, DD.doc2Ref]) - lift case out of - [] -> codebaseByName - [(_name, ref, _tm)] -> do - len <- eval BranchHashLength - let names = NamesWithHistory.NamesWithHistory prettyPrintNames mempty - let tm = Term.ref External ref - tm <- eval $ Evaluate1 (PPE.fromNames len names) True tm - case tm of - Left e -> respond (EvaluationFailure e) - Right tm -> doDisplay ConsoleLocation names (Term.unannotate tm) - out -> do - numberedArgs .= fmap (HQ.toString . view _1) out - respond $ ListOfLinks ppe out - - codebaseByName = do - parseNames <- basicParseNames - case NamesWithHistory.lookupHQTerm dotDoc (NamesWithHistory.NamesWithHistory parseNames mempty) of - s | Set.size s == 1 -> displayI prettyPrintNames ConsoleLocation dotDoc - | Set.size s == 0 -> respond $ ListOfLinks mempty [] - | otherwise -> -- todo: return a list of links here too - respond $ ListOfLinks mempty [] - - -filterBySlurpResult :: Ord v - => SlurpResult v - -> UF.TypecheckedUnisonFile v Ann - -> UF.TypecheckedUnisonFile v Ann -filterBySlurpResult SlurpResult{adds, updates} - (UF.TypecheckedUnisonFileId - dataDeclarations' - effectDeclarations' - topLevelComponents' - watchComponents - hashTerms) = - UF.TypecheckedUnisonFileId datas effects tlcs watches hashTerms' - where - keep = updates <> adds - keepTerms = SC.terms keep - keepTypes = SC.types keep - hashTerms' = Map.restrictKeys hashTerms keepTerms - datas = Map.restrictKeys dataDeclarations' keepTypes - effects = Map.restrictKeys effectDeclarations' keepTypes - tlcs = filter (not.null) $ fmap (List.filter filterTLC) topLevelComponents' - watches = filter (not.null.snd) $ fmap (second (List.filter filterTLC)) watchComponents - filterTLC (v,_,_) = Set.member v keepTerms - --- updates the namespace for adding `slurp` -doSlurpAdds :: forall m v. (Monad m, Var v) - => SlurpComponent v - -> UF.TypecheckedUnisonFile v Ann - -> (Branch0 m -> Branch0 m) -doSlurpAdds slurp uf = Branch.stepManyAt0 (typeActions <> termActions) - where - typeActions = map doType . toList $ SC.types slurp - termActions = map doTerm . toList $ - SC.terms slurp <> Slurp.constructorsFor (SC.types slurp) uf - names = UF.typecheckedToNames uf - tests = Set.fromList $ fst <$> UF.watchesOfKind WK.TestWatch (UF.discardTypes uf) - (isTestType, isTestValue) = isTest - md v = - if Set.member v tests then Metadata.singleton isTestType isTestValue - else Metadata.empty - doTerm :: v -> (Path, Branch0 m -> Branch0 m) - doTerm v = case toList (Names.termsNamed names (Name.unsafeFromVar v)) of - [] -> errorMissingVar v - [r] -> case Path.splitFromName (Name.unsafeFromVar v) of - Nothing -> errorEmptyVar - Just split -> BranchUtil.makeAddTermName split r (md v) - wha -> error $ "Unison bug, typechecked file w/ multiple terms named " - <> Var.nameStr v <> ": " <> show wha - doType :: v -> (Path, Branch0 m -> Branch0 m) - doType v = case toList (Names.typesNamed names (Name.unsafeFromVar v)) of - [] -> errorMissingVar v - [r] -> case Path.splitFromName (Name.unsafeFromVar v) of - Nothing -> errorEmptyVar - Just split -> BranchUtil.makeAddTypeName split r Metadata.empty - wha -> error $ "Unison bug, typechecked file w/ multiple types named " - <> Var.nameStr v <> ": " <> show wha - errorEmptyVar = error "encountered an empty var name" - errorMissingVar v = error $ "expected to find " ++ show v ++ " in " ++ show uf - -doSlurpUpdates :: Monad m - => Map Name (Reference, Reference) - -> Map Name (Reference, Reference) - -> [(Name, Referent)] - -> (Branch0 m -> Branch0 m) -doSlurpUpdates typeEdits termEdits deprecated b0 = - Branch.stepManyAt0 (typeActions <> termActions <> deprecateActions) b0 - where - typeActions = join . map doType . Map.toList $ typeEdits - termActions = join . map doTerm . Map.toList $ termEdits - deprecateActions = join . map doDeprecate $ deprecated where - doDeprecate (n, r) = case Path.splitFromName n of - Nothing -> errorEmptyVar - Just split -> [BranchUtil.makeDeleteTermName split r] - - -- we copy over the metadata on the old thing - -- todo: if the thing being updated, m, is metadata for something x in b0 - -- update x's md to reference `m` - doType, doTerm :: - (Name, (Reference, Reference)) -> [(Path, Branch0 m -> Branch0 m)] - doType (n, (old, new)) = case Path.splitFromName n of - Nothing -> errorEmptyVar - Just split -> [ BranchUtil.makeDeleteTypeName split old - , BranchUtil.makeAddTypeName split new oldMd ] - where - oldMd = BranchUtil.getTypeMetadataAt split old b0 - doTerm (n, (old, new)) = case Path.splitFromName n of - Nothing -> errorEmptyVar - Just split -> [ BranchUtil.makeDeleteTermName split (Referent.Ref old) - , BranchUtil.makeAddTermName split (Referent.Ref new) oldMd ] - where - -- oldMd is the metadata linked to the old definition - -- we relink it to the new definition - oldMd = BranchUtil.getTermMetadataAt split (Referent.Ref old) b0 - errorEmptyVar = error "encountered an empty var name" - -loadDisplayInfo :: - Set Reference -> Action m i v ([(Reference, Maybe (Type v Ann))] - ,[(Reference, DisplayObject () (DD.Decl v Ann))]) -loadDisplayInfo refs = do - termRefs <- filterM (eval . IsTerm) (toList refs) - typeRefs <- filterM (eval . IsType) (toList refs) - terms <- forM termRefs $ \r -> (r,) <$> eval (LoadTypeOfTerm r) - types <- forM typeRefs $ \r -> (r,) <$> loadTypeDisplayObject r - pure (terms, types) - --- Any absolute names in the input which have `currentPath` as a prefix --- are converted to names relative to current path. all other names are --- converted to absolute names. For example: --- --- e.g. if currentPath = .foo.bar --- then name foo.bar.baz becomes baz --- name cat.dog becomes .cat.dog -fixupNamesRelative :: Path.Absolute -> Names -> Names -fixupNamesRelative currentPath' = Names.map fixName where - prefix = Path.toName (Path.unabsolute currentPath') - fixName n = if currentPath' == Path.absoluteEmpty then n else - fromMaybe (Name.makeAbsolute n) (Name.stripNamePrefix prefix n) - -makeHistoricalParsingNames :: - Monad m => Set (HQ.HashQualified Name) -> Action' m v NamesWithHistory -makeHistoricalParsingNames lexedHQs = do - rawHistoricalNames <- findHistoricalHQs lexedHQs - basicNames <- basicParseNames - currentPath <- use currentPath - pure $ NamesWithHistory basicNames - (Names.makeAbsolute rawHistoricalNames <> - fixupNamesRelative currentPath rawHistoricalNames) - -loadTypeDisplayObject - :: Reference -> Action m i v (DisplayObject () (DD.Decl v Ann)) -loadTypeDisplayObject = \case - Reference.Builtin _ -> pure (BuiltinObject ()) - Reference.DerivedId id -> - maybe (MissingObject $ Reference.idToShortHash id) UserObject - <$> eval (LoadType id) - -lexedSource :: Monad m => SourceName -> Source -> Action' m v (NamesWithHistory, LexedSource) -lexedSource name src = do - let tokens = L.lexer (Text.unpack name) (Text.unpack src) - getHQ = \case - L.Backticks s (Just sh) -> Just (HQ.HashQualified (Name.unsafeFromString s) sh) - L.WordyId s (Just sh) -> Just (HQ.HashQualified (Name.unsafeFromString s) sh) - L.SymbolyId s (Just sh) -> Just (HQ.HashQualified (Name.unsafeFromString s) sh) - L.Hash sh -> Just (HQ.HashOnly sh) - _ -> Nothing - hqs = Set.fromList . mapMaybe (getHQ . L.payload) $ tokens - parseNames <- makeHistoricalParsingNames hqs - pure (parseNames, (src, tokens)) - -suffixifiedPPE :: NamesWithHistory -> Action' m v PPE.PrettyPrintEnv -suffixifiedPPE ns = eval CodebaseHashLength <&> (`PPE.fromSuffixNames` ns) - -fqnPPE :: NamesWithHistory -> Action' m v PPE.PrettyPrintEnv -fqnPPE ns = eval CodebaseHashLength <&> (`PPE.fromNames` ns) - -parseSearchType :: (Monad m, Var v) - => SrcLoc -> String -> Action' m v (Either (Output v) (Type v Ann)) -parseSearchType srcLoc typ = fmap Type.removeAllEffectVars <$> parseType srcLoc typ - --- | A description of where the given parse was triggered from, for error messaging purposes. -type SrcLoc = String -parseType :: (Monad m, Var v) - => SrcLoc -> String -> Action' m v (Either (Output v) (Type v Ann)) -parseType input src = do - -- `show Input` is the name of the "file" being lexed - (names0, lexed) <- lexedSource (Text.pack input) (Text.pack src) - parseNames <- basicParseNames - let names = NamesWithHistory.push (NamesWithHistory.currentNames names0) - (NamesWithHistory.NamesWithHistory parseNames (NamesWithHistory.oldNames names0)) - e <- eval $ ParseType names lexed - pure $ case e of - Left err -> Left $ TypeParseError src err - Right typ -> case Type.bindNames mempty (NamesWithHistory.currentNames names) - $ Type.generalizeLowercase mempty typ of - Left es -> Left $ ParseResolutionFailures src (toList es) - Right typ -> Right typ - -makeShadowedPrintNamesFromLabeled - :: Monad m => Set LabeledDependency -> Names -> Action' m v NamesWithHistory -makeShadowedPrintNamesFromLabeled deps shadowing = - NamesWithHistory.shadowing shadowing <$> makePrintNamesFromLabeled' deps - -makePrintNamesFromLabeled' - :: Monad m => Set LabeledDependency -> Action' m v NamesWithHistory -makePrintNamesFromLabeled' deps = do - root <- use root - currentPath <- use currentPath - (_missing, rawHistoricalNames) <- eval . Eval $ Branch.findHistoricalRefs - deps - root - basicNames <- basicPrettyPrintNamesA - pure $ NamesWithHistory basicNames (fixupNamesRelative currentPath rawHistoricalNames) - -getTermsIncludingHistorical - :: Monad m => Path.HQSplit -> Branch0 m -> Action' m v (Set Referent) -getTermsIncludingHistorical (p, hq) b = case Set.toList refs of - [] -> case hq of - HQ'.HashQualified n hs -> do - names <- findHistoricalHQs - $ Set.fromList [HQ.HashQualified (Name.unsafeFromText (NameSegment.toText n)) hs] - pure . R.ran $ Names.terms names - _ -> pure Set.empty - _ -> pure refs - where refs = BranchUtil.getTerm (p, hq) b - --- discards inputs that aren't hashqualified; --- I'd enforce it with finer-grained types if we had them. -findHistoricalHQs :: Monad m => Set (HQ.HashQualified Name) -> Action' m v Names -findHistoricalHQs lexedHQs0 = do - root <- use root - currentPath <- use currentPath - let - -- omg this nightmare name-to-path parsing code is littered everywhere. - -- We need to refactor so that the absolute-ness of a name isn't represented - -- by magical text combinations. - -- Anyway, this function takes a name, tries to determine whether it is - -- relative or absolute, and tries to return the corresponding name that is - -- /relative/ to the root. - preprocess n = case Name.toString n of - -- some absolute name that isn't just "." - '.' : t@(_:_) -> Name.unsafeFromString t - -- something in current path - _ -> if Path.isRoot currentPath then n - else Name.joinDot (Path.toName . Path.unabsolute $ currentPath) n - - lexedHQs = Set.map (fmap preprocess) . Set.filter HQ.hasHash $ lexedHQs0 - (_missing, rawHistoricalNames) <- eval . Eval $ Branch.findHistoricalHQs lexedHQs root - pure rawHistoricalNames - -basicPrettyPrintNamesA :: Functor m => Action' m v Names -basicPrettyPrintNamesA = snd <$> basicNames' - -makeShadowedPrintNamesFromHQ :: Monad m => Set (HQ.HashQualified Name) -> Names -> Action' m v NamesWithHistory -makeShadowedPrintNamesFromHQ lexedHQs shadowing = do - rawHistoricalNames <- findHistoricalHQs lexedHQs - basicNames <- basicPrettyPrintNamesA - currentPath <- use currentPath - -- The basic names go into "current", but are shadowed by "shadowing". - -- They go again into "historical" as a hack that makes them available HQ-ed. - pure $ - NamesWithHistory.shadowing - shadowing - (NamesWithHistory basicNames (fixupNamesRelative currentPath rawHistoricalNames)) - -basicParseNames, slurpResultNames :: Functor m => Action' m v Names -basicParseNames = fst <$> basicNames' --- we check the file against everything in the current path -slurpResultNames = currentPathNames - -currentPathNames :: Functor m => Action' m v Names -currentPathNames = do - currentPath' <- use currentPath - currentBranch' <- getAt currentPath' - pure $ Branch.toNames (Branch.head currentBranch') - --- implementation detail of basicParseNames and basicPrettyPrintNames -basicNames' :: Functor m => Action' m v (Names, Names) -basicNames' = do - root' <- use root - currentPath' <- use currentPath - pure $ Backend.basicNames' root' (Backend.AllNames $ Path.unabsolute currentPath') - -data AddRunMainResult v - = NoTermWithThatName - | TermHasBadType (Type v Ann) - | RunMainSuccess (TypecheckedUnisonFile v Ann) - --- Adds a watch expression of the given name to the file, if --- it would resolve to a TLD in the file. Returns the freshened --- variable name and the new typechecked file. --- --- Otherwise, returns `Nothing`. -addWatch - :: (Monad m, Var v) - => String - -> Maybe (TypecheckedUnisonFile v Ann) - -> Action' m v (Maybe (v, TypecheckedUnisonFile v Ann)) -addWatch _watchName Nothing = pure Nothing -addWatch watchName (Just uf) = do - let components = join $ UF.topLevelComponents uf - let mainComponent = filter ((\v -> Var.nameStr v == watchName) . view _1) components - case mainComponent of - [(v, tm, ty)] -> pure . pure $ let - v2 = Var.freshIn (Set.fromList [v]) v - a = ABT.annotation tm - in (v2, UF.typecheckedUnisonFile - (UF.dataDeclarationsId' uf) - (UF.effectDeclarationsId' uf) - (UF.topLevelComponents' uf) - (UF.watchComponents uf <> [(WK.RegularWatch, [(v2, Term.var a v, ty)])])) - _ -> addWatch watchName Nothing - --- Given a typechecked file with a main function called `mainName` --- of the type `'{IO} ()`, adds an extra binding which --- forces the `main` function. --- --- If that function doesn't exist in the typechecked file, the --- codebase is consulted. -addRunMain - :: (Monad m, Var v) - => String - -> Maybe (TypecheckedUnisonFile v Ann) - -> Action' m v (AddRunMainResult v) -addRunMain mainName Nothing = do - parseNames <- basicParseNames - let loadTypeOfTerm ref = eval $ LoadTypeOfTerm ref - mainType <- eval RuntimeMain - mainToFile <$> - MainTerm.getMainTerm loadTypeOfTerm parseNames mainName mainType - where - mainToFile (MainTerm.NotAFunctionName _) = NoTermWithThatName - mainToFile (MainTerm.NotFound _) = NoTermWithThatName - mainToFile (MainTerm.BadType _ ty) = maybe NoTermWithThatName TermHasBadType ty - mainToFile (MainTerm.Success hq tm typ) = RunMainSuccess $ - let v = Var.named (HQ.toText hq) in - UF.typecheckedUnisonFile mempty mempty mempty [("main",[(v, tm, typ)])] -- mempty -addRunMain mainName (Just uf) = do - let components = join $ UF.topLevelComponents uf - let mainComponent = filter ((\v -> Var.nameStr v == mainName) . view _1) components - mainType <- eval RuntimeMain - case mainComponent of - [(v, tm, ty)] -> pure $ let - v2 = Var.freshIn (Set.fromList [v]) v - a = ABT.annotation tm - in - if Typechecker.isSubtype ty mainType then RunMainSuccess $ let - runMain = DD.forceTerm a a (Term.var a v) - in UF.typecheckedUnisonFile - (UF.dataDeclarationsId' uf) - (UF.effectDeclarationsId' uf) - (UF.topLevelComponents' uf) - (UF.watchComponents uf <> [("main", [(v2, runMain, mainType)])]) - else TermHasBadType ty - _ -> addRunMain mainName Nothing - -executePPE - :: (Var v, Monad m) - => TypecheckedUnisonFile v a - -> Action' m v PPE.PrettyPrintEnv -executePPE unisonFile = - suffixifiedPPE =<< displayNames unisonFile - --- Produce a `Names` needed to display all the hashes used in the given file. -displayNames :: (Var v, Monad m) - => TypecheckedUnisonFile v a - -> Action' m v NamesWithHistory -displayNames unisonFile = - -- voodoo - makeShadowedPrintNamesFromLabeled - (UF.termSignatureExternalLabeledDependencies unisonFile) - (UF.typecheckedToNames unisonFile) - -diffHelper :: Monad m - => Branch0 m - -> Branch0 m - -> Action' m v (PPE.PrettyPrintEnv, OBranchDiff.BranchDiffOutput v Ann) -diffHelper before after = do - hqLength <- eval CodebaseHashLength - diff <- eval . Eval $ BranchDiff.diff0 before after - names0 <- basicPrettyPrintNamesA - ppe <- PPE.suffixifiedPPE <$> prettyPrintEnvDecl (NamesWithHistory names0 mempty) - (ppe,) <$> - OBranchDiff.toOutput - loadTypeOfTerm - declOrBuiltin - hqLength - (Branch.toNames before) - (Branch.toNames after) - ppe - diff - -loadTypeOfTerm :: Referent -> Action m i v (Maybe (Type v Ann)) -loadTypeOfTerm (Referent.Ref r) = eval $ LoadTypeOfTerm r -loadTypeOfTerm (Referent.Con (Reference.DerivedId r) cid _) = do - decl <- eval $ LoadType r - case decl of - Just (either DD.toDataDecl id -> dd) -> pure $ DD.typeOfConstructor dd cid - Nothing -> pure Nothing -loadTypeOfTerm Referent.Con{} = error $ - reportBug "924628772" "Attempt to load a type declaration which is a builtin!" - -declOrBuiltin :: Reference -> Action m i v (Maybe (DD.DeclOrBuiltin v Ann)) -declOrBuiltin r = case r of - Reference.Builtin{} -> - pure . fmap DD.Builtin $ Map.lookup r Builtin.builtinConstructorType - Reference.DerivedId id -> - fmap DD.Decl <$> eval (LoadType id) - -fuzzySelectTermsAndTypes :: Branch0 m -> Action m (Either Event Input) v [HQ.HashQualified Name] -fuzzySelectTermsAndTypes searchBranch0 = do - let termsAndTypes = - Relation.dom (Names.hashQualifyTermsRelation (Relation.swap $ Branch.deepTerms searchBranch0)) - <> Relation.dom (Names.hashQualifyTypesRelation (Relation.swap $ Branch.deepTypes searchBranch0)) - fromMaybe [] <$> eval (FuzzySelect Fuzzy.defaultOptions HQ.toText (Set.toList termsAndTypes)) - -fuzzySelectNamespace :: Branch0 m -> Action m (Either Event Input) v [Path'] -fuzzySelectNamespace searchBranch0 = - do - fmap Path.toPath' - . fromMaybe [] - <$> eval - ( FuzzySelect - Fuzzy.defaultOptions {Fuzzy.allowMultiSelect = False} - Path.toText - (Set.toList $ Branch.deepPaths searchBranch0) - ) diff --git a/parser-typechecker/src/Unison/Codebase/PushBehavior.hs b/parser-typechecker/src/Unison/Codebase/PushBehavior.hs new file mode 100644 index 0000000000..e336a9c457 --- /dev/null +++ b/parser-typechecker/src/Unison/Codebase/PushBehavior.hs @@ -0,0 +1,13 @@ +-- | This module defines the 'PushBehavior' type. +module Unison.Codebase.PushBehavior + ( PushBehavior (..), + ) +where + +-- | How a `push` behaves. +data PushBehavior + = -- | The namespace being pushed to is required to be empty. + RequireEmpty + | -- | The namespace being pushed to is required to be non-empty + RequireNonEmpty + deriving stock (Eq, Show) diff --git a/parser-typechecker/src/Unison/CommandLine/OutputMessages.hs b/parser-typechecker/src/Unison/CommandLine/OutputMessages.hs deleted file mode 100644 index 93c8e353e4..0000000000 --- a/parser-typechecker/src/Unison/CommandLine/OutputMessages.hs +++ /dev/null @@ -1,2084 +0,0 @@ -{-# OPTIONS_GHC -fno-warn-partial-type-signatures #-} - -{-# LANGUAGE OverloadedStrings #-} -{-# LANGUAGE PartialTypeSignatures #-} -{-# LANGUAGE RankNTypes #-} -{-# LANGUAGE RecordWildCards #-} -{-# LANGUAGE ViewPatterns #-} - - -module Unison.CommandLine.OutputMessages where - -import Unison.Prelude hiding (unlessM) - -import qualified Unison.Codebase as Codebase -import Unison.Codebase.Editor.Output -import qualified Unison.Codebase.Editor.Output as E -import qualified Unison.Codebase.Editor.Output as Output -import qualified Unison.Codebase.Editor.TodoOutput as TO -import qualified Unison.Codebase.Editor.Output.BranchDiff as OBD -import qualified Unison.Server.SearchResult' as SR' -import Unison.Server.Backend (ShallowListEntry(..), TermEntry(..), TypeEntry(..)) - -import Control.Lens -import qualified Control.Monad.State.Strict as State -import Data.Bifunctor (first, second) -import Data.List (sort, stripPrefix) -import Data.List.Extra (nubOrdOn, nubOrd, notNull) -import qualified Data.Map as Map -import qualified Data.Set as Set -import qualified Data.Sequence as Seq -import qualified Data.Text as Text -import Data.Text.IO (readFile, writeFile) -import Data.Tuple.Extra (dupe, uncurry3) -import Prelude hiding (readFile, writeFile) -import System.Directory ( canonicalizePath - , doesFileExist - , getHomeDirectory - ) -import qualified Unison.ABT as ABT -import qualified Unison.UnisonFile as UF -import Unison.Codebase.Type (GitError(GitSqliteCodebaseError, GitProtocolError, GitCodebaseError)) -import Unison.Codebase.GitError -import qualified Unison.Codebase.Path as Path -import qualified Unison.Codebase.Patch as Patch -import Unison.Codebase.Patch (Patch(..)) -import qualified Unison.Codebase.ShortBranchHash as SBH -import qualified Unison.Codebase.TermEdit as TermEdit -import qualified Unison.Codebase.TypeEdit as TypeEdit -import Unison.CommandLine ( bigproblem - , tip - , note - ) -import Unison.PrettyTerminal ( clearCurrentLine - , putPretty' - ) -import qualified Unison.CommandLine.InputPattern as IP1 -import Unison.CommandLine.InputPatterns (makeExample, makeExample') -import qualified Unison.CommandLine.InputPatterns as IP -import qualified Unison.Builtin.Decls as DD -import qualified Unison.DataDeclaration as DD -import qualified Unison.DeclPrinter as DeclPrinter -import qualified Unison.HashQualified as HQ -import qualified Unison.HashQualified' as HQ' -import Unison.Name (Name) -import qualified Unison.Name as Name -import Unison.NamePrinter (prettyHashQualified, - prettyReference, prettyReferent, - prettyLabeledDependency, - prettyNamedReference, - prettyNamedReferent, - prettyName, prettyShortHash, - styleHashQualified, - styleHashQualified', prettyHashQualified') -import Unison.Names (Names(..)) -import qualified Unison.Names as Names -import qualified Unison.NamesWithHistory as Names -import Unison.Parser.Ann (Ann, startingLine) -import qualified Unison.PrettyPrintEnv as PPE -import qualified Unison.PrettyPrintEnv.Util as PPE -import qualified Unison.PrettyPrintEnvDecl as PPE -import qualified Unison.Codebase.Runtime as Runtime -import Unison.PrintError ( prettyParseError - , printNoteWithSource - , prettyResolutionFailures - , renderCompilerBug - ) -import qualified Unison.Reference as Reference -import Unison.Reference ( Reference ) -import qualified Unison.Referent as Referent -import Unison.Referent ( Referent ) -import qualified Unison.Result as Result -import qualified Unison.Term as Term -import Unison.Term (Term) -import Unison.Type (Type) -import qualified Unison.TermPrinter as TermPrinter -import qualified Unison.TypePrinter as TypePrinter -import Unison.Util.Monoid ( intercalateMap - , unlessM - ) -import qualified Unison.Util.Pretty as P -import qualified Unison.Util.Relation as R -import Unison.Var (Var) -import qualified Unison.Var as Var -import qualified Unison.Codebase.Editor.SlurpResult as SlurpResult -import Unison.Codebase.Editor.DisplayObject (DisplayObject(MissingObject, BuiltinObject, UserObject)) -import qualified Unison.Hash as Hash -import qualified Unison.Codebase.Causal as Causal -import qualified Unison.Codebase.Editor.RemoteRepo as RemoteRepo -import qualified Unison.Util.List as List -import Data.Tuple (swap) -import Unison.Codebase.ShortBranchHash (ShortBranchHash) -import qualified Unison.ShortHash as SH -import Unison.LabeledDependency as LD -import Unison.Codebase.Editor.RemoteRepo (ReadRepo, WriteRepo) -import U.Codebase.Sqlite.DbId (SchemaVersion(SchemaVersion)) -import Unison.Codebase.SqliteCodebase.GitError (GitSqliteCodebaseError(UnrecognizedSchemaVersion, GitCouldntParseRootBranchHash)) -import qualified Unison.Referent' as Referent -import qualified Unison.WatchKind as WK -import qualified Unison.Codebase.Editor.Input as Input - -type Pretty = P.Pretty P.ColorText - -shortenDirectory :: FilePath -> IO FilePath -shortenDirectory dir = do - home <- getHomeDirectory - pure $ case stripPrefix home dir of - Just d -> "~" <> d - Nothing -> dir - -renderFileName :: FilePath -> IO Pretty -renderFileName dir = P.group . P.blue . fromString <$> shortenDirectory dir - -notifyNumbered :: Var v => NumberedOutput v -> (Pretty, NumberedArgs) -notifyNumbered o = case o of - ShowDiffNamespace oldPrefix newPrefix ppe diffOutput -> - showDiffNamespace ShowNumbers ppe oldPrefix newPrefix diffOutput - - ShowDiffAfterDeleteDefinitions ppe diff -> - first (\p -> P.lines - [ p - , "" - , undoTip - ]) (showDiffNamespace ShowNumbers ppe e e diff) - - ShowDiffAfterDeleteBranch bAbs ppe diff -> - first (\p -> P.lines - [ p - , "" - , undoTip - ]) (showDiffNamespace ShowNumbers ppe bAbs bAbs diff) - - ShowDiffAfterModifyBranch b' _ _ (OBD.isEmpty -> True) -> - (P.wrap $ "Nothing changed in" <> prettyPath' b' <> ".", mempty) - ShowDiffAfterModifyBranch b' bAbs ppe diff -> - first (\p -> P.lines - [ P.wrap $ "Here's what changed in" <> prettyPath' b' <> ":" - , "" - , p - , "" - , undoTip - ]) (showDiffNamespace ShowNumbers ppe bAbs bAbs diff) - - ShowDiffAfterMerge _ _ _ (OBD.isEmpty -> True) -> - (P.wrap $ "Nothing changed as a result of the merge.", mempty) - ShowDiffAfterMerge dest' destAbs ppe diffOutput -> - first (\p -> P.lines [ - P.wrap $ "Here's what's changed in " <> prettyPath' dest' <> "after the merge:" - , "" - , p - , "" - , tip $ "You can use " <> IP.makeExample' IP.todo - <> "to see if this generated any work to do in this namespace" - <> "and " <> IP.makeExample' IP.test <> "to run the tests." - <> "Or you can use" <> IP.makeExample' IP.undo <> " or" - <> IP.makeExample' IP.viewReflog <> " to undo the results of this merge." - ]) (showDiffNamespace ShowNumbers ppe destAbs destAbs diffOutput) - - ShowDiffAfterMergePropagate dest' destAbs patchPath' ppe diffOutput -> - first (\p -> P.lines [ - P.wrap $ "Here's what's changed in " <> prettyPath' dest' - <> "after applying the patch at " <> P.group (prettyPath' patchPath' <> ":") - , "" - , p - , "" - , tip $ "You can use " - <> IP.makeExample IP.todo [prettyPath' patchPath', prettyPath' dest'] - <> "to see if this generated any work to do in this namespace" - <> "and " <> IP.makeExample' IP.test <> "to run the tests." - <> "Or you can use" <> IP.makeExample' IP.undo <> " or" - <> IP.makeExample' IP.viewReflog <> " to undo the results of this merge." - ]) (showDiffNamespace ShowNumbers ppe destAbs destAbs diffOutput) - - ShowDiffAfterMergePreview dest' destAbs ppe diffOutput -> - first (\p -> P.lines [ - P.wrap $ "Here's what would change in " <> prettyPath' dest' <> "after the merge:" - , "" - , p - ]) (showDiffNamespace ShowNumbers ppe destAbs destAbs diffOutput) - - ShowDiffAfterUndo ppe diffOutput -> - first (\p -> P.lines ["Here are the changes I undid", "", p ]) - (showDiffNamespace ShowNumbers ppe e e diffOutput) - - ShowDiffAfterPull dest' destAbs ppe diff -> - if OBD.isEmpty diff then - ("✅ Looks like " <> prettyPath' dest' <> " is up to date.", mempty) - else - first (\p -> P.lines [ - P.wrap $ "Here's what's changed in " <> prettyPath' dest' <> "after the pull:", "", - p, "", - undoTip - ]) - (showDiffNamespace ShowNumbers ppe destAbs destAbs diff) - ShowDiffAfterCreatePR baseRepo headRepo ppe diff -> - if OBD.isEmpty diff then - (P.wrap $ "Looks like there's no difference between " - <> prettyRemoteNamespace baseRepo - <> "and" - <> prettyRemoteNamespace headRepo <> "." - ,mempty) - else first (\p -> - (P.lines - [P.wrap $ "The changes summarized below are available for you to review," - <> "using the following command:" - ,"" - ,P.indentN 2 $ - IP.makeExampleNoBackticks - IP.loadPullRequest [(prettyRemoteNamespace baseRepo) - ,(prettyRemoteNamespace headRepo)] - ,"" - ,p])) (showDiffNamespace HideNumbers ppe e e diff) - -- todo: these numbers aren't going to work, - -- since the content isn't necessarily here. - -- Should we have a mode with no numbers? :P - - ShowDiffAfterCreateAuthor authorNS authorPath' bAbs ppe diff -> - first (\p -> P.lines - [ p - , "" - , tip $ "Add" <> prettyName "License" <> "values for" - <> prettyName (Name.fromSegment authorNS) - <> "under" <> P.group (prettyPath' authorPath' <> ".") - ]) (showDiffNamespace ShowNumbers ppe bAbs bAbs diff) - where - e = Path.absoluteEmpty - undoTip = tip $ "You can use" <> IP.makeExample' IP.undo - <> "or" <> IP.makeExample' IP.viewReflog - <> "to undo this change." - -prettyRemoteNamespace :: (RemoteRepo.ReadRepo, - Maybe ShortBranchHash, Path.Path) - -> Pretty -prettyRemoteNamespace = - P.group . P.text . uncurry3 RemoteRepo.printNamespace - -notifyUser :: forall v . Var v => FilePath -> Output v -> IO Pretty -notifyUser dir o = case o of - Success -> pure $ P.bold "Done." - PrintMessage pretty -> do - pure pretty - BadRootBranch e -> case e of - Codebase.NoRootBranch -> - pure . P.fatalCallout $ "I couldn't find the codebase root!" - Codebase.CouldntParseRootBranch s -> - pure - . P.warnCallout - $ "I coulnd't parse a valid namespace from " - <> P.string (show s) - <> "." - Codebase.CouldntLoadRootBranch h -> - pure - . P.warnCallout - $ "I couldn't find a root namespace with the hash " - <> prettySBH (SBH.fullFromHash h) - <> "." - CouldntLoadBranch h -> - pure . P.fatalCallout . P.wrap $ "I have reason to believe that" - <> P.shown h <> "exists in the codebase, but there was a failure" - <> "when I tried to load it." - WarnIncomingRootBranch current hashes -> pure $ - if null hashes then P.wrap $ - "Please let someone know I generated an empty IncomingRootBranch" - <> " event, which shouldn't be possible!" - else P.lines - [ P.wrap $ (if length hashes == 1 then "A" else "Some") - <> "codebase" <> P.plural hashes "root" <> "appeared unexpectedly" - <> "with" <> P.group (P.plural hashes "hash" <> ":") - , "" - , (P.indentN 2 . P.oxfordCommas) - (map prettySBH $ toList hashes) - , "" - , P.wrap $ "and I'm not sure what to do about it." - <> "The last root namespace hash that I knew about was:" - , "" - , P.indentN 2 $ prettySBH current - , "" - , P.wrap $ "Now might be a good time to make a backup of your codebase. 😬" - , "" - , P.wrap $ "After that, you might try using the" <> makeExample' IP.forkLocal - <> "command to inspect the namespaces listed above, and decide which" - <> "one you want as your root." - <> "You can also use" <> makeExample' IP.viewReflog <> "to see the" - <> "last few root namespace hashes on record." - , "" - , P.wrap $ "Once you find one you like, you can use the" - <> makeExample' IP.resetRoot <> "command to set it." - ] - LoadPullRequest baseNS headNS basePath headPath mergedPath squashedPath -> pure $ P.lines - [ P.wrap $ "I checked out" <> prettyRemoteNamespace baseNS <> "to" <> P.group (prettyPath' basePath <> ".") - , P.wrap $ "I checked out" <> prettyRemoteNamespace headNS <> "to" <> P.group (prettyPath' headPath <> ".") - , "" - , P.wrap $ "The merged result is in" <> P.group (prettyPath' mergedPath <> ".") - , P.wrap $ "The (squashed) merged result is in" <> P.group (prettyPath' squashedPath <> ".") - , P.wrap $ "Use" <> - IP.makeExample IP.diffNamespace - [prettyPath' basePath, prettyPath' mergedPath] - <> "or" <> - IP.makeExample IP.diffNamespace - [prettyPath' basePath, prettyPath' squashedPath] - <> "to see what's been updated." - , P.wrap $ "Use" <> - IP.makeExample IP.todo - [ prettyPath' (snoc mergedPath "patch") - , prettyPath' mergedPath ] - <> "to see what work is remaining for the merge." - , P.wrap $ "Use" <> - IP.makeExample IP.push - [prettyRemoteNamespace baseNS, prettyPath' mergedPath] <> - "or" <> - IP.makeExample IP.push - [prettyRemoteNamespace baseNS, prettyPath' squashedPath] - <> "to push the changes." - ] - - DisplayDefinitions outputLoc ppe types terms -> - displayDefinitions outputLoc ppe types terms - DisplayRendered outputLoc pp -> - displayRendered outputLoc pp - TestResults stats ppe _showSuccess _showFailures oks fails -> case stats of - CachedTests 0 _ -> pure . P.callout "😶" $ "No tests to run." - CachedTests n n' | n == n' -> pure $ - P.lines [ cache, "", displayTestResults True ppe oks fails ] - CachedTests _n m -> pure $ - if m == 0 then "✅ " - else P.indentN 2 $ - P.lines [ "", cache, "", displayTestResults False ppe oks fails, "", "✅ " ] - where - NewlyComputed -> do - clearCurrentLine - pure $ P.lines [ - " " <> P.bold "New test results:", - "", - displayTestResults True ppe oks fails ] - where - cache = P.bold "Cached test results " <> "(`help testcache` to learn more)" - - TestIncrementalOutputStart ppe (n,total) r _src -> do - putPretty' $ P.shown (total - n) <> " tests left to run, current test: " - <> (P.syntaxToColor $ prettyHashQualified (PPE.termName ppe $ Referent.Ref r)) - pure mempty - - TestIncrementalOutputEnd _ppe (_n, _total) _r result -> do - clearCurrentLine - if isTestOk result then putPretty' " ✅ " - else putPretty' " 🚫 " - pure mempty - - TermMissingType ref -> - pure . P.fatalCallout . P.lines $ [ - P.wrap $ "The type signature for reference " - <> P.blue (P.text (Reference.toText ref)) - <> " is missing from the codebase! This means something might be wrong " - <> " with the codebase, or the term was deleted just now " - <> " by someone else. Trying your command again might fix it." - ] - - MetadataMissingType ppe ref -> pure . P.fatalCallout . P.lines $ [ - P.wrap $ "The metadata value " <> P.red (prettyTermName ppe ref) - <> "is missing a type signature in the codebase.", - "", - P.wrap $ "This might be due to pulling an incomplete" - <> "or invalid codebase, or because files inside the codebase" - <> "are being deleted external to UCM." - ] - MetadataAmbiguous hq _ppe [] -> pure . P.warnCallout . - P.wrap $ "I couldn't find any metadata matching " - <> P.syntaxToColor (prettyHashQualified hq) - MetadataAmbiguous _ ppe refs -> pure . P.warnCallout . P.lines $ [ - P.wrap $ "I'm not sure which metadata value you're referring to" - <> "since there are multiple matches:", - "", - P.indentN 2 $ P.spaced (P.blue . prettyTermName ppe <$> refs), - "", - tip "Try again and supply one of the above definitions explicitly." - ] - - EvaluationFailure err -> pure err - TypeTermMismatch typeName termName -> - pure - $ P.warnCallout "I was expecting either two types or two terms but was given a type " - <> P.syntaxToColor (prettyHashQualified typeName) - <> " and a term " - <> P.syntaxToColor (prettyHashQualified termName) - <> "." - SearchTermsNotFound hqs | null hqs -> pure mempty - SearchTermsNotFound hqs -> - pure - $ P.warnCallout "The following names were not found in the codebase. Check your spelling." - <> P.newline - <> (P.syntaxToColor $ P.indent " " (P.lines (prettyHashQualified <$> hqs))) - PatchNotFound _ -> - pure . P.warnCallout $ "I don't know about that patch." - NameNotFound _ -> - pure . P.warnCallout $ "I don't know about that name." - TermNotFound _ -> - pure . P.warnCallout $ "I don't know about that term." - TypeNotFound _ -> - pure . P.warnCallout $ "I don't know about that type." - TermAlreadyExists _ _ -> - pure . P.warnCallout $ "A term by that name already exists." - TypeAlreadyExists _ _ -> - pure . P.warnCallout $ "A type by that name already exists." - PatchAlreadyExists _ -> - pure . P.warnCallout $ "A patch by that name already exists." - BranchEmpty b -> pure . P.warnCallout . P.wrap $ - P.group (either P.shown prettyPath' b) <> "is an empty namespace." - BranchNotEmpty path -> - pure . P.warnCallout $ "I was expecting the namespace " <> prettyPath' path - <> " to be empty for this operation, but it isn't." - CantDelete ppe failed failedDependents -> pure . P.warnCallout $ - P.lines [ - P.wrap "I couldn't delete ", - "", P.indentN 2 $ listOfDefinitions' ppe False failed, - "", - "because it's still being used by these definitions:", - "", P.indentN 2 $ listOfDefinitions' ppe False failedDependents - ] - CantUndo reason -> case reason of - CantUndoPastStart -> pure . P.warnCallout $ "Nothing more to undo." - CantUndoPastMerge -> pure . P.warnCallout $ "Sorry, I can't undo a merge (not implemented yet)." - NoMainFunction main ppe ts -> pure . P.callout "😶" $ P.lines [ - P.wrap $ "I looked for a function" <> P.backticked (P.string main) - <> "in the most recently typechecked file and codebase but couldn't find one. It has to have the type:", - "", - P.indentN 2 $ P.lines [ P.string main <> " : " <> TypePrinter.pretty ppe t | t <- ts ] - ] - BadMainFunction main ty ppe ts -> pure . P.callout "😶" $ P.lines [ - P.string "I found this function:", - "", - P.indentN 2 $ P.string main <> " : " <> TypePrinter.pretty ppe ty, - "", - P.wrap $ P.string "but in order for me to" <> P.backticked (P.string "run") <> "it it needs to have the type:", - "", - P.indentN 2 $ P.lines [ P.string main <> " : " <> TypePrinter.pretty ppe t | t <- ts ] - ] - NoUnisonFile -> do - dir' <- canonicalizePath dir - fileName <- renderFileName dir' - pure . P.callout "😶" $ P.lines - [ P.wrap "There's nothing for me to add right now." - , "" - , P.column2 [(P.bold "Hint:", msg fileName)] ] - where - msg dir = P.wrap - $ "I'm currently watching for definitions in .u files under the" - <> dir - <> "directory. Make sure you've updated something there before using the" - <> makeExample' IP.add <> "or" <> makeExample' IP.update - <> "commands, or use" <> makeExample' IP.load <> "to load a file explicitly." - InvalidSourceName name -> - pure . P.callout "😶" $ P.wrap $ "The file " - <> P.blue (P.shown name) - <> " does not exist or is not a valid source file." - SourceLoadFailed name -> - pure . P.callout "😶" $ P.wrap $ "The file " - <> P.blue (P.shown name) - <> " could not be loaded." - BranchNotFound b -> - pure . P.warnCallout $ "The namespace " <> P.blue (P.shown b) <> " doesn't exist." - CreatedNewBranch path -> pure $ - "☝️ The namespace " <> P.blue (P.shown path) <> " is empty." - -- RenameOutput rootPath oldName newName r -> do - -- nameChange "rename" "renamed" oldName newName r - -- AliasOutput rootPath existingName newName r -> do - -- nameChange "alias" "aliased" existingName newName r - DeletedEverything -> - pure . P.wrap . P.lines $ - ["Okay, I deleted everything except the history." - ,"Use " <> IP.makeExample' IP.undo <> " to undo, or " - <> IP.makeExample' IP.mergeBuiltins - <> " to restore the absolute " - <> "basics to the current path."] - DeleteEverythingConfirmation -> - pure . P.warnCallout . P.lines $ - ["Are you sure you want to clear away everything?" - ,"You could use " <> IP.makeExample' IP.cd - <> " to switch to a new namespace instead."] - DeleteBranchConfirmation _uniqueDeletions -> error "todo" - -- let - -- pretty (branchName, (ppe, results)) = - -- header $ listOfDefinitions' ppe False results - -- where - -- header = plural uniqueDeletions id ((P.text branchName <> ":") `P.hang`) - -- - -- in putPrettyLn . P.warnCallout - -- $ P.wrap ("The" - -- <> plural uniqueDeletions "namespace contains" "namespaces contain" - -- <> "definitions that don't exist in any other branches:") - -- <> P.border 2 (mconcat (fmap pretty uniqueDeletions)) - -- <> P.newline - -- <> P.wrap "Please repeat the same command to confirm the deletion." - ListOfDefinitions ppe detailed results -> - listOfDefinitions ppe detailed results - ListOfLinks ppe results -> - listOfLinks ppe [ (name,tm) | (name,_ref,tm) <- results ] - ListNames _len [] [] -> pure . P.callout "😶" $ - P.wrap "I couldn't find anything by that name." - ListNames len types terms -> pure . P.sepNonEmpty "\n\n" $ [ - formatTypes types, formatTerms terms ] - where - formatTerms tms = - P.lines . P.nonEmpty $ P.plural tms (P.blue "Term") : (go <$> tms) where - go (ref, hqs) = P.column2 - [ ("Hash:", P.syntaxToColor (prettyReferent len ref)) - , ("Names: ", P.group (P.spaced (P.bold . P.syntaxToColor . prettyHashQualified' <$> toList hqs))) - ] - formatTypes types = - P.lines . P.nonEmpty $ P.plural types (P.blue "Type") : (go <$> types) where - go (ref, hqs) = P.column2 - [ ("Hash:", P.syntaxToColor (prettyReference len ref)) - , ("Names:", P.group (P.spaced (P.bold . P.syntaxToColor . prettyHashQualified' <$> toList hqs))) - ] - -- > names foo - -- Terms: - -- Hash: #asdflkjasdflkjasdf - -- Names: .util.frobnicate foo blarg.mcgee - -- - -- Term (with hash #asldfkjsdlfkjsdf): .util.frobnicate, foo, blarg.mcgee - -- Types (with hash #hsdflkjsdfsldkfj): Optional, Maybe, foo - ListShallow ppe entries -> - -- todo: make a version of prettyNumberedResult to support 3-columns - pure $ if null entries then P.lit "nothing to show" - else numberedEntries entries - where - numberedEntries :: [ShallowListEntry v a] -> Pretty - numberedEntries entries = - (P.column3 . fmap f) ([(1::Integer)..] `zip` fmap formatEntry entries) - where - f (i, (p1, p2)) = (P.hiBlack . fromString $ show i <> ".", p1, p2) - formatEntry :: ShallowListEntry v a -> (Pretty, Pretty) - formatEntry = \case - ShallowTermEntry (TermEntry _r hq ot _) -> - (P.syntaxToColor . prettyHashQualified' . fmap Name.fromSegment $ hq - , P.lit "(" <> maybe "type missing" (TypePrinter.pretty ppe) ot <> P.lit ")" ) - ShallowTypeEntry (TypeEntry r hq _) -> - (P.syntaxToColor . prettyHashQualified' . fmap Name.fromSegment $ hq - ,isBuiltin r) - ShallowBranchEntry ns _ count -> - ((P.syntaxToColor . prettyName . Name.fromSegment) ns <> "/" - ,case count of - 1 -> P.lit "(1 definition)" - _n -> P.lit "(" <> P.shown count <> P.lit " definitions)") - ShallowPatchEntry ns -> - ((P.syntaxToColor . prettyName . Name.fromSegment) ns - ,P.lit "(patch)") - isBuiltin = \case - Reference.Builtin{} -> P.lit "(builtin type)" - Reference.DerivedId{} -> P.lit "(type)" - SlurpOutput input ppe s -> let - isPast = case input of Input.AddI{} -> True - Input.UpdateI{} -> True - _ -> False - in pure $ SlurpResult.pretty isPast ppe s - - NoExactTypeMatches -> - pure . P.callout "☝️" $ P.wrap "I couldn't find exact type matches, resorting to fuzzy matching..." - TypeParseError src e -> - pure . P.fatalCallout $ P.lines [ - P.wrap "I couldn't parse the type you supplied:", - "", - prettyParseError src e - ] - ParseResolutionFailures src es -> pure $ - prettyResolutionFailures src es - TypeHasFreeVars typ -> - pure . P.warnCallout $ P.lines [ - P.wrap "The type uses these names, but I'm not sure what they are:", - P.sep ", " (map (P.text . Var.name) . toList $ ABT.freeVars typ) - ] - ParseErrors src es -> - pure . P.sep "\n\n" $ prettyParseError (Text.unpack src) <$> es - TypeErrors src ppenv notes -> do - let showNote = - intercalateMap "\n\n" (printNoteWithSource ppenv (Text.unpack src)) - . map Result.TypeError - pure . showNote $ notes - CompilerBugs src env bugs -> pure $ intercalateMap "\n\n" bug bugs - where bug = renderCompilerBug env (Text.unpack src) - Evaluated fileContents ppe bindings watches -> - if null watches then pure "\n" - else - -- todo: hashqualify binding names if necessary to distinguish them from - -- defs in the codebase. In some cases it's fine for bindings to - -- shadow codebase names, but you don't want it to capture them in - -- the decompiled output. - let prettyBindings = P.bracket . P.lines $ - P.wrap "The watch expression(s) reference these definitions:" : "" : - [(P.syntaxToColor $ TermPrinter.prettyBinding ppe (HQ.unsafeFromVar v) b) - | (v, b) <- bindings] - prettyWatches = P.sep "\n\n" [ - watchPrinter fileContents ppe ann kind evald isCacheHit | - (ann,kind,evald,isCacheHit) <- - sortOn (\(a,_,_,_)->a) . toList $ watches ] - -- todo: use P.nonempty - in pure $ if null bindings then prettyWatches - else prettyBindings <> "\n" <> prettyWatches - - DisplayConflicts termNamespace typeNamespace -> - pure $ P.sepNonEmpty "\n\n" [ - showConflicts "terms" terms, - showConflicts "types" types - ] - where - terms = R.dom termNamespace - types = R.dom typeNamespace - showConflicts :: Foldable f => Pretty -> f Name -> Pretty - showConflicts thingsName things = - if (null things) then mempty - else P.lines [ - "These " <> thingsName <> " have conflicts: ", "", - P.lines [ (" " <> prettyName x) | x <- toList things ] - ] - -- TODO: Present conflicting TermEdits and TypeEdits - -- if we ever allow users to edit hashes directly. - Typechecked sourceName ppe slurpResult uf -> do - let fileStatusMsg = SlurpResult.pretty False ppe slurpResult - let containsWatchExpressions = notNull $ UF.watchComponents uf - if UF.nonEmpty uf then do - fileName <- renderFileName $ Text.unpack sourceName - pure $ P.linesNonEmpty ([ - if fileStatusMsg == mempty then - P.okCallout $ fileName <> " changed." - else if SlurpResult.isAllDuplicates slurpResult then - P.wrap $ "I found and" - <> P.bold "typechecked" <> "the definitions in " - <> P.group (fileName <> ".") - <> "This file " <> P.bold "has been previously added" <> "to the codebase." - else - P.linesSpaced $ [ - P.wrap $ "I found and" - <> P.bold "typechecked" <> "these definitions in " - <> P.group (fileName <> ".") - <> "If you do an " - <> IP.makeExample' IP.add - <> " or " - <> P.group (IP.makeExample' IP.update <> ",") - <> "here's how your codebase would" - <> "change:" - , P.indentN 2 $ SlurpResult.pretty False ppe slurpResult - ] - ] ++ if containsWatchExpressions then [ - "", - P.wrap $ "Now evaluating any watch expressions" - <> "(lines starting with `>`)... " - <> P.group (P.hiBlack "Ctrl+C cancels.") - ] else []) - else if (null $ UF.watchComponents uf) then pure . P.wrap $ - "I loaded " <> P.text sourceName <> " and didn't find anything." - else pure mempty - - TodoOutput names todo -> pure (todoOutput names todo) - GitError input e -> pure $ case e of - GitSqliteCodebaseError e -> case e of - UnrecognizedSchemaVersion repo localPath (SchemaVersion v) -> P.wrap - $ "I don't know how to interpret schema version " <> P.shown v - <> "in the repository at" <> prettyReadRepo repo - <> "in the cache directory at" <> P.backticked' (P.string localPath) "." - GitCouldntParseRootBranchHash repo s -> P.wrap $ "I couldn't parse the string" - <> P.red (P.string s) <> "into a namespace hash, when opening the repository at" - <> P.group (prettyReadRepo repo <> ".") - GitProtocolError e -> case e of - NoGit -> P.wrap $ - "I couldn't find git. Make sure it's installed and on your path." - CleanupError e -> P.wrap $ - "I encountered an exception while trying to clean up a git cache directory:" - <> P.group (P.shown e) - CloneException repo msg -> P.wrap $ - "I couldn't clone the repository at" <> prettyReadRepo repo <> ";" - <> "the error was:" <> (P.indentNAfterNewline 2 . P.group . P.string) msg - PushNoOp repo -> P.wrap $ - "The repository at" <> prettyWriteRepo repo <> "is already up-to-date." - PushException repo msg -> P.wrap $ - "I couldn't push to the repository at" <> prettyWriteRepo repo <> ";" - <> "the error was:" <> (P.indentNAfterNewline 2 . P.group . P.string) msg - UnrecognizableCacheDir uri localPath -> P.wrap $ "A cache directory for" - <> P.backticked (P.text $ RemoteRepo.printReadRepo uri) <> "already exists at" - <> P.backticked' (P.string localPath) "," <> "but it doesn't seem to" - <> "be a git repository, so I'm not sure what to do next. Delete it?" - UnrecognizableCheckoutDir uri localPath -> P.wrap $ "I tried to clone" - <> P.backticked (P.text $ RemoteRepo.printReadRepo uri) <> "into a cache directory at" - <> P.backticked' (P.string localPath) "," <> "but I can't recognize the" - <> "result as a git repository, so I'm not sure what to do next." - PushDestinationHasNewStuff repo -> - P.callout "⏸" . P.lines $ [ - P.wrap $ "The repository at" <> prettyWriteRepo repo - <> "has some changes I don't know about.", - "", - P.wrap $ "If you want to " <> push <> "you can do:", "", - P.indentN 2 pull, "", - P.wrap $ - "to merge these changes locally," <> - "then try your" <> push <> "again." - ] - where - push = P.group . P.backticked . P.string . IP1.patternName $ IP.patternFromInput input - pull = P.group . P.backticked $ IP.inputStringFromInput input - GitCodebaseError e -> case e of - CouldntLoadRootBranch repo hash -> P.wrap - $ "I couldn't load the designated root hash" - <> P.group ("(" <> fromString (Hash.showBase32Hex hash) <> ")") - <> "from the repository at" <> prettyReadRepo repo - CouldntLoadSyncedBranch ns h -> P.wrap - $ "I just finished importing the branch" <> P.red (P.shown h) - <> "from" <> P.red (prettyRemoteNamespace ns) - <> "but now I can't find it." - CouldntFindRemoteBranch repo path -> P.wrap - $ "I couldn't find the remote branch at" - <> P.shown path - <> "in the repository at" <> prettyReadRepo repo - NoRemoteNamespaceWithHash repo sbh -> P.wrap - $ "The repository at" <> prettyReadRepo repo - <> "doesn't contain a namespace with the hash prefix" - <> (P.blue . P.text . SBH.toText) sbh - RemoteNamespaceHashAmbiguous repo sbh hashes -> P.lines [ - P.wrap $ "The namespace hash" <> prettySBH sbh - <> "at" <> prettyReadRepo repo - <> "is ambiguous." - <> "Did you mean one of these hashes?", - "", - P.indentN 2 $ P.lines - (prettySBH . SBH.fromHash ((Text.length . SBH.toText) sbh * 2) - <$> Set.toList hashes), - "", - P.wrap "Try again with a few more hash characters to disambiguate." - ] - ListEdits patch ppe -> do - let - types = Patch._typeEdits patch - terms = Patch._termEdits patch - - prettyTermEdit (r, TermEdit.Deprecate) = - (P.syntaxToColor . prettyHashQualified . PPE.termName ppe . Referent.Ref $ r - , "-> (deprecated)") - prettyTermEdit (r, TermEdit.Replace r' _typing) = - (P.syntaxToColor . prettyHashQualified . PPE.termName ppe . Referent.Ref $ r - , "-> " <> (P.syntaxToColor . prettyHashQualified . PPE.termName ppe . Referent.Ref $ r')) - prettyTypeEdit (r, TypeEdit.Deprecate) = - (P.syntaxToColor . prettyHashQualified $ PPE.typeName ppe r - , "-> (deprecated)") - prettyTypeEdit (r, TypeEdit.Replace r') = - (P.syntaxToColor . prettyHashQualified $ PPE.typeName ppe r - , "-> " <> (P.syntaxToColor . prettyHashQualified . PPE.typeName ppe $ r')) - pure $ P.sepNonEmpty "\n\n" [ - if R.null types then mempty - else "Edited Types:" `P.hang` - P.column2 (prettyTypeEdit <$> R.toList types), - if R.null terms then mempty - else "Edited Terms:" `P.hang` - P.column2 (prettyTermEdit <$> R.toList terms), - if R.null types && R.null terms then "This patch is empty." - else tip . P.string $ "To remove entries from a patch, use " - <> IP.deleteTermReplacementCommand <> " or " - <> IP.deleteTypeReplacementCommand <> ", as appropriate." - ] - BustedBuiltins (Set.toList -> new) (Set.toList -> old) -> - -- todo: this could be prettier! Have a nice list like `find` gives, but - -- that requires querying the codebase to determine term types. Probably - -- the only built-in types will be primitive types like `Int`, so no need - -- to look up decl types. - -- When we add builtin terms, they may depend on new derived types, so - -- these derived types should be added to the branch too; but not - -- necessarily ever be automatically deprecated. (A library curator might - -- deprecate them; more work needs to go into the idea of sharing deprecations and stuff. - pure . P.warnCallout . P.lines $ - case (new, old) of - ([],[]) -> error "BustedBuiltins busted, as there were no busted builtins." - ([], old) -> - P.wrap ("This codebase includes some builtins that are considered deprecated. Use the " <> makeExample' IP.updateBuiltins <> " command when you're ready to work on eliminating them from your codebase:") - : "" - : fmap (P.text . Reference.toText) old - (new, []) -> P.wrap ("This version of Unison provides builtins that are not part of your codebase. Use " <> makeExample' IP.updateBuiltins <> " to add them:") - : "" : fmap (P.text . Reference.toText) new - (new@(_:_), old@(_:_)) -> - [ P.wrap - ("Sorry and/or good news! This version of Unison supports a different set of builtins than this codebase uses. You can use " - <> makeExample' IP.updateBuiltins - <> " to add the ones you're missing and deprecate the ones I'm missing. 😉" - ) - , "You're missing:" `P.hang` P.lines (fmap (P.text . Reference.toText) new) - , "I'm missing:" `P.hang` P.lines (fmap (P.text . Reference.toText) old) - ] - ListOfPatches patches -> pure $ - if null patches then P.lit "nothing to show" - else numberedPatches patches - where - numberedPatches :: Set Name -> Pretty - numberedPatches patches = - (P.column2 . fmap format) ([(1::Integer)..] `zip` (toList patches)) - where - format (i, p) = (P.hiBlack . fromString $ show i <> ".", prettyName p) - ConfiguredMetadataParseError p md err -> - pure . P.fatalCallout . P.lines $ - [ P.wrap $ "I couldn't understand the default metadata that's set for " - <> prettyPath' p <> " in .unisonConfig." - , P.wrap $ "The value I found was" - <> (P.backticked . P.blue . P.string) md - <> "but I encountered the following error when trying to parse it:" - , "" - , err - ] - NoConfiguredGitUrl pp p -> - pure . P.fatalCallout . P.wrap $ - "I don't know where to " <> - pushPull "push to!" "pull from!" pp <> - (if Path.isRoot' p then "" - else "Add a line like `GitUrl." <> P.shown p - <> " = ' to .unisonConfig. " - ) - <> "Type `help " <> pushPull "push" "pull" pp <> - "` for more information." - --- | ConfiguredGitUrlParseError PushPull Path' Text String - ConfiguredGitUrlParseError pp p url err -> - pure . P.fatalCallout . P.lines $ - [ P.wrap $ "I couldn't understand the GitUrl that's set for" - <> prettyPath' p <> "in .unisonConfig" - , P.wrap $ "The value I found was" <> (P.backticked . P.blue . P.text) url - <> "but I encountered the following error when trying to parse it:" - , "" - , P.string err - , "" - , P.wrap $ "Type" <> P.backticked ("help " <> pushPull "push" "pull" pp) - <> "for more information." - ] - NoBranchWithHash _h -> pure . P.callout "😶" $ - P.wrap $ "I don't know of a namespace with that hash." - NotImplemented -> pure $ P.wrap "That's not implemented yet. Sorry! 😬" - BranchAlreadyExists p -> pure . P.wrap $ - "The namespace" <> prettyPath' p <> "already exists." - LabeledReferenceNotFound hq -> - pure . P.callout "\129300" . P.wrap . P.syntaxToColor $ - "Sorry, I couldn't find anything named" <> prettyHashQualified hq <> "." - LabeledReferenceAmbiguous hashLen hq (LD.partition -> (tps, tms)) -> - pure . P.callout "\129300" . P.lines $ [ - P.wrap "That name is ambiguous. It could refer to any of the following definitions:" - , "" - , P.indentN 2 (P.lines (map qualifyTerm tms ++ map qualifyType tps)) - ] - where - qualifyTerm :: Referent -> Pretty - qualifyTerm = P.syntaxToColor . case hq of - HQ.NameOnly n -> prettyNamedReferent hashLen n - HQ.HashQualified n _ -> prettyNamedReferent hashLen n - HQ.HashOnly _ -> prettyReferent hashLen - qualifyType :: Reference -> Pretty - qualifyType = P.syntaxToColor . case hq of - HQ.NameOnly n -> prettyNamedReference hashLen n - HQ.HashQualified n _ -> prettyNamedReference hashLen n - HQ.HashOnly _ -> prettyReference hashLen - DeleteNameAmbiguous hashLen p tms tys -> - pure . P.callout "\129300" . P.lines $ [ - P.wrap "That name is ambiguous. It could refer to any of the following definitions:" - , "" - , P.indentN 2 (P.lines (map qualifyTerm (Set.toList tms) ++ map qualifyType (Set.toList tys))) - , "" - , P.wrap "You may:" - , "" - , P.indentN 2 . P.bulleted $ - [ P.wrap "Delete one by an unambiguous name, given above." - , P.wrap "Delete them all by re-issuing the previous command." - ] - ] - where - name :: Name - name = Path.toName' (HQ'.toName (Path.unsplitHQ' p)) - qualifyTerm :: Referent -> Pretty - qualifyTerm = P.syntaxToColor . prettyNamedReferent hashLen name - qualifyType :: Reference -> Pretty - qualifyType = P.syntaxToColor . prettyNamedReference hashLen name - TermAmbiguous _ _ -> pure "That term is ambiguous." - HashAmbiguous h rs -> pure . P.callout "\129300" . P.lines $ [ - P.wrap $ "The hash" <> prettyShortHash h <> "is ambiguous." - <> "Did you mean one of these hashes?", - "", - P.indentN 2 $ P.lines (P.shown <$> Set.toList rs), - "", - P.wrap "Try again with a few more hash characters to disambiguate." - ] - BranchHashAmbiguous h rs -> pure . P.callout "\129300" . P.lines $ [ - P.wrap $ "The namespace hash" <> prettySBH h <> "is ambiguous." - <> "Did you mean one of these hashes?", - "", - P.indentN 2 $ P.lines (prettySBH <$> Set.toList rs), - "", - P.wrap "Try again with a few more hash characters to disambiguate." - ] - BadName n -> - pure . P.wrap $ P.string n <> " is not a kind of name I understand." - TermNotFound' sh -> - pure $ "I could't find a term with hash " - <> (prettyShortHash sh) - TypeNotFound' sh -> - pure $ "I could't find a type with hash " - <> (prettyShortHash sh) - NothingToPatch _patchPath dest -> pure $ - P.callout "😶" . P.wrap - $ "This had no effect. Perhaps the patch has already been applied" - <> "or it doesn't intersect with the definitions in" - <> P.group (prettyPath' dest <> ".") - PatchNeedsToBeConflictFree -> - pure . P.wrap $ - "I tried to auto-apply the patch, but couldn't because it contained" - <> "contradictory entries." - PatchInvolvesExternalDependents _ _ -> - pure "That patch involves external dependents." - ShowReflog [] -> pure . P.warnCallout $ "The reflog appears to be empty!" - ShowReflog entries -> pure $ - P.lines [ - P.wrap $ "Here is a log of the root namespace hashes," - <> "starting with the most recent," - <> "along with the command that got us there." - <> "Try:", - "", - -- `head . tail` is safe: entries never has 1 entry, and [] is handled above - let e2 = head . tail $ entries in - P.indentN 2 . P.wrapColumn2 $ [ - (IP.makeExample IP.forkLocal ["2", ".old"], - ""), - (IP.makeExample IP.forkLocal [prettySBH . Output.hash $ e2, ".old"], - "to make an old namespace accessible again,"), - (mempty,mempty), - (IP.makeExample IP.resetRoot [prettySBH . Output.hash $ e2], - "to reset the root namespace and its history to that of the specified" - <> "namespace.") - ], - "", - P.numberedList . fmap renderEntry $ entries - ] - where - renderEntry :: Output.ReflogEntry -> Pretty - renderEntry (Output.ReflogEntry hash reason) = P.wrap $ - P.blue (prettySBH hash) <> " : " <> P.text reason - History _cap history tail -> pure $ - P.lines [ - note $ "The most recent namespace hash is immediately below this message.", "", - P.sep "\n\n" [ go h diff | (h,diff) <- reverse history ], "", - tailMsg - ] - where - tailMsg = case tail of - E.EndOfLog h -> P.lines [ - "□ " <> prettySBH h <> " (start of history)" - ] - E.MergeTail h hs -> P.lines [ - P.wrap $ "This segment of history starts with a merge." <> ex, - "", - "⊙ " <> prettySBH h, - "⑃", - P.lines (prettySBH <$> hs) - ] - E.PageEnd h _n -> P.lines [ - P.wrap $ "There's more history before the versions shown here." <> ex, "", - dots, "", - "⊙ " <> prettySBH h, - "" - ] - dots = "⠇" - go hash diff = P.lines [ - "⊙ " <> prettySBH hash, - "", - P.indentN 2 $ prettyDiff diff - ] - ex = "Use" <> IP.makeExample IP.history ["#som3n4m3space"] - <> "to view history starting from a given namespace hash." - StartOfCurrentPathHistory -> pure $ - P.wrap "You're already at the very beginning! 🙂" - PullAlreadyUpToDate ns dest -> pure . P.callout "😶" $ - P.wrap $ prettyPath' dest <> "was already up-to-date with" - <> P.group (prettyRemoteNamespace ns <> ".") - - MergeAlreadyUpToDate src dest -> pure . P.callout "😶" $ - P.wrap $ prettyPath' dest <> "was already up-to-date with" - <> P.group (prettyPath' src <> ".") - PreviewMergeAlreadyUpToDate src dest -> pure . P.callout "😶" $ - P.wrap $ prettyPath' dest <> "is already up-to-date with" - <> P.group (prettyPath' src <> ".") - DumpNumberedArgs args -> pure . P.numberedList $ fmap P.string args - NoConflictsOrEdits -> - pure (P.okCallout "No conflicts or edits in progress.") - NoOp -> pure $ P.string "I didn't make any changes." - DefaultMetadataNotification -> pure $ P.wrap "I added some default metadata." - DumpBitBooster head map -> let - go output [] = output - go output (head : queue) = case Map.lookup head map of - Nothing -> go (renderLine head [] : output) queue - Just tails -> go (renderLine head tails : output) (queue ++ tails) - where - renderHash = take 10 . Text.unpack . Hash.base32Hex . Causal.unRawHash - renderLine head tail = - (renderHash head) ++ "|" ++ intercalateMap " " renderHash tail ++ - case Map.lookup (Hash.base32Hex . Causal.unRawHash $ head) tags of - Just t -> "|tag: " ++ t - Nothing -> "" - -- some specific hashes that we want to label in the output - tags :: Map Text String - tags = Map.fromList . fmap swap $ - [ ("unisonbase 2019/8/6", "54s9qjhaonotuo4sp6ujanq7brngk32f30qt5uj61jb461h9fcca6vv5levnoo498bavne4p65lut6k6a7rekaruruh9fsl19agu8j8") - , ("unisonbase 2019/8/5", "focmbmg7ca7ht7opvjaqen58fobu3lijfa9adqp7a1l1rlkactd7okoimpfmd0ftfmlch8gucleh54t3rd1e7f13fgei86hnsr6dt1g") - , ("unisonbase 2019/7/31", "jm2ltsg8hh2b3c3re7aru6e71oepkqlc3skr2v7bqm4h1qgl3srucnmjcl1nb8c9ltdv56dpsgpdur1jhpfs6n5h43kig5bs4vs50co") - , ("unisonbase 2019/7/25", "an1kuqsa9ca8tqll92m20tvrmdfk0eksplgjbda13evdlngbcn5q72h8u6nb86ojr7cvnemjp70h8cq1n95osgid1koraq3uk377g7g") - , ("ucm m1b", "o6qocrqcqht2djicb1gcmm5ct4nr45f8g10m86bidjt8meqablp0070qae2tvutnvk4m9l7o1bkakg49c74gduo9eati20ojf0bendo") - , ("ucm m1, m1a", "auheev8io1fns2pdcnpf85edsddj27crpo9ajdujum78dsncvfdcdu5o7qt186bob417dgmbd26m8idod86080bfivng1edminu3hug") - ] - - in pure $ P.lines [ - P.lines (fmap fromString . reverse . nubOrd $ go [] [head]), - "", - "Paste that output into http://bit-booster.com/graph.html" - ] - ListDependents hqLength ld names missing -> pure $ - if names == mempty && missing == mempty - then c (prettyLabeledDependency hqLength ld) <> " doesn't have any dependents." - else - "Dependents of " <> c (prettyLabeledDependency hqLength ld) <> ":\n\n" <> - (P.indentN 2 (P.numberedColumn2Header num pairs)) - where - num n = P.hiBlack $ P.shown n <> "." - header = (P.hiBlack "Reference", P.hiBlack "Name") - pairs = header : (fmap (first c . second c) $ - [ (p $ Reference.toShortHash r, prettyName n) | (n, r) <- names ] ++ - [ (p $ Reference.toShortHash r, "(no name available)") | r <- toList missing ]) - p = prettyShortHash . SH.take hqLength - c = P.syntaxToColor - -- this definition is identical to the previous one, apart from the word - -- "Dependencies", but undecided about whether or how to refactor - ListDependencies hqLength ld names missing -> pure $ - if names == mempty && missing == mempty - then c (prettyLabeledDependency hqLength ld) <> " doesn't have any dependencies." - else - "Dependencies of " <> c (prettyLabeledDependency hqLength ld) <> ":\n\n" <> - (P.indentN 2 (P.numberedColumn2Header num pairs)) - where - num n = P.hiBlack $ P.shown n <> "." - header = (P.hiBlack "Reference", P.hiBlack "Name") - pairs = header : (fmap (first c . second c) $ - [ (p $ Reference.toShortHash r, prettyName n) | (n, r) <- names ] ++ - [ (p $ Reference.toShortHash r, "(no name available)") | r <- toList missing ]) - p = prettyShortHash . SH.take hqLength - c = P.syntaxToColor - DumpUnisonFileHashes hqLength datas effects terms -> - pure . P.syntaxToColor . P.lines $ - (effects <&> \(n,r) -> "ability " <> - prettyHashQualified' (HQ'.take hqLength . HQ'.fromNamedReference n $ Reference.DerivedId r)) <> - (datas <&> \(n,r) -> "type " <> - prettyHashQualified' (HQ'.take hqLength . HQ'.fromNamedReference n $ Reference.DerivedId r)) <> - (terms <&> \(n,r) -> - prettyHashQualified' (HQ'.take hqLength . HQ'.fromNamedReference n $ Reference.DerivedId r)) - - where - _nameChange _cmd _pastTenseCmd _oldName _newName _r = error "todo" - -- do - -- when (not . Set.null $ E.changedSuccessfully r) . putPrettyLn . P.okCallout $ - -- P.wrap $ "I" <> pastTenseCmd <> "the" - -- <> ns (E.changedSuccessfully r) - -- <> P.blue (prettyName oldName) - -- <> "to" <> P.group (P.green (prettyName newName) <> ".") - -- when (not . Set.null $ E.oldNameConflicted r) . putPrettyLn . P.warnCallout $ - -- (P.wrap $ "I couldn't" <> cmd <> "the" - -- <> ns (E.oldNameConflicted r) - -- <> P.blue (prettyName oldName) - -- <> "to" <> P.green (prettyName newName) - -- <> "because of conflicts.") - -- <> "\n\n" - -- <> tip ("Use " <> makeExample' IP.todo <> " to view more information on conflicts and remaining work.") - -- when (not . Set.null $ E.newNameAlreadyExists r) . putPrettyLn . P.warnCallout $ - -- (P.wrap $ "I couldn't" <> cmd <> P.blue (prettyName oldName) - -- <> "to" <> P.green (prettyName newName) - -- <> "because the " - -- <> ns (E.newNameAlreadyExists r) - -- <> "already exist(s).") - -- <> "\n\n" - -- <> tip - -- ("Use" <> makeExample IP.rename [prettyName newName, ""] <> "to make" <> prettyName newName <> "available.") --- where --- ns targets = P.oxfordCommas $ --- map (fromString . Names.renderNameTarget) (toList targets) - -prettyPath' :: Path.Path' -> Pretty -prettyPath' p' = - if Path.isCurrentPath p' - then "the current namespace" - else P.blue (P.shown p') - -prettyRelative :: Path.Relative -> Pretty -prettyRelative = P.blue . P.shown - -prettySBH :: IsString s => ShortBranchHash -> P.Pretty s -prettySBH hash = P.group $ "#" <> P.text (SBH.toText hash) - -formatMissingStuff :: (Show tm, Show typ) => - [(HQ.HashQualified Name, tm)] -> [(HQ.HashQualified Name, typ)] -> Pretty -formatMissingStuff terms types = - (unlessM (null terms) . P.fatalCallout $ - P.wrap "The following terms have a missing or corrupted type signature:" - <> "\n\n" - <> P.column2 [ (P.syntaxToColor $ prettyHashQualified name, fromString (show ref)) | (name, ref) <- terms ]) <> - (unlessM (null types) . P.fatalCallout $ - P.wrap "The following types weren't found in the codebase:" - <> "\n\n" - <> P.column2 [ (P.syntaxToColor $ prettyHashQualified name, fromString (show ref)) | (name, ref) <- types ]) - -displayDefinitions' :: Var v => Ord a1 - => PPE.PrettyPrintEnvDecl - -> Map Reference.Reference (DisplayObject () (DD.Decl v a1)) - -> Map Reference.Reference (DisplayObject (Type v a1) (Term v a1)) - -> Pretty -displayDefinitions' ppe0 types terms = P.syntaxToColor $ P.sep "\n\n" (prettyTypes <> prettyTerms) - where - ppeBody r = PPE.declarationPPE ppe0 r - ppeDecl = PPE.unsuffixifiedPPE ppe0 - prettyTerms = map go . Map.toList - -- sort by name - $ Map.mapKeys (first (PPE.termName ppeDecl . Referent.Ref) . dupe) terms - prettyTypes = map go2 . Map.toList - $ Map.mapKeys (first (PPE.typeName ppeDecl) . dupe) types - go ((n, r), dt) = - case dt of - MissingObject r -> missing n r - BuiltinObject typ -> - P.hang ("builtin " <> prettyHashQualified n <> " :") - (TypePrinter.prettySyntax (ppeBody r) typ) - UserObject tm -> TermPrinter.prettyBinding (ppeBody r) n tm - go2 ((n, r), dt) = - case dt of - MissingObject r -> missing n r - BuiltinObject _ -> builtin n - UserObject decl -> case decl of - Left d -> DeclPrinter.prettyEffectDecl (ppeBody r) r n d - Right d -> DeclPrinter.prettyDataDecl (PPE.declarationPPEDecl ppe0 r) r n d - builtin n = P.wrap $ "--" <> prettyHashQualified n <> " is built-in." - missing n r = P.wrap ( - "-- The name " <> prettyHashQualified n <> " is assigned to the " - <> "reference " <> fromString (show r ++ ",") - <> "which is missing from the codebase.") - <> P.newline - <> tip "You might need to repair the codebase manually." - -displayRendered :: Maybe FilePath -> Pretty -> IO Pretty -displayRendered outputLoc pp = - maybe (pure pp) scratchAndDisplay outputLoc - where - scratchAndDisplay path = do - path' <- canonicalizePath path - prependToFile pp path' - pure (message pp path') - where - prependToFile pp path = do - existingContents <- do - exists <- doesFileExist path - if exists then readFile path - else pure "" - writeFile path . Text.pack . P.toPlain 80 $ - P.lines [ pp, "", P.text existingContents ] - message pp path = - P.callout "☝️" $ P.lines [ - P.wrap $ "I added this to the top of " <> fromString path, - "", - P.indentN 2 pp - ] - -displayDefinitions :: - Var v => - Ord a1 => - Maybe FilePath -> - PPE.PrettyPrintEnvDecl -> - Map Reference.Reference (DisplayObject () (DD.Decl v a1)) -> - Map Reference.Reference (DisplayObject (Type v a1) (Term v a1)) -> - IO Pretty -displayDefinitions _outputLoc _ppe types terms - | Map.null types && Map.null terms = - pure $ P.callout "😶" "No results to display." -displayDefinitions outputLoc ppe types terms = - maybe displayOnly scratchAndDisplay outputLoc - where - displayOnly = pure code - scratchAndDisplay path = do - path' <- canonicalizePath path - prependToFile code path' - pure (message code path') - where - prependToFile code path = do - existingContents <- do - exists <- doesFileExist path - if exists - then readFile path - else pure "" - writeFile path . Text.pack . P.toPlain 80 $ - P.lines - [ code, - "", - "---- " <> "Anything below this line is ignored by Unison.", - "", - P.text existingContents - ] - message code path = - P.callout "☝️" $ - P.lines - [ P.wrap $ "I added these definitions to the top of " <> fromString path, - "", - P.indentN 2 code, - "", - P.wrap $ - "You can edit them there, then do" <> makeExample' IP.update - <> "to replace the definitions currently in this namespace." - ] - code = - P.syntaxToColor $ P.sep "\n\n" (prettyTypes <> prettyTerms) - where - ppeBody r = PPE.declarationPPE ppe r - ppeDecl = PPE.unsuffixifiedPPE ppe - prettyTerms = - map go . Map.toList $ - -- sort by name - Map.mapKeys (first (PPE.termName ppeDecl . Referent.Ref) . dupe) terms - prettyTypes = - map go2 . Map.toList $ - Map.mapKeys (first (PPE.typeName ppeDecl) . dupe) types - go ((n, r), dt) = - case dt of - MissingObject r -> missing n r - BuiltinObject typ -> - P.hang - ("builtin " <> prettyHashQualified n <> " :") - (TypePrinter.prettySyntax (ppeBody r) typ) - UserObject tm -> TermPrinter.prettyBinding (ppeBody r) n tm - go2 ((n, r), dt) = - case dt of - MissingObject r -> missing n r - BuiltinObject _ -> builtin n - UserObject decl -> case decl of - Left d -> DeclPrinter.prettyEffectDecl (ppeBody r) r n d - Right d -> DeclPrinter.prettyDataDecl (PPE.declarationPPEDecl ppe r) r n d - builtin n = P.wrap $ "--" <> prettyHashQualified n <> " is built-in." - missing n r = - P.wrap - ( "-- The name " <> prettyHashQualified n <> " is assigned to the " - <> "reference " - <> fromString (show r ++ ",") - <> "which is missing from the codebase." - ) - <> P.newline - <> tip "You might need to repair the codebase manually." - -displayTestResults :: Bool -- whether to show the tip - -> PPE.PrettyPrintEnv - -> [(Reference, Text)] - -> [(Reference, Text)] - -> Pretty -displayTestResults showTip ppe oksUnsorted failsUnsorted = let - oks = Name.sortByText fst [ (name r, msg) | (r, msg) <- oksUnsorted ] - fails = Name.sortByText fst [ (name r, msg) | (r, msg) <- failsUnsorted ] - name r = HQ.toText $ PPE.termName ppe (Referent.Ref r) - okMsg = - if null oks then mempty - else P.column2 [ (P.green "◉ " <> P.text r, " " <> P.green (P.text msg)) | (r, msg) <- oks ] - okSummary = - if null oks then mempty - else "✅ " <> P.bold (P.num (length oks)) <> P.green " test(s) passing" - failMsg = - if null fails then mempty - else P.column2 [ (P.red "✗ " <> P.text r, " " <> P.red (P.text msg)) | (r, msg) <- fails ] - failSummary = - if null fails then mempty - else "🚫 " <> P.bold (P.num (length fails)) <> P.red " test(s) failing" - tipMsg = - if not showTip || (null oks && null fails) then mempty - else tip $ "Use " <> P.blue ("view " <> P.text (fst $ head (fails ++ oks))) - <> "to view the source of a test." - in if null oks && null fails then "😶 No tests available." - else P.sep "\n\n" . P.nonEmpty $ [ - okMsg, failMsg, - P.sep ", " . P.nonEmpty $ [failSummary, okSummary], tipMsg] - -unsafePrettyTermResultSig' :: Var v => - PPE.PrettyPrintEnv -> SR'.TermResult' v a -> Pretty -unsafePrettyTermResultSig' ppe = \case - SR'.TermResult' name (Just typ) r _aliases -> - head (TypePrinter.prettySignatures' ppe [(r,name,typ)]) - _ -> error "Don't pass Nothing" - --- produces: --- -- #5v5UtREE1fTiyTsTK2zJ1YNqfiF25SkfUnnji86Lms#0 --- Optional.None, Maybe.Nothing : Maybe a -unsafePrettyTermResultSigFull' :: Var v => - PPE.PrettyPrintEnv -> SR'.TermResult' v a -> Pretty -unsafePrettyTermResultSigFull' ppe = \case - SR'.TermResult' hq (Just typ) r aliases -> - P.lines - [ P.hiBlack "-- " <> greyHash (HQ.fromReferent r) - , P.group $ - P.commas (fmap greyHash $ hq : map HQ'.toHQ (toList aliases)) <> " : " - <> (P.syntaxToColor $ TypePrinter.pretty0 ppe mempty (-1) typ) - , mempty - ] - _ -> error "Don't pass Nothing" - where greyHash = styleHashQualified' id P.hiBlack - -prettyTypeResultHeader' :: Var v => SR'.TypeResult' v a -> Pretty -prettyTypeResultHeader' (SR'.TypeResult' name dt r _aliases) = - prettyDeclTriple (name, r, dt) - --- produces: --- -- #5v5UtREE1fTiyTsTK2zJ1YNqfiF25SkfUnnji86Lms --- type Optional --- type Maybe -prettyTypeResultHeaderFull' :: Var v => SR'.TypeResult' v a -> Pretty -prettyTypeResultHeaderFull' (SR'.TypeResult' name dt r aliases) = - P.lines stuff <> P.newline - where - stuff = - (P.hiBlack "-- " <> greyHash (HQ.fromReference r)) : - fmap (\name -> prettyDeclTriple (name, r, dt)) - (name : map HQ'.toHQ (toList aliases)) - where greyHash = styleHashQualified' id P.hiBlack - -prettyDeclTriple :: Var v => - (HQ.HashQualified Name, Reference.Reference, DisplayObject () (DD.Decl v a)) - -> Pretty -prettyDeclTriple (name, _, displayDecl) = case displayDecl of - BuiltinObject _ -> P.hiBlack "builtin " <> P.hiBlue "type " <> P.blue (P.syntaxToColor $ prettyHashQualified name) - MissingObject _ -> mempty -- these need to be handled elsewhere - UserObject decl -> case decl of - Left ed -> P.syntaxToColor $ DeclPrinter.prettyEffectHeader name ed - Right dd -> P.syntaxToColor $ DeclPrinter.prettyDataHeader name dd - -prettyDeclPair :: Var v => - PPE.PrettyPrintEnv -> (Reference, DisplayObject () (DD.Decl v a)) - -> Pretty -prettyDeclPair ppe (r, dt) = prettyDeclTriple (PPE.typeName ppe r, r, dt) - -renderNameConflicts :: Set.Set Name -> Set.Set Name -> Pretty -renderNameConflicts conflictedTypeNames conflictedTermNames = - unlessM (null allNames) $ P.callout "❓" . P.sep "\n\n" . P.nonEmpty $ [ - showConflictedNames "types" conflictedTypeNames, - showConflictedNames "terms" conflictedTermNames, - tip $ "This occurs when merging branches that both independently introduce the same name. Use " - <> makeExample IP.view (prettyName <$> take 3 allNames) - <> "to see the conflicting definitions, then use " - <> makeExample' (if (not . null) conflictedTypeNames - then IP.renameType else IP.renameTerm) - <> "to resolve the conflicts." - ] - where - allNames = toList (conflictedTermNames <> conflictedTypeNames) - showConflictedNames things conflictedNames = - unlessM (Set.null conflictedNames) $ - P.wrap ("These" <> P.bold (things <> "have conflicting definitions:")) - `P.hang` P.commas (P.blue . prettyName <$> toList conflictedNames) - -renderEditConflicts :: - PPE.PrettyPrintEnv -> Patch -> Pretty -renderEditConflicts ppe Patch{..} = - unlessM (null editConflicts) . P.callout "❓" . P.sep "\n\n" $ [ - P.wrap $ "These" <> P.bold "definitions were edited differently" - <> "in namespaces that have been merged into this one." - <> "You'll have to tell me what to use as the new definition:", - P.indentN 2 (P.lines (formatConflict <$> editConflicts)) --- , tip $ "Use " <> makeExample IP.resolve [name (head editConflicts), " "] <> " to pick a replacement." -- todo: eventually something with `edit` - ] - where - -- todo: could possibly simplify all of this, but today is a copy/paste day. - editConflicts :: [Either (Reference, Set TypeEdit.TypeEdit) (Reference, Set TermEdit.TermEdit)] - editConflicts = - (fmap Left . Map.toList . R.toMultimap . R.filterManyDom $ _typeEdits) <> - (fmap Right . Map.toList . R.toMultimap . R.filterManyDom $ _termEdits) - typeName r = styleHashQualified P.bold (PPE.typeName ppe r) - termName r = styleHashQualified P.bold (PPE.termName ppe (Referent.Ref r)) - formatTypeEdits (r, toList -> es) = P.wrap $ - "The type" <> typeName r <> "was" <> - (if TypeEdit.Deprecate `elem` es - then "deprecated and also replaced with" - else "replaced with") <> - P.oxfordCommas [ typeName r | TypeEdit.Replace r <- es ] - formatTermEdits (r, toList -> es) = P.wrap $ - "The term" <> termName r <> "was" <> - (if TermEdit.Deprecate `elem` es - then "deprecated and also replaced with" - else "replaced with") <> - P.oxfordCommas [ termName r | TermEdit.Replace r _ <- es ] - formatConflict = either formatTypeEdits formatTermEdits - -type Numbered = State.State (Int, Seq.Seq String) - -todoOutput :: Var v => PPE.PrettyPrintEnvDecl -> TO.TodoOutput v a -> Pretty -todoOutput ppe todo = - todoConflicts <> todoEdits - where - ppeu = PPE.unsuffixifiedPPE ppe - ppes = PPE.suffixifiedPPE ppe - (frontierTerms, frontierTypes) = TO.todoFrontier todo - (dirtyTerms, dirtyTypes) = TO.todoFrontierDependents todo - corruptTerms = - [ (PPE.termName ppeu (Referent.Ref r), r) | (r, Nothing) <- frontierTerms ] - corruptTypes = - [ (PPE.typeName ppeu r, r) | (r, MissingObject _) <- frontierTypes ] - goodTerms ts = - [ (Referent.Ref r, PPE.termName ppeu (Referent.Ref r), typ) | (r, Just typ) <- ts ] - todoConflicts = if TO.noConflicts todo then mempty else P.lines . P.nonEmpty $ - [ renderEditConflicts ppeu (TO.editConflicts todo) - , renderNameConflicts conflictedTypeNames conflictedTermNames ] - where - -- If a conflict is both an edit and a name conflict, we show it in the edit - -- conflicts section - c :: Names - c = removeEditConflicts (TO.editConflicts todo) (TO.nameConflicts todo) - conflictedTypeNames = (R.dom . Names.types) c - conflictedTermNames = (R.dom . Names.terms) c - -- e.g. `foo#a` has been independently updated to `foo#b` and `foo#c`. - -- This means there will be a name conflict: - -- foo -> #b - -- foo -> #c - -- as well as an edit conflict: - -- #a -> #b - -- #a -> #c - -- We want to hide/ignore the name conflicts that are also targets of an - -- edit conflict, so that the edit conflict will be dealt with first. - -- For example, if hash `h` has multiple edit targets { #x, #y, #z, ...}, - -- we'll temporarily remove name conflicts pointing to { #x, #y, #z, ...}. - removeEditConflicts :: Patch -> Names -> Names - removeEditConflicts Patch{..} Names{..} = Names terms' types' where - terms' = R.filterRan (`Set.notMember` conflictedTermEditTargets) terms - types' = R.filterRan (`Set.notMember` conflictedTypeEditTargets) types - conflictedTypeEditTargets :: Set Reference - conflictedTypeEditTargets = - Set.fromList $ toList (R.ran typeEditConflicts) >>= TypeEdit.references - conflictedTermEditTargets :: Set Referent.Referent - conflictedTermEditTargets = - Set.fromList . fmap Referent.Ref - $ toList (R.ran termEditConflicts) >>= TermEdit.references - typeEditConflicts = R.filterDom (`R.manyDom` _typeEdits) _typeEdits - termEditConflicts = R.filterDom (`R.manyDom` _termEdits) _termEdits - - - todoEdits = unlessM (TO.noEdits todo) . P.callout "🚧" . P.sep "\n\n" . P.nonEmpty $ - [ P.wrap ("The namespace has" <> fromString (show (TO.todoScore todo)) - <> "transitive dependent(s) left to upgrade." - <> "Your edit frontier is the dependents of these definitions:") - , P.indentN 2 . P.lines $ ( - (prettyDeclPair ppeu <$> toList frontierTypes) ++ - TypePrinter.prettySignatures' ppes (goodTerms frontierTerms) - ) - , P.wrap "I recommend working on them in the following order:" - , P.numberedList $ - let unscore (_score,a,b) = (a,b) - in (prettyDeclPair ppeu . unscore <$> toList dirtyTypes) ++ - TypePrinter.prettySignatures' - ppes - (goodTerms $ unscore <$> dirtyTerms) - , formatMissingStuff corruptTerms corruptTypes - ] - -listOfDefinitions :: - Var v => PPE.PrettyPrintEnv -> E.ListDetailed -> [SR'.SearchResult' v a] -> IO Pretty -listOfDefinitions ppe detailed results = - pure $ listOfDefinitions' ppe detailed results - -listOfLinks :: - Var v => PPE.PrettyPrintEnv -> [(HQ.HashQualified Name, Maybe (Type v a))] -> IO Pretty -listOfLinks _ [] = pure . P.callout "😶" . P.wrap $ - "No results. Try using the " <> - IP.makeExample IP.link [] <> - "command to add metadata to a definition." -listOfLinks ppe results = pure $ P.lines [ - P.numberedColumn2 num [ - (P.syntaxToColor $ prettyHashQualified hq, ": " <> prettyType typ) | (hq,typ) <- results - ], "", - tip $ "Try using" <> IP.makeExample IP.display ["1"] - <> "to display the first result or" - <> IP.makeExample IP.view ["1"] <> "to view its source." - ] - where - num i = P.hiBlack $ P.shown i <> "." - prettyType Nothing = "❓ (missing a type for this definition)" - prettyType (Just t) = TypePrinter.pretty ppe t - -data ShowNumbers = ShowNumbers | HideNumbers --- | `ppe` is just for rendering type signatures --- `oldPath, newPath :: Path.Absolute` are just for producing fully-qualified --- numbered args -showDiffNamespace :: forall v . Var v - => ShowNumbers - -> PPE.PrettyPrintEnv - -> Path.Absolute - -> Path.Absolute - -> OBD.BranchDiffOutput v Ann - -> (Pretty, NumberedArgs) -showDiffNamespace _ _ _ _ diffOutput | OBD.isEmpty diffOutput = - ("The namespaces are identical.", mempty) -showDiffNamespace sn ppe oldPath newPath OBD.BranchDiffOutput{..} = - (P.sepNonEmpty "\n\n" p, toList args) - where - (p, (menuSize, args)) = (`State.runState` (0::Int, Seq.empty)) $ sequence [ - if (not . null) newTypeConflicts - || (not . null) newTermConflicts - then do - prettyUpdatedTypes :: [Pretty] <- traverse prettyUpdateType newTypeConflicts - prettyUpdatedTerms :: [Pretty] <- traverse prettyUpdateTerm newTermConflicts - pure $ P.sepNonEmpty "\n\n" - [ P.red "New name conflicts:" - , P.indentN 2 . P.sepNonEmpty "\n\n" $ prettyUpdatedTypes <> prettyUpdatedTerms - ] - else pure mempty - ,if (not . null) resolvedTypeConflicts - || (not . null) resolvedTermConflicts - then do - prettyUpdatedTypes :: [Pretty] <- traverse prettyUpdateType resolvedTypeConflicts - prettyUpdatedTerms :: [Pretty] <- traverse prettyUpdateTerm resolvedTermConflicts - pure $ P.sepNonEmpty "\n\n" - [ P.bold "Resolved name conflicts:" - , P.indentN 2 . P.sepNonEmpty "\n\n" $ prettyUpdatedTypes <> prettyUpdatedTerms - ] - else pure mempty - ,if (not . null) updatedTypes - || (not . null) updatedTerms - || propagatedUpdates > 0 - || (not . null) updatedPatches - then do - prettyUpdatedTypes :: [Pretty] <- traverse prettyUpdateType updatedTypes - prettyUpdatedTerms :: [Pretty] <- traverse prettyUpdateTerm updatedTerms - prettyUpdatedPatches :: [Pretty] <- traverse (prettySummarizePatch newPath) updatedPatches - pure $ P.sepNonEmpty "\n\n" - [ P.bold "Updates:" - , P.indentNonEmptyN 2 . P.sepNonEmpty "\n\n" $ prettyUpdatedTypes <> prettyUpdatedTerms - , if propagatedUpdates > 0 - then P.indentN 2 - $ P.wrap (P.hiBlack $ "There were " - <> P.shown propagatedUpdates - <> "auto-propagated updates.") - else mempty - , P.indentNonEmptyN 2 . P.linesNonEmpty $ prettyUpdatedPatches - ] - else pure mempty - ,if (not . null) addedTypes - || (not . null) addedTerms - || (not . null) addedPatches - then do - prettyAddedTypes :: Pretty <- prettyAddTypes addedTypes - prettyAddedTerms :: Pretty <- prettyAddTerms addedTerms - prettyAddedPatches :: [Pretty] <- traverse (prettySummarizePatch newPath) addedPatches - pure $ P.sepNonEmpty "\n\n" - [ P.bold "Added definitions:" - , P.indentNonEmptyN 2 $ P.linesNonEmpty [prettyAddedTypes, prettyAddedTerms] - , P.indentNonEmptyN 2 $ P.lines prettyAddedPatches - ] - else pure mempty - ,if (not . null) removedTypes - || (not . null) removedTerms - || (not . null) removedPatches - then do - prettyRemovedTypes :: Pretty <- prettyRemoveTypes removedTypes - prettyRemovedTerms :: Pretty <- prettyRemoveTerms removedTerms - prettyRemovedPatches :: [Pretty] <- traverse (prettyNamePatch oldPath) removedPatches - pure $ P.sepNonEmpty "\n\n" - [ P.bold "Removed definitions:" - , P.indentN 2 $ P.linesNonEmpty [ prettyRemovedTypes - , prettyRemovedTerms - , P.linesNonEmpty prettyRemovedPatches ] - ] - else pure mempty - ,if (not . null) renamedTypes - || (not . null) renamedTerms - then do - results <- prettyRenameGroups renamedTypes renamedTerms - pure $ P.sepNonEmpty "\n\n" - [ P.bold "Name changes:" - , P.indentN 2 . P.sepNonEmpty "\n\n" $ results - ] - -- todo: change separator to just '\n' here if all the results are 1 to 1 - else pure mempty - ] - - {- new implementation - 23. X ┐ => (added) 24. X' - 25. X2 ┘ (removed) 26. X2 - -} - prettyRenameGroups :: [OBD.RenameTypeDisplay v a] - -> [OBD.RenameTermDisplay v a] - -> Numbered [Pretty] - prettyRenameGroups types terms = - (<>) <$> traverse (prettyGroup . (over (_1._1) Referent.Ref)) - (types `zip` [0..]) - <*> traverse prettyGroup (terms `zip` [length types ..]) - where - leftNamePad :: P.Width = - foldl1' max - $ map (foldl1' max . map (P.Width . HQ'.nameLength) . toList . view _3) - terms - <> map (foldl1' max . map (P.Width . HQ'.nameLength) . toList . view _3) - types - prettyGroup - :: ( (Referent, b, Set (HQ'.HashQualified Name), Set (HQ'.HashQualified Name)) - , Int - ) - -> Numbered Pretty - prettyGroup ((r, _, olds, news),i) = let - -- [ "peach ┐" - -- , "peach' ┘"] - olds' :: [Numbered Pretty] = - map (\(oldhq, oldp) -> numHQ' oldPath oldhq r <&> (\n -> n <> " " <> oldp)) - . (zip (toList olds)) - . P.boxRight - . map (P.rightPad leftNamePad . phq') - $ toList olds - - added' = toList $ Set.difference news olds - removed' = toList $ Set.difference olds news - -- [ "(added) 24. X'" - -- , "(removed) 26. X2" - -- ] - - news' :: [Numbered Pretty] = - map (number addedLabel) added' ++ map (number removedLabel) removed' - where - addedLabel = "(added)" - removedLabel = "(removed)" - number label name = - numHQ' newPath name r <&> - (\num -> num <> " " <> phq' name <> " " <> label) - - buildTable :: [Numbered Pretty] -> [Numbered Pretty] -> Numbered Pretty - buildTable lefts rights = let - hlefts = if i == 0 then pure (P.bold "Original") : lefts - else lefts - hrights = if i == 0 then pure (P.bold "Changes") : rights else rights - in P.column2UnzippedM @Numbered mempty hlefts hrights - - in buildTable olds' news' - - prettyUpdateType :: OBD.UpdateTypeDisplay v a -> Numbered Pretty - {- - 1. ability Foo#pqr x y - 2. - AllRightsReserved : License - 3. + MIT : License - 4. ability Foo#abc - 5. - apiDocs : License - 6. + MIT : License - -} - prettyUpdateType (Nothing, mdUps) = - P.column2 <$> traverse (mdTypeLine newPath) mdUps - {- - 1. ┌ ability Foo#pqr x y - 2. └ ability Foo#xyz a b - ⧩ - 4. ┌ ability Foo#abc - │ 5. - apiDocs : Doc - │ 6. + MIT : License - 7. └ ability Foo#def - 8. - apiDocs : Doc - 9. + MIT : License - - 1. ┌ foo#abc : Nat -> Nat -> Poop - 2. └ foo#xyz : Nat - ↓ - 4. foo : Poop - 5. + foo.docs : Doc - -} - prettyUpdateType (Just olds, news) = - do - olds <- traverse (mdTypeLine oldPath) [ (name,r,decl,mempty) | (name,r,decl) <- olds ] - news <- traverse (mdTypeLine newPath) news - let (oldnums, olddatas) = unzip olds - let (newnums, newdatas) = unzip news - pure . P.column2 $ - zip (oldnums <> [""] <> newnums) - (P.boxLeft olddatas <> [downArrow] <> P.boxLeft newdatas) - - {- - 13. ┌ability Yyz (+1 metadata) - 14. └ability copies.Yyz (+2 metadata) - -} - prettyAddTypes :: forall a. [OBD.AddedTypeDisplay v a] -> Numbered Pretty - prettyAddTypes = fmap P.lines . traverse prettyGroup where - prettyGroup :: OBD.AddedTypeDisplay v a -> Numbered Pretty - prettyGroup (hqmds, r, odecl) = do - pairs <- traverse (prettyLine r odecl) hqmds - let (nums, decls) = unzip pairs - let boxLeft = case hqmds of _:_:_ -> P.boxLeft; _ -> id - pure . P.column2 $ zip nums (boxLeft decls) - prettyLine :: Reference -> Maybe (DD.DeclOrBuiltin v a) -> (HQ'.HashQualified Name, [OBD.MetadataDisplay v a]) -> Numbered (Pretty, Pretty) - prettyLine r odecl (hq, mds) = do - n <- numHQ' newPath hq (Referent.Ref r) - pure . (n,) $ prettyDecl hq odecl <> case length mds of - 0 -> mempty - c -> " (+" <> P.shown c <> " metadata)" - - prettyAddTerms :: forall a. [OBD.AddedTermDisplay v a] -> Numbered Pretty - prettyAddTerms = fmap (P.column3 . mconcat) . traverse prettyGroup . reorderTerms where - reorderTerms = sortOn (not . Referent.isConstructor . view _2) - prettyGroup :: OBD.AddedTermDisplay v a -> Numbered [(Pretty, Pretty, Pretty)] - prettyGroup (hqmds, r, otype) = do - pairs <- traverse (prettyLine r otype) hqmds - let (nums, names, decls) = unzip3 pairs - boxLeft = case hqmds of _:_:_ -> P.boxLeft; _ -> id - pure $ zip3 nums (boxLeft names) decls - prettyLine :: - Referent -> - Maybe (Type v a) -> - (HQ'.HashQualified Name, [OBD.MetadataDisplay v a]) -> - Numbered (Pretty, Pretty, Pretty) - prettyLine r otype (hq, mds) = do - n <- numHQ' newPath hq r - pure . (n, phq' hq, ) $ ": " <> prettyType otype <> case length mds of - 0 -> mempty - c -> " (+" <> P.shown c <> " metadata)" - - prettySummarizePatch, prettyNamePatch :: Path.Absolute -> OBD.PatchDisplay -> Numbered Pretty - -- 12. patch p (added 3 updates, deleted 1) - prettySummarizePatch prefix (name, patchDiff) = do - n <- numPatch prefix name - let addCount = (R.size . view Patch.addedTermEdits) patchDiff + - (R.size . view Patch.addedTypeEdits) patchDiff - delCount = (R.size . view Patch.removedTermEdits) patchDiff + - (R.size . view Patch.removedTypeEdits) patchDiff - messages = - (if addCount > 0 then ["added " <> P.shown addCount] else []) ++ - (if delCount > 0 then ["deleted " <> P.shown addCount] else []) - message = case messages of - [] -> mempty - x : ys -> " (" <> P.commas (x <> " updates" : ys) <> ")" - pure $ n <> P.bold " patch " <> prettyName name <> message - -- 18. patch q - prettyNamePatch prefix (name, _patchDiff) = do - n <- numPatch prefix name - pure $ n <> P.bold " patch " <> prettyName name - - {- - Removes: - - 10. ┌ oldn'busted : Nat -> Nat -> Poop - 11. └ oldn'busted' - 12. ability BadType - 13. patch defunctThingy - -} - prettyRemoveTypes :: forall a. [OBD.RemovedTypeDisplay v a] -> Numbered Pretty - prettyRemoveTypes = fmap P.lines . traverse prettyGroup where - prettyGroup :: OBD.RemovedTypeDisplay v a -> Numbered Pretty - prettyGroup (hqs, r, odecl) = do - lines <- traverse (prettyLine r odecl) hqs - let (nums, decls) = unzip lines - boxLeft = case hqs of _:_:_ -> P.boxLeft; _ -> id - pure . P.column2 $ zip nums (boxLeft decls) - prettyLine :: Reference -> Maybe (DD.DeclOrBuiltin v a) -> HQ'.HashQualified Name -> Numbered (Pretty, Pretty) - prettyLine r odecl hq = do - n <- numHQ' newPath hq (Referent.Ref r) - pure (n, prettyDecl hq odecl) - - prettyRemoveTerms :: forall a. [OBD.RemovedTermDisplay v a] -> Numbered Pretty - prettyRemoveTerms = fmap (P.column3 . mconcat) . traverse prettyGroup . reorderTerms where - reorderTerms = sortOn (not . Referent.isConstructor . view _2) - prettyGroup :: OBD.RemovedTermDisplay v a -> Numbered [(Pretty, Pretty, Pretty)] - prettyGroup ([], r, _) = - error $ "trying to remove " <> show r <> " without any names." - prettyGroup (hq1:hqs, r, otype) = do - line1 <- prettyLine1 r otype hq1 - lines <- traverse (prettyLine r) hqs - let (nums, names, decls) = unzip3 (line1:lines) - boxLeft = case hqs of _:_ -> P.boxLeft; _ -> id - pure $ zip3 nums (boxLeft names) decls - prettyLine1 r otype hq = do - n <- numHQ' newPath hq r - pure (n, phq' hq, ": " <> prettyType otype) - prettyLine r hq = do - n <- numHQ' newPath hq r - pure (n, phq' hq, mempty) - - downArrow = P.bold "↓" - mdTypeLine :: Path.Absolute -> OBD.TypeDisplay v a -> Numbered (Pretty, Pretty) - mdTypeLine p (hq, r, odecl, mddiff) = do - n <- numHQ' p hq (Referent.Ref r) - fmap ((n,) . P.linesNonEmpty) . sequence $ - [ pure $ prettyDecl hq odecl - , P.indentN leftNumsWidth <$> prettyMetadataDiff mddiff ] - - -- + 2. MIT : License - -- - 3. AllRightsReserved : License - mdTermLine - :: Path.Absolute - -> P.Width - -> OBD.TermDisplay v a - -> Numbered (Pretty, Pretty) - mdTermLine p namesWidth (hq, r, otype, mddiff) = do - n <- numHQ' p hq r - fmap ((n, ) . P.linesNonEmpty) - . sequence - $ [ pure $ P.rightPad namesWidth (phq' hq) <> " : " <> prettyType otype - , prettyMetadataDiff mddiff - ] - - prettyUpdateTerm :: OBD.UpdateTermDisplay v a -> Numbered Pretty - prettyUpdateTerm (Nothing, newTerms) = if null newTerms - then error "Super invalid UpdateTermDisplay" - else fmap P.column2 $ traverse (mdTermLine newPath namesWidth) newTerms - where - namesWidth = foldl1' max $ fmap (P.Width . HQ'.nameLength . view _1) newTerms - prettyUpdateTerm (Just olds, news) = fmap P.column2 $ do - olds <- traverse (mdTermLine oldPath namesWidth) - [ (name, r, typ, mempty) | (name, r, typ) <- olds ] - news <- traverse (mdTermLine newPath namesWidth) news - let (oldnums, olddatas) = unzip olds - let (newnums, newdatas) = unzip news - pure $ zip (oldnums <> [""] <> newnums) - (P.boxLeft olddatas <> [downArrow] <> P.boxLeft newdatas) - where - namesWidth = - foldl1' max - $ fmap (P.Width . HQ'.nameLength . view _1) news - <> fmap (P.Width . HQ'.nameLength . view _1) olds - - prettyMetadataDiff :: OBD.MetadataDiff (OBD.MetadataDisplay v a) -> Numbered Pretty - prettyMetadataDiff OBD.MetadataDiff{..} = P.column2M $ - map (elem oldPath "- ") removedMetadata <> - map (elem newPath "+ ") addedMetadata - where - elem p x (hq, r, otype) = do - num <- numHQ p hq r - pure (x <> num <> " " <> phq hq, ": " <> prettyType otype) - - prettyType :: Maybe (Type v a) -> Pretty - prettyType = maybe (P.red "type not found") (TypePrinter.pretty ppe) - prettyDecl hq = - maybe (P.red "type not found") - (P.syntaxToColor . DeclPrinter.prettyDeclOrBuiltinHeader (HQ'.toHQ hq)) - phq' :: _ -> Pretty = P.syntaxToColor . prettyHashQualified' - phq :: _ -> Pretty = P.syntaxToColor . prettyHashQualified - -- - -- DeclPrinter.prettyDeclHeader : HQ -> Either - numPatch :: Path.Absolute -> Name -> Numbered Pretty - numPatch prefix name = - addNumberedArg . Name.toString . Name.makeAbsolute $ Path.prefixName prefix name - - numHQ :: Path.Absolute -> HQ.HashQualified Name -> Referent -> Numbered Pretty - numHQ prefix hq r = addNumberedArg (HQ.toString hq') - where - hq' = HQ.requalify (fmap (Name.makeAbsolute . Path.prefixName prefix) hq) r - - numHQ' :: Path.Absolute -> HQ'.HashQualified Name -> Referent -> Numbered Pretty - numHQ' prefix hq r = addNumberedArg (HQ'.toString hq') - where - hq' = HQ'.requalify (fmap (Name.makeAbsolute . Path.prefixName prefix) hq) r - - addNumberedArg :: String -> Numbered Pretty - addNumberedArg s = case sn of - ShowNumbers -> do - (n, args) <- State.get - State.put (n+1, args Seq.|> s) - pure $ padNumber (n+1) - HideNumbers -> pure mempty - - padNumber :: Int -> Pretty - padNumber n = P.hiBlack . P.rightPad leftNumsWidth $ P.shown n <> "." - - leftNumsWidth = P.Width $ length (show menuSize) + length ("." :: String) - -noResults :: Pretty -noResults = P.callout "😶" $ - P.wrap $ "No results. Check your spelling, or try using tab completion " - <> "to supply command arguments." - -listOfDefinitions' - :: Var v - => PPE.PrettyPrintEnv -- for printing types of terms :-\ - -> E.ListDetailed - -> [SR'.SearchResult' v a] - -> Pretty -listOfDefinitions' ppe detailed results = if null results - then noResults - else - P.lines - . P.nonEmpty - $ prettyNumberedResults - : [ formatMissingStuff termsWithMissingTypes missingTypes - , unlessM (null missingBuiltins) - . bigproblem - $ P.wrap - "I encountered an inconsistency in the codebase; these definitions refer to built-ins that this version of unison doesn't know about:" - `P.hang` P.column2 - ( (P.bold "Name", P.bold "Built-in") - -- : ("-", "-") - : fmap - (bimap (P.syntaxToColor . prettyHashQualified) - (P.text . Referent.toText) - ) - missingBuiltins - ) - ] - where - prettyNumberedResults = P.numberedList prettyResults - -- todo: group this by namespace - prettyResults = map (SR'.foldResult' renderTerm renderType) - (filter (not . missingType) results) - where - (renderTerm, renderType) = if detailed - then (unsafePrettyTermResultSigFull' ppe, prettyTypeResultHeaderFull') - else (unsafePrettyTermResultSig' ppe, prettyTypeResultHeader') - missingType (SR'.Tm _ Nothing _ _) = True - missingType (SR'.Tp _ (MissingObject _) _ _) = True - missingType _ = False - -- termsWithTypes = [(name,t) | (name, Just t) <- sigs0 ] - -- where sigs0 = (\(name, _, typ) -> (name, typ)) <$> terms - termsWithMissingTypes = - [ (name, Reference.idToShortHash r) - | SR'.Tm name Nothing (Referent.Ref (Reference.DerivedId r)) _ <- results - ] - missingTypes = - nubOrdOn snd - $ [ (name, r) | SR'.Tp name (MissingObject r) _ _ <- results ] - <> [ (name, Reference.toShortHash r) - | SR'.Tm name Nothing (Referent.toTypeReference -> Just r) _ <- results - ] - missingBuiltins = results >>= \case - SR'.Tm name Nothing r@(Referent.Ref (Reference.Builtin _)) _ -> - [(name, r)] - _ -> [] - -watchPrinter - :: Var v - => Text - -> PPE.PrettyPrintEnv - -> Ann - -> WK.WatchKind - -> Term v () - -> Runtime.IsCacheHit - -> Pretty -watchPrinter src ppe ann kind term isHit = - P.bracket - $ let - lines = Text.lines src - lineNum = fromMaybe 1 $ startingLine ann - lineNumWidth = length (show lineNum) - extra = " " <> replicate (length kind) ' ' -- for the ` | > ` after the line number - line = lines !! (lineNum - 1) - addCache p = if isHit then p <> " (cached)" else p - renderTest (Term.App' (Term.Constructor' _ id) (Term.Text' msg)) = - "\n" <> if id == DD.okConstructorId - then addCache - (P.green "✅ " <> P.bold "Passed" <> P.green (P.text msg')) - else if id == DD.failConstructorId - then addCache - (P.red "🚫 " <> P.bold "FAILED" <> P.red (P.text msg')) - else P.red "❓ " <> TermPrinter.pretty ppe term - where - msg' = if Text.take 1 msg == " " then msg - else " " <> msg - - renderTest x = - fromString $ "\n Unison bug: " <> show x <> " is not a test." - in - P.lines - [ fromString (show lineNum) <> " | " <> P.text line - , case (kind, term) of - (WK.TestWatch, Term.List' tests) -> foldMap renderTest tests - _ -> P.lines - [ fromString (replicate lineNumWidth ' ') - <> fromString extra - <> (if isHit then id else P.purple) "⧩" - , P.indentN (P.Width (lineNumWidth + length extra)) - . (if isHit then id else P.bold) - $ TermPrinter.pretty ppe term - ] - ] - -filestatusTip :: Pretty -filestatusTip = tip "Use `help filestatus` to learn more." - -prettyDiff :: Names.Diff -> Pretty -prettyDiff diff = let - orig = Names.originalNames diff - adds = Names.addedNames diff - removes = Names.removedNames diff - - addedTerms = [ (n,r) | (n,r) <- R.toList (Names.terms adds) - , not $ R.memberRan r (Names.terms removes) ] - addedTypes = [ (n,r) | (n,r) <- R.toList (Names.types adds) - , not $ R.memberRan r (Names.types removes) ] - added = sort (hqTerms ++ hqTypes) - where - hqTerms = [ Names.hqName adds n (Right r) | (n, r) <- addedTerms ] - hqTypes = [ Names.hqName adds n (Left r) | (n, r) <- addedTypes ] - - removedTerms = [ (n,r) | (n,r) <- R.toList (Names.terms removes) - , not $ R.memberRan r (Names.terms adds) - , Set.notMember n addedTermsSet ] where - addedTermsSet = Set.fromList (map fst addedTerms) - removedTypes = [ (n,r) | (n,r) <- R.toList (Names.types removes) - , not $ R.memberRan r (Names.types adds) - , Set.notMember n addedTypesSet ] where - addedTypesSet = Set.fromList (map fst addedTypes) - removed = sort (hqTerms ++ hqTypes) - where - hqTerms = [ Names.hqName removes n (Right r) | (n, r) <- removedTerms ] - hqTypes = [ Names.hqName removes n (Left r) | (n, r) <- removedTypes ] - - movedTerms = [ (n,n2) | (n,r) <- R.toList (Names.terms removes) - , n2 <- toList (R.lookupRan r (Names.terms adds)) ] - movedTypes = [ (n,n2) | (n,r) <- R.toList (Names.types removes) - , n2 <- toList (R.lookupRan r (Names.types adds)) ] - moved = Name.sortNamed fst . nubOrd $ (movedTerms <> movedTypes) - - copiedTerms = List.multimap [ - (n,n2) | (n2,r) <- R.toList (Names.terms adds) - , not (R.memberRan r (Names.terms removes)) - , n <- toList (R.lookupRan r (Names.terms orig)) ] - copiedTypes = List.multimap [ - (n,n2) | (n2,r) <- R.toList (Names.types adds) - , not (R.memberRan r (Names.types removes)) - , n <- toList (R.lookupRan r (Names.types orig)) ] - copied = Name.sortNamed fst $ - Map.toList (Map.unionWith (<>) copiedTerms copiedTypes) - in - P.sepNonEmpty "\n\n" [ - if not $ null added then - P.lines [ - -- todo: split out updates - P.green "+ Adds / updates:", "", - P.indentN 2 . P.wrap $ - P.sep " " (P.syntaxToColor . prettyHashQualified' <$> added) - ] - else mempty, - if not $ null removed then - P.lines [ - P.hiBlack "- Deletes:", "", - P.indentN 2 . P.wrap $ - P.sep " " (P.syntaxToColor . prettyHashQualified' <$> removed) - ] - else mempty, - if not $ null moved then - P.lines [ - P.purple "> Moves:", "", - P.indentN 2 $ - P.column2 $ - (P.hiBlack "Original name", P.hiBlack "New name") : - [ (prettyName n,prettyName n2) | (n, n2) <- moved ] - ] - else mempty, - if not $ null copied then - P.lines [ - P.yellow "= Copies:", "", - P.indentN 2 $ - P.column2 $ - (P.hiBlack "Original name", P.hiBlack "New name(s)") : - [ (prettyName n, P.sep " " (prettyName <$> ns)) - | (n, ns) <- copied ] - ] - else mempty - ] - -prettyTermName :: PPE.PrettyPrintEnv -> Referent -> Pretty -prettyTermName ppe r = P.syntaxToColor $ - prettyHashQualified (PPE.termName ppe r) - -prettyReadRepo :: ReadRepo -> Pretty -prettyReadRepo (RemoteRepo.ReadGitRepo url) = P.blue (P.text url) - -prettyWriteRepo :: WriteRepo -> Pretty -prettyWriteRepo (RemoteRepo.WriteGitRepo url) = P.blue (P.text url) - -isTestOk :: Term v Ann -> Bool -isTestOk tm = case tm of - Term.List' ts -> all isSuccess ts where - isSuccess (Term.App' (Term.Constructor' ref cid) _) = - cid == DD.okConstructorId && - ref == DD.testResultRef - isSuccess _ = False - _ -> False diff --git a/parser-typechecker/src/Unison/DeclPrinter.hs b/parser-typechecker/src/Unison/DeclPrinter.hs index c256c9ccff..7daa2be426 100644 --- a/parser-typechecker/src/Unison/DeclPrinter.hs +++ b/parser-typechecker/src/Unison/DeclPrinter.hs @@ -1,7 +1,7 @@ {-# OPTIONS_GHC -Wno-orphans #-} {-# LANGUAGE OverloadedStrings #-} -module Unison.DeclPrinter where +module Unison.DeclPrinter (prettyDecl, prettyDeclHeader, prettyDeclOrBuiltinHeader) where import Unison.Prelude @@ -47,8 +47,8 @@ prettyDecl -> HashQualified Name -> DD.Decl v a -> Pretty SyntaxText -prettyDecl ppe@(PrettyPrintEnvDecl unsuffixifiedPPE _) r hq d = case d of - Left e -> prettyEffectDecl unsuffixifiedPPE r hq e +prettyDecl ppe r hq d = case d of + Left e -> prettyEffectDecl (suffixifiedPPE ppe) r hq e Right dd -> prettyDataDecl ppe r hq dd prettyEffectDecl diff --git a/parser-typechecker/src/Unison/Parser.hs b/parser-typechecker/src/Unison/Parser.hs index 5e48034ac2..d09408fbf8 100644 --- a/parser-typechecker/src/Unison/Parser.hs +++ b/parser-typechecker/src/Unison/Parser.hs @@ -28,7 +28,6 @@ import Data.Bytes.VarInt ( VarInt(..) ) import Data.Bifunctor (bimap) import qualified Data.Char as Char import Data.List.NonEmpty (NonEmpty (..)) --- import Data.Maybe import qualified Data.Set as Set import qualified Data.Text as Text import Data.Typeable (Proxy (..)) @@ -46,6 +45,7 @@ import Unison.Term (MatchCase (..)) import Unison.Var (Var) import qualified Unison.Var as Var import qualified Unison.UnisonFile.Error as UF +import qualified U.Util.Base32Hex as Base32Hex import Unison.Util.Bytes (Bytes) import Unison.Name as Name import Unison.NamesWithHistory (NamesWithHistory) @@ -100,7 +100,7 @@ uniqueName :: Var v => Int -> P v Text uniqueName lenInBase32Hex = do UniqueName mkName <- asks uniqueNames pos <- L.start <$> P.lookAhead anyToken - let none = Hash.base32Hex . Hash.fromBytes . encodeUtf8 . Text.pack $ show pos + let none = Base32Hex.toText . Base32Hex.fromByteString . encodeUtf8 . Text.pack $ show pos pure . fromMaybe none $ mkName pos lenInBase32Hex data Error v diff --git a/parser-typechecker/src/Unison/PrettyPrintEnv.hs b/parser-typechecker/src/Unison/PrettyPrintEnv.hs index 7723dbf94e..b841ba3145 100644 --- a/parser-typechecker/src/Unison/PrettyPrintEnv.hs +++ b/parser-typechecker/src/Unison/PrettyPrintEnv.hs @@ -15,6 +15,7 @@ import Unison.Prelude import Unison.DataDeclaration.ConstructorId (ConstructorId) import Unison.HashQualified ( HashQualified ) +import qualified Unison.HashQualified' as HQ' import Unison.Name ( Name ) import Unison.Reference ( Reference ) import Unison.Referent ( Referent ) @@ -24,11 +25,11 @@ import qualified Unison.ConstructorType as CT data PrettyPrintEnv = PrettyPrintEnv { -- names for terms, constructors, and requests - terms :: Referent -> Maybe (HashQualified Name), + terms :: Referent -> Maybe (HQ'.HashQualified Name), -- names for types - types :: Reference -> Maybe (HashQualified Name) } + types :: Reference -> Maybe (HQ'.HashQualified Name) } -patterns :: PrettyPrintEnv -> Reference -> ConstructorId -> Maybe (HashQualified Name) +patterns :: PrettyPrintEnv -> Reference -> ConstructorId -> Maybe (HQ'.HashQualified Name) patterns ppe r cid = terms ppe (Referent.Con r cid CT.Data) <|>terms ppe (Referent.Con r cid CT.Effect) @@ -47,16 +48,20 @@ todoHashLength = 10 termName :: PrettyPrintEnv -> Referent -> HashQualified Name termName env r = - fromMaybe (HQ.take todoHashLength $ HQ.fromReferent r) (terms env r) + case terms env r of + Nothing -> HQ.take todoHashLength (HQ.fromReferent r) + Just name -> HQ'.toHQ name typeName :: PrettyPrintEnv -> Reference -> HashQualified Name typeName env r = - fromMaybe (HQ.take todoHashLength $ HQ.fromReference r) (types env r) + case types env r of + Nothing -> HQ.take todoHashLength (HQ.fromReference r) + Just name -> HQ'.toHQ name patternName :: PrettyPrintEnv -> Reference -> ConstructorId -> HashQualified Name patternName env r cid = case patterns env r cid of - Just name -> name + Just name -> HQ'.toHQ name Nothing -> HQ.take todoHashLength $ HQ.fromPattern r cid instance Monoid PrettyPrintEnv where diff --git a/parser-typechecker/src/Unison/PrettyPrintEnv/Names.hs b/parser-typechecker/src/Unison/PrettyPrintEnv/Names.hs index f7628b40fe..576a9e72d8 100644 --- a/parser-typechecker/src/Unison/PrettyPrintEnv/Names.hs +++ b/parser-typechecker/src/Unison/PrettyPrintEnv/Names.hs @@ -4,9 +4,7 @@ module Unison.PrettyPrintEnv.Names (fromNames, fromSuffixNames) where import Unison.Prelude -import qualified Data.Set as Set -import qualified Unison.HashQualified as HQ -import qualified Unison.Name as Name +import qualified Unison.HashQualified' as HQ' import Unison.NamesWithHistory (NamesWithHistory) import qualified Unison.NamesWithHistory as Names import Unison.PrettyPrintEnv (PrettyPrintEnv (PrettyPrintEnv)) @@ -14,9 +12,9 @@ import Unison.Util.List (safeHead) fromNames :: Int -> NamesWithHistory -> PrettyPrintEnv fromNames len names = PrettyPrintEnv terms' types' where - terms' r = shortestName . Set.map Name.convert $ Names.termName len r names - types' r = shortestName . Set.map Name.convert $ Names.typeName len r names - shortestName ns = safeHead $ HQ.sortByLength (toList ns) + terms' r = shortestName (Names.termName len r names) + types' r = shortestName (Names.typeName len r names) + shortestName ns = safeHead $ HQ'.sortByLength (toList ns) fromSuffixNames :: Int -> NamesWithHistory -> PrettyPrintEnv fromSuffixNames len names = PrettyPrintEnv terms' types' where diff --git a/parser-typechecker/src/Unison/Runtime/ANF.hs b/parser-typechecker/src/Unison/Runtime/ANF.hs index 0009258be1..3f12cc8047 100644 --- a/parser-typechecker/src/Unison/Runtime/ANF.hs +++ b/parser-typechecker/src/Unison/Runtime/ANF.hs @@ -70,7 +70,7 @@ module Unison.Runtime.ANF import GHC.Stack (CallStack,callStack) -import Unison.Prelude +import Unison.Prelude hiding (Text) import Control.Exception (throw) import Control.Monad.Reader (ReaderT(..), ask, local) @@ -92,7 +92,8 @@ import Unison.Util.Bytes (Bytes) import qualified Unison.Util.Pretty as Pretty import qualified Data.Map as Map import qualified Data.Set as Set -import qualified Data.Text as Text +import qualified Data.Text as Data.Text +import qualified Unison.Util.Text as Util.Text import qualified Unison.ABT as ABT import qualified Unison.ABT.Normalized as ABTN import qualified Unison.Type as Ty @@ -242,7 +243,7 @@ freshFloat avoid (Var.freshIn avoid -> v0) -> freshFloat (Set.insert v0 avoid) v0 _ -> v0 where - w = Text.pack . show $ Var.freshId v0 + w = Data.Text.pack . show $ Var.freshId v0 letFloater :: (Var v, Monoid a) @@ -504,7 +505,7 @@ matchLit :: Term v a -> Maybe Lit matchLit (Int' i) = Just $ I i matchLit (Nat' n) = Just $ N n matchLit (Float' f) = Just $ F f -matchLit (Text' t) = Just $ T t +matchLit (Text' t) = Just $ T (Util.Text.fromText t) matchLit (Char' c) = Just $ C c matchLit _ = Nothing @@ -576,7 +577,7 @@ data SeqEnd = SLeft | SRight data Branched e = MatchIntegral (EnumMap Word64 e) (Maybe e) - | MatchText (Map.Map Text e) (Maybe e) + | MatchText (Map.Map Util.Text.Text e) (Maybe e) | MatchRequest (Map Reference (EnumMap CTag ([Mem], e))) e | MatchEmpty | MatchData Reference (EnumMap CTag ([Mem], e)) (Maybe e) @@ -594,7 +595,7 @@ data BranchAccum v (EnumMap Word64 (ANormal v)) | AccumText (Maybe (ANormal v)) - (Map.Map Text (ANormal v)) + (Map.Map Util.Text.Text (ANormal v)) | AccumDefault (ANormal v) | AccumPure (ANormal v) | AccumRequest @@ -697,7 +698,7 @@ data Lit = I Int64 | N Word64 | F Double - | T Text + | T Util.Text.Text | C Char | LM Referent | LY Reference @@ -823,7 +824,7 @@ data Cont deriving (Show) data BLit - = Text Text + = Text Util.Text.Text | List (Seq Value) | TmLink Referent | TyLink Reference @@ -930,7 +931,7 @@ anfBlock (If' c t f) = do (dt, ct) <- anfTerm t (cx, v) <- contextualize cc let cases = MatchData - (Builtin $ Text.pack "Boolean") + (Builtin $ Data.Text.pack "Boolean") (EC.mapSingleton 0 ([], cf)) (Just ct) pure (cctx <> cx, (Indirect () <> df <> dt, TMatch v cases)) @@ -1071,7 +1072,7 @@ anfBlock (Apps' (Blank' b) args) = do , pure $ TPrm EROR (nm : cas) ) where - msg = Text.pack . fromMaybe "blank expression" $ nameb b + msg = Util.Text.pack . fromMaybe "blank expression" $ nameb b anfBlock (Apps' f args) = do (fctx, (d, cf)) <- anfFunc f (actx, cas) <- anfArgs args @@ -1093,7 +1094,7 @@ anfBlock (Blank' b) = do nm <- fresh ev <- fresh pure ( pure [ ST1 Direct nm BX (TLit (T name)) - , ST1 Direct ev BX (TLit (T $ Text.pack msg))] + , ST1 Direct ev BX (TLit (T $ Util.Text.pack msg))] , pure $ TPrm EROR [nm, ev]) where name = "blank expression" @@ -1137,7 +1138,7 @@ anfInitCase u (MatchCase p guard (ABT.AbsN' vs bd)) . EC.mapSingleton t . ([],) <$> anfBody bd | P.Text _ t <- p , [] <- vs - = AccumText Nothing . Map.singleton t <$> anfBody bd + = AccumText Nothing . Map.singleton (Util.Text.fromText t) <$> anfBody bd | P.Constructor _ r t ps <- p = do (,) <$> expandBindings ps vs <*> anfBody bd <&> \(us,bd) -> AccumData r Nothing @@ -1332,7 +1333,7 @@ prettyGroup s (Rec grp ent) . prettySuperNormal 2 sn . showString "\n" . r pvar :: Var v => v -> ShowS -pvar v = showString . Text.unpack $ Var.name v +pvar v = showString . Data.Text.unpack $ Var.name v prettyVars :: Var v => [v] -> ShowS prettyVars diff --git a/parser-typechecker/src/Unison/Runtime/ANF/Serialize.hs b/parser-typechecker/src/Unison/Runtime/ANF/Serialize.hs index 49c6bb6116..92333c48e9 100644 --- a/parser-typechecker/src/Unison/Runtime/ANF/Serialize.hs +++ b/parser-typechecker/src/Unison/Runtime/ANF/Serialize.hs @@ -25,6 +25,7 @@ import qualified Data.ByteString.Lazy as L import GHC.Stack import Unison.Reference (Reference) +import qualified Unison.Util.Text as Util.Text import Unison.ABT.Normalized (Term(..)) import Unison.Runtime.Exception @@ -394,7 +395,7 @@ putLit :: MonadPut m => Lit -> m () putLit (I i) = putTag IT *> putInt i putLit (N n) = putTag NT *> putNat n putLit (F f) = putTag FT *> putFloat f -putLit (T t) = putTag TT *> putText t +putLit (T t) = putTag TT *> putText (Util.Text.toText t) putLit (C c) = putTag CT *> putChar c putLit (LM r) = putTag LMT *> putReferent r putLit (LY r) = putTag LYT *> putReference r @@ -404,13 +405,13 @@ getLit = getTag >>= \case IT -> I <$> getInt NT -> N <$> getNat FT -> F <$> getFloat - TT -> T <$> getText + TT -> T . Util.Text.fromText <$> getText CT -> C <$> getChar LMT -> LM <$> getReferent LYT -> LY <$> getReference putBLit :: MonadPut m => BLit -> m () -putBLit (Text t) = putTag TextT *> putText t +putBLit (Text t) = putTag TextT *> putText (Util.Text.toText t) putBLit (List s) = putTag ListT *> putFoldable putValue s putBLit (TmLink r) = putTag TmLinkT *> putReferent r putBLit (TyLink r) = putTag TyLinkT *> putReference r @@ -418,7 +419,7 @@ putBLit (Bytes b) = putTag BytesT *> putBytes b getBLit :: MonadGet m => m BLit getBLit = getTag >>= \case - TextT -> Text <$> getText + TextT -> Text . Util.Text.fromText <$> getText ListT -> List . Seq.fromList <$> getList getValue TmLinkT -> TmLink <$> getReferent TyLinkT -> TyLink <$> getReference @@ -439,7 +440,7 @@ putBranches ctx bs = case bs of putMaybe df $ putNormal ctx MatchText m df -> do putTag MTextT - putMap putText (putNormal ctx) m + putMap (putText . Util.Text.toText) (putNormal ctx) m putMaybe df $ putNormal ctx MatchRequest m (TAbs v df) -> do putTag MReqT @@ -466,7 +467,7 @@ getBranches ctx frsh0 = getTag >>= \case <*> getMaybe (getNormal ctx frsh0) MTextT -> MatchText - <$> getMap getText (getNormal ctx frsh0) + <$> getMap (Util.Text.fromText <$> getText) (getNormal ctx frsh0) <*> getMaybe (getNormal ctx frsh0) MReqT -> MatchRequest diff --git a/parser-typechecker/src/Unison/Runtime/Builtin.hs b/parser-typechecker/src/Unison/Runtime/Builtin.hs index c5f283945b..00896a902b 100644 --- a/parser-typechecker/src/Unison/Runtime/Builtin.hs +++ b/parser-typechecker/src/Unison/Runtime/Builtin.hs @@ -46,10 +46,6 @@ import Unison.Util.EnumContainers as EC import Data.Default (def) import Data.ByteString (hGet, hPut) -import Data.Text as Text (pack, unpack) -import qualified Data.Text as Text -import qualified Data.Text.IO as Text -import Data.Text.Encoding ( decodeUtf8', decodeUtf8' ) import qualified Data.ByteArray as BA import qualified Data.ByteString.Lazy as L import qualified System.X509 as X @@ -60,8 +56,11 @@ import Data.PEM (pemContent, pemParseLBS, PEM) import Data.Set (insert) import qualified Data.Map as Map -import Unison.Prelude hiding (some) +import Unison.Prelude hiding (some,Text) import qualified Unison.Util.Bytes as Bytes +import qualified Data.Text +import qualified Data.Text.IO as Text.IO +import qualified Unison.Util.Text as Util.Text import Network.Socket as SYS ( accept , socketPort @@ -124,6 +123,7 @@ import System.Directory as SYS ) import System.Environment as SYS ( getEnv + , getArgs ) import System.IO.Temp (createTempDirectory) @@ -707,7 +707,7 @@ fork'comp where (act,unit,lz) = fresh3 -bug :: Var v => Text -> SuperNormal v +bug :: Var v => Util.Text.Text -> SuperNormal v bug name = unop0 1 $ \[x, n] -> TLetD n BX (TLit $ T name) @@ -1509,10 +1509,10 @@ builtinLookup ] ++ foreignWrappers type FDecl v - = State (Word64, [(Text, SuperNormal v)], EnumMap Word64 ForeignFunc) + = State (Word64, [(Data.Text.Text, SuperNormal v)], EnumMap Word64 ForeignFunc) declareForeign - :: Var v => Text -> ForeignOp -> ForeignFunc -> FDecl v () + :: Var v => Data.Text.Text -> ForeignOp -> ForeignFunc -> FDecl v () declareForeign name op func = modify $ \(w, cs, fs) -> (w+1, (name, uncurry Lambda (op w)) : cs, mapInsert w func fs) @@ -1525,7 +1525,7 @@ mkForeignIOF f = mkForeign $ \a -> tryIOE (f a) tryIOE :: IO a -> IO (Either Failure a) tryIOE = fmap handleIOE . try handleIOE :: Either IOException a -> Either Failure a - handleIOE (Left e) = Left $ Failure Ty.ioFailureRef (pack (show e)) unitValue + handleIOE (Left e) = Left $ Failure Ty.ioFailureRef (Util.Text.pack (show e)) unitValue handleIOE (Right a) = Right a unitValue :: Closure @@ -1541,15 +1541,15 @@ mkForeignTls f = mkForeign $ \a -> fmap flatten (tryIO2 (tryIO1 (f a))) tryIO2 :: IO (Either TLS.TLSException r) -> IO (Either IOException (Either TLS.TLSException r)) tryIO2 = try flatten :: Either IOException (Either TLS.TLSException r) -> Either (Failure ) r - flatten (Left e) = Left (Failure Ty.ioFailureRef (pack (show e)) unitValue) - flatten (Right (Left e)) = Left (Failure Ty.tlsFailureRef (pack (show e)) (unitValue)) + flatten (Left e) = Left (Failure Ty.ioFailureRef (Util.Text.pack (show e)) unitValue) + flatten (Right (Left e)) = Left (Failure Ty.tlsFailureRef (Util.Text.pack (show e)) (unitValue)) flatten (Right (Right a)) = Right a declareForeigns :: Var v => FDecl v () declareForeigns = do declareForeign "IO.openFile.impl.v3" boxIomrToEFBox $ - mkForeignIOF $ \(fnameText :: Text, n :: Int) -> - let fname = (unpack fnameText) + mkForeignIOF $ \(fnameText :: Util.Text.Text, n :: Int) -> + let fname = Util.Text.toString fnameText mode = case n of 0 -> ReadMode 1 -> WriteMode @@ -1575,7 +1575,8 @@ declareForeigns = do declareForeign "IO.setBuffering.impl.v3" set'buffering . mkForeignIOF $ uncurry hSetBuffering - declareForeign "IO.getLine.impl.v1" boxToEFBox $ mkForeignIOF Text.hGetLine + declareForeign "IO.getLine.impl.v1" boxToEFBox $ mkForeignIOF $ + fmap Util.Text.fromText . Text.IO.hGetLine declareForeign "IO.getBytes.impl.v3" boxNatToEFBox . mkForeignIOF $ \(h,n) -> Bytes.fromArray <$> hGet h n @@ -1608,6 +1609,9 @@ declareForeigns = do declareForeign "IO.getEnv.impl.v1" boxToEFBox $ mkForeignIOF getEnv + declareForeign "IO.getArgs.impl.v1" unitToEFBox + $ mkForeignIOF $ \() -> fmap Util.Text.pack <$> SYS.getArgs + declareForeign "IO.isDirectory.impl.v3" boxToEFBool $ mkForeignIOF doesDirectoryExist @@ -1621,7 +1625,7 @@ declareForeigns = do $ mkForeignIOF $ uncurry renameDirectory declareForeign "IO.directoryContents.impl.v3" boxToEFBox - $ mkForeignIOF $ (fmap pack <$>) . getDirectoryContents + $ mkForeignIOF $ (fmap Util.Text.pack <$>) . getDirectoryContents declareForeign "IO.removeFile.impl.v3" boxToEF0 $ mkForeignIOF removeFile @@ -1637,7 +1641,7 @@ declareForeigns = do . mkForeignIOF $ \fp -> fromInteger @Word64 <$> getFileSize fp declareForeign "IO.serverSocket.impl.v3" maybeBoxToEFBox - . mkForeignIOF $ \(mhst :: Maybe Text + . mkForeignIOF $ \(mhst :: Maybe Util.Text.Text , port) -> fst <$> SYS.bindSock (hostPreference mhst) port @@ -1663,7 +1667,7 @@ declareForeigns = do declareForeign "IO.socketReceive.impl.v3" boxNatToEFBox . mkForeignIOF $ \(hs,n) -> - maybe Bytes.empty Bytes.fromArray <$> SYS.recv hs n + maybe mempty Bytes.fromArray <$> SYS.recv hs n declareForeign "IO.kill.impl.v3" boxTo0 $ mkForeignIOF killThread @@ -1709,21 +1713,21 @@ declareForeigns = do declareForeign "Char.toText" (wordDirect Ty.charRef) . mkForeign $ - \(ch :: Char) -> pure (Text.singleton ch) + \(ch :: Char) -> pure (Util.Text.singleton ch) declareForeign "Text.repeat" (wordBoxDirect Ty.natRef) . mkForeign $ - \(n :: Word64, txt :: Text) -> pure (Text.replicate (fromIntegral n) txt) + \(n :: Word64, txt :: Util.Text.Text) -> pure (Util.Text.replicate (fromIntegral n) txt) declareForeign "Text.toUtf8" boxDirect . mkForeign - $ pure . Bytes.fromArray . encodeUtf8 + $ pure . Util.Text.toUtf8 declareForeign "Text.fromUtf8.impl.v3" boxToEFBox . mkForeign - $ pure . mapLeft (\t -> Failure Ty.ioFailureRef (pack ( show t)) unitValue) . decodeUtf8' . Bytes.toArray + $ pure . mapLeft (\t -> Failure Ty.ioFailureRef (Util.Text.pack t) unitValue) . Util.Text.fromUtf8 declareForeign "Tls.ClientConfig.default" boxBoxDirect . mkForeign - $ \(hostName::Text, serverId:: Bytes.Bytes) -> + $ \(hostName :: Util.Text.Text, serverId:: Bytes.Bytes) -> fmap (\store -> - (defaultParamsClient (unpack hostName) (Bytes.toArray serverId)) { + (defaultParamsClient (Util.Text.unpack hostName) (Bytes.toArray serverId)) { TLS.clientSupported = def { TLS.supportedCiphers = Cipher.ciphersuite_strong }, TLS.clientShared = def { TLS.sharedCAStore = store } }) X.getSystemCertificateStore @@ -1785,11 +1789,11 @@ declareForeigns = do defaultSupported = def { TLS.supportedCiphers = Cipher.ciphersuite_strong } declareForeign "Tls.Config.defaultClient" boxBoxDirect - . mkForeign $ \(hostName::Text, serverId:: Bytes.Bytes) -> do + . mkForeign $ \(hostName :: Util.Text.Text, serverId:: Bytes.Bytes) -> do store <- X.getSystemCertificateStore let shared :: TLS.Shared shared = def { TLS.sharedCAStore = store } - defaultParams = (defaultParamsClient (unpack hostName) (Bytes.toArray serverId)) { TLS.clientSupported = defaultSupported, TLS.clientShared = shared } + defaultParams = (defaultParamsClient (Util.Text.unpack hostName) (Bytes.toArray serverId)) { TLS.clientSupported = defaultSupported, TLS.clientShared = shared } pure defaultParams declareForeign "Tls.Config.defaultServer" unitDirect . mkForeign $ \() -> do @@ -1810,7 +1814,7 @@ declareForeigns = do \(tls :: TLS.Context, bytes :: Bytes.Bytes) -> TLS.sendData tls (Bytes.toLazyByteString bytes) - let wrapFailure t = Failure Ty.tlsFailureRef (pack t) unitValue + let wrapFailure t = Failure Ty.tlsFailureRef (Util.Text.pack t) unitValue decoded :: Bytes.Bytes -> Either String PEM decoded bytes = fmap head $ pemParseLBS $ Bytes.toLazyByteString bytes asCert :: PEM -> Either String X.SignedCertificate @@ -1826,7 +1830,7 @@ declareForeigns = do \(bytes :: Bytes.Bytes) -> pure $ X.readKeyFileFromMemory $ L.toStrict $ Bytes.toLazyByteString bytes declareForeign "Tls.encodePrivateKey" boxDirect . mkForeign $ - \(privateKey :: X.PrivKey) -> pure $ pack $ show privateKey + \(privateKey :: X.PrivKey) -> pure $ Util.Text.pack $ show privateKey declareForeign "Tls.receive.impl.v3" boxToEFBox . mkForeignTls $ \(tls :: TLS.Context) -> do @@ -1845,7 +1849,7 @@ declareForeigns = do declareForeign "Code.deserialize" boxToEBoxBox . mkForeign $ pure . deserializeGroup @Symbol . Bytes.toArray declareForeign "Code.display" boxBoxDirect . mkForeign - $ \(nm,sg) -> pure $ prettyGroup @Symbol (Text.unpack nm) sg "" + $ \(nm,sg) -> pure $ prettyGroup @Symbol (Util.Text.unpack nm) sg "" declareForeign "Value.dependencies" boxDirect . mkForeign $ pure . fmap (Wrap Ty.termLinkRef . Ref) . valueTermLinks @@ -1854,7 +1858,7 @@ declareForeigns = do declareForeign "Value.deserialize" boxToEBoxBox . mkForeign $ pure . deserializeValue . Bytes.toArray -- Hashing functions - let declareHashAlgorithm :: forall v alg . Var v => Hash.HashAlgorithm alg => Text -> alg -> FDecl v () + let declareHashAlgorithm :: forall v alg . Var v => Hash.HashAlgorithm alg => Data.Text.Text -> alg -> FDecl v () declareHashAlgorithm txt alg = do let algoRef = Builtin ("crypto.HashAlgorithm." <> txt) declareForeign ("crypto.HashAlgorithm." <> txt) direct . mkForeign $ \() -> @@ -1868,13 +1872,10 @@ declareForeigns = do declareHashAlgorithm "Blake2b_256" Hash.Blake2b_256 declareHashAlgorithm "Blake2s_256" Hash.Blake2s_256 - -- declareForeign ("crypto.hash") boxBoxDirect . mkForeign $ \(HashAlgorithm _ref _alg, _a :: Closure) -> - -- pure $ Bytes.empty -- todo : implement me - declareForeign "crypto.hashBytes" boxBoxDirect . mkForeign $ \(HashAlgorithm _ alg, b :: Bytes.Bytes) -> let ctx = Hash.hashInitWith alg - in pure . Bytes.fromArray . Hash.hashFinalize $ Hash.hashUpdates ctx (Bytes.chunks b) + in pure . Bytes.fromArray . Hash.hashFinalize $ Hash.hashUpdates ctx (Bytes.byteStringChunks b) declareForeign "crypto.hmacBytes" boxBoxBoxDirect . mkForeign $ \(HashAlgorithm _ alg, key :: Bytes.Bytes, msg :: Bytes.Bytes) -> @@ -1904,11 +1905,11 @@ declareForeigns = do let - catchAll :: (MonadCatch m, MonadIO m, NFData a) => m a -> m (Either Text a) + catchAll :: (MonadCatch m, MonadIO m, NFData a) => m a -> m (Either Util.Text.Text a) catchAll e = do e <- Exception.tryAnyDeep e pure $ case e of - Left se -> Left (Text.pack (show se)) + Left se -> Left (Util.Text.pack (show se)) Right a -> Right a declareForeign "Bytes.zlib.compress" boxDirect . mkForeign $ pure . Bytes.zlibCompress @@ -1923,10 +1924,14 @@ declareForeigns = do declareForeign "Bytes.toBase64" boxDirect . mkForeign $ pure . Bytes.toBase64 declareForeign "Bytes.toBase64UrlUnpadded" boxDirect . mkForeign $ pure . Bytes.toBase64UrlUnpadded - declareForeign "Bytes.fromBase16" boxToEBoxBox . mkForeign $ pure . Bytes.fromBase16 - declareForeign "Bytes.fromBase32" boxToEBoxBox . mkForeign $ pure . Bytes.fromBase32 - declareForeign "Bytes.fromBase64" boxToEBoxBox . mkForeign $ pure . Bytes.fromBase64 - declareForeign "Bytes.fromBase64UrlUnpadded" boxDirect . mkForeign $ pure . Bytes.fromBase64UrlUnpadded + declareForeign "Bytes.fromBase16" boxToEBoxBox . mkForeign $ + pure . mapLeft Util.Text.fromText . Bytes.fromBase16 + declareForeign "Bytes.fromBase32" boxToEBoxBox . mkForeign $ + pure . mapLeft Util.Text.fromText . Bytes.fromBase32 + declareForeign "Bytes.fromBase64" boxToEBoxBox . mkForeign $ + pure . mapLeft Util.Text.fromText . Bytes.fromBase64 + declareForeign "Bytes.fromBase64UrlUnpadded" boxDirect . mkForeign $ + pure . mapLeft Util.Text.fromText . Bytes.fromBase64UrlUnpadded declareForeign "Bytes.decodeNat64be" boxToMaybeTup . mkForeign $ pure . Bytes.decodeNat64be declareForeign "Bytes.decodeNat64le" boxToMaybeTup . mkForeign $ pure . Bytes.decodeNat64le @@ -1942,9 +1947,9 @@ declareForeigns = do declareForeign "Bytes.encodeNat16be" (wordDirect Ty.natRef) . mkForeign $ pure . Bytes.encodeNat16be declareForeign "Bytes.encodeNat16le" (wordDirect Ty.natRef) . mkForeign $ pure . Bytes.encodeNat16le -hostPreference :: Maybe Text -> SYS.HostPreference +hostPreference :: Maybe Util.Text.Text -> SYS.HostPreference hostPreference Nothing = SYS.HostAny -hostPreference (Just host) = SYS.Host $ Text.unpack host +hostPreference (Just host) = SYS.Host $ Util.Text.unpack host typeReferences :: [(Reference, Word64)] typeReferences = zip rs [1..] @@ -1955,10 +1960,10 @@ typeReferences = zip rs [1..] foreignDeclResults :: Var v - => (Word64, [(Text, SuperNormal v)], EnumMap Word64 ForeignFunc) + => (Word64, [(Data.Text.Text, SuperNormal v)], EnumMap Word64 ForeignFunc) foreignDeclResults = execState declareForeigns (0, [], mempty) -foreignWrappers :: Var v => [(Text, SuperNormal v)] +foreignWrappers :: Var v => [(Data.Text.Text, SuperNormal v)] foreignWrappers | (_, l, _) <- foreignDeclResults = reverse l numberedTermLookup :: Var v => EnumMap Word64 (SuperNormal v) diff --git a/parser-typechecker/src/Unison/Runtime/Decompile.hs b/parser-typechecker/src/Unison/Runtime/Decompile.hs index f6a8215245..ea14b0fb8f 100644 --- a/parser-typechecker/src/Unison/Runtime/Decompile.hs +++ b/parser-typechecker/src/Unison/Runtime/Decompile.hs @@ -33,6 +33,7 @@ import Unison.Codebase.Runtime (Error) import Unison.Util.Pretty (lit) import qualified Unison.Util.Bytes as By +import qualified Unison.Util.Text as Text import qualified Unison.Term as Term import Unsafe.Coerce -- for Int -> Double @@ -103,7 +104,7 @@ decompileForeign -> Foreign -> Either Error (Term v ()) decompileForeign topTerms f - | Just t <- maybeUnwrapBuiltin f = Right $ text () t + | Just t <- maybeUnwrapBuiltin f = Right $ text () (Text.toText t) | Just b <- maybeUnwrapBuiltin f = Right $ decompileBytes b | Just h <- maybeUnwrapBuiltin f = Right $ decompileHashAlgorithm h | Just l <- maybeUnwrapForeign termLinkRef f diff --git a/parser-typechecker/src/Unison/Runtime/Foreign.hs b/parser-typechecker/src/Unison/Runtime/Foreign.hs index 0991b74e29..6347257abd 100644 --- a/parser-typechecker/src/Unison/Runtime/Foreign.hs +++ b/parser-typechecker/src/Unison/Runtime/Foreign.hs @@ -18,13 +18,13 @@ module Unison.Runtime.Foreign import Control.Concurrent (ThreadId, MVar) import Data.IORef (IORef) -import Data.Text (Text, unpack) import Data.Tagged (Tagged(..)) import Network.Socket (Socket) import qualified Network.TLS as TLS (ClientParams, Context, ServerParams) import qualified Data.X509 as X509 import System.IO (Handle) import Unison.Util.Bytes (Bytes) +import Unison.Util.Text (Text) import Unison.Reference (Reference) import Unison.Referent (Referent) import Unison.Runtime.ANF (SuperGroup, Value) @@ -78,7 +78,7 @@ instance Show Foreign where $ showString "Wrap " . showsPrec 10 r . showString " " . contents where contents - | r == Ty.textRef = shows (unpack (unsafeCoerce v)) + | r == Ty.textRef = shows @Text (unsafeCoerce v) | otherwise = showString "_" unwrapForeign :: Foreign -> a diff --git a/parser-typechecker/src/Unison/Runtime/Foreign/Function.hs b/parser-typechecker/src/Unison/Runtime/Foreign/Function.hs index f89f4c32b2..2f6769bf5c 100644 --- a/parser-typechecker/src/Unison/Runtime/Foreign/Function.hs +++ b/parser-typechecker/src/Unison/Runtime/Foreign/Function.hs @@ -20,13 +20,13 @@ import Control.Exception (evaluate) import qualified Data.Char as Char import Data.IORef (IORef) import Data.Foldable (toList) -import Data.Text (Text, pack, unpack) import Data.Time.Clock.POSIX (POSIXTime) import qualified Data.Sequence as Sq import Data.Word (Word64) import Network.Socket (Socket) import System.IO (BufferMode(..), SeekMode, Handle, IOMode) import Unison.Util.Bytes (Bytes) +import Unison.Util.Text (Text,unpack,pack) import Unison.Reference (Reference) import Unison.Type (mvarRef, tvarRef, typeLinkRef, refRef) diff --git a/parser-typechecker/src/Unison/Runtime/MCode.hs b/parser-typechecker/src/Unison/Runtime/MCode.hs index 2e3519f709..5502b4e7fb 100644 --- a/parser-typechecker/src/Unison/Runtime/MCode.hs +++ b/parser-typechecker/src/Unison/Runtime/MCode.hs @@ -4,6 +4,7 @@ {-# language PatternGuards #-} {-# language EmptyDataDecls #-} {-# language PatternSynonyms #-} +{-# language OverloadedStrings #-} module Unison.Runtime.MCode ( Args'(..) @@ -48,7 +49,7 @@ import Data.Primitive.PrimArray import qualified Data.Map.Strict as M import Unison.Util.EnumContainers as EC -import Data.Text (Text,pack) +import Unison.Util.Text (Text) import Unison.Var (Var) import Unison.ABT.Normalized (pattern TAbss) @@ -933,7 +934,7 @@ emitLet rns grp rec d vcs ctx bnd f s w = Let s (CIx contRef grp w) contRef :: Reference -contRef = Builtin (pack "Continuation") +contRef = Builtin "Continuation" -- Translate from ANF prim ops to machine code operations. The -- machine code operations are divided with respect to more detailed diff --git a/parser-typechecker/src/Unison/Runtime/MCode/Serialize.hs b/parser-typechecker/src/Unison/Runtime/MCode/Serialize.hs index f13887ec1e..175e9ae033 100644 --- a/parser-typechecker/src/Unison/Runtime/MCode/Serialize.hs +++ b/parser-typechecker/src/Unison/Runtime/MCode/Serialize.hs @@ -20,6 +20,7 @@ import Data.Word (Word64) import Unison.Runtime.MCode hiding (MatchT) import Unison.Runtime.Serialize +import qualified Unison.Util.Text as Util.Text putComb :: MonadPut m => Comb -> m () putComb (Lam ua ba uf bf body) = @@ -299,7 +300,7 @@ instance Tag MLitT where putLit :: MonadPut m => MLit -> m () putLit (MI i) = putTag MIT *> pInt i putLit (MD d) = putTag MDT *> putFloat d -putLit (MT t) = putTag MTT *> putText t +putLit (MT t) = putTag MTT *> putText (Util.Text.toText t) putLit (MM r) = putTag MMT *> putReferent r putLit (MY r) = putTag MYT *> putReference r @@ -307,7 +308,7 @@ getLit :: MonadGet m => m MLit getLit = getTag >>= \case MIT -> MI <$> gInt MDT -> MD <$> getFloat - MTT -> MT <$> getText + MTT -> MT . Util.Text.fromText <$> getText MMT -> MM <$> getReferent MYT -> MY <$> getReference @@ -336,7 +337,7 @@ putBranch (Test2 a sa b sb d) putBranch (TestW d m) = putTag TestWT *> putSection d *> putEnumMap pWord putSection m putBranch (TestT d m) - = putTag TestTT *> putSection d *> putMap putText putSection m + = putTag TestTT *> putSection d *> putMap (putText . Util.Text.toText) putSection m getBranch :: MonadGet m => m Branch getBranch = getTag >>= \case @@ -346,7 +347,7 @@ getBranch = getTag >>= \case <*> gWord <*> getSection <*> getSection TestWT -> TestW <$> getSection <*> getEnumMap gWord getSection - TestTT -> TestT <$> getSection <*> getMap getText getSection + TestTT -> TestT <$> getSection <*> getMap (Util.Text.fromText <$> getText) getSection gInt :: MonadGet m => m Int diff --git a/parser-typechecker/src/Unison/Runtime/Machine.hs b/parser-typechecker/src/Unison/Runtime/Machine.hs index 9995a9576d..be98c16db2 100644 --- a/parser-typechecker/src/Unison/Runtime/Machine.hs +++ b/parser-typechecker/src/Unison/Runtime/Machine.hs @@ -20,7 +20,8 @@ import Data.Ord (comparing) import Data.Traversable import Data.Word (Word64) -import qualified Data.Text as Tx +import qualified Data.Text as DTx +import qualified Unison.Util.Text as Util.Text import qualified Data.Text.IO as Tx import qualified Data.Sequence as Sq import qualified Data.Map.Strict as M @@ -141,7 +142,7 @@ topDEnv -> (DEnv, K -> K) topDEnv rfTy rfTm | Just n <- M.lookup exceptionRef rfTy - , rcrf <- Builtin (Tx.pack "raise") + , rcrf <- Builtin (DTx.pack "raise") , Just j <- M.lookup rcrf rfTm = ( EC.mapSingleton n (PAp (CIx rcrf j 0) unull bnull) , Mark (EC.setSingleton n) mempty @@ -260,7 +261,7 @@ exec !env !denv !ustk !bstk !k (BPrim1 LKUP i) = do exec !_ !denv !ustk !bstk !k (BPrim1 TLTT i) = do clink <- peekOff bstk i let Ref link = unwrapForeign $ marshalToForeign clink - let sh = SH.toText $ toShortHash link + let sh = Util.Text.fromText . SH.toText $ toShortHash link bstk <- bump bstk pokeBi bstk sh pure (denv, ustk, bstk, k) @@ -311,7 +312,7 @@ exec !_ !denv !ustk !bstk !k (Unpack r i) = do pure (denv, ustk, bstk, k) exec !_ !denv !ustk !bstk !k (Print i) = do t <- peekOffBi bstk i - Tx.putStrLn t + Tx.putStrLn (Util.Text.toText t) pure (denv, ustk, bstk, k) exec !_ !denv !ustk !bstk !k (Lit (MI n)) = do ustk <- bump ustk @@ -1061,7 +1062,7 @@ bprim1 bprim1 !ustk !bstk SIZT i = do t <- peekOffBi bstk i ustk <- bump ustk - poke ustk $ Tx.length t + poke ustk $ Util.Text.size t pure (ustk, bstk) bprim1 !ustk !bstk SIZS i = do s <- peekOffS bstk i @@ -1071,20 +1072,20 @@ bprim1 !ustk !bstk SIZS i = do bprim1 !ustk !bstk ITOT i = do n <- peekOff ustk i bstk <- bump bstk - pokeBi bstk . Tx.pack $ show n + pokeBi bstk . Util.Text.pack $ show n pure (ustk, bstk) bprim1 !ustk !bstk NTOT i = do n <- peekOffN ustk i bstk <- bump bstk - pokeBi bstk . Tx.pack $ show n + pokeBi bstk . Util.Text.pack $ show n pure (ustk, bstk) bprim1 !ustk !bstk FTOT i = do f <- peekOffD ustk i bstk <- bump bstk - pokeBi bstk . Tx.pack $ show f + pokeBi bstk . Util.Text.pack $ show f pure (ustk, bstk) bprim1 !ustk !bstk USNC i - = peekOffBi bstk i >>= \t -> case Tx.unsnoc t of + = peekOffBi bstk i >>= \t -> case Util.Text.unsnoc t of Nothing -> do ustk <- bump ustk poke ustk 0 @@ -1097,7 +1098,7 @@ bprim1 !ustk !bstk USNC i pokeBi bstk t pure (ustk, bstk) bprim1 !ustk !bstk UCNS i - = peekOffBi bstk i >>= \t -> case Tx.uncons t of + = peekOffBi bstk i >>= \t -> case Util.Text.uncons t of Nothing -> do ustk <- bump ustk poke ustk 0 @@ -1110,7 +1111,7 @@ bprim1 !ustk !bstk UCNS i pokeBi bstk t pure (ustk, bstk) bprim1 !ustk !bstk TTOI i - = peekOffBi bstk i >>= \t -> case readm $ Tx.unpack t of + = peekOffBi bstk i >>= \t -> case readm $ Util.Text.unpack t of Nothing -> do ustk <- bump ustk poke ustk 0 @@ -1124,7 +1125,7 @@ bprim1 !ustk !bstk TTOI i readm ('+':s) = readMaybe s readm s = readMaybe s bprim1 !ustk !bstk TTON i - = peekOffBi bstk i >>= \t -> case readMaybe $ Tx.unpack t of + = peekOffBi bstk i >>= \t -> case readMaybe $ Util.Text.unpack t of Nothing -> do ustk <- bump ustk poke ustk 0 @@ -1135,7 +1136,7 @@ bprim1 !ustk !bstk TTON i pokeOffN ustk 1 n pure (ustk, bstk) bprim1 !ustk !bstk TTOF i - = peekOffBi bstk i >>= \t -> case readMaybe $ Tx.unpack t of + = peekOffBi bstk i >>= \t -> case readMaybe $ Util.Text.unpack t of Nothing -> do ustk <- bump ustk poke ustk 0 @@ -1174,7 +1175,7 @@ bprim1 !ustk !bstk VWRS i bprim1 !ustk !bstk PAKT i = do s <- peekOffS bstk i bstk <- bump bstk - pokeBi bstk . Tx.pack . toList $ clo2char <$> s + pokeBi bstk . Util.Text.pack . toList $ clo2char <$> s pure (ustk, bstk) where clo2char (DataU1 _ t i) | t == charTag = toEnum i @@ -1183,7 +1184,7 @@ bprim1 !ustk !bstk UPKT i = do t <- peekOffBi bstk i bstk <- bump bstk pokeS bstk . Sq.fromList - . fmap (DataU1 Rf.charRef charTag . fromEnum) . Tx.unpack $ t + . fmap (DataU1 Rf.charRef charTag . fromEnum) . Util.Text.unpack $ t pure (ustk, bstk) bprim1 !ustk !bstk PAKB i = do s <- peekOffS bstk i @@ -1232,34 +1233,34 @@ bprim2 !ustk !bstk DRPT i j = do n <- peekOff ustk i t <- peekOffBi bstk j bstk <- bump bstk - pokeBi bstk $ Tx.drop n t + pokeBi bstk $ Util.Text.drop n t pure (ustk, bstk) bprim2 !ustk !bstk CATT i j = do x <- peekOffBi bstk i y <- peekOffBi bstk j bstk <- bump bstk - pokeBi bstk $ Tx.append x y + pokeBi bstk $ (x <> y :: Util.Text.Text) pure (ustk, bstk) bprim2 !ustk !bstk TAKT i j = do n <- peekOff ustk i t <- peekOffBi bstk j bstk <- bump bstk - pokeBi bstk $ Tx.take n t + pokeBi bstk $ Util.Text.take n t pure (ustk, bstk) bprim2 !ustk !bstk EQLT i j = do - x <- peekOffBi @Tx.Text bstk i + x <- peekOffBi @Util.Text.Text bstk i y <- peekOffBi bstk j ustk <- bump ustk poke ustk $ if x == y then 1 else 0 pure (ustk, bstk) bprim2 !ustk !bstk LEQT i j = do - x <- peekOffBi @Tx.Text bstk i + x <- peekOffBi @Util.Text.Text bstk i y <- peekOffBi bstk j ustk <- bump ustk poke ustk $ if x <= y then 1 else 0 pure (ustk, bstk) bprim2 !ustk !bstk LEST i j = do - x <- peekOffBi @Tx.Text bstk i + x <- peekOffBi @Util.Text.Text bstk i y <- peekOffBi bstk j ustk <- bump ustk poke ustk $ if x < y then 1 else 0 @@ -1368,9 +1369,9 @@ bprim2 !ustk !bstk CATB i j = do pokeBi bstk (l <> r :: By.Bytes) pure (ustk, bstk) bprim2 !_ !bstk THRO i j = do - name <- peekOffBi bstk i + name <- peekOffBi @Util.Text.Text bstk i x <- peekOff bstk j - throwIO (BU name x) + throwIO (BU (Util.Text.toText name) x) bprim2 !ustk !bstk CMPU _ _ = pure (ustk, bstk) -- impossible {-# inline bprim2 #-} @@ -1396,7 +1397,7 @@ yield !env !denv !ustk !bstk !k = leap denv k {-# inline yield #-} selectTextBranch - :: Tx.Text -> Section -> M.Map Tx.Text Section -> Section + :: Util.Text.Text -> Section -> M.Map Util.Text.Text Section -> Section selectTextBranch t df cs = M.findWithDefault df t cs {-# inline selectTextBranch #-} @@ -1466,7 +1467,7 @@ combSection env (CIx _ n i) = Nothing -> die $ "unknown combinator `" ++ show n ++ "`." dummyRef :: Reference -dummyRef = Builtin (Tx.pack "dummy") +dummyRef = Builtin (DTx.pack "dummy") reserveIds :: Word64 -> TVar Word64 -> IO Word64 reserveIds n free = atomically . stateTVar free $ \i -> (i, i+n) @@ -1528,8 +1529,8 @@ codeValidate tml cc = do combinate (n, g) = evaluate $ emitCombs rns n g (Nothing <$ traverse_ combinate (zip [ftm..] gs)) `catch` \(CE cs perr) -> let - msg = Tx.pack $ toPlainUnbroken perr - extra = Foreign . Wrap Rf.textRef . Tx.pack $ show cs in + msg = Util.Text.pack $ toPlainUnbroken perr + extra = Foreign . Wrap Rf.textRef . Util.Text.pack $ show cs in pure . Just $ Failure ioFailureRef msg extra cacheAdd0 diff --git a/parser-typechecker/src/Unison/Runtime/Serialize.hs b/parser-typechecker/src/Unison/Runtime/Serialize.hs index 3d5195d0ad..9f24fa1459 100644 --- a/parser-typechecker/src/Unison/Runtime/Serialize.hs +++ b/parser-typechecker/src/Unison/Runtime/Serialize.hs @@ -2,15 +2,12 @@ module Unison.Runtime.Serialize where -import Basement.Block (Block) - import Control.Applicative (liftA2) import Control.Monad (replicateM) import Data.Foldable (traverse_) -import qualified Data.ByteArray as BA +import qualified Data.Vector.Primitive as BA import qualified Data.ByteString as B -import qualified Data.ByteString.Short as SBS import Data.Bits (Bits) import Data.Bytes.Put import Data.Bytes.Get hiding (getBytes) @@ -30,7 +27,7 @@ import Unison.Referent (Referent, pattern Ref, pattern Con) import qualified Unison.Util.Bytes as Bytes import Unison.Util.EnumContainers as EC import Unison.Hash (Hash) -import qualified Unison.Hash as Hash +import qualified U.Util.Hash as Hash import qualified Unison.ConstructorType as CT import Unison.Runtime.Exception import Unison.Runtime.MCode @@ -142,15 +139,15 @@ getBytes = Bytes.fromChunks <$> getList getBlock putBytes :: MonadPut m => Bytes.Bytes -> m () putBytes = putFoldable putBlock . Bytes.chunks -getBlock :: MonadGet m => m (Bytes.View (Block Word8)) -getBlock = getLength >>= fmap (Bytes.view . BA.convert) . getByteString +getBlock :: MonadGet m => m Bytes.Chunk +getBlock = getLength >>= fmap Bytes.byteStringToChunk . getByteString -putBlock :: MonadPut m => Bytes.View (Block Word8) -> m () -putBlock b = putLength (BA.length b) *> putByteString (BA.convert b) +putBlock :: MonadPut m => Bytes.Chunk -> m () +putBlock b = putLength (BA.length b) *> putByteString (Bytes.chunkToByteString b) putHash :: MonadPut m => Hash -> m () putHash h = do - let bs = SBS.fromShort $ Hash.toBytes h + let bs = Hash.toByteString h putLength (B.length bs) putByteString bs @@ -158,7 +155,7 @@ getHash :: MonadGet m => m Hash getHash = do len <- getLength bs <- B.copy <$> Ser.getBytes len - pure $ Hash.fromBytes bs + pure $ Hash.fromByteString bs putReferent :: MonadPut m => Referent -> m () putReferent = \case diff --git a/parser-typechecker/src/Unison/Server/Doc/AsHtml.hs b/parser-typechecker/src/Unison/Server/Doc/AsHtml.hs index a315f0b096..6d870e7edc 100644 --- a/parser-typechecker/src/Unison/Server/Doc/AsHtml.hs +++ b/parser-typechecker/src/Unison/Server/Doc/AsHtml.hs @@ -41,9 +41,9 @@ embeddedSource ref = Term s -> embeddedSource' s Type s -> embeddedSource' s -inlineCode :: [Attribute] -> Html () -> Html () -inlineCode attrs = - pre_ (class_ "inline-code" : attrs) . code_ [] +inlineCode :: [Text] -> Html () -> Html () +inlineCode classNames = + code_ [classes_ ("inline-code" : classNames)] codeBlock :: [Attribute] -> Html () -> Html () codeBlock attrs = @@ -96,14 +96,16 @@ foldedToHtml attrs isFolded = if isFolded then open_ "open" : attrs else attrs - in details_ attrsWithOpen $ summary_ [] $ sequence_ $ summary ++ details + in details_ attrsWithOpen $ do + summary_ [class_ "folded-content"] $ sequence_ summary + div_ [class_ "folded-content"] $ sequence_ details foldedToHtmlSource :: Bool -> EmbeddedSource -> Html () foldedToHtmlSource isFolded source = case source of Builtin summary -> foldedToHtml - [class_ "rich source"] + [class_ "folded rich source"] ( Disabled ( div_ [class_ "builtin-summary"] @@ -115,7 +117,7 @@ foldedToHtmlSource isFolded source = ) ) EmbeddedSource summary details -> - foldedToHtml [class_ "rich source"] $ + foldedToHtml [class_ "folded rich source"] $ IsFolded isFolded [codeBlock [] $ Syntax.toHtml summary] @@ -228,9 +230,9 @@ toHtml docNamesByRef document = Callout icon content -> let (cls, ico) = case icon of - Just (Word emoji) -> - (class_ "callout callout-with-icon", div_ [class_ "callout-icon"] $ L.toHtml emoji) - _ -> + Just emoji -> + (class_ "callout callout-with-icon", div_ [class_ "callout-icon"] $ L.toHtml . toText "" $ emoji) + Nothing -> (class_ "callout", "") in div_ [cls] $ do ico @@ -243,14 +245,11 @@ toHtml docNamesByRef document = tr_ [] $ mapM_ cellToHtml $ mergeWords " " cells in table_ [] $ tbody_ [] $ mapM_ rowToHtml rows Folded isFolded summary details -> - let content = - if isFolded - then [currentSectionLevelToHtml summary] - else - [ currentSectionLevelToHtml summary, - currentSectionLevelToHtml details - ] - in foldedToHtml [] (IsFolded isFolded content []) + foldedToHtml [class_ "folded"] $ + IsFolded + isFolded + [currentSectionLevelToHtml summary] + [currentSectionLevelToHtml details] Paragraph docs -> case docs of [d] -> @@ -313,7 +312,7 @@ toHtml docNamesByRef document = ExampleBlock syntax -> div_ [class_ "source rich example"] $ codeBlock [] (Syntax.toHtml syntax) Link syntax -> - inlineCode [class_ "rich source"] (Syntax.toHtml syntax) + inlineCode ["rich", "source"] $ Syntax.toHtml syntax Signature signatures -> div_ [class_ "rich source signatures"] diff --git a/parser-typechecker/src/Unison/Server/Syntax.hs b/parser-typechecker/src/Unison/Server/Syntax.hs index 79c8ebe80f..cf43adb811 100644 --- a/parser-typechecker/src/Unison/Server/Syntax.hs +++ b/parser-typechecker/src/Unison/Server/Syntax.hs @@ -156,6 +156,7 @@ reference (Segment _ el) = case el' of TermReference r -> Just r TypeReference r -> Just r + HashQualifier r -> Just r _ -> Nothing in el >>= reference' diff --git a/parser-typechecker/src/Unison/Util/Bytes.hs b/parser-typechecker/src/Unison/Util/Bytes.hs index c4c5a2a211..f40a20a396 100644 --- a/parser-typechecker/src/Unison/Util/Bytes.hs +++ b/parser-typechecker/src/Unison/Util/Bytes.hs @@ -1,42 +1,118 @@ {-# Language ViewPatterns #-} +{-# Language GeneralizedNewtypeDeriving #-} +{-# Language BangPatterns #-} {-# OPTIONS_GHC -fno-warn-orphans #-} - -module Unison.Util.Bytes where +{-# OPTIONS_GHC -fno-warn-unused-top-binds #-} + +module Unison.Util.Bytes ( + Bytes(..), Chunk, + fromByteString, toByteString, + fromWord8s, toWord8s, + fromBase16, toBase16, + fromBase32, toBase32, + fromBase64, toBase64, + fromBase64UrlUnpadded, toBase64UrlUnpadded, + + chunkFromByteString, byteStringToChunk, chunkToByteString, + fromChunks, chunks, byteStringChunks, + toArray, fromArray, toLazyByteString, + flatten, + + at, take, drop, size, empty, + + encodeNat16be, decodeNat16be, + encodeNat32be, decodeNat32be, + encodeNat64be, decodeNat64be, + encodeNat16le, decodeNat16le, + encodeNat32le, decodeNat32le, + encodeNat64le, decodeNat64le, + decodeUtf8, encodeUtf8, + + zlibCompress, zlibDecompress, + gzipCompress, gzipDecompress +) where import Control.DeepSeq (NFData(..)) +import Control.Monad.Primitive (unsafeIOToPrim) import Data.Bits (shiftR, shiftL, (.|.)) import Data.Char -import Data.Memory.PtrMethods (memCompare, memEqual) -import Data.Monoid (Sum(..)) -import Foreign.Ptr (Ptr, plusPtr) -import Foreign.Storable (poke) -import System.IO.Unsafe (unsafeDupablePerformIO) import Unison.Prelude hiding (ByteString, empty) -import Basement.Block (Block) -import qualified Data.ByteString.Lazy as LB -import qualified Data.ByteArray as B +import Prelude hiding (take, drop) +import qualified Data.ByteString as B +import qualified Data.ByteArray as BA import qualified Data.ByteArray.Encoding as BE -import qualified Data.FingerTree as T +import qualified Data.ByteString.Lazy as LB import qualified Data.Text as Text +import qualified Unison.Util.Rope as R +import qualified Data.Vector.Primitive as V +import qualified Data.Vector.Primitive.Mutable as MV +import qualified Data.Vector.Storable as SV +import qualified Data.Vector.Storable.Mutable as MSV +import qualified Data.Vector.Storable.ByteString as BSV +import Data.Primitive.ByteArray (copyByteArrayToPtr) +import Data.Primitive.Ptr (copyPtrToMutableByteArray) +import Foreign.Storable (pokeByteOff) +import Foreign.ForeignPtr (withForeignPtr) import qualified Codec.Compression.Zlib as Zlib import qualified Codec.Compression.GZip as GZip +import Unsafe.Coerce (unsafeCoerce) + +type Chunk = V.Vector Word8 --- Block is just `newtype Block a = Block ByteArray#` -type ByteString = Block Word8 +-- Bytes type represented as a rope of ByteStrings +newtype Bytes = Bytes { underlying :: R.Rope Chunk } deriving (Semigroup,Monoid,Eq,Ord) --- Bytes type represented as a finger tree of ByteStrings. --- Can be efficiently sliced and indexed, using the byte count --- annotation at each subtree. -newtype Bytes = Bytes (T.FingerTree (Sum Int) (View ByteString)) +instance R.Sized Chunk where size = V.length +instance R.Drop Chunk where drop = V.drop +instance R.Take Chunk where take = V.take +instance R.Index Chunk Word8 where unsafeIndex n bs = bs `V.unsafeIndex` n +instance R.Reverse Chunk where reverse = V.reverse +instance NFData Bytes where rnf _ = () null :: Bytes -> Bool -null (Bytes bs) = T.null bs +null = R.null . underlying empty :: Bytes -empty = Bytes mempty +empty = mempty + +isAscii :: Bytes -> Bool +isAscii b = all (V.all (<= 0x7F)) (chunks b) + +fromByteString :: B.ByteString -> Bytes +fromByteString b = snoc empty (byteStringToChunk b) + +toByteString :: Bytes -> B.ByteString +toByteString b = B.concat (map chunkToByteString (chunks b)) -fromArray :: B.ByteArrayAccess ba => ba -> Bytes -fromArray = snoc empty +toArray :: BA.ByteArray b => Bytes -> b +toArray b = chunkToArray $ V.concat (chunks b) + +fromArray :: BA.ByteArrayAccess b => b -> Bytes +fromArray b = snoc empty (arrayToChunk b) + +byteStringToChunk, chunkFromByteString :: B.ByteString -> Chunk +byteStringToChunk = fromStorable . BSV.byteStringToVector +chunkFromByteString = byteStringToChunk + +chunkToByteString :: Chunk -> B.ByteString +chunkToByteString = BSV.vectorToByteString . toStorable + +fromStorable :: SV.Vector Word8 -> V.Vector Word8 +fromStorable sv + = V.create $ do + MSV.MVector l fp <- SV.unsafeThaw sv + v@(MV.MVector _ _ ba) <- MV.unsafeNew l + unsafeIOToPrim . withForeignPtr fp $ \p -> + -- Note: unsafeCoerce is for s -> RealWorld in byte array type + copyPtrToMutableByteArray (unsafeCoerce ba) 0 p l + pure v + +toStorable :: V.Vector Word8 -> SV.Vector Word8 +toStorable (V.Vector o l ba) = SV.create $ do + v@(MSV.MVector _ fp) <- MSV.unsafeNew l + unsafeIOToPrim . withForeignPtr fp $ \p -> + copyByteArrayToPtr p ba o l + pure v zlibCompress :: Bytes -> Bytes zlibCompress = fromLazyByteString . Zlib.compress . toLazyByteString @@ -50,89 +126,67 @@ gzipDecompress = fromLazyByteString . GZip.decompress . toLazyByteString zlibDecompress :: Bytes -> Bytes zlibDecompress = fromLazyByteString . Zlib.decompress . toLazyByteString -toArray :: forall bo . B.ByteArray bo => Bytes -> bo -toArray b = B.concat (map B.convert (chunks b) :: [bo]) - toLazyByteString :: Bytes -> LB.ByteString -toLazyByteString b = LB.fromChunks $ map B.convert $ chunks b +toLazyByteString b = LB.fromChunks $ map chunkToByteString $ chunks b fromLazyByteString :: LB.ByteString -> Bytes -fromLazyByteString b = fromChunks (map (view . B.convert) $ LB.toChunks b) +fromLazyByteString b = fromChunks (byteStringToChunk <$> LB.toChunks b) size :: Bytes -> Int -size (Bytes bs) = getSum (T.measure bs) +size = R.size . underlying -chunks :: Bytes -> [View ByteString] -chunks (Bytes b) = toList b +chunkSize :: Chunk -> Int +chunkSize = V.length -fromChunks :: [View ByteString] -> Bytes -fromChunks = foldl' snocView empty +chunks :: Bytes -> [Chunk] +chunks (Bytes bs) = toList bs -snocView :: Bytes -> View ByteString -> Bytes -snocView bs b | B.null b = bs -snocView (Bytes bs) b = Bytes (bs T.|> b) +byteStringChunks :: Bytes -> [B.ByteString] +byteStringChunks bs = chunkToByteString <$> chunks bs -cons :: B.ByteArrayAccess ba => ba -> Bytes -> Bytes -cons b bs | B.null b = bs -cons b (Bytes bs) = Bytes (view (B.convert b) T.<| bs) +fromChunks :: [Chunk] -> Bytes +fromChunks = foldl' snoc empty -snoc :: B.ByteArrayAccess ba => Bytes -> ba -> Bytes -snoc bs b | B.null b = bs -snoc (Bytes bs) b = Bytes (bs T.|> view (B.convert b)) +cons :: Chunk -> Bytes -> Bytes +cons b (Bytes bs) = Bytes (R.cons b bs) + +snoc :: Bytes -> Chunk -> Bytes +snoc (Bytes bs) b = Bytes (R.snoc bs b) flatten :: Bytes -> Bytes -flatten b = snoc mempty (B.concat (chunks b) :: ByteString) +flatten b = snoc mempty (V.concat (chunks b)) take :: Int -> Bytes -> Bytes -take n (Bytes bs) = go (T.split (> Sum n) bs) where - go (ok, s) = Bytes $ case T.viewl s of - last T.:< _ -> - if T.measure ok == Sum n then ok - else ok T.|> takeView (n - getSum (T.measure ok)) last - _ -> ok +take n (Bytes bs) = Bytes (R.take n bs) drop :: Int -> Bytes -> Bytes -drop n b0@(Bytes bs) = go (T.dropUntil (> Sum n) bs) where - go s = Bytes $ case T.viewl s of - head T.:< tail -> - if (size b0 - getSum (T.measure s)) == n then s - else dropView (n - (size b0 - getSum (T.measure s))) head T.<| tail - _ -> s - -at :: Int -> Bytes -> Maybe Word8 -at i bs = case Unison.Util.Bytes.drop i bs of - -- todo: there's a more efficient implementation that does no allocation - -- note: chunks guaranteed nonempty (see `snoc` and `cons` implementations) - Bytes (T.viewl -> hd T.:< _) -> Just (B.index hd 0) - _ -> Nothing - -dropBlock :: Int -> Bytes -> Maybe (View ByteString, Bytes) -dropBlock nBytes chunks = - go mempty chunks where - go acc (Bytes chunks) = - if B.length acc == nBytes then - Just (view acc, (Bytes chunks)) - else if B.length acc >= nBytes then - let v = view acc in - Just ((takeView nBytes v), Bytes ((dropView nBytes v) T.<| chunks)) - else - case chunks of - (T.viewl -> (head T.:< tail)) -> go (acc <> (B.convert head)) (Bytes tail) - _ -> Nothing +drop n (Bytes bs) = Bytes (R.drop n bs) + +at, index :: Int -> Bytes -> Maybe Word8 +at n (Bytes bs) = R.index n bs +index = at +dropBlock :: Int -> Bytes -> Maybe (Chunk, Bytes) +dropBlock nBytes (Bytes chunks) = go mempty chunks + where + go acc chunks + | V.length acc == nBytes = Just (acc, Bytes chunks) + | V.length acc >= nBytes, (hd,hd2) <- V.splitAt nBytes acc = Just (hd, Bytes (hd2 `R.cons` chunks)) + | Just (head, tail) <- R.uncons chunks = go (acc <> head) tail + | otherwise = Nothing decodeNat64be :: Bytes -> Maybe (Word64, Bytes) decodeNat64be bs = case dropBlock 8 bs of Just (head, rest) -> let - b8 = B.index head 0 - b7 = B.index head 1 - b6 = B.index head 2 - b5 = B.index head 3 - b4 = B.index head 4 - b3 = B.index head 5 - b2 = B.index head 6 - b1 = B.index head 7 + b8 = V.unsafeIndex head 0 + b7 = V.unsafeIndex head 1 + b6 = V.unsafeIndex head 2 + b5 = V.unsafeIndex head 3 + b4 = V.unsafeIndex head 4 + b3 = V.unsafeIndex head 5 + b2 = V.unsafeIndex head 6 + b1 = V.unsafeIndex head 7 b = shiftL (fromIntegral b8) 56 .|.shiftL (fromIntegral b7) 48 .|.shiftL (fromIntegral b6) 40 @@ -149,14 +203,14 @@ decodeNat64le :: Bytes -> Maybe (Word64, Bytes) decodeNat64le bs = case dropBlock 8 bs of Just (head, rest) -> let - b1 = B.index head 0 - b2 = B.index head 1 - b3 = B.index head 2 - b4 = B.index head 3 - b5 = B.index head 4 - b6 = B.index head 5 - b7 = B.index head 6 - b8 = B.index head 7 + b1 = V.unsafeIndex head 0 + b2 = V.unsafeIndex head 1 + b3 = V.unsafeIndex head 2 + b4 = V.unsafeIndex head 3 + b5 = V.unsafeIndex head 4 + b6 = V.unsafeIndex head 5 + b7 = V.unsafeIndex head 6 + b8 = V.unsafeIndex head 7 b = shiftL (fromIntegral b8) 56 .|. shiftL (fromIntegral b7) 48 .|. shiftL (fromIntegral b6) 40 @@ -173,10 +227,10 @@ decodeNat32be :: Bytes -> Maybe (Word64, Bytes) decodeNat32be bs = case dropBlock 4 bs of Just (head, rest) -> let - b4 = B.index head 0 - b3 = B.index head 1 - b2 = B.index head 2 - b1 = B.index head 3 + b4 = V.unsafeIndex head 0 + b3 = V.unsafeIndex head 1 + b2 = V.unsafeIndex head 2 + b1 = V.unsafeIndex head 3 b = shiftL (fromIntegral b4) 24 .|. shiftL (fromIntegral b3) 16 .|. shiftL (fromIntegral b2) 8 @@ -189,10 +243,10 @@ decodeNat32le :: Bytes -> Maybe (Word64, Bytes) decodeNat32le bs = case dropBlock 4 bs of Just (head, rest) -> let - b1 = B.index head 0 - b2 = B.index head 1 - b3 = B.index head 2 - b4 = B.index head 3 + b1 = V.unsafeIndex head 0 + b2 = V.unsafeIndex head 1 + b3 = V.unsafeIndex head 2 + b4 = V.unsafeIndex head 3 b = shiftL (fromIntegral b4) 24 .|. shiftL (fromIntegral b3) 16 .|. shiftL (fromIntegral b2) 8 @@ -205,8 +259,8 @@ decodeNat16be :: Bytes -> Maybe (Word64, Bytes) decodeNat16be bs = case dropBlock 2 bs of Just (head, rest) -> let - b2 = B.index head 0 - b1 = B.index head 1 + b2 = V.unsafeIndex head 0 + b1 = V.unsafeIndex head 1 b = shiftL (fromIntegral b2) 8 .|. fromIntegral b1 in @@ -217,8 +271,8 @@ decodeNat16le :: Bytes -> Maybe (Word64, Bytes) decodeNat16le bs = case dropBlock 2 bs of Just (head, rest) -> let - b1 = B.index head 0 - b2 = B.index head 1 + b1 = V.unsafeIndex head 0 + b2 = V.unsafeIndex head 1 b = shiftL (fromIntegral b2) 8 .|. fromIntegral b1 in @@ -226,46 +280,60 @@ decodeNat16le bs = case dropBlock 2 bs of Nothing -> Nothing -fillBE :: Word64 -> Int -> Ptr Word8 -> IO () -fillBE n 0 p = poke p (fromIntegral n) >> return () -fillBE n i p = poke p (fromIntegral (shiftR n (i * 8))) - >> fillBE n (i - 1) (p `plusPtr` 1) +fillBE :: Word64 -> Int -> Int -> Word8 +fillBE n k 0 = fromIntegral (shiftR n (k*8)) +fillBE n k i = fromIntegral (shiftR n ((k-i) * 8)) +{-# inline fillBE #-} encodeNat64be :: Word64 -> Bytes -encodeNat64be n = Bytes (T.singleton (view (B.unsafeCreate 8 (fillBE n 7)))) +encodeNat64be n = Bytes (R.one (V.generate 8 (fillBE n 7))) encodeNat32be :: Word64 -> Bytes -encodeNat32be n = Bytes (T.singleton (view (B.unsafeCreate 4 (fillBE n 3)))) +encodeNat32be n = Bytes (R.one (V.generate 4 (fillBE n 3))) encodeNat16be :: Word64 -> Bytes -encodeNat16be n = Bytes (T.singleton (view (B.unsafeCreate 2 (fillBE n 1)))) +encodeNat16be n = Bytes (R.one (V.generate 2 (fillBE n 1))) -fillLE :: Word64 -> Int -> Int -> Ptr Word8 -> IO () -fillLE n i j p = - if i == j then - return () - else - poke p (fromIntegral (shiftR n (i * 8))) >> fillLE n (i + 1) j (p `plusPtr` 1) +fillLE :: Word64 -> Int -> Word8 +fillLE n i = fromIntegral (shiftR n (i*8)) +{-# inline fillLE #-} encodeNat64le :: Word64 -> Bytes -encodeNat64le n = Bytes (T.singleton (view (B.unsafeCreate 8 (fillLE n 0 8)))) +encodeNat64le n = Bytes (R.one (V.generate 8 (fillLE n))) encodeNat32le :: Word64 -> Bytes -encodeNat32le n = Bytes (T.singleton (view (B.unsafeCreate 4 (fillLE n 0 4)))) +encodeNat32le n = Bytes (R.one (V.generate 4 (fillLE n))) encodeNat16le :: Word64 -> Bytes -encodeNat16le n = Bytes (T.singleton (view (B.unsafeCreate 2 (fillLE n 0 2)))) +encodeNat16le n = Bytes (R.one (V.generate 2 (fillLE n))) toBase16 :: Bytes -> Bytes toBase16 bs = foldl' step empty (chunks bs) where - step bs b = snoc bs (BE.convertToBase BE.Base16 b :: ByteString) + step bs b = snoc bs (arrayToChunk @BA.Bytes $ + BE.convertToBase BE.Base16 (chunkToArray @BA.Bytes b)) + +chunkToArray, arrayFromChunk :: BA.ByteArray b => Chunk -> b +chunkToArray bs = BA.allocAndFreeze (V.length bs) $ \ptr -> + let + go !ind = + if ind < V.length bs + then pokeByteOff ptr ind (V.unsafeIndex bs ind) >> go (ind+1) + else pure () + in go 0 + +arrayFromChunk = chunkToArray + +arrayToChunk, chunkFromArray :: BA.ByteArrayAccess b => b -> Chunk +arrayToChunk bs = V.generate (BA.length bs) (BA.index bs) +chunkFromArray = arrayToChunk fromBase16 :: Bytes -> Either Text.Text Bytes fromBase16 bs = case traverse convert (chunks bs) of Left e -> Left (Text.pack e) - Right bs -> Right (fromChunks (map view bs)) + Right bs -> Right (fromChunks bs) where - convert b = BE.convertFromBase BE.Base16 b :: Either String ByteString + convert b = BE.convertFromBase BE.Base16 (chunkToArray @BA.Bytes b) + <&> arrayToChunk @BA.Bytes toBase32, toBase64, toBase64UrlUnpadded :: Bytes -> Bytes toBase32 = toBase BE.Base32 @@ -278,110 +346,21 @@ fromBase64 = fromBase BE.Base64 fromBase64UrlUnpadded = fromBase BE.Base64URLUnpadded fromBase :: BE.Base -> Bytes -> Either Text.Text Bytes -fromBase e bs = case BE.convertFromBase e (toArray bs :: ByteString) of +fromBase e (Bytes bs) = case BE.convertFromBase e (chunkToArray @BA.Bytes $ R.flatten bs) of Left e -> Left (Text.pack e) - Right b -> Right $ snocView empty (view b) + Right b -> Right $ snoc empty (chunkFromArray (b :: BA.Bytes)) toBase :: BE.Base -> Bytes -> Bytes -toBase e bs = snoc empty (BE.convertToBase e (toArray bs :: ByteString) :: ByteString) +toBase e (Bytes bs) = snoc empty (arrayToChunk arr) + where + arr :: BA.Bytes + arr = BE.convertToBase e (chunkToArray @BA.Bytes $ R.flatten bs) toWord8s :: Bytes -> [Word8] -toWord8s bs = chunks bs >>= B.unpack +toWord8s bs = chunks bs >>= V.toList fromWord8s :: [Word8] -> Bytes -fromWord8s bs = fromArray (view $ B.pack bs :: View ByteString) - -instance Monoid Bytes where - mempty = Bytes mempty - mappend (Bytes b1) (Bytes b2) = Bytes (b1 `mappend` b2) - -instance Semigroup Bytes where (<>) = mappend - -instance T.Measured (Sum Int) (View ByteString) where - measure b = Sum (B.length b) +fromWord8s bs = snoc empty (V.fromList bs) instance Show Bytes where show bs = toWord8s (toBase16 bs) >>= \w -> [chr (fromIntegral w)] - --- Produces two lists where the chunks have the same length -alignChunks :: B.ByteArrayAccess ba => [View ba] -> [View ba] -> ([View ba], [View ba]) -alignChunks bs1 bs2 = (cs1, cs2) - where - cs1 = alignTo bs1 bs2 - cs2 = alignTo bs2 cs1 - alignTo :: B.ByteArrayAccess ba => [View ba] -> [View ba] -> [View ba] - alignTo bs1 [] = bs1 - alignTo [] _ = [] - alignTo (hd1:tl1) (hd2:tl2) - | len1 == len2 = hd1 : alignTo tl1 tl2 - | len1 < len2 = hd1 : alignTo tl1 (dropView len1 hd2 : tl2) - | otherwise = -- len1 > len2 - let (hd1',hd1rem) = (takeView len2 hd1, dropView len2 hd1) - in hd1' : alignTo (hd1rem : tl1) tl2 - where - len1 = B.length hd1 - len2 = B.length hd2 - -instance Eq Bytes where - b1 == b2 | size b1 == size b2 = - uncurry (==) (alignChunks (chunks b1) (chunks b2)) - _ == _ = False - --- Lexicographical ordering -instance Ord Bytes where - b1 `compare` b2 = uncurry compare (alignChunks (chunks b1) (chunks b2)) - --- --- Forked from: http://hackage.haskell.org/package/memory-0.15.0/docs/src/Data.ByteArray.View.html --- which is already one of our dependencies. Forked because the view --- type in the memory package doesn't expose its constructor which makes --- it impossible to implement take and drop. --- --- Module : Data.ByteArray.View --- License : BSD-style --- Maintainer : Nicolas DI PRIMA --- Stability : stable --- Portability : Good - -view :: B.ByteArrayAccess bs => bs -> View bs -view bs = View 0 (B.length bs) bs - -takeView, dropView :: B.ByteArrayAccess bs => Int -> View bs -> View bs -takeView k (View i n bs) = View i (min k n) bs -dropView k (View i n bs) = View (i + (k `min` n)) (n - (k `min` n)) bs - -data View bytes = View - { viewOffset :: !Int - , viewSize :: !Int - , unView :: !bytes - } - -instance B.ByteArrayAccess bytes => Eq (View bytes) where - v1 == v2 = viewSize v1 == viewSize v2 && unsafeDupablePerformIO ( - B.withByteArray v1 $ \ptr1 -> - B.withByteArray v2 $ \ptr2 -> memEqual ptr1 ptr2 (viewSize v1)) - -instance B.ByteArrayAccess bytes => Ord (View bytes) where - compare v1 v2 = unsafeDupablePerformIO $ - B.withByteArray v1 $ \ptr1 -> - B.withByteArray v2 $ \ptr2 -> do - ret <- memCompare ptr1 ptr2 (min (viewSize v1) (viewSize v2)) - return $ case ret of - EQ | B.length v1 > B.length v2 -> GT - | B.length v1 < B.length v2 -> LT - | B.length v1 == B.length v2 -> EQ - _ -> ret - -instance B.ByteArrayAccess bytes => Show (View bytes) where - show v = show (B.unpack v) - -instance B.ByteArrayAccess bytes => B.ByteArrayAccess (View bytes) where - length = viewSize - withByteArray v f = B.withByteArray (unView v) $ - \ptr -> f (ptr `plusPtr` (viewOffset v)) - -instance NFData (View bs) where - rnf bs = seq bs () - -instance NFData Bytes where - rnf bs = rnf (chunks bs) diff --git a/parser-typechecker/src/Unison/Util/Rope.hs b/parser-typechecker/src/Unison/Util/Rope.hs new file mode 100644 index 0000000000..d5891e625d --- /dev/null +++ b/parser-typechecker/src/Unison/Util/Rope.hs @@ -0,0 +1,236 @@ +{-# Language FunctionalDependencies #-} +{-# Language DeriveFoldable #-} + +module Unison.Util.Rope + (chunks, singleton, one, map, traverse, null, + flatten, two, cons, uncons, snoc, unsnoc, index, debugDepth, + Sized(..), Take(..), Drop(..), Reverse(..), Index(..), Rope, + ) +where + +import Prelude hiding (drop,take,reverse,map,traverse,null) +import Data.Foldable (toList) +import Control.DeepSeq (NFData(..)) + +-- | Roughly size-balanced binary tree of chunks. There are +-- a few operations that are sloppier about rebalancing as long +-- as that can't lead to trees of more than logarithmic depth. +-- +-- The `Int` in the `Two` constructor is a cached size of that subtree. +data Rope a + = Empty + | One !a + | Two {-# unpack #-} !Int !(Rope a) !(Rope a) + deriving Foldable + +chunks :: Rope a -> [a] +chunks = toList + +singleton, one :: Sized a => a -> Rope a +one a | size a == 0 = Empty +one a = One a +singleton = one + +-- Note: this function doesn't do rebalancing, so it shouldn't +-- be used unless the function is "roughly" size-preserving. +-- So converting from text to utf-8 encoded text chunks is okay, +-- wherease filtering out 95% of the chunks will lead to a size-unbalanced tree +map :: Sized b => (a -> b) -> Rope a -> Rope b +map f = \case + Empty -> Empty + One a -> one (f a) + Two _ l r -> two (map f l) (map f r) + +-- Like `map`, this doesn't do rebalancing +traverse :: (Applicative f, Sized b) => (a -> f b) -> Rope a -> f (Rope b) +traverse f = \case + Empty -> pure Empty + One a -> one <$> f a + Two _ l r -> two <$> traverse f l <*> traverse f r + +-- typeclasses used for abstracting over the chunk type +class Sized a where size :: a -> Int +class Take a where take :: Int -> a -> a +class Drop a where drop :: Int -> a -> a +class Index a elem where unsafeIndex :: Int -> a -> elem +class Reverse a where reverse :: a -> a + +instance Sized a => Sized (Rope a) where + size = \case + Empty -> 0 + One a -> size a + Two n _ _ -> n + +null :: Sized a => Rope a -> Bool +null r = size r == 0 + +flatten :: Monoid a => Rope a -> a +flatten = mconcat . toList + +instance (Sized a, Semigroup a) => Semigroup (Rope a) where (<>) = mappend +instance (Sized a, Semigroup a) => Monoid (Rope a) where + mempty = Empty + mappend r1 r2 = case (r1,r2) of + (Empty, k) -> k + (k, Empty) -> k + (One a0, k2) -> cons' (size a0) a0 k2 + (k1, One aN) -> snoc' k1 (size aN) aN + (k1@(Two sz1 l1 r1), k2@(Two sz2 l2 r2)) + | sz1 * 2 >= sz2 && sz2 * 2 >= sz1 -> Two (sz1 + sz2) k1 k2 + | sz1 > sz2 -> appendL (size l1) l1 (r1 <> k2) + | otherwise -> appendR (k1 <> l2) (size r2) r2 + +-- size-balanced append, leaving the left tree as is +appendL :: Sized a => Int -> Rope a -> Rope a -> Rope a +appendL 0 _ a = a +appendL _ l Empty = l +appendL szl l r@(One a) = Two (szl + size a) l r +appendL szl l r@(Two szr r1 r2) | szl >= szr = Two (szl+szr) l r + | otherwise = Two (szl+szr) (appendL szl l r1) r2 + +-- size-balanced append, leaving the right tree as is +appendR :: Sized a => Rope a -> Int -> Rope a -> Rope a +appendR a 0 _ = a +appendR Empty _ r = r +appendR l@(One a) szr r = Two (size a + szr) l r +appendR l@(Two szl l1 l2) szr r + | szr >= szl = Two (szl + szr) l r + | otherwise = Two (szl + szr) l1 (appendR l2 szr r) + +cons :: (Sized a, Semigroup a) => a -> Rope a -> Rope a +cons a r = cons' (size a) a r + +snoc :: (Sized a, Semigroup a) => Rope a -> a -> Rope a +snoc as a = snoc' as (size a) a + +cons' :: (Sized a, Semigroup a) => Int -> a -> Rope a -> Rope a +cons' 0 _ as = as +cons' sz0 a0 as = go as + where + go as = case as of + Empty -> One a0 + One a1 -> case sz0 + size a1 of + n | n <= threshold -> One (a0 <> a1) + | otherwise -> Two n (One a0) as + Two sz l r + | sz0 >= sz -> Two (sz0+sz) (One a0) as + | otherwise -> appendR (go l) (size r) r + +snoc' :: (Sized a, Semigroup a) => Rope a -> Int -> a -> Rope a +snoc' as 0 _ = as +snoc' as szN aN = go as + where + go as = case as of + Empty -> One aN + One a0 -> case size a0 + szN of + n | n <= threshold -> One (a0 <> aN) + | otherwise -> Two n as (One aN) + Two sz l r + | szN >= sz -> Two (sz+szN) as (One aN) + | otherwise -> appendL (size l) l (go r) + +instance Reverse a => Reverse (Rope a) where + reverse = \case + One a -> One (reverse a) + Two sz l r -> Two sz (reverse r) (reverse l) + Empty -> Empty + +two :: Sized a => Rope a -> Rope a -> Rope a +two r1 r2 = Two (size r1 + size r2) r1 r2 + +-- Cutoff for when `snoc` or `cons` will create a new subtree +-- rather than just snoc/cons-ing onto the underlying chunk. +-- +-- See https://github.com/unisonweb/unison/pull/1899#discussion_r742953469 +threshold :: Int +threshold = 32 + + +index :: (Sized a, Index a ch) => Int -> Rope a -> Maybe ch +index i r | i >= 0 && i < size r = Just (unsafeIndex i r) + | otherwise = Nothing +{-# inline index #-} + +instance (Sized a, Index a ch) => Index (Rope a) ch where + unsafeIndex i = \case + One a -> unsafeIndex i a + Two sz l r + | i < size l -> unsafeIndex i l + | i >= sz -> error "out of bounds" + | otherwise -> unsafeIndex (i - size l) r + Empty -> error "out of bounds" + +instance (Sized a, Semigroup a, Take a) => Take (Rope a) where + -- this avoids rebalancing the tree, which is more efficient + -- when walking a large rope from left to right via take/drop + take n as = case as of + One a -> if n <= 0 then Empty else one (take n a) + Two sz l r + | n < size l -> take n l + | n >= sz -> as + | otherwise -> two l (take (n - size l) r) -- don't rebalance + Empty -> Empty + +instance (Sized a, Semigroup a, Drop a) => Drop (Rope a) where + -- this avoids rebalancing the tree, which is more efficient + -- when walking a large rope from left to right via take/drop + drop n as = case as of + One a -> if n >= size a then Empty else one (drop n a) + Two sz l r + | n >= size l -> drop (n - size l) r + | n >= sz -> Empty + | otherwise -> two (drop n l) r -- don't rebalance + Empty -> Empty + +uncons :: Sized a => Rope a -> Maybe (a, Rope a) +uncons = \case + Empty -> Nothing + One a -> Just (a, Empty) + Two _ l r -> case uncons l of + Nothing -> uncons r + Just (hd,tl) -> Just (hd, two tl r) + +unsnoc :: Sized a => Rope a -> Maybe (Rope a, a) +unsnoc = \case + Empty -> Nothing + One a -> Just (Empty, a) + Two _ l r -> case unsnoc r of + Nothing -> unsnoc l + Just (init,last) -> Just (two l init, last) + +-- Produces two lists of chunks where the chunks have the same length +alignChunks :: (Sized a, Take a, Drop a) => [a] -> [a] -> ([a], [a]) +alignChunks bs1 bs2 = (cs1, cs2) + where + cs1 = alignTo bs1 bs2 + cs2 = alignTo bs2 cs1 + alignTo bs1 [] = bs1 + alignTo [] _ = [] + alignTo (hd1:tl1) (hd2:tl2) + | len1 == len2 = hd1 : alignTo tl1 tl2 + | len1 < len2 = hd1 : alignTo tl1 (drop len1 hd2 : tl2) + | otherwise = -- len1 > len2 + let (hd1',hd1rem) = (take len2 hd1, drop len2 hd1) + in hd1' : alignTo (hd1rem : tl1) tl2 + where + len1 = size hd1 + len2 = size hd2 + +instance (Sized a, Take a, Drop a, Eq a) => Eq (Rope a) where + b1 == b2 | size b1 == size b2 = + uncurry (==) (alignChunks (chunks b1) (chunks b2)) + _ == _ = False + +-- Lexicographical ordering +instance (Sized a, Take a, Drop a, Ord a) => Ord (Rope a) where + b1 `compare` b2 = uncurry compare (alignChunks (chunks b1) (chunks b2)) + +instance NFData a => NFData (Rope a) where + rnf Empty = () + rnf (One a) = rnf a + rnf (Two _ l r) = rnf l `seq` rnf r + +debugDepth :: Rope a -> Int +debugDepth Empty = 0 +debugDepth One{} = 0 +debugDepth (Two _ l r) = 1 + (debugDepth l `max` debugDepth r) diff --git a/parser-typechecker/src/Unison/Util/Text.hs b/parser-typechecker/src/Unison/Util/Text.hs new file mode 100644 index 0000000000..05c38b1e59 --- /dev/null +++ b/parser-typechecker/src/Unison/Util/Text.hs @@ -0,0 +1,130 @@ +{-# Language BangPatterns #-} +{-# Language GeneralizedNewtypeDeriving #-} +{-# OPTIONS_GHC -fno-warn-orphans #-} + +module Unison.Util.Text where + +import Data.String (IsString(..)) +import Data.Foldable (toList) +import Data.List (unfoldr,foldl') +import Prelude hiding (take,drop,replicate) +import qualified Data.Text as T +import qualified Data.Text.Encoding as T +import qualified Unison.Util.Bytes as B +import qualified Unison.Util.Rope as R + +-- Text type represented as a `Rope` of chunks +newtype Text = Text (R.Rope Chunk) deriving (Eq,Ord,Semigroup,Monoid) + +data Chunk = Chunk {-# unpack #-} !Int {-# unpack #-} !T.Text + +empty :: Text +empty = Text mempty + +one, singleton :: Char -> Text +one ch = Text (R.one (chunk (T.singleton ch))) +singleton = one + +threshold :: Int +threshold = 512 + +replicate :: Int -> Text -> Text +replicate n t | size t * n < threshold = Text (R.one (chunk (T.replicate n (toText t)))) +replicate 0 _ = mempty +replicate 1 t = t +replicate n t = + replicate (n `div` 2) t <> replicate (n - (n `div` 2)) t + +chunkToText :: Chunk -> T.Text +chunkToText (Chunk _ t) = t + +chunk :: T.Text -> Chunk +chunk t = Chunk (T.length t) t + +take :: Int -> Text -> Text +take n (Text t) = Text (R.take n t) + +drop :: Int -> Text -> Text +drop n (Text t) = Text (R.drop n t) + +uncons :: Text -> Maybe (Char, Text) +uncons t | size t == 0 = Nothing +uncons t = (,drop 1 t) <$> at 0 t + +unsnoc :: Text -> Maybe (Text, Char) +unsnoc t | size t == 0 = Nothing +unsnoc t = (take (size t - 1) t,) <$> at (size t - 1) t + +at :: Int -> Text -> Maybe Char +at n (Text t) = R.index n t + +size :: Text -> Int +size (Text t) = R.size t + +reverse :: Text -> Text +reverse (Text t) = Text (R.reverse t) + +fromUtf8 :: B.Bytes -> Either String Text +fromUtf8 bs = + case T.decodeUtf8' (B.toByteString bs) of + Right t -> Right (fromText t) + Left e -> Left (show e) + +toUtf8 :: Text -> B.Bytes +toUtf8 (Text t) = B.Bytes (R.map (B.chunkFromByteString . T.encodeUtf8 . chunkToText) t) + +fromText :: T.Text -> Text +fromText s | T.null s = mempty +fromText s = Text (go (chunk <$> T.chunksOf threshold s)) + where + go = foldl' R.snoc mempty + +pack :: String -> Text +pack = fromText . T.pack +{-# inline pack #-} + +toString, unpack :: Text -> String +toString (Text bs) = toList bs >>= (T.unpack . chunkToText) +{-# inline toString #-} +{-# inline unpack #-} + +unpack = toString + +toText :: Text -> T.Text +toText (Text t) = T.concat (chunkToText <$> unfoldr R.uncons t) +{-# inline toText #-} + +instance Eq Chunk where (Chunk n a) == (Chunk n2 a2) = n == n2 && a == a2 +instance Ord Chunk where (Chunk _ a) `compare` (Chunk _ a2) = compare a a2 +instance Semigroup Chunk where (<>) = mappend +instance Monoid Chunk where + mempty = Chunk 0 mempty + mappend l r = Chunk (R.size l + R.size r) (chunkToText l <> chunkToText r) + +instance R.Sized Chunk where size (Chunk n _) = n + +instance R.Drop Chunk where + drop k c@(Chunk n t) + | k >= n = mempty + | k <= 0 = c + | otherwise = Chunk (n-k) (T.drop k t) + +instance R.Take Chunk where + take k c@(Chunk n t) + | k >= n = c + | k <= 0 = mempty + | otherwise = Chunk k (T.take k t) + +instance R.Index Chunk Char where + unsafeIndex i (Chunk _ t) = T.index t i + +instance R.Reverse Chunk where + reverse (Chunk n t) = Chunk n (T.reverse t) + +instance R.Sized Text where size (Text t) = R.size t + +instance Show Text where + show t = show (toText t) + +instance IsString Text where + fromString = pack \ No newline at end of file diff --git a/parser-typechecker/tests/Suite.hs b/parser-typechecker/tests/Suite.hs index cd445a5a41..16e1a8d839 100644 --- a/parser-typechecker/tests/Suite.hs +++ b/parser-typechecker/tests/Suite.hs @@ -9,7 +9,6 @@ import System.IO import qualified Unison.Core.Test.Name as Name import qualified Unison.Test.ABT as ABT import qualified Unison.Test.Cache as Cache -import qualified Unison.Test.ClearCache as ClearCache import qualified Unison.Test.Codebase.Branch as Branch import qualified Unison.Test.Codebase.Causal as Causal import qualified Unison.Test.Codebase.Path as Path @@ -28,18 +27,15 @@ import qualified Unison.Test.Typechecker as Typechecker import qualified Unison.Test.Typechecker.Context as Context import qualified Unison.Test.Typechecker.TypeError as TypeError import qualified Unison.Test.UnisonSources as UnisonSources -import qualified Unison.Test.UriParser as UriParser import qualified Unison.Test.Util.Bytes as Bytes +import qualified Unison.Test.Util.Text as Text import qualified Unison.Test.Util.PinBoard as PinBoard import qualified Unison.Test.Util.Pretty as Pretty import qualified Unison.Test.Util.Relation as Relation import qualified Unison.Test.Var as Var import qualified Unison.Test.ANF as ANF import qualified Unison.Test.MCode as MCode -import qualified Unison.Test.VersionParser as VersionParser -import qualified Unison.Test.GitSync as GitSync import qualified Unison.Test.CodebaseInit as CodebaseInit -import qualified Unison.Test.CommandLine as CommandLine -- import qualified Unison.Test.BaseUpgradePushPullTest as BaseUpgradePushPullTest test :: Test () @@ -58,6 +54,7 @@ test = tests , Range.test , ColorText.test , Bytes.test + , Text.test , Relation.test , Path.test , Causal.test @@ -66,17 +63,12 @@ test = tests , ANF.test , MCode.test , Var.test - , ClearCache.test , Typechecker.test - , UriParser.test , Context.test - , GitSync.test , Name.test - , VersionParser.test , Pretty.test , PinBoard.test , CodebaseInit.test - , CommandLine.test , Branch.test ] diff --git a/parser-typechecker/tests/Unison/Test/ANF.hs b/parser-typechecker/tests/Unison/Test/ANF.hs index d2a32abc97..8c2177a792 100644 --- a/parser-typechecker/tests/Unison/Test/ANF.hs +++ b/parser-typechecker/tests/Unison/Test/ANF.hs @@ -26,6 +26,7 @@ import Unison.Test.Common (tm) import Control.Monad.Reader (ReaderT(..)) import Control.Monad.State (evalState) +import qualified Unison.Util.Text as Util.Text -- testSNF s = ok -- where @@ -68,7 +69,7 @@ denormalize (TLit l) = case l of I i -> Term.int () i N n -> Term.nat () n F f -> Term.float () f - T t -> Term.text () t + T t -> Term.text () (Util.Text.toText t) C c -> Term.char () c LM r -> Term.termLink () r LY r -> Term.typeLink () r @@ -125,7 +126,7 @@ denormalizeMatch b | MatchIntegral m df <- b = (dcase (ipat Ty.intRef) <$> mapToList m) ++ dfcase df | MatchText m df <- b - = (dcase (const $ P.Text ()) <$> Map.toList m) ++ dfcase df + = (dcase (const $ P.Text () . Util.Text.toText) <$> Map.toList m) ++ dfcase df | MatchData r cs Nothing <- b , [(0, ([UN], zb))] <- mapToList cs , TAbs i (TMatch j (MatchIntegral m df)) <- zb diff --git a/parser-typechecker/tests/Unison/Test/Codebase/Causal.hs b/parser-typechecker/tests/Unison/Test/Codebase/Causal.hs index 9681a2bea4..005e520848 100644 --- a/parser-typechecker/tests/Unison/Test/Codebase/Causal.hs +++ b/parser-typechecker/tests/Unison/Test/Codebase/Causal.hs @@ -24,7 +24,6 @@ import Unison.Hashable (Hashable) import Data.Set (Set) import Data.Functor.Identity import Unison.Hash (Hash) -import Unison.CommandLine (beforeHash) c :: M (Causal M Int64 [Int64]) c = merge (foldr cons (one [1]) t1) @@ -141,7 +140,7 @@ beforeHashTests = do expect' . not =<< before c1 (longCausal (1000 :: Int64)) ok where - before h c = beforeHash 10 (Causal.currentHash h) c + before h c = Causal.beforeHash 10 (Causal.currentHash h) c sillyMerge _lca l _r = pure l longCausal 0 = Causal.one 0 longCausal n = Causal.cons n (longCausal (n - 1)) diff --git a/parser-typechecker/tests/Unison/Test/DataDeclaration.hs b/parser-typechecker/tests/Unison/Test/DataDeclaration.hs index 639d3c5f40..c314d0fbdf 100644 --- a/parser-typechecker/tests/Unison/Test/DataDeclaration.hs +++ b/parser-typechecker/tests/Unison/Test/DataDeclaration.hs @@ -4,11 +4,12 @@ module Unison.Test.DataDeclaration where import Data.Map (Map, (!)) import qualified Data.Map as Map +import Data.Text.Encoding (encodeUtf8) import EasyTest import Text.RawString.QQ +import qualified U.Util.Hash as Hash import Unison.DataDeclaration (DataDeclaration (..), Decl) import qualified Unison.DataDeclaration as DD -import qualified Unison.Hash as Hash import qualified Unison.Hashing.V2.Convert as Hashing import Unison.Parser.Ann (Ann) import Unison.Parsers (unsafeParseFile) @@ -87,7 +88,7 @@ unhashComponentTest = tests app = Type.app () forall = Type.forall () (-->) = Type.arrow () - h = Hash.unsafeFromBase32Hex "abcd" + h = Hash.fromByteString (encodeUtf8 "abcd") ref = R.Id h 0 a = Var.refIdNamed ref b = Var.named "b" diff --git a/parser-typechecker/tests/Unison/Test/Term.hs b/parser-typechecker/tests/Unison/Test/Term.hs index fbc92b3f88..abe8a3e841 100644 --- a/parser-typechecker/tests/Unison/Test/Term.hs +++ b/parser-typechecker/tests/Unison/Test/Term.hs @@ -5,7 +5,8 @@ module Unison.Test.Term where import EasyTest import qualified Data.Map as Map import Data.Map ( (!) ) -import qualified Unison.Hash as Hash +import Data.Text.Encoding (encodeUtf8) +import qualified U.Util.Hash as Hash import qualified Unison.Reference as R import Unison.Symbol ( Symbol ) import qualified Unison.Term as Term @@ -40,7 +41,7 @@ test = scope "term" $ tests expect $ tm' == expected ok , scope "Term.unhashComponent" $ - let h = Hash.unsafeFromBase32Hex "abcd" + let h = Hash.fromByteString (encodeUtf8 "abcd") ref = R.Id h 0 v1 = Var.refIdNamed @Symbol ref -- input component: `ref = \v1 -> ref` diff --git a/parser-typechecker/tests/Unison/Test/Util/Text.hs b/parser-typechecker/tests/Unison/Test/Util/Text.hs new file mode 100644 index 0000000000..4132916656 --- /dev/null +++ b/parser-typechecker/tests/Unison/Test/Util/Text.hs @@ -0,0 +1,107 @@ +{-# Language OverloadedStrings #-} +module Unison.Test.Util.Text where + +import EasyTest +import Control.Monad +import Data.List (foldl', unfoldr) +import qualified Unison.Util.Text as Text +import qualified Unison.Util.Rope as R +import qualified Data.Text as T + +test :: Test () +test = scope "util.text" . tests $ [ + scope "empty ==" . expect $ Text.empty == Text.empty, + + scope "empty `compare`" . expect $ Text.empty `compare` Text.empty == EQ, + + scope "==" . expect $ let + a = join (replicate 100 ['a'..'z']) + b = join (replicate 45 ['A'..'Z']) + in (Text.pack a <> Text.pack b) == Text.pack (a ++ b), + + scope "at" $ do + expect' (Text.at 0 (Text.pack "abc") == Just 'a') + expect' (Text.at 0 mempty == Nothing) + ok, + + scope "at.cornerCases" $ do + let b = Text.drop 3 $ "abc" <> "def" + expect' (Text.at 0 b == Just 'd') + expect' (Text.at 0 (mempty <> "abc") == Just 'a') + ok, + + scope "consistency with Text" $ do + forM_ [(1::Int)..100] $ \_ -> do + n <- int' 0 50 + m <- int' 0 50 + k <- int' 0 (n + m) + o <- int' 0 50 + let ch = pick (['a'..'z'] <> ['A'..'Z'] <> ['0'..'9']) + t1 <- T.pack <$> replicateM n ch + t2 <- T.pack <$> replicateM m ch + t3 <- T.pack <$> replicateM o ch + let [t1s, t2s, t3s] = Text.fromText <$> [t1, t2, t3] + scope "associativity" $ do + -- note $ show (t1s, t2s, t3s) + expect' $ t1s <> (t2s <> t3s) == (t1s <> t2s) <> t3s + scope "<>" . expect' $ + Text.toText (t1s <> t2s <> t3s) == t1 <> t2 <> t3 + scope "Ord" . expect' $ + (t1 <> t2 <> t3) `compare` t3 == + (t1s <> t2s <> t3s) `compare` t3s + scope "take" . expect' $ + Text.toText (Text.take k (t1s <> t2s)) == T.take k (t1 <> t2) + scope "drop" . expect' $ + Text.toText (Text.drop k (t1s <> t2s)) == T.drop k (t1 <> t2) + scope "uncons" . expect' $ + let ts = t1s <> t2s <> t3s + in unfoldr Text.uncons ts == Text.unpack ts + scope "unsnoc" . expect' $ + let ts = t1s <> t2s <> t3s + in unfoldr (\t -> (\(ts,ch) -> (ch,ts)) <$> Text.unsnoc t) ts == reverse (Text.unpack ts) + scope "at" $ + let bs = t1s <> t2s <> t3s + b = t1 <> t2 <> t3 + in forM_ [0 .. (T.length b - 1)] $ \ind -> + expect' $ Just (T.index b ind) == Text.at ind bs + ok, + + scope "lots of chunks" $ do + forM_ [(0::Int)..25] $ \_ -> do + n <- int' 0 50 + k <- int' 200 600 + chunks <- replicateM n (replicateM k char) + let b1 = foldMap Text.pack chunks + b2 = foldr (<>) mempty (Text.pack <$> chunks) + b3 = foldl' (<>) mempty (Text.pack <$> chunks) + b = T.concat (T.pack <$> chunks) + expect' $ b1 == b2 && b2 == b3 + expect' $ Text.toText b1 == b + expect' $ Text.toText b2 == b + expect' $ Text.toText b3 == b + ok, + + scope "depth checks" $ do + chunk <- Text.pack <$> replicateM 1000 char + forM_ [100,200,400] $ \i0 -> do + n <- int' 200 400 + i <- (i0+) <$> int' (-10) 10 + let chunks = replicate i chunk + t1 = foldMap id chunks + t2 = foldr (<>) mempty chunks + t3 = foldl' (<>) mempty chunks + moarChunks = join (replicate n chunks) + ts = [t1, t2, t3, foldMap id (replicate n t3), + foldr (<>) mempty moarChunks, + foldl' (<>) mempty moarChunks ] + maxDepth = maximum depths + depths = map depth ts + note ("maximum depth for tree with " <> show (i*n) <> " chunks was " <> show maxDepth) + expect' (maxDepth < log2 (i*n) * 2) + ok + ] + where + log2 :: Int -> Int + log2 n | n <= 1 = 0 + | otherwise = 1 + log2 (div n 2) + depth (Text.Text t) = R.debugDepth t \ No newline at end of file diff --git a/parser-typechecker/unison-parser-typechecker.cabal b/parser-typechecker/unison-parser-typechecker.cabal index adc0dab0fa..ef17d0a928 100644 --- a/parser-typechecker/unison-parser-typechecker.cabal +++ b/parser-typechecker/unison-parser-typechecker.cabal @@ -40,23 +40,9 @@ library Unison.Codebase.Causal.FoldHistory Unison.Codebase.CodeLookup Unison.Codebase.CodeLookup.Util - Unison.Codebase.Editor.AuthorInfo - Unison.Codebase.Editor.Command Unison.Codebase.Editor.DisplayObject Unison.Codebase.Editor.Git - Unison.Codebase.Editor.HandleCommand - Unison.Codebase.Editor.HandleInput - Unison.Codebase.Editor.Input - Unison.Codebase.Editor.Output - Unison.Codebase.Editor.Output.BranchDiff - Unison.Codebase.Editor.Output.DumpNamespace - Unison.Codebase.Editor.Propagate Unison.Codebase.Editor.RemoteRepo - Unison.Codebase.Editor.SlurpComponent - Unison.Codebase.Editor.SlurpResult - Unison.Codebase.Editor.TodoOutput - Unison.Codebase.Editor.UriParser - Unison.Codebase.Editor.VersionParser Unison.Codebase.Execute Unison.Codebase.FileCodebase Unison.Codebase.GitError @@ -68,6 +54,7 @@ library Unison.Codebase.Patch Unison.Codebase.Path Unison.Codebase.Path.Parse + Unison.Codebase.PushBehavior Unison.Codebase.Reflog Unison.Codebase.Runtime Unison.Codebase.Serialization @@ -82,21 +69,11 @@ library Unison.Codebase.SyncMode Unison.Codebase.TermEdit Unison.Codebase.TermEdit.Typing - Unison.Codebase.TranscriptParser Unison.Codebase.Type Unison.Codebase.TypeEdit Unison.Codebase.Verbosity Unison.Codebase.Watch Unison.CodebasePath - Unison.CommandLine - Unison.CommandLine.DisplayValues - Unison.CommandLine.FuzzySelect - Unison.CommandLine.Globbing - Unison.CommandLine.InputPattern - Unison.CommandLine.InputPatterns - Unison.CommandLine.Main - Unison.CommandLine.OutputMessages - Unison.CommandLine.Welcome Unison.DeclPrinter Unison.FileParser Unison.FileParsers @@ -195,8 +172,10 @@ library Unison.Util.PinBoard Unison.Util.Pretty Unison.Util.Range + Unison.Util.Rope Unison.Util.Star3 Unison.Util.SyntaxText + Unison.Util.Text Unison.Util.TQueue Unison.Util.TransitiveClosure other-modules: @@ -238,6 +217,7 @@ library , binary , bytes , bytestring + , bytestring-to-vector , cereal , configurator , containers >=0.6.3 @@ -300,6 +280,7 @@ library , temporary , terminal-size , text + , text-short , time , tls , transformers @@ -370,17 +351,14 @@ executable tests Unison.Test.ABT Unison.Test.ANF Unison.Test.Cache - Unison.Test.ClearCache Unison.Test.Codebase.Branch Unison.Test.Codebase.Causal Unison.Test.Codebase.Path Unison.Test.CodebaseInit Unison.Test.ColorText - Unison.Test.CommandLine Unison.Test.Common Unison.Test.DataDeclaration Unison.Test.FileParser - Unison.Test.GitSync Unison.Test.Lexer Unison.Test.MCode Unison.Test.Range @@ -394,15 +372,13 @@ executable tests Unison.Test.Typechecker.Context Unison.Test.Typechecker.TypeError Unison.Test.TypePrinter - Unison.Test.Ucm Unison.Test.UnisonSources - Unison.Test.UriParser Unison.Test.Util.Bytes Unison.Test.Util.PinBoard Unison.Test.Util.Pretty Unison.Test.Util.Relation + Unison.Test.Util.Text Unison.Test.Var - Unison.Test.VersionParser Paths_unison_parser_typechecker hs-source-dirs: tests diff --git a/unison-cli/integration-tests/IntegrationTests/ArgumentParsing.hs b/unison-cli/integration-tests/IntegrationTests/ArgumentParsing.hs index 3b5cb5fddf..363404a544 100644 --- a/unison-cli/integration-tests/IntegrationTests/ArgumentParsing.hs +++ b/unison-cli/integration-tests/IntegrationTests/ArgumentParsing.hs @@ -5,10 +5,12 @@ module IntegrationTests.ArgumentParsing where import Data.List (intercalate) import Data.Text (pack) +import Data.Time (getCurrentTime, diffUTCTime) import EasyTest import Shellmet (($|)) import System.Exit (ExitCode (ExitSuccess)) import System.Process (readProcessWithExitCode) +import Text.Printf uFile :: String uFile = "unison-cli/integration-tests/IntegrationTests/print.u" @@ -52,17 +54,19 @@ test = , expectExitCode ExitSuccess "stack" defaultArgs ["exec", "--", "unison", "--token", "MY_TOKEN"] "" -- ? , expectExitCode ExitSuccess "stack" defaultArgs ["exec", "--", "unison"] "" , expectExitCode ExitSuccess "stack" defaultArgs ["exec", "--", "unison", "--ui", tempCodebase] "" - -- run.compiled appears to be broken at the moment, these should be added back once it's - -- fixed to ensure it keeps working. - -- , scope "can compile, then run compiled artifact" $ tests - -- [ expectExitCode ExitSuccess "stack" defaultArgs [] ["exec", "--", "unison", "transcript", transcriptFile] "" - -- , expectExitCode ExitSuccess "stack" defaultArgs [] ["exec", "--", "unison", "run.compiled", "./unison-cli/integration-tests/IntegrationTests/main"] "" - -- ] + , scope "can compile, then run compiled artifact" $ tests + [ expectExitCode ExitSuccess "stack" defaultArgs ["exec", "--", "unison", "transcript", transcriptFile] "" + , expectExitCode ExitSuccess "stack" defaultArgs ["exec", "--", "unison", "run.compiled", "./unison-cli/integration-tests/IntegrationTests/main.uc"] "" + ] ] expectExitCode :: ExitCode -> FilePath -> [String] -> [String] -> String -> Test () expectExitCode expected cmd defArgs args stdin = scope (intercalate " " (cmd : args <> defArgs)) do + start <- io $ getCurrentTime (code, _, _) <- io $ readProcessWithExitCode cmd args stdin + end <- io $ getCurrentTime + let diff = diffUTCTime end start + note $ printf "\n[Time: %s sec]" $ show diff expectEqual code expected defaultArgs :: [String] diff --git a/unison-cli/package.yaml b/unison-cli/package.yaml index af1889f79e..53c5abc90d 100644 --- a/unison-cli/package.yaml +++ b/unison-cli/package.yaml @@ -2,49 +2,67 @@ name: unison-cli github: unisonweb/unison copyright: Copyright (C) 2013-2018 Unison Computing, PBC and contributors -default-extensions: - - ApplicativeDo - - BlockArguments - - DeriveFunctor - - DerivingStrategies - - DoAndIfThenElse - - FlexibleContexts - - FlexibleInstances - - LambdaCase - - MultiParamTypeClasses - - ScopedTypeVariables - - TupleSections - - TypeApplications - flags: optimized: manual: true default: false +ghc-options: -Wall + +dependencies: + - ListLike + - async + - base + - bytestring + - configurator + - containers >= 0.6.3 + - cryptonite + - directory + - errors + - extra + - filepath + - haskeline + - lens + - megaparsec >= 5.0.0 && < 7.0.0 + - mtl + - open-browser + - random >= 1.2.0 + - regex-tdfa + - stm + - text + - unison-codebase-sqlite + - unison-core1 + - unison-parser-typechecker + - unison-prelude + - unison-util + - unison-util-relation + - unliftio + +library: + source-dirs: src + +tests: + tests: + dependencies: + - easytest + - here + - shellmet + - temporary + - unison-cli + main: Main.hs + source-dirs: tests + executables: unison: source-dirs: unison main: Main.hs ghc-options: -threaded -rtsopts -with-rtsopts=-I0 -optP-Wno-nonportable-include-path dependencies: - - base - - bytestring - - configurator - - directory - - directory - - errors - - filepath - - megaparsec - - mtl - optparse-applicative >= 0.16.1.0 - shellmet - template-haskell - temporary - - text - - unison-core1 - - unison-parser-typechecker - - unison-prelude - - unliftio + - unison-cli when: - condition: '!os(windows)' dependencies: unix @@ -54,16 +72,9 @@ executables: main: Transcripts.hs ghc-options: -threaded -rtsopts -with-rtsopts=-N -v0 dependencies: - - base - - directory - easytest - - filepath - - shellmet - process - - text - - unison-core1 - - unison-parser-typechecker - - unison-prelude + - shellmet build-tools: - unison-cli:unison @@ -72,15 +83,33 @@ executables: main: Suite.hs ghc-options: -W -threaded -rtsopts "-with-rtsopts=-N -T" -v0 dependencies: - - base - easytest - process - shellmet - - text - - unison-core1 - - unison-parser-typechecker - - unison-prelude + - time when: - condition: flag(optimized) ghc-options: -O2 -funbox-strict-fields + +default-extensions: + - ApplicativeDo + - BangPatterns + - BlockArguments + - DeriveFunctor + - DeriveGeneric + - DerivingStrategies + - DoAndIfThenElse + - FlexibleContexts + - FlexibleInstances + - GeneralizedNewtypeDeriving + - LambdaCase + - MultiParamTypeClasses + - NamedFieldPuns + - OverloadedStrings + - PatternSynonyms + - RankNTypes + - ScopedTypeVariables + - TupleSections + - TypeApplications + - ViewPatterns diff --git a/parser-typechecker/src/Unison/Codebase/Editor/AuthorInfo.hs b/unison-cli/src/Unison/Codebase/Editor/AuthorInfo.hs similarity index 100% rename from parser-typechecker/src/Unison/Codebase/Editor/AuthorInfo.hs rename to unison-cli/src/Unison/Codebase/Editor/AuthorInfo.hs diff --git a/parser-typechecker/src/Unison/Codebase/Editor/Command.hs b/unison-cli/src/Unison/Codebase/Editor/Command.hs similarity index 99% rename from parser-typechecker/src/Unison/Codebase/Editor/Command.hs rename to unison-cli/src/Unison/Codebase/Editor/Command.hs index 1e8e903889..360f9e42d3 100644 --- a/parser-typechecker/src/Unison/Codebase/Editor/Command.hs +++ b/unison-cli/src/Unison/Codebase/Editor/Command.hs @@ -250,7 +250,7 @@ data Command -- Execute a UnisonFile for its IO effects -- todo: Execute should do some evaluation? - Execute :: PPE.PrettyPrintEnv -> UF.TypecheckedUnisonFile v Ann -> Command m i v (Runtime.WatchResults v Ann) + Execute :: PPE.PrettyPrintEnv -> UF.TypecheckedUnisonFile v Ann -> [String] -> Command m i v (Runtime.WatchResults v Ann) CreateAuthorInfo :: Text -> Command m i v (AuthorInfo v Ann) diff --git a/parser-typechecker/src/Unison/Codebase/Editor/HandleCommand.hs b/unison-cli/src/Unison/Codebase/Editor/HandleCommand.hs similarity index 98% rename from parser-typechecker/src/Unison/Codebase/Editor/HandleCommand.hs rename to unison-cli/src/Unison/Codebase/Editor/HandleCommand.hs index 0e84d62da2..0de356cf98 100644 --- a/parser-typechecker/src/Unison/Codebase/Editor/HandleCommand.hs +++ b/unison-cli/src/Unison/Codebase/Editor/HandleCommand.hs @@ -47,6 +47,7 @@ import qualified Unison.Util.Free as Free import Unison.Var (Var) import qualified Unison.WatchKind as WK import Web.Browser (openBrowser) +import System.Environment (withArgs) import qualified Unison.CommandLine.FuzzySelect as Fuzzy import qualified Unison.Codebase.Path as Path @@ -124,7 +125,7 @@ commandLine config awaitInput setBranchRef rt notifyUser notifyNumbered loadSour env = Parser.ParsingEnv namegen names lift $ typecheck ambient codebase env sourceName source TypecheckFile file ambient -> lift $ typecheck' ambient codebase file - Evaluate ppe unisonFile -> lift $ evalUnisonFile ppe unisonFile + Evaluate ppe unisonFile -> lift $ evalUnisonFile ppe unisonFile [] Evaluate1 ppe useCache term -> lift $ eval1 ppe useCache term LoadLocalRootBranch -> lift $ either (const Branch.empty) id <$> Codebase.getRootBranch codebase LoadLocalBranch h -> lift $ fromMaybe Branch.empty <$> Codebase.getBranchForHash codebase h @@ -182,8 +183,8 @@ commandLine config awaitInput setBranchRef rt notifyUser notifyNumbered loadSour -- b0 <- Codebase.propagate codebase (Branch.head b) -- pure $ Branch.append b0 b - Execute ppe uf -> - lift $ evalUnisonFile ppe uf + Execute ppe uf args -> + lift $ evalUnisonFile ppe uf args AppendToReflog reason old new -> lift $ Codebase.appendReflog codebase reason old new LoadReflog -> lift $ Codebase.getReflog codebase CreateAuthorInfo t -> AuthorInfo.createAuthorInfo Ann.External t @@ -218,8 +219,8 @@ commandLine config awaitInput setBranchRef rt notifyUser notifyNumbered loadSour Left _ -> pure () pure $ r <&> Term.amap (const Ann.External) - evalUnisonFile :: PPE.PrettyPrintEnv -> UF.TypecheckedUnisonFile v Ann -> _ - evalUnisonFile ppe unisonFile = do + evalUnisonFile :: PPE.PrettyPrintEnv -> UF.TypecheckedUnisonFile v Ann -> [String] -> _ + evalUnisonFile ppe unisonFile args = withArgs args do let codeLookup = Codebase.toCodeLookup codebase r <- Runtime.evaluateWatches codeLookup ppe watchCache rt unisonFile case r of diff --git a/unison-cli/src/Unison/Codebase/Editor/HandleInput.hs b/unison-cli/src/Unison/Codebase/Editor/HandleInput.hs new file mode 100644 index 0000000000..55356f973b --- /dev/null +++ b/unison-cli/src/Unison/Codebase/Editor/HandleInput.hs @@ -0,0 +1,3355 @@ +{-# LANGUAGE TemplateHaskell #-} + +module Unison.Codebase.Editor.HandleInput + ( loop, + loopState0, + LoopState (..), + currentPath, + parseSearchType, + ) +where + +-- TODO: Don't import backend + +import qualified Control.Error.Util as ErrorUtil +import Control.Lens +import Control.Monad.Except (ExceptT (..), runExceptT, withExceptT) +import Control.Monad.State (StateT) +import qualified Control.Monad.State as State +import Data.Bifunctor (first, second) +import Data.Configurator () +import Data.Either.Extra (eitherToMaybe) +import qualified Data.Foldable as Foldable +import qualified Data.List as List +import Data.List.Extra (nubOrd) +import Data.List.NonEmpty (NonEmpty) +import qualified Data.List.NonEmpty as Nel +import qualified Data.Map as Map +import Data.Sequence (Seq (..)) +import qualified Data.Set as Set +import qualified Data.Text as Text +import Data.Tuple.Extra (uncurry3) +import qualified Text.Megaparsec as P +import U.Util.Timing (unsafeTime) +import qualified Unison.ABT as ABT +import qualified Unison.Builtin as Builtin +import qualified Unison.Builtin.Decls as DD +import qualified Unison.Builtin.Terms as Builtin +import Unison.Codebase.Branch (Branch (..), Branch0 (..)) +import qualified Unison.Codebase.Branch as Branch +import qualified Unison.Codebase.Branch.Merge as Branch +import qualified Unison.Codebase.Branch.Names as Branch +import qualified Unison.Codebase.BranchDiff as BranchDiff +import qualified Unison.Codebase.BranchUtil as BranchUtil +import qualified Unison.Codebase.Causal as Causal +import Unison.Codebase.Editor.AuthorInfo (AuthorInfo (..)) +import Unison.Codebase.Editor.Command as Command +import Unison.Codebase.Editor.DisplayObject +import Unison.Codebase.Editor.Input +import Unison.Codebase.Editor.Output +import qualified Unison.Codebase.Editor.Output as Output +import qualified Unison.Codebase.Editor.Output.BranchDiff as OBranchDiff +import qualified Unison.Codebase.Editor.Output.DumpNamespace as Output.DN +import qualified Unison.Codebase.Editor.Propagate as Propagate +import Unison.Codebase.Editor.RemoteRepo (ReadRemoteNamespace, WriteRemotePath, WriteRepo, printNamespace, writePathToRead, writeToRead) +import Unison.Codebase.Editor.SlurpComponent (SlurpComponent (..)) +import qualified Unison.Codebase.Editor.SlurpComponent as SC +import Unison.Codebase.Editor.SlurpResult (SlurpResult (..)) +import qualified Unison.Codebase.Editor.SlurpResult as Slurp +import qualified Unison.Codebase.Editor.TodoOutput as TO +import qualified Unison.Codebase.Editor.UriParser as UriParser +import qualified Unison.Codebase.MainTerm as MainTerm +import qualified Unison.Codebase.Metadata as Metadata +import Unison.Codebase.Patch (Patch (..)) +import qualified Unison.Codebase.Patch as Patch +import Unison.Codebase.Path (Path, Path' (..)) +import qualified Unison.Codebase.Path as Path +import qualified Unison.Codebase.Path.Parse as Path +import Unison.Codebase.PushBehavior (PushBehavior) +import qualified Unison.Codebase.PushBehavior as PushBehavior +import qualified Unison.Codebase.Reflog as Reflog +import Unison.Codebase.ShortBranchHash (ShortBranchHash) +import qualified Unison.Codebase.ShortBranchHash as SBH +import qualified Unison.Codebase.SyncMode as SyncMode +import Unison.Codebase.TermEdit (TermEdit (..)) +import qualified Unison.Codebase.TermEdit as TermEdit +import qualified Unison.Codebase.TermEdit.Typing as TermEdit +import Unison.Codebase.Type (GitError) +import qualified Unison.Codebase.TypeEdit as TypeEdit +import qualified Unison.Codebase.Verbosity as Verbosity +import qualified Unison.CommandLine.DisplayValues as DisplayValues +import qualified Unison.CommandLine.FuzzySelect as Fuzzy +import qualified Unison.CommandLine.InputPattern as InputPattern +import qualified Unison.CommandLine.InputPatterns as InputPatterns +import qualified Unison.DataDeclaration as DD +import qualified Unison.HashQualified as HQ +import qualified Unison.HashQualified' as HQ' +import qualified Unison.Hashing.V2.Convert as Hashing +import Unison.LabeledDependency (LabeledDependency) +import qualified Unison.LabeledDependency as LD +import qualified Unison.Lexer as L +import Unison.Name (Name) +import qualified Unison.Name as Name +import Unison.NameSegment (NameSegment (..)) +import qualified Unison.NameSegment as NameSegment +import Unison.Names (Names (Names)) +import qualified Unison.Names as Names +import Unison.NamesWithHistory (NamesWithHistory (..)) +import qualified Unison.NamesWithHistory as NamesWithHistory +import Unison.Parser.Ann (Ann (..)) +import Unison.Prelude +import qualified Unison.PrettyPrintEnv as PPE +import qualified Unison.PrettyPrintEnv.Names as PPE +import qualified Unison.PrettyPrintEnvDecl as PPE +import qualified Unison.PrettyPrintEnvDecl.Names as PPE +import Unison.Reference (Reference (..)) +import qualified Unison.Reference as Reference +import Unison.Referent (Referent) +import qualified Unison.Referent as Referent +import Unison.Result (pattern Result) +import qualified Unison.Result as Result +import Unison.Runtime.IOSource (isTest) +import qualified Unison.Runtime.IOSource as DD +import qualified Unison.Runtime.IOSource as IOSource +import Unison.Server.Backend (ShallowListEntry (..), TermEntry (..), TypeEntry (..)) +import qualified Unison.Server.Backend as Backend +import Unison.Server.QueryResult +import Unison.Server.SearchResult (SearchResult) +import qualified Unison.Server.SearchResult as SR +import qualified Unison.Server.SearchResult' as SR' +import qualified Unison.ShortHash as SH +import Unison.Term (Term) +import qualified Unison.Term as Term +import Unison.Type (Type) +import qualified Unison.Type as Type +import qualified Unison.Type.Names as Type +import qualified Unison.Typechecker as Typechecker +import Unison.UnisonFile (TypecheckedUnisonFile) +import qualified Unison.UnisonFile as UF +import qualified Unison.UnisonFile.Names as UF +import qualified Unison.Util.Find as Find +import Unison.Util.Free (Free) +import qualified Unison.Util.Free as Free +import Unison.Util.List (uniqueBy) +import Unison.Util.Monoid (intercalateMap) +import qualified Unison.Util.Monoid as Monoid +import qualified Unison.Util.Pretty as P +import qualified Unison.Util.Relation as R +import qualified Unison.Util.Relation as Relation +import qualified Unison.Util.Relation4 as R4 +import qualified Unison.Util.Star3 as Star3 +import Unison.Util.TransitiveClosure (transitiveClosure) +import Unison.Var (Var) +import qualified Unison.Var as Var +import qualified Unison.WatchKind as WK + +type F m i v = Free (Command m i v) + +-- type (Action m i v) a +type Action m i v = MaybeT (StateT (LoopState m v) (F m i v)) + +data LoopState m v = LoopState + { _root :: Branch m, + _lastSavedRoot :: Branch m, + -- the current position in the namespace + _currentPathStack :: NonEmpty Path.Absolute, + -- TBD + -- , _activeEdits :: Set Branch.EditGuid + + -- The file name last modified, and whether to skip the next file + -- change event for that path (we skip file changes if the file has + -- just been modified programmatically) + _latestFile :: Maybe (FilePath, SkipNextUpdate), + _latestTypecheckedFile :: Maybe (UF.TypecheckedUnisonFile v Ann), + -- The previous user input. Used to request confirmation of + -- questionable user commands. + _lastInput :: Maybe Input, + -- A 1-indexed list of strings that can be referenced by index at the + -- CLI prompt. e.g. Given ["Foo.bat", "Foo.cat"], + -- `rename 2 Foo.foo` will rename `Foo.cat` to `Foo.foo`. + _numberedArgs :: NumberedArgs + } + +type SkipNextUpdate = Bool + +type InputDescription = Text + +makeLenses ''LoopState + +-- replacing the old read/write scalar Lens with "peek" Getter for the NonEmpty +currentPath :: Getter (LoopState m v) Path.Absolute +currentPath = currentPathStack . to Nel.head + +loopState0 :: Branch m -> Path.Absolute -> LoopState m v +loopState0 b p = LoopState b b (pure p) Nothing Nothing Nothing [] + +type Action' m v = Action m (Either Event Input) v + +defaultPatchNameSegment :: NameSegment +defaultPatchNameSegment = "patch" + +prettyPrintEnvDecl :: NamesWithHistory -> Action' m v PPE.PrettyPrintEnvDecl +prettyPrintEnvDecl ns = eval CodebaseHashLength <&> (`PPE.fromNamesDecl` ns) + +loop :: forall m v. (Monad m, Var v) => Action m (Either Event Input) v () +loop = do + uf <- use latestTypecheckedFile + root' <- use root + currentPath' <- use currentPath + latestFile' <- use latestFile + currentBranch' <- getAt currentPath' + e <- eval Input + hqLength <- eval CodebaseHashLength + sbhLength <- eval BranchHashLength + let currentPath'' = Path.unabsolute currentPath' + hqNameQuery q = eval $ HQNameQuery (Just currentPath'') root' q + sbh = SBH.fromHash sbhLength + root0 = Branch.head root' + currentBranch0 = Branch.head currentBranch' + defaultPatchPath :: PatchPath + defaultPatchPath = (Path' $ Left currentPath', defaultPatchNameSegment) + resolveSplit' :: (Path', a) -> (Path, a) + resolveSplit' = Path.fromAbsoluteSplit . Path.toAbsoluteSplit currentPath' + resolveToAbsolute :: Path' -> Path.Absolute + resolveToAbsolute = Path.resolve currentPath' + getAtSplit :: Path.Split -> Maybe (Branch m) + getAtSplit p = BranchUtil.getBranch p root0 + getAtSplit' :: Path.Split' -> Maybe (Branch m) + getAtSplit' = getAtSplit . resolveSplit' + getPatchAtSplit' :: Path.Split' -> Action' m v (Maybe Patch) + getPatchAtSplit' s = do + let (p, seg) = Path.toAbsoluteSplit currentPath' s + b <- getAt p + eval . Eval $ Branch.getMaybePatch seg (Branch.head b) + getHQ'TermsIncludingHistorical p = + getTermsIncludingHistorical (resolveSplit' p) root0 + + getHQ'Terms :: Path.HQSplit' -> Set Referent + getHQ'Terms p = BranchUtil.getTerm (resolveSplit' p) root0 + getHQ'Types :: Path.HQSplit' -> Set Reference + getHQ'Types p = BranchUtil.getType (resolveSplit' p) root0 + + basicPrettyPrintNames :: Names + basicPrettyPrintNames = + Backend.basicPrettyPrintNames root' (Backend.AllNames $ Path.unabsolute currentPath') + + resolveHHQS'Types :: HashOrHQSplit' -> Action' m v (Set Reference) + resolveHHQS'Types = + either + (eval . TypeReferencesByShortHash) + (pure . getHQ'Types) + -- Term Refs and Cons + resolveHHQS'Referents = + either + (eval . TermReferentsByShortHash) + (pure . getHQ'Terms) + getTypes :: Path.Split' -> Set Reference + getTypes = getHQ'Types . fmap HQ'.NameOnly + getTerms :: Path.Split' -> Set Referent + getTerms = getHQ'Terms . fmap HQ'.NameOnly + getPatchAt :: Path.Split' -> Action' m v Patch + getPatchAt patchPath' = do + let (p, seg) = Path.toAbsoluteSplit currentPath' patchPath' + b <- getAt p + eval . Eval $ Branch.getPatch seg (Branch.head b) + withFile ambient sourceName lexed@(text, tokens) k = do + let getHQ = \case + L.Backticks s (Just sh) -> + Just (HQ.HashQualified (Name.unsafeFromString s) sh) + L.WordyId s (Just sh) -> + Just (HQ.HashQualified (Name.unsafeFromString s) sh) + L.SymbolyId s (Just sh) -> + Just (HQ.HashQualified (Name.unsafeFromString s) sh) + L.Hash sh -> Just (HQ.HashOnly sh) + _ -> Nothing + hqs = Set.fromList . mapMaybe (getHQ . L.payload) $ tokens + let parseNames = Backend.getCurrentParseNames (Backend.AllNames currentPath'') root' + latestFile .= Just (Text.unpack sourceName, False) + latestTypecheckedFile .= Nothing + Result notes r <- eval $ Typecheck ambient parseNames sourceName lexed + case r of + -- Parsing failed + Nothing -> + respond $ + ParseErrors text [err | Result.Parsing err <- toList notes] + Just (Left errNames) -> do + ns <- makeShadowedPrintNamesFromHQ hqs errNames + ppe <- suffixifiedPPE ns + let tes = [err | Result.TypeError err <- toList notes] + cbs = + [ bug + | Result.CompilerBug (Result.TypecheckerBug bug) <- + toList notes + ] + when (not $ null tes) . respond $ TypeErrors text ppe tes + when (not $ null cbs) . respond $ CompilerBugs text ppe cbs + Just (Right uf) -> k uf + loadUnisonFile sourceName text = do + let lexed = L.lexer (Text.unpack sourceName) (Text.unpack text) + withFile [] sourceName (text, lexed) $ \unisonFile -> do + sr <- toSlurpResult currentPath' unisonFile <$> slurpResultNames + names <- displayNames unisonFile + pped <- prettyPrintEnvDecl names + let ppe = PPE.suffixifiedPPE pped + eval . Notify $ Typechecked sourceName ppe sr unisonFile + unlessError' EvaluationFailure do + (bindings, e) <- ExceptT . eval . Evaluate ppe $ unisonFile + lift do + let e' = Map.map go e + go (ann, kind, _hash, _uneval, eval, isHit) = (ann, kind, eval, isHit) + unless (null e') $ + eval . Notify $ Evaluated text ppe bindings e' + latestTypecheckedFile .= Just unisonFile + + case e of + Left (IncomingRootBranch hashes) -> + eval . Notify $ + WarnIncomingRootBranch + (SBH.fromHash sbhLength $ Branch.headHash root') + (Set.map (SBH.fromHash sbhLength) hashes) + Left (UnisonFileChanged sourceName text) -> + -- We skip this update if it was programmatically generated + if maybe False snd latestFile' + then modifying latestFile (fmap (const False) <$>) + else loadUnisonFile sourceName text + Right input -> + let ifConfirmed = ifM (confirmedCommand input) + branchNotFound = respond . BranchNotFound + branchNotFound' = respond . BranchNotFound . Path.unsplit' + patchNotFound :: Path.Split' -> Action' m v () + patchNotFound s = respond $ PatchNotFound s + patchExists :: Path.Split' -> Action' m v () + patchExists s = respond $ PatchAlreadyExists s + typeNotFound = respond . TypeNotFound + typeNotFound' = respond . TypeNotFound' + termNotFound = respond . TermNotFound + termNotFound' = respond . TermNotFound' + nameConflicted src tms tys = respond (DeleteNameAmbiguous hqLength src tms tys) + typeConflicted src = nameConflicted src Set.empty + termConflicted src tms = nameConflicted src tms Set.empty + hashConflicted src = respond . HashAmbiguous src + typeReferences :: [SearchResult] -> [Reference] + typeReferences rs = + [r | SR.Tp (SR.TypeResult _ r _) <- rs] + termReferences :: [SearchResult] -> [Reference] + termReferences rs = + [r | SR.Tm (SR.TermResult _ (Referent.Ref r) _) <- rs] + termResults rs = [r | SR.Tm r <- rs] + typeResults rs = [r | SR.Tp r <- rs] + doRemoveReplacement from patchPath isTerm = do + let patchPath' = fromMaybe defaultPatchPath patchPath + patch <- getPatchAt patchPath' + QueryResult misses' hits <- hqNameQuery [from] + let tpRefs = Set.fromList $ typeReferences hits + tmRefs = Set.fromList $ termReferences hits + misses = + Set.difference + (Set.fromList misses') + if isTerm + then Set.fromList $ SR.termName <$> termResults hits + else Set.fromList $ SR.typeName <$> typeResults hits + go :: Reference -> Action m (Either Event Input) v () + go fr = do + let termPatch = + over Patch.termEdits (R.deleteDom fr) patch + typePatch = + over Patch.typeEdits (R.deleteDom fr) patch + (patchPath'', patchName) = resolveSplit' patchPath' + -- Save the modified patch + stepAtM + inputDescription + ( patchPath'', + Branch.modifyPatches + patchName + (const (if isTerm then termPatch else typePatch)) + ) + -- Say something + success + unless (Set.null misses) $ + respond $ SearchTermsNotFound (Set.toList misses) + traverse_ go (if isTerm then tmRefs else tpRefs) + branchExists dest _x = respond $ BranchAlreadyExists dest + branchExistsSplit = branchExists . Path.unsplit' + typeExists dest = respond . TypeAlreadyExists dest + termExists dest = respond . TermAlreadyExists dest + inputDescription :: InputDescription + inputDescription = case input of + ForkLocalBranchI src dest -> "fork " <> hp' src <> " " <> p' dest + MergeLocalBranchI src dest mode -> case mode of + Branch.RegularMerge -> "merge " <> p' src <> " " <> p' dest + Branch.SquashMerge -> "merge.squash " <> p' src <> " " <> p' dest + ResetRootI src -> "reset-root " <> hp' src + AliasTermI src dest -> "alias.term " <> hhqs' src <> " " <> ps' dest + AliasTypeI src dest -> "alias.type " <> hhqs' src <> " " <> ps' dest + AliasManyI srcs dest -> + "alias.many " <> intercalateMap " " hqs srcs <> " " <> p' dest + MoveTermI src dest -> "move.term " <> hqs' src <> " " <> ps' dest + MoveTypeI src dest -> "move.type " <> hqs' src <> " " <> ps' dest + MoveBranchI src dest -> "move.namespace " <> ops' src <> " " <> ps' dest + MovePatchI src dest -> "move.patch " <> ps' src <> " " <> ps' dest + CopyPatchI src dest -> "copy.patch " <> ps' src <> " " <> ps' dest + DeleteI thing -> "delete " <> hqs' thing + DeleteTermI def -> "delete.term " <> hqs' def + DeleteTypeI def -> "delete.type " <> hqs' def + DeleteBranchI opath -> "delete.namespace " <> ops' opath + DeletePatchI path -> "delete.patch " <> ps' path + ReplaceI src target p -> + "replace " <> HQ.toText src <> " " + <> HQ.toText target + <> " " + <> opatch p + ResolveTermNameI path -> "resolve.termName " <> hqs' path + ResolveTypeNameI path -> "resolve.typeName " <> hqs' path + AddI _selection -> "add" + UpdateI p _selection -> "update " <> opatch p + PropagatePatchI p scope -> "patch " <> ps' p <> " " <> p' scope + UndoI {} -> "undo" + UiI -> "ui" + DocsToHtmlI path dir -> "docs.to-html " <> Path.toText' path <> " " <> Text.pack dir + ExecuteI s args -> "execute " <> (Text.unwords . fmap Text.pack $ (s : args)) + IOTestI hq -> "io.test " <> HQ.toText hq + LinkI md defs -> + "link " <> HQ.toText md <> " " <> intercalateMap " " hqs' defs + UnlinkI md defs -> + "unlink " <> HQ.toText md <> " " <> intercalateMap " " hqs' defs + UpdateBuiltinsI -> "builtins.update" + MergeBuiltinsI -> "builtins.merge" + MergeIOBuiltinsI -> "builtins.mergeio" + MakeStandaloneI out nm -> + "compile.output " <> Text.pack out <> " " <> HQ.toText nm + PullRemoteBranchI orepo dest _syncMode _ -> + (Text.pack . InputPattern.patternName $ InputPatterns.pull) + <> " " + -- todo: show the actual config-loaded namespace + <> maybe + "(remote namespace from .unisonConfig)" + (uncurry3 printNamespace) + orepo + <> " " + <> p' dest + CreateMessage {} -> wat + LoadI {} -> wat + PreviewAddI {} -> wat + PreviewUpdateI {} -> wat + CreateAuthorI (NameSegment id) name -> "create.author " <> id <> " " <> name + CreatePullRequestI {} -> wat + LoadPullRequestI base head dest -> + "pr.load " + <> uncurry3 printNamespace base + <> " " + <> uncurry3 printNamespace head + <> " " + <> p' dest + PushRemoteBranchI {} -> wat + PreviewMergeLocalBranchI {} -> wat + DiffNamespaceI {} -> wat + SwitchBranchI {} -> wat + UpI {} -> wat + PopBranchI {} -> wat + NamesI {} -> wat + TodoI {} -> wat + ListEditsI {} -> wat + ListDependenciesI {} -> wat + ListDependentsI {} -> wat + HistoryI {} -> wat + TestI {} -> wat + LinksI {} -> wat + SearchByNameI {} -> wat + FindShallowI {} -> wat + FindPatchI {} -> wat + ShowDefinitionI {} -> wat + DisplayI {} -> wat + DocsI {} -> wat + ShowDefinitionByPrefixI {} -> wat + ShowReflogI {} -> wat + DebugNumberedArgsI {} -> wat + DebugTypecheckedUnisonFileI {} -> wat + DebugDumpNamespacesI {} -> wat + DebugDumpNamespaceSimpleI {} -> wat + DebugClearWatchI {} -> wat + QuitI {} -> wat + DeprecateTermI {} -> undefined + DeprecateTypeI {} -> undefined + RemoveTermReplacementI src p -> + "delete.term-replacement" <> HQ.toText src <> " " <> opatch p + RemoveTypeReplacementI src p -> + "delete.type-replacement" <> HQ.toText src <> " " <> opatch p + where + hp' = either (Text.pack . show) p' + p' = Text.pack . show . resolveToAbsolute + ops' = maybe "." ps' + opatch = ps' . fromMaybe defaultPatchPath + wat = error $ show input ++ " is not expected to alter the branch" + hhqs' (Left sh) = SH.toText sh + hhqs' (Right x) = hqs' x + hqs' (p, hq) = + Monoid.unlessM (Path.isRoot' p) (p' p) <> "." <> Text.pack (show hq) + hqs (p, hq) = hqs' (Path' . Right . Path.Relative $ p, hq) + ps' = p' . Path.unsplit' + stepAt = Unison.Codebase.Editor.HandleInput.stepAt inputDescription + stepManyAt = Unison.Codebase.Editor.HandleInput.stepManyAt inputDescription + stepManyAtNoSync = + Unison.Codebase.Editor.HandleInput.stepManyAtNoSync + updateRoot = flip Unison.Codebase.Editor.HandleInput.updateRoot inputDescription + syncRoot = use root >>= updateRoot + updateAtM = Unison.Codebase.Editor.HandleInput.updateAtM inputDescription + unlessGitError = unlessError' Output.GitError + importRemoteBranch ns mode = ExceptT . eval $ ImportRemoteBranch ns mode + loadSearchResults = eval . LoadSearchResults + handleFailedDelete failed failedDependents = do + failed <- loadSearchResults $ SR.fromNames failed + failedDependents <- loadSearchResults $ SR.fromNames failedDependents + ppe <- + fqnPPE + =<< makePrintNamesFromLabeled' + (foldMap SR'.labeledDependencies $ failed <> failedDependents) + respond $ CantDelete ppe failed failedDependents + saveAndApplyPatch patchPath'' patchName patch' = do + stepAtM + (inputDescription <> " (1/2)") + ( patchPath'', + Branch.modifyPatches patchName (const patch') + ) + -- Apply the modified patch to the current path + -- since we might be able to propagate further. + void $ propagatePatch inputDescription patch' currentPath' + -- Say something + success + previewResponse sourceName sr uf = do + names <- displayNames uf + ppe <- PPE.suffixifiedPPE <$> prettyPrintEnvDecl names + respond $ Typechecked (Text.pack sourceName) ppe sr uf + + -- Add default metadata to all added types and terms in a slurp component. + -- + -- No-op if the slurp component is empty. + addDefaultMetadata :: + SlurpComponent v -> + Action m (Either Event Input) v () + addDefaultMetadata adds = + when (not (SC.isEmpty adds)) do + let addedVs = Set.toList $ SC.types adds <> SC.terms adds + addedNs = traverse (Path.hqSplitFromName' . Name.unsafeFromVar) addedVs + case addedNs of + Nothing -> + error $ + "I couldn't parse a name I just added to the codebase! " + <> "-- Added names: " + <> show addedVs + Just addedNames -> do + dm <- resolveDefaultMetadata currentPath' + case toList dm of + [] -> pure () + dm' -> do + let hqs = traverse InputPatterns.parseHashQualifiedName dm' + case hqs of + Left e -> + respond $ + ConfiguredMetadataParseError + (Path.absoluteToPath' currentPath') + (show dm') + e + Right defaultMeta -> + manageLinks True addedNames defaultMeta Metadata.insert + + -- Add/remove links between definitions and metadata. + -- `silent` controls whether this produces any output to the user. + -- `srcs` is (names of the) definitions to pass to `op` + -- `mdValues` is (names of the) metadata to pass to `op` + -- `op` is the operation to add/remove/alter metadata mappings. + -- e.g. `Metadata.insert` is passed to add metadata links. + manageLinks :: + Bool -> + [(Path', HQ'.HQSegment)] -> + [HQ.HashQualified Name] -> + ( forall r. + Ord r => + (r, Metadata.Type, Metadata.Value) -> + Branch.Star r NameSegment -> + Branch.Star r NameSegment + ) -> + Action m (Either Event Input) v () + manageLinks silent srcs mdValues op = do + runExceptT (for mdValues \val -> ExceptT (getMetadataFromName val)) >>= \case + Left output -> respond output + Right metadata -> do + before <- Branch.head <$> use root + traverse_ go metadata + if silent + then respond DefaultMetadataNotification + else do + after <- Branch.head <$> use root + (ppe, outputDiff) <- diffHelper before after + if OBranchDiff.isEmpty outputDiff + then respond NoOp + else + respondNumbered $ + ShowDiffNamespace + Path.absoluteEmpty + Path.absoluteEmpty + ppe + outputDiff + where + go :: (Metadata.Type, Metadata.Value) -> Action m (Either Event Input) v () + go (mdType, mdValue) = do + newRoot <- use root + let r0 = Branch.head newRoot + getTerms p = BranchUtil.getTerm (resolveSplit' p) r0 + getTypes p = BranchUtil.getType (resolveSplit' p) r0 + !srcle = toList . getTerms =<< srcs + !srclt = toList . getTypes =<< srcs + let step b0 = + let tmUpdates terms = foldl' go terms srcle + where + go terms src = op (src, mdType, mdValue) terms + tyUpdates types = foldl' go types srclt + where + go types src = op (src, mdType, mdValue) types + in over Branch.terms tmUpdates . over Branch.types tyUpdates $ b0 + steps = srcs <&> \(path, _hq) -> (Path.unabsolute (resolveToAbsolute path), step) + stepManyAtNoSync steps + + delete :: + (Path.HQSplit' -> Set Referent) -> -- compute matching terms + (Path.HQSplit' -> Set Reference) -> -- compute matching types + Path.HQSplit' -> + Action' m v () + delete getHQ'Terms getHQ'Types hq = do + let matchingTerms = toList (getHQ'Terms hq) + let matchingTypes = toList (getHQ'Types hq) + case (matchingTerms, matchingTypes) of + ([], []) -> respond (NameNotFound hq) + (Set.fromList -> tms, Set.fromList -> tys) -> goMany tms tys + where + resolvedPath = resolveSplit' (HQ'.toName <$> hq) + goMany tms tys = do + let rootNames = Branch.toNames root0 + name = Path.toName (Path.unsplit resolvedPath) + toRel :: Ord ref => Set ref -> R.Relation Name ref + toRel = R.fromList . fmap (name,) . toList + -- these names are relative to the root + toDelete = Names (toRel tms) (toRel tys) + (failed, failedDependents) <- + getEndangeredDependents (eval . GetDependents) toDelete rootNames + if failed == mempty + then do + let makeDeleteTermNames = fmap (BranchUtil.makeDeleteTermName resolvedPath) . toList $ tms + let makeDeleteTypeNames = fmap (BranchUtil.makeDeleteTypeName resolvedPath) . toList $ tys + stepManyAt (makeDeleteTermNames ++ makeDeleteTypeNames) + root'' <- use root + diffHelper (Branch.head root') (Branch.head root'') + >>= respondNumbered . uncurry ShowDiffAfterDeleteDefinitions + else handleFailedDelete failed failedDependents + in case input of + CreateMessage pretty -> + respond $ PrintMessage pretty + ShowReflogI -> do + entries <- convertEntries Nothing [] <$> eval LoadReflog + numberedArgs .= fmap (('#' :) . SBH.toString . Output.hash) entries + respond $ ShowReflog entries + where + -- reverses & formats entries, adds synthetic entries when there is a + -- discontinuity in the reflog. + convertEntries :: + Maybe Branch.Hash -> + [Output.ReflogEntry] -> + [Reflog.Entry Branch.Hash] -> + [Output.ReflogEntry] + convertEntries _ acc [] = acc + convertEntries Nothing acc entries@(Reflog.Entry old _ _ : _) = + convertEntries + (Just old) + (Output.ReflogEntry (SBH.fromHash sbhLength old) "(initial reflogged namespace)" : acc) + entries + convertEntries (Just lastHash) acc entries@(Reflog.Entry old new reason : rest) = + if lastHash /= old + then + convertEntries + (Just old) + (Output.ReflogEntry (SBH.fromHash sbhLength old) "(external change)" : acc) + entries + else + convertEntries + (Just new) + (Output.ReflogEntry (SBH.fromHash sbhLength new) reason : acc) + rest + ResetRootI src0 -> + case src0 of + Left hash -> unlessError do + newRoot <- resolveShortBranchHash hash + lift do + updateRoot newRoot + success + Right path' -> do + newRoot <- getAt $ resolveToAbsolute path' + if Branch.isEmpty newRoot + then respond $ BranchNotFound path' + else do + updateRoot newRoot + success + ForkLocalBranchI src0 dest0 -> do + let tryUpdateDest srcb dest0 = do + let dest = resolveToAbsolute dest0 + -- if dest isn't empty: leave dest unchanged, and complain. + destb <- getAt dest + if Branch.isEmpty destb + then do + ok <- updateAtM dest (const $ pure srcb) + if ok then success else respond $ BranchEmpty src0 + else respond $ BranchAlreadyExists dest0 + case src0 of + Left hash -> unlessError do + srcb <- resolveShortBranchHash hash + lift $ tryUpdateDest srcb dest0 + Right path' -> do + srcb <- getAt $ resolveToAbsolute path' + if Branch.isEmpty srcb + then respond $ BranchNotFound path' + else tryUpdateDest srcb dest0 + MergeLocalBranchI src0 dest0 mergeMode -> do + let [src, dest] = resolveToAbsolute <$> [src0, dest0] + srcb <- getAt src + if Branch.isEmpty srcb + then branchNotFound src0 + else do + let err = Just $ MergeAlreadyUpToDate src0 dest0 + mergeBranchAndPropagateDefaultPatch mergeMode inputDescription err srcb (Just dest0) dest + PreviewMergeLocalBranchI src0 dest0 -> do + let [src, dest] = resolveToAbsolute <$> [src0, dest0] + srcb <- getAt src + if Branch.isEmpty srcb + then branchNotFound src0 + else do + destb <- getAt dest + merged <- eval $ Merge Branch.RegularMerge srcb destb + if merged == destb + then respond (PreviewMergeAlreadyUpToDate src0 dest0) + else + diffHelper (Branch.head destb) (Branch.head merged) + >>= respondNumbered . uncurry (ShowDiffAfterMergePreview dest0 dest) + DiffNamespaceI before0 after0 -> do + let [beforep, afterp] = + resolveToAbsolute <$> [before0, after0] + before <- Branch.head <$> getAt beforep + after <- Branch.head <$> getAt afterp + case (Branch.isEmpty0 before, Branch.isEmpty0 after) of + (True, True) -> respond . NamespaceEmpty $ Right (beforep, afterp) + (True, False) -> respond . NamespaceEmpty $ Left beforep + (False, True) -> respond . NamespaceEmpty $ Left afterp + _ -> do + (ppe, outputDiff) <- diffHelper before after + respondNumbered $ ShowDiffNamespace beforep afterp ppe outputDiff + CreatePullRequestI baseRepo headRepo -> unlessGitError do + (cleanupBase, baseBranch) <- viewRemoteBranch baseRepo + (cleanupHead, headBranch) <- viewRemoteBranch headRepo + lift do + merged <- eval $ Merge Branch.RegularMerge baseBranch headBranch + (ppe, diff) <- diffHelper (Branch.head baseBranch) (Branch.head merged) + respondNumbered $ ShowDiffAfterCreatePR baseRepo headRepo ppe diff + eval . Eval $ do + cleanupBase + cleanupHead + LoadPullRequestI baseRepo headRepo dest0 -> do + let desta = resolveToAbsolute dest0 + let dest = Path.unabsolute desta + destb <- getAt desta + if Branch.isEmpty0 (Branch.head destb) + then unlessGitError do + baseb <- importRemoteBranch baseRepo SyncMode.ShortCircuit + headb <- importRemoteBranch headRepo SyncMode.ShortCircuit + lift $ do + mergedb <- eval $ Merge Branch.RegularMerge baseb headb + squashedb <- eval $ Merge Branch.SquashMerge headb baseb + stepManyAt + [ BranchUtil.makeSetBranch (dest, "base") baseb, + BranchUtil.makeSetBranch (dest, "head") headb, + BranchUtil.makeSetBranch (dest, "merged") mergedb, + BranchUtil.makeSetBranch (dest, "squashed") squashedb + ] + let base = snoc dest0 "base" + head = snoc dest0 "head" + merged = snoc dest0 "merged" + squashed = snoc dest0 "squashed" + respond $ LoadPullRequest baseRepo headRepo base head merged squashed + loadPropagateDiffDefaultPatch + inputDescription + (Just merged) + (snoc desta "merged") + else respond . BranchNotEmpty . Path.Path' . Left $ currentPath' + + -- move the root to a sub-branch + MoveBranchI Nothing dest -> do + b <- use root + stepManyAt + [ (Path.empty, const Branch.empty0), + BranchUtil.makeSetBranch (resolveSplit' dest) b + ] + success + MoveBranchI (Just src) dest -> + maybe (branchNotFound' src) srcOk (getAtSplit' src) + where + srcOk b = maybe (destOk b) (branchExistsSplit dest) (getAtSplit' dest) + destOk b = do + stepManyAt + [ BranchUtil.makeSetBranch (resolveSplit' src) Branch.empty, + BranchUtil.makeSetBranch (resolveSplit' dest) b + ] + success -- could give rando stats about new defns + MovePatchI src dest -> do + psrc <- getPatchAtSplit' src + pdest <- getPatchAtSplit' dest + case (psrc, pdest) of + (Nothing, _) -> patchNotFound src + (_, Just _) -> patchExists dest + (Just p, Nothing) -> do + stepManyAt + [ BranchUtil.makeDeletePatch (resolveSplit' src), + BranchUtil.makeReplacePatch (resolveSplit' dest) p + ] + success + CopyPatchI src dest -> do + psrc <- getPatchAtSplit' src + pdest <- getPatchAtSplit' dest + case (psrc, pdest) of + (Nothing, _) -> patchNotFound src + (_, Just _) -> patchExists dest + (Just p, Nothing) -> do + stepAt (BranchUtil.makeReplacePatch (resolveSplit' dest) p) + success + DeletePatchI src -> do + psrc <- getPatchAtSplit' src + case psrc of + Nothing -> patchNotFound src + Just _ -> do + stepAt (BranchUtil.makeDeletePatch (resolveSplit' src)) + success + DeleteBranchI Nothing -> + ifConfirmed + ( do + stepAt (Path.empty, const Branch.empty0) + respond DeletedEverything + ) + (respond DeleteEverythingConfirmation) + DeleteBranchI (Just p) -> + maybe (branchNotFound' p) go $ getAtSplit' p + where + go (Branch.head -> b) = do + (failed, failedDependents) <- + let rootNames = Branch.toNames root0 + toDelete = + Names.prefix0 + (Path.toName . Path.unsplit . resolveSplit' $ p) -- resolveSplit' incorporates currentPath + (Branch.toNames b) + in getEndangeredDependents (eval . GetDependents) toDelete rootNames + if failed == mempty + then do + stepAt $ BranchUtil.makeSetBranch (resolveSplit' p) Branch.empty + -- Looks similar to the 'toDelete' above... investigate me! ;) + diffHelper b Branch.empty0 + >>= respondNumbered + . uncurry + ( ShowDiffAfterDeleteBranch $ + resolveToAbsolute (Path.unsplit' p) + ) + else handleFailedDelete failed failedDependents + SwitchBranchI maybePath' -> do + mpath' <- case maybePath' of + Nothing -> + fuzzySelectNamespace root0 <&> \case + [] -> Nothing + -- Shouldn't be possible to get multiple paths here, we can just take + -- the first. + (p : _) -> Just p + Just p -> pure $ Just p + case mpath' of + Nothing -> pure () + Just path' -> do + let path = resolveToAbsolute path' + currentPathStack %= Nel.cons path + branch' <- getAt path + when (Branch.isEmpty branch') (respond $ CreatedNewBranch path) + UpI -> + use currentPath >>= \p -> case Path.unsnoc (Path.unabsolute p) of + Nothing -> pure () + Just (path, _) -> currentPathStack %= Nel.cons (Path.Absolute path) + PopBranchI -> + use (currentPathStack . to Nel.uncons) >>= \case + (_, Nothing) -> respond StartOfCurrentPathHistory + (_, Just t) -> currentPathStack .= t + HistoryI resultsCap diffCap from -> case from of + Left hash -> unlessError do + b <- resolveShortBranchHash hash + lift $ doHistory 0 b [] + Right path' -> do + let path = resolveToAbsolute path' + branch' <- getAt path + if Branch.isEmpty branch' + then respond $ CreatedNewBranch path + else doHistory 0 branch' [] + where + doHistory !n b acc = + if maybe False (n >=) resultsCap + then respond $ History diffCap acc (PageEnd (sbh $ Branch.headHash b) n) + else case Branch._history b of + Causal.One {} -> + respond $ History diffCap acc (EndOfLog . sbh $ Branch.headHash b) + Causal.Merge {Causal.tails} -> + respond $ History diffCap acc (MergeTail (sbh $ Branch.headHash b) . map sbh $ Map.keys tails) + Causal.Cons {Causal.tail} -> do + b' <- fmap Branch.Branch . eval . Eval $ snd tail + let elem = (sbh $ Branch.headHash b, Branch.namesDiff b' b) + doHistory (n + 1) b' (elem : acc) + UndoI -> do + prev <- eval . Eval $ Branch.uncons root' + case prev of + Nothing -> + respond . CantUndo $ + if Branch.isOne root' + then CantUndoPastStart + else CantUndoPastMerge + Just (_, prev) -> do + updateRoot prev + diffHelper (Branch.head prev) (Branch.head root') + >>= respondNumbered . uncurry Output.ShowDiffAfterUndo + UiI -> eval UI + DocsToHtmlI namespacePath' sourceDirectory -> do + let absPath = Path.unabsolute $ resolveToAbsolute namespacePath' + eval (DocsToHtml root' absPath sourceDirectory) + AliasTermI src dest -> do + referents <- resolveHHQS'Referents src + case (toList referents, toList (getTerms dest)) of + ([r], []) -> do + stepAt (BranchUtil.makeAddTermName (resolveSplit' dest) r (oldMD r)) + success + ([_], rs@(_ : _)) -> termExists dest (Set.fromList rs) + ([], _) -> either termNotFound' termNotFound src + (rs, _) -> + either hashConflicted termConflicted src (Set.fromList rs) + where + oldMD r = + either + (const mempty) + ( \src -> + let p = resolveSplit' src + in BranchUtil.getTermMetadataAt p r root0 + ) + src + AliasTypeI src dest -> do + refs <- resolveHHQS'Types src + case (toList refs, toList (getTypes dest)) of + ([r], []) -> do + stepAt (BranchUtil.makeAddTypeName (resolveSplit' dest) r (oldMD r)) + success + ([_], rs@(_ : _)) -> typeExists dest (Set.fromList rs) + ([], _) -> either typeNotFound' typeNotFound src + (rs, _) -> + either + (\src -> hashConflicted src . Set.map Referent.Ref) + typeConflicted + src + (Set.fromList rs) + where + oldMD r = + either + (const mempty) + ( \src -> + let p = resolveSplit' src + in BranchUtil.getTypeMetadataAt p r root0 + ) + src + + -- this implementation will happily produce name conflicts, + -- but will surface them in a normal diff at the end of the operation. + AliasManyI srcs dest' -> do + let destAbs = resolveToAbsolute dest' + old <- getAt destAbs + let (unknown, actions) = foldl' go mempty srcs + stepManyAt actions + new <- getAt destAbs + diffHelper (Branch.head old) (Branch.head new) + >>= respondNumbered . uncurry (ShowDiffAfterModifyBranch dest' destAbs) + unless (null unknown) $ + respond . SearchTermsNotFound . fmap fixupOutput $ unknown + where + -- a list of missing sources (if any) and the actions that do the work + go :: + ([Path.HQSplit], [(Path, Branch0 m -> Branch0 m)]) -> + Path.HQSplit -> + ([Path.HQSplit], [(Path, Branch0 m -> Branch0 m)]) + go (missingSrcs, actions) hqsrc = + let src :: Path.Split + src = second HQ'.toName hqsrc + proposedDest :: Path.Split + proposedDest = second HQ'.toName hqProposedDest + hqProposedDest :: Path.HQSplit + hqProposedDest = + first Path.unabsolute $ + Path.resolve (resolveToAbsolute dest') hqsrc + -- `Nothing` if src doesn't exist + doType :: Maybe [(Path, Branch0 m -> Branch0 m)] + doType = case ( BranchUtil.getType hqsrc currentBranch0, + BranchUtil.getType hqProposedDest root0 + ) of + (null -> True, _) -> Nothing -- missing src + (rsrcs, existing) -> + -- happy path + Just . map addAlias . toList $ Set.difference rsrcs existing + where + addAlias r = BranchUtil.makeAddTypeName proposedDest r (oldMD r) + oldMD r = BranchUtil.getTypeMetadataAt src r currentBranch0 + doTerm :: Maybe [(Path, Branch0 m -> Branch0 m)] + doTerm = case ( BranchUtil.getTerm hqsrc currentBranch0, + BranchUtil.getTerm hqProposedDest root0 + ) of + (null -> True, _) -> Nothing -- missing src + (rsrcs, existing) -> + Just . map addAlias . toList $ Set.difference rsrcs existing + where + addAlias r = BranchUtil.makeAddTermName proposedDest r (oldMD r) + oldMD r = BranchUtil.getTermMetadataAt src r currentBranch0 + in case (doType, doTerm) of + (Nothing, Nothing) -> (missingSrcs :> hqsrc, actions) + (Just as, Nothing) -> (missingSrcs, actions ++ as) + (Nothing, Just as) -> (missingSrcs, actions ++ as) + (Just as1, Just as2) -> (missingSrcs, actions ++ as1 ++ as2) + + fixupOutput :: Path.HQSplit -> HQ.HashQualified Name + fixupOutput = fmap Path.toName . HQ'.toHQ . Path.unsplitHQ + NamesI thing -> do + ns0 <- basicParseNames + let ns = NamesWithHistory ns0 mempty + terms = NamesWithHistory.lookupHQTerm thing ns + types = NamesWithHistory.lookupHQType thing ns + printNames = NamesWithHistory basicPrettyPrintNames mempty + terms' :: Set (Referent, Set (HQ'.HashQualified Name)) + terms' = Set.map go terms + where + go r = (r, NamesWithHistory.termName hqLength r printNames) + types' :: Set (Reference, Set (HQ'.HashQualified Name)) + types' = Set.map go types + where + go r = (r, NamesWithHistory.typeName hqLength r printNames) + respond $ ListNames hqLength (toList types') (toList terms') + LinkI mdValue srcs -> do + manageLinks False srcs [mdValue] Metadata.insert + syncRoot + UnlinkI mdValue srcs -> do + manageLinks False srcs [mdValue] Metadata.delete + syncRoot + + -- > links List.map (.Docs .English) + -- > links List.map -- give me all the + -- > links Optional License + LinksI src mdTypeStr -> unlessError do + (ppe, out) <- getLinks (show input) src (Right mdTypeStr) + lift do + numberedArgs .= fmap (HQ.toString . view _1) out + respond $ ListOfLinks ppe out + DocsI srcs -> do + srcs' <- case srcs of + [] -> + fuzzySelectTermsAndTypes root0 + -- HQ names should always parse as a valid split, so we just discard any + -- that don't to satisfy the type-checker. + <&> mapMaybe (eitherToMaybe . Path.parseHQSplit' . HQ.toString) + xs -> pure xs + for_ srcs' (docsI (show input) basicPrettyPrintNames) + CreateAuthorI authorNameSegment authorFullName -> do + initialBranch <- getAt currentPath' + AuthorInfo + guid@(guidRef, _, _) + author@(authorRef, _, _) + copyrightHolder@(copyrightHolderRef, _, _) <- + eval $ CreateAuthorInfo authorFullName + -- add the new definitions to the codebase and to the namespace + traverse_ (eval . uncurry3 PutTerm) [guid, author, copyrightHolder] + stepManyAt + [ BranchUtil.makeAddTermName (resolveSplit' authorPath) (d authorRef) mempty, + BranchUtil.makeAddTermName (resolveSplit' copyrightHolderPath) (d copyrightHolderRef) mempty, + BranchUtil.makeAddTermName (resolveSplit' guidPath) (d guidRef) mempty + ] + finalBranch <- getAt currentPath' + -- print some output + diffHelper (Branch.head initialBranch) (Branch.head finalBranch) + >>= respondNumbered + . uncurry + ( ShowDiffAfterCreateAuthor + authorNameSegment + (Path.unsplit' base) + currentPath' + ) + where + d :: Reference.Id -> Referent + d = Referent.Ref . Reference.DerivedId + base :: Path.Split' = (Path.relativeEmpty', "metadata") + authorPath = base |> "authors" |> authorNameSegment + copyrightHolderPath = base |> "copyrightHolders" |> authorNameSegment + guidPath = authorPath |> "guid" + MoveTermI src dest -> + case (toList (getHQ'Terms src), toList (getTerms dest)) of + ([r], []) -> do + stepManyAt + [ BranchUtil.makeDeleteTermName p r, + BranchUtil.makeAddTermName (resolveSplit' dest) r (mdSrc r) + ] + success + ([_], rs) -> termExists dest (Set.fromList rs) + ([], _) -> termNotFound src + (rs, _) -> termConflicted src (Set.fromList rs) + where + p = resolveSplit' (HQ'.toName <$> src) + mdSrc r = BranchUtil.getTermMetadataAt p r root0 + MoveTypeI src dest -> + case (toList (getHQ'Types src), toList (getTypes dest)) of + ([r], []) -> do + stepManyAt + [ BranchUtil.makeDeleteTypeName p r, + BranchUtil.makeAddTypeName (resolveSplit' dest) r (mdSrc r) + ] + success + ([_], rs) -> typeExists dest (Set.fromList rs) + ([], _) -> typeNotFound src + (rs, _) -> typeConflicted src (Set.fromList rs) + where + p = resolveSplit' (HQ'.toName <$> src) + mdSrc r = BranchUtil.getTypeMetadataAt p r root0 + DeleteI hq -> delete getHQ'Terms getHQ'Types hq + DeleteTypeI hq -> delete (const Set.empty) getHQ'Types hq + DeleteTermI hq -> delete getHQ'Terms (const Set.empty) hq + DisplayI outputLoc names' -> do + names <- case names' of + [] -> fuzzySelectTermsAndTypes root0 + ns -> pure ns + traverse_ (displayI basicPrettyPrintNames outputLoc) names + ShowDefinitionI outputLoc query -> handleShowDefinition outputLoc query + FindPatchI -> do + let patches = + [ Path.toName $ Path.snoc p seg + | (p, b) <- Branch.toList0 currentBranch0, + (seg, _) <- Map.toList (Branch._edits b) + ] + respond $ ListOfPatches $ Set.fromList patches + numberedArgs .= fmap Name.toString patches + FindShallowI pathArg -> do + let pathArgAbs = resolveToAbsolute pathArg + ppe = + Backend.basicSuffixifiedNames + sbhLength + root' + (Backend.AllNames $ Path.fromPath' pathArg) + res <- eval $ FindShallow pathArgAbs + case res of + Left e -> handleBackendError e + Right entries -> do + -- caching the result as an absolute path, for easier jumping around + numberedArgs .= fmap entryToHQString entries + respond $ ListShallow ppe entries + where + entryToHQString :: ShallowListEntry v Ann -> String + entryToHQString e = + fixup $ case e of + ShallowTypeEntry (TypeEntry _ hq _) -> HQ'.toString hq + ShallowTermEntry (TermEntry _ hq _ _) -> HQ'.toString hq + ShallowBranchEntry ns _ _ -> NameSegment.toString ns + ShallowPatchEntry ns -> NameSegment.toString ns + where + fixup s = case pathArgStr of + "" -> s + p | last p == '.' -> p ++ s + p -> p ++ "." ++ s + pathArgStr = show pathArg + SearchByNameI isVerbose _showAll ws -> do + let prettyPrintNames = basicPrettyPrintNames + unlessError do + results <- case ws of + -- no query, list everything + [] -> pure . listBranch $ Branch.head currentBranch' + -- type query + ":" : ws -> + ExceptT (parseSearchType (show input) (unwords ws)) >>= \typ -> + ExceptT $ do + let named = Branch.deepReferents root0 + matches <- + fmap (filter (`Set.member` named) . toList) $ + eval $ GetTermsOfType typ + matches <- + if null matches + then do + respond NoExactTypeMatches + fmap (filter (`Set.member` named) . toList) $ + eval $ GetTermsMentioningType typ + else pure matches + let results = + -- in verbose mode, aliases are shown, so we collapse all + -- aliases to a single search result; in non-verbose mode, + -- a separate result may be shown for each alias + (if isVerbose then uniqueBy SR.toReferent else id) $ + searchResultsFor prettyPrintNames matches [] + pure . pure $ results + + -- name query + (map HQ.unsafeFromString -> qs) -> do + let ns = basicPrettyPrintNames + let srs = searchBranchScored ns fuzzyNameDistance qs + pure $ uniqueBy SR.toReferent srs + lift do + numberedArgs .= fmap searchResultToHQString results + results' <- loadSearchResults results + ppe <- + suffixifiedPPE + =<< makePrintNamesFromLabeled' + (foldMap SR'.labeledDependencies results') + respond $ ListOfDefinitions ppe isVerbose results' + ResolveTypeNameI hq -> + zeroOneOrMore (getHQ'Types hq) (typeNotFound hq) go (typeConflicted hq) + where + conflicted = getHQ'Types (fmap HQ'.toNameOnly hq) + makeDelete = + BranchUtil.makeDeleteTypeName (resolveSplit' (HQ'.toName <$> hq)) + go r = stepManyAt . fmap makeDelete . toList . Set.delete r $ conflicted + ResolveTermNameI hq -> do + refs <- getHQ'TermsIncludingHistorical hq + zeroOneOrMore refs (termNotFound hq) go (termConflicted hq) + where + conflicted = getHQ'Terms (fmap HQ'.toNameOnly hq) + makeDelete = + BranchUtil.makeDeleteTermName (resolveSplit' (HQ'.toName <$> hq)) + go r = stepManyAt . fmap makeDelete . toList . Set.delete r $ conflicted + ReplaceI from to patchPath -> do + let patchPath' = fromMaybe defaultPatchPath patchPath + patch <- getPatchAt patchPath' + QueryResult fromMisses' fromHits <- hqNameQuery [from] + QueryResult toMisses' toHits <- hqNameQuery [to] + let termsFromRefs = termReferences fromHits + termsToRefs = termReferences toHits + typesFromRefs = typeReferences fromHits + typesToRefs = typeReferences toHits + --- Here are all the kinds of misses + --- [X] [X] + --- [Type] [Term] + --- [Term] [Type] + --- [Type] [X] + --- [Term] [X] + --- [X] [Type] + --- [X] [Term] + -- Type hits are term misses + termFromMisses = + fromMisses' + <> (SR.typeName <$> typeResults fromHits) + termToMisses = + toMisses' + <> (SR.typeName <$> typeResults toHits) + -- Term hits are type misses + typeFromMisses = + fromMisses' + <> (SR.termName <$> termResults fromHits) + typeToMisses = + toMisses' + <> (SR.termName <$> termResults toHits) + + termMisses = termFromMisses <> termToMisses + typeMisses = typeFromMisses <> typeToMisses + + replaceTerms :: + Reference -> + Reference -> + Action m (Either Event Input) v () + replaceTerms fr tr = do + mft <- eval $ LoadTypeOfTerm fr + mtt <- eval $ LoadTypeOfTerm tr + let termNotFound = + respond . TermNotFound' + . SH.take hqLength + . Reference.toShortHash + case (mft, mtt) of + (Nothing, _) -> termNotFound fr + (_, Nothing) -> termNotFound tr + (Just ft, Just tt) -> do + let patch' = + -- The modified patch + over + Patch.termEdits + ( R.insert fr (Replace tr (TermEdit.typing tt ft)) + . R.deleteDom fr + ) + patch + (patchPath'', patchName) = resolveSplit' patchPath' + saveAndApplyPatch patchPath'' patchName patch' + + replaceTypes :: + Reference -> + Reference -> + Action m (Either Event Input) v () + replaceTypes fr tr = do + let patch' = + -- The modified patch + over + Patch.typeEdits + (R.insert fr (TypeEdit.Replace tr) . R.deleteDom fr) + patch + (patchPath'', patchName) = resolveSplit' patchPath' + saveAndApplyPatch patchPath'' patchName patch' + + ambiguous t rs = + let rs' = Set.map Referent.Ref $ Set.fromList rs + in case t of + HQ.HashOnly h -> + hashConflicted h rs' + (Path.parseHQSplit' . HQ.toString -> Right n) -> + termConflicted n rs' + _ -> respond . BadName $ HQ.toString t + + mismatch typeName termName = respond $ TypeTermMismatch typeName termName + + case (termsFromRefs, termsToRefs, typesFromRefs, typesToRefs) of + ([], [], [], []) -> respond $ SearchTermsNotFound termMisses + ([_], [], [], [_]) -> mismatch to from + ([], [_], [_], []) -> mismatch from to + ([_], [], _, _) -> respond $ SearchTermsNotFound termMisses + ([], [_], _, _) -> respond $ SearchTermsNotFound termMisses + (_, _, [_], []) -> respond $ SearchTermsNotFound typeMisses + (_, _, [], [_]) -> respond $ SearchTermsNotFound typeMisses + ([fr], [tr], [], []) -> replaceTerms fr tr + ([], [], [fr], [tr]) -> replaceTypes fr tr + (froms, [_], [], []) -> ambiguous from froms + ([], [], froms, [_]) -> ambiguous from froms + ([_], tos, [], []) -> ambiguous to tos + ([], [], [_], tos) -> ambiguous to tos + (_, _, _, _) -> error "unpossible" + LoadI maybePath -> + case maybePath <|> (fst <$> latestFile') of + Nothing -> respond NoUnisonFile + Just path -> do + res <- eval . LoadSource . Text.pack $ path + case res of + InvalidSourceNameError -> respond $ InvalidSourceName path + LoadError -> respond $ SourceLoadFailed path + LoadSuccess contents -> loadUnisonFile (Text.pack path) contents + AddI hqs -> + case uf of + Nothing -> respond NoUnisonFile + Just uf -> do + sr <- + Slurp.disallowUpdates + . applySelection hqs uf + . toSlurpResult currentPath' uf + <$> slurpResultNames + let adds = Slurp.adds sr + stepAtNoSync (Path.unabsolute currentPath', doSlurpAdds adds uf) + eval . AddDefsToCodebase . filterBySlurpResult sr $ uf + ppe <- prettyPrintEnvDecl =<< displayNames uf + respond $ SlurpOutput input (PPE.suffixifiedPPE ppe) sr + addDefaultMetadata adds + syncRoot + PreviewAddI hqs -> case (latestFile', uf) of + (Just (sourceName, _), Just uf) -> do + sr <- + Slurp.disallowUpdates + . applySelection hqs uf + . toSlurpResult currentPath' uf + <$> slurpResultNames + previewResponse sourceName sr uf + _ -> respond NoUnisonFile + UpdateI maybePatchPath hqs -> case uf of + Nothing -> respond NoUnisonFile + Just uf -> do + let patchPath = fromMaybe defaultPatchPath maybePatchPath + slurpCheckNames <- slurpResultNames + currentPathNames <- currentPathNames + let sr = + applySelection hqs uf + . toSlurpResult currentPath' uf + $ slurpCheckNames + addsAndUpdates = Slurp.updates sr <> Slurp.adds sr + fileNames = UF.typecheckedToNames uf + -- todo: display some error if typeEdits or termEdits itself contains a loop + typeEdits :: Map Name (Reference, Reference) + typeEdits = Map.fromList $ map f (toList $ SC.types (updates sr)) + where + f v = case ( toList (Names.typesNamed slurpCheckNames n), + toList (Names.typesNamed fileNames n) + ) of + ([old], [new]) -> (n, (old, new)) + _ -> + error $ + "Expected unique matches for " + ++ Var.nameStr v + ++ " but got: " + ++ show otherwise + where + n = Name.unsafeFromVar v + hashTerms :: Map Reference (Type v Ann) + hashTerms = Map.fromList (toList hashTerms0) + where + hashTerms0 = (\(r, _wk, _tm, typ) -> (r, typ)) <$> UF.hashTerms uf + termEdits :: Map Name (Reference, Reference) + termEdits = Map.fromList $ map g (toList $ SC.terms (updates sr)) + where + g v = case ( toList (Names.refTermsNamed slurpCheckNames n), + toList (Names.refTermsNamed fileNames n) + ) of + ([old], [new]) -> (n, (old, new)) + _ -> + error $ + "Expected unique matches for " + ++ Var.nameStr v + ++ " but got: " + ++ show otherwise + where + n = Name.unsafeFromVar v + termDeprecations :: [(Name, Referent)] + termDeprecations = + [ (n, r) | (oldTypeRef, _) <- Map.elems typeEdits, (n, r) <- Names.constructorsForType oldTypeRef currentPathNames + ] + + ye'ol'Patch <- getPatchAt patchPath + -- If `uf` updates a -> a', we want to replace all (a0 -> a) in patch + -- with (a0 -> a') in patch'. + -- So for all (a0 -> a) in patch, for all (a -> a') in `uf`, + -- we must know the type of a0, a, a'. + let -- we need: + -- all of the `old` references from the `new` edits, + -- plus all of the `old` references for edits from patch we're replacing + collectOldForTyping :: [(Reference, Reference)] -> Patch -> Set Reference + collectOldForTyping new old = foldl' f mempty (new ++ fromOld) + where + f acc (r, _r') = Set.insert r acc + newLHS = Set.fromList . fmap fst $ new + fromOld :: [(Reference, Reference)] + fromOld = + [ (r, r') | (r, TermEdit.Replace r' _) <- R.toList . Patch._termEdits $ old, Set.member r' newLHS + ] + neededTypes = collectOldForTyping (toList termEdits) ye'ol'Patch + + allTypes :: Map Reference (Type v Ann) <- + fmap Map.fromList . for (toList neededTypes) $ \r -> + (r,) . fromMaybe (Type.builtin External "unknown type") + <$> (eval . LoadTypeOfTerm) r + + let typing r1 r2 = case (Map.lookup r1 allTypes, Map.lookup r2 hashTerms) of + (Just t1, Just t2) + | Typechecker.isEqual t1 t2 -> TermEdit.Same + | Typechecker.isSubtype t1 t2 -> TermEdit.Subtype + | otherwise -> TermEdit.Different + e -> + error $ + "compiler bug: typing map not constructed properly\n" + <> "typing " + <> show r1 + <> " " + <> show r2 + <> " : " + <> show e + + let updatePatch :: Patch -> Patch + updatePatch p = foldl' step2 p' termEdits + where + p' = foldl' step1 p typeEdits + step1 p (r, r') = Patch.updateType r (TypeEdit.Replace r') p + step2 p (r, r') = Patch.updateTerm typing r (TermEdit.Replace r' (typing r r')) p + (p, seg) = Path.toAbsoluteSplit currentPath' patchPath + updatePatches :: Branch0 m -> m (Branch0 m) + updatePatches = Branch.modifyPatches seg updatePatch + + when (Slurp.isNonempty sr) $ do + -- take a look at the `updates` from the SlurpResult + -- and make a patch diff to record a replacement from the old to new references + stepManyAtMNoSync + [ ( Path.unabsolute currentPath', + pure . doSlurpUpdates typeEdits termEdits termDeprecations + ), + ( Path.unabsolute currentPath', + pure . doSlurpAdds addsAndUpdates uf + ), + (Path.unabsolute p, updatePatches) + ] + eval . AddDefsToCodebase . filterBySlurpResult sr $ uf + ppe <- prettyPrintEnvDecl =<< displayNames uf + respond $ SlurpOutput input (PPE.suffixifiedPPE ppe) sr + -- propagatePatch prints TodoOutput + void $ propagatePatchNoSync (updatePatch ye'ol'Patch) currentPath' + addDefaultMetadata addsAndUpdates + syncRoot + PreviewUpdateI hqs -> case (latestFile', uf) of + (Just (sourceName, _), Just uf) -> do + sr <- + applySelection hqs uf + . toSlurpResult currentPath' uf + <$> slurpResultNames + previewResponse sourceName sr uf + _ -> respond NoUnisonFile + TodoI patchPath branchPath' -> do + patch <- getPatchAt (fromMaybe defaultPatchPath patchPath) + doShowTodoOutput patch $ resolveToAbsolute branchPath' + TestI showOk showFail -> do + let testTerms = + Map.keys . R4.d1 . uncurry R4.selectD34 isTest + . Branch.deepTermMetadata + $ currentBranch0 + testRefs = Set.fromList [r | Referent.Ref r <- toList testTerms] + oks results = + [ (r, msg) + | (r, Term.List' ts) <- Map.toList results, + Term.App' (Term.Constructor' ref cid) (Term.Text' msg) <- toList ts, + cid == DD.okConstructorId && ref == DD.testResultRef + ] + fails results = + [ (r, msg) + | (r, Term.List' ts) <- Map.toList results, + Term.App' (Term.Constructor' ref cid) (Term.Text' msg) <- toList ts, + cid == DD.failConstructorId && ref == DD.testResultRef + ] + cachedTests <- fmap Map.fromList . eval $ LoadWatches WK.TestWatch testRefs + let stats = Output.CachedTests (Set.size testRefs) (Map.size cachedTests) + names <- + makePrintNamesFromLabeled' $ + LD.referents testTerms + <> LD.referents [DD.okConstructorReferent, DD.failConstructorReferent] + ppe <- fqnPPE names + respond $ + TestResults + stats + ppe + showOk + showFail + (oks cachedTests) + (fails cachedTests) + let toCompute = Set.difference testRefs (Map.keysSet cachedTests) + unless (Set.null toCompute) $ do + let total = Set.size toCompute + computedTests <- fmap join . for (toList toCompute `zip` [1 ..]) $ \(r, n) -> + case r of + Reference.DerivedId rid -> do + tm <- eval $ LoadTerm rid + case tm of + Nothing -> [] <$ respond (TermNotFound' . SH.take hqLength . Reference.toShortHash $ Reference.DerivedId rid) + Just tm -> do + respond $ TestIncrementalOutputStart ppe (n, total) r tm + -- v don't cache; test cache populated below + tm' <- eval $ Evaluate1 ppe False tm + case tm' of + Left e -> respond (EvaluationFailure e) $> [] + Right tm' -> do + -- After evaluation, cache the result of the test + eval $ PutWatch WK.TestWatch rid tm' + respond $ TestIncrementalOutputEnd ppe (n, total) r tm' + pure [(r, tm')] + r -> error $ "unpossible, tests can't be builtins: " <> show r + + let m = Map.fromList computedTests + respond $ TestResults Output.NewlyComputed ppe showOk showFail (oks m) (fails m) + + -- ListBranchesI -> + -- eval ListBranches >>= respond . ListOfBranches currentBranchName' + -- DeleteBranchI branchNames -> withBranches branchNames $ \bnbs -> do + -- uniqueToDelete <- prettyUniqueDefinitions bnbs + -- let deleteBranches b = + -- traverse (eval . DeleteBranch) b >> respond (Success input) + -- if (currentBranchName' `elem` branchNames) + -- then respond DeletingCurrentBranch + -- else if null uniqueToDelete + -- then deleteBranches branchNames + -- else ifM (confirmedCommand input) + -- (deleteBranches branchNames) + -- (respond . DeleteBranchConfirmation $ uniqueToDelete) + + PropagatePatchI patchPath scopePath -> do + patch <- getPatchAt patchPath + updated <- propagatePatch inputDescription patch (resolveToAbsolute scopePath) + unless updated (respond $ NothingToPatch patchPath scopePath) + ExecuteI main args -> + addRunMain main uf >>= \case + NoTermWithThatName -> do + ppe <- suffixifiedPPE (NamesWithHistory.NamesWithHistory basicPrettyPrintNames mempty) + mainType <- eval RuntimeMain + respond $ NoMainFunction main ppe [mainType] + TermHasBadType ty -> do + ppe <- suffixifiedPPE (NamesWithHistory.NamesWithHistory basicPrettyPrintNames mempty) + mainType <- eval RuntimeMain + respond $ BadMainFunction main ty ppe [mainType] + RunMainSuccess unisonFile -> do + ppe <- executePPE unisonFile + e <- eval $ Execute ppe unisonFile args + + case e of + Left e -> respond $ EvaluationFailure e + Right _ -> pure () -- TODO + MakeStandaloneI output main -> do + mainType <- eval RuntimeMain + parseNames <- + flip NamesWithHistory.NamesWithHistory mempty <$> basicPrettyPrintNamesA + ppe <- suffixifiedPPE parseNames + let resolved = toList $ NamesWithHistory.lookupHQTerm main parseNames + smain = HQ.toString main + filtered <- + catMaybes + <$> traverse (\r -> fmap (r,) <$> loadTypeOfTerm r) resolved + case filtered of + [(Referent.Ref ref, ty)] + | Typechecker.isSubtype ty mainType -> + eval (MakeStandalone ppe ref output) >>= \case + Just err -> respond $ EvaluationFailure err + Nothing -> pure () + | otherwise -> + respond $ BadMainFunction smain ty ppe [mainType] + _ -> respond $ NoMainFunction smain ppe [mainType] + IOTestI main -> do + -- todo - allow this to run tests from scratch file, using addRunMain + testType <- eval RuntimeTest + parseNames <- (`NamesWithHistory.NamesWithHistory` mempty) <$> basicPrettyPrintNamesA + ppe <- suffixifiedPPE parseNames + -- use suffixed names for resolving the argument to display + let oks results = + [ (r, msg) + | (r, Term.List' ts) <- results, + Term.App' (Term.Constructor' ref cid) (Term.Text' msg) <- toList ts, + cid == DD.okConstructorId && ref == DD.testResultRef + ] + fails results = + [ (r, msg) + | (r, Term.List' ts) <- results, + Term.App' (Term.Constructor' ref cid) (Term.Text' msg) <- toList ts, + cid == DD.failConstructorId && ref == DD.testResultRef + ] + + results = NamesWithHistory.lookupHQTerm main parseNames + in case toList results of + [Referent.Ref ref] -> do + typ <- loadTypeOfTerm (Referent.Ref ref) + case typ of + Just typ | Typechecker.isSubtype typ testType -> do + let a = ABT.annotation tm + tm = DD.forceTerm a a (Term.ref a ref) + in do + -- v Don't cache IO tests + tm' <- eval $ Evaluate1 ppe False tm + case tm' of + Left e -> respond (EvaluationFailure e) + Right tm' -> + respond $ TestResults Output.NewlyComputed ppe True True (oks [(ref, tm')]) (fails [(ref, tm')]) + _ -> respond $ NoMainFunction (HQ.toString main) ppe [testType] + _ -> respond $ NoMainFunction (HQ.toString main) ppe [testType] + + -- UpdateBuiltinsI -> do + -- stepAt updateBuiltins + -- checkTodo + + MergeBuiltinsI -> do + -- these were added once, but maybe they've changed and need to be + -- added again. + let uf = + UF.typecheckedUnisonFile + (Map.fromList Builtin.builtinDataDecls) + (Map.fromList Builtin.builtinEffectDecls) + [Builtin.builtinTermsSrc Intrinsic] + mempty + eval $ AddDefsToCodebase uf + -- add the names; note, there are more names than definitions + -- due to builtin terms; so we don't just reuse `uf` above. + let srcb = BranchUtil.fromNames Builtin.names0 + _ <- updateAtM (currentPath' `snoc` "builtin") $ \destb -> + eval $ Merge Branch.RegularMerge srcb destb + success + MergeIOBuiltinsI -> do + -- these were added once, but maybe they've changed and need to be + -- added again. + let uf = + UF.typecheckedUnisonFile + (Map.fromList Builtin.builtinDataDecls) + (Map.fromList Builtin.builtinEffectDecls) + [Builtin.builtinTermsSrc Intrinsic] + mempty + eval $ AddDefsToCodebase uf + -- these have not necessarily been added yet + eval $ AddDefsToCodebase IOSource.typecheckedFile' + + -- add the names; note, there are more names than definitions + -- due to builtin terms; so we don't just reuse `uf` above. + let names0 = + Builtin.names0 + <> UF.typecheckedToNames @v IOSource.typecheckedFile' + let srcb = BranchUtil.fromNames names0 + _ <- updateAtM (currentPath' `snoc` "builtin") $ \destb -> + eval $ Merge Branch.RegularMerge srcb destb + + success + ListEditsI maybePath -> do + let (p, seg) = + maybe + (Path.toAbsoluteSplit currentPath' defaultPatchPath) + (Path.toAbsoluteSplit currentPath') + maybePath + patch <- eval . Eval . Branch.getPatch seg . Branch.head =<< getAt p + ppe <- + suffixifiedPPE + =<< makePrintNamesFromLabeled' (Patch.labeledDependencies patch) + respond $ ListEdits patch ppe + PullRemoteBranchI mayRepo path syncMode verbosity -> unlessError do + ns <- maybe (writePathToRead <$> resolveConfiguredGitUrl Pull path) pure mayRepo + lift $ unlessGitError do + b <- importRemoteBranch ns syncMode + let msg = Just $ PullAlreadyUpToDate ns path + let destAbs = resolveToAbsolute path + let printDiffPath = if Verbosity.isSilent verbosity then Nothing else Just path + lift $ mergeBranchAndPropagateDefaultPatch Branch.RegularMerge inputDescription msg b printDiffPath destAbs + PushRemoteBranchI mayRepo path pushBehavior syncMode -> handlePushRemoteBranch mayRepo path pushBehavior syncMode + ListDependentsI hq -> + -- todo: add flag to handle transitive efficiently + resolveHQToLabeledDependencies hq >>= \lds -> + if null lds + then respond $ LabeledReferenceNotFound hq + else for_ lds $ \ld -> do + dependents <- + let tp r = eval $ GetDependents r + tm (Referent.Ref r) = eval $ GetDependents r + tm (Referent.Con r _i _ct) = eval $ GetDependents r + in LD.fold tp tm ld + (missing, names0) <- eval . Eval $ Branch.findHistoricalRefs' dependents root' + let types = R.toList $ Names.types names0 + let terms = fmap (second Referent.toReference) $ R.toList $ Names.terms names0 + let names = types <> terms + numberedArgs .= fmap (Text.unpack . Reference.toText) ((fmap snd names) <> toList missing) + respond $ ListDependents hqLength ld names missing + ListDependenciesI hq -> + -- todo: add flag to handle transitive efficiently + resolveHQToLabeledDependencies hq >>= \lds -> + if null lds + then respond $ LabeledReferenceNotFound hq + else for_ lds $ \ld -> do + dependencies :: Set Reference <- + let tp r@(Reference.DerivedId i) = + eval (LoadType i) <&> \case + Nothing -> error $ "What happened to " ++ show i ++ "?" + Just decl -> Set.delete r . DD.dependencies $ DD.asDataDecl decl + tp _ = pure mempty + tm (Referent.Ref r@(Reference.DerivedId i)) = + eval (LoadTerm i) <&> \case + Nothing -> error $ "What happened to " ++ show i ++ "?" + Just tm -> Set.delete r $ Term.dependencies tm + tm con@(Referent.Con (Reference.DerivedId i) cid _ct) = + eval (LoadType i) <&> \case + Nothing -> error $ "What happened to " ++ show i ++ "?" + Just decl -> case DD.typeOfConstructor (DD.asDataDecl decl) cid of + Nothing -> error $ "What happened to " ++ show con ++ "?" + Just tp -> Type.dependencies tp + tm _ = pure mempty + in LD.fold tp tm ld + (missing, names0) <- eval . Eval $ Branch.findHistoricalRefs' dependencies root' + let types = R.toList $ Names.types names0 + let terms = fmap (second Referent.toReference) $ R.toList $ Names.terms names0 + let names = types <> terms + numberedArgs .= fmap (Text.unpack . Reference.toText) ((fmap snd names) <> toList missing) + respond $ ListDependencies hqLength ld names missing + DebugNumberedArgsI -> use numberedArgs >>= respond . DumpNumberedArgs + DebugTypecheckedUnisonFileI -> case uf of + Nothing -> respond NoUnisonFile + Just uf -> + let datas, effects, terms :: [(Name, Reference.Id)] + datas = [(Name.unsafeFromVar v, r) | (v, (r, _d)) <- Map.toList $ UF.dataDeclarationsId' uf] + effects = [(Name.unsafeFromVar v, r) | (v, (r, _e)) <- Map.toList $ UF.effectDeclarationsId' uf] + terms = [(Name.unsafeFromVar v, r) | (v, (r, _wk, _tm, _tp)) <- Map.toList $ UF.hashTermsId uf] + in eval . Notify $ DumpUnisonFileHashes hqLength datas effects terms + DebugDumpNamespacesI -> do + let seen h = State.gets (Set.member h) + set h = State.modify (Set.insert h) + getCausal b = (Branch.headHash b, pure $ Branch._history b) + goCausal :: forall m. Monad m => [(Branch.Hash, m (Branch.UnwrappedBranch m))] -> StateT (Set Branch.Hash) m () + goCausal [] = pure () + goCausal ((h, mc) : queue) = do + ifM (seen h) (goCausal queue) do + lift mc >>= \case + Causal.One h b -> goBranch h b mempty queue + Causal.Cons h b tail -> goBranch h b [fst tail] (tail : queue) + Causal.Merge h b (Map.toList -> tails) -> goBranch h b (map fst tails) (tails ++ queue) + goBranch :: forall m. Monad m => Branch.Hash -> Branch0 m -> [Branch.Hash] -> [(Branch.Hash, m (Branch.UnwrappedBranch m))] -> StateT (Set Branch.Hash) m () + goBranch h b (Set.fromList -> causalParents) queue = case b of + Branch0 terms0 types0 children0 patches0 _ _ _ _ _ _ -> + let wrangleMetadata :: (Ord r, Ord n) => Metadata.Star r n -> r -> (r, (Set n, Set Metadata.Value)) + wrangleMetadata s r = + (r, (R.lookupDom r $ Star3.d1 s, Set.map snd . R.lookupDom r $ Star3.d3 s)) + terms = Map.fromList . map (wrangleMetadata terms0) . Foldable.toList $ Star3.fact terms0 + types = Map.fromList . map (wrangleMetadata types0) . Foldable.toList $ Star3.fact types0 + patches = fmap fst patches0 + children = fmap Branch.headHash children0 + in do + let d = Output.DN.DumpNamespace terms types patches children causalParents + -- the alternate implementation that doesn't rely on `traceM` blows up + traceM $ P.toPlain 200 (prettyDump (h, d)) + set h + goCausal (map getCausal (Foldable.toList children0) ++ queue) + prettyDump (h, Output.DN.DumpNamespace terms types patches children causalParents) = + P.lit "Namespace " <> P.shown h <> P.newline + <> ( P.indentN 2 $ + P.linesNonEmpty + [ Monoid.unlessM (null causalParents) $ P.lit "Causal Parents:" <> P.newline <> P.indentN 2 (P.lines (map P.shown $ Set.toList causalParents)), + Monoid.unlessM (null terms) $ P.lit "Terms:" <> P.newline <> P.indentN 2 (P.lines (map (prettyDefn Referent.toText) $ Map.toList terms)), + Monoid.unlessM (null types) $ P.lit "Types:" <> P.newline <> P.indentN 2 (P.lines (map (prettyDefn Reference.toText) $ Map.toList types)), + Monoid.unlessM (null patches) $ P.lit "Patches:" <> P.newline <> P.indentN 2 (P.column2 (map (bimap P.shown P.shown) $ Map.toList patches)), + Monoid.unlessM (null children) $ P.lit "Children:" <> P.newline <> P.indentN 2 (P.column2 (map (bimap P.shown P.shown) $ Map.toList children)) + ] + ) + where + prettyLinks renderR r [] = P.indentN 2 $ P.text (renderR r) + prettyLinks renderR r links = P.indentN 2 (P.lines (P.text (renderR r) : (links <&> \r -> "+ " <> P.text (Reference.toText r)))) + prettyDefn renderR (r, (Foldable.toList -> names, Foldable.toList -> links)) = + P.lines (P.shown <$> if null names then [NameSegment ""] else names) <> P.newline <> prettyLinks renderR r links + void . eval . Eval . flip State.execStateT mempty $ goCausal [getCausal root'] + DebugDumpNamespaceSimpleI -> do + for_ (Relation.toList . Branch.deepTypes . Branch.head $ root') \(r, name) -> + traceM $ show name ++ ",Type," ++ Text.unpack (Reference.toText r) + for_ (Relation.toList . Branch.deepTerms . Branch.head $ root') \(r, name) -> + traceM $ show name ++ ",Term," ++ Text.unpack (Referent.toText r) + DebugClearWatchI {} -> eval ClearWatchCache + DeprecateTermI {} -> notImplemented + DeprecateTypeI {} -> notImplemented + RemoveTermReplacementI from patchPath -> + doRemoveReplacement from patchPath True + RemoveTypeReplacementI from patchPath -> + doRemoveReplacement from patchPath False + ShowDefinitionByPrefixI {} -> notImplemented + UpdateBuiltinsI -> notImplemented + QuitI -> MaybeT $ pure Nothing + where + notImplemented = eval $ Notify NotImplemented + success = respond Success + + resolveDefaultMetadata :: Path.Absolute -> Action' m v [String] + resolveDefaultMetadata path = do + let superpaths = Path.ancestors path + xs <- + for + superpaths + ( \path -> do + mayNames <- + eval . ConfigLookup @[String] $ configKey "DefaultMetadata" path + pure . join $ toList mayNames + ) + pure . join $ toList xs + + case e of + Right input -> lastInput .= Just input + _ -> pure () + +handlePushRemoteBranch :: + forall m v. + Applicative m => + -- | The URL to push to. If missing, it is looked up in `.unisonConfig`. + Maybe WriteRemotePath -> + -- | The local path to push. If relative, it's resolved relative to the current path (`cd`). + Path' -> + -- | The push behavior (whether the remote branch is required to be empty or non-empty). + PushBehavior -> + -- | The sync mode. Unused as of 21-11-04, but do look for yourself. + SyncMode.SyncMode -> + Action' m v () +handlePushRemoteBranch mayRepo path pushBehavior syncMode = do + srcb <- do + currentPath' <- use currentPath + getAt (Path.resolve currentPath' path) + unlessError do + (repo, remotePath) <- maybe (resolveConfiguredGitUrl Push path) pure mayRepo + (cleanup, remoteRoot) <- + unsafeTime "Push viewRemoteBranch" do + withExceptT Output.GitError do + viewRemoteBranch (writeToRead repo, Nothing, Path.empty) + -- We don't merge `srcb` with the remote branch, we just replace it. This push will be rejected if this rewinds time or misses any new + -- updates in the remote branch that aren't in `srcb` already. + case Branch.modifyAtM remotePath (\remoteBranch -> if shouldPushTo remoteBranch then Just srcb else Nothing) remoteRoot of + Nothing -> lift do + eval (Eval cleanup) + respond (RefusedToPush pushBehavior) + Just newRemoteRoot -> do + unsafeTime "Push syncRemoteRootBranch" do + withExceptT Output.GitError do + syncRemoteRootBranch repo newRemoteRoot syncMode + lift do + eval (Eval cleanup) + respond Success + where + -- Per `pushBehavior`, we are either: + -- + -- (1) updating an empty branch, which fails if the branch isn't empty (`push.create`) + -- (2) updating a non-empty branch, which fails if the branch is empty (`push`) + shouldPushTo :: Branch m -> Bool + shouldPushTo remoteBranch = do + case pushBehavior of + PushBehavior.RequireEmpty -> Branch.isEmpty remoteBranch + PushBehavior.RequireNonEmpty -> not (Branch.isEmpty remoteBranch) + +-- | Handle a @ShowDefinitionI@ input command, i.e. `view` or `edit`. +handleShowDefinition :: forall m v. Functor m => OutputLocation -> [HQ.HashQualified Name] -> Action' m v () +handleShowDefinition outputLoc inputQuery = do + -- If the query is empty, run a fuzzy search. + query <- + if null inputQuery + then do + branch <- fuzzyBranch + fuzzySelectTermsAndTypes branch + else pure inputQuery + currentPath' <- Path.unabsolute <$> use currentPath + root' <- use root + hqLength <- eval CodebaseHashLength + Backend.DefinitionResults terms types misses <- + eval (GetDefinitionsBySuffixes (Just currentPath') root' includeCycles query) + outputPath <- getOutputPath + when (not (null types && null terms)) do + let printNames = Backend.getCurrentPrettyNames (Backend.AllNames currentPath') root' + let ppe = PPE.fromNamesDecl hqLength printNames + respond (DisplayDefinitions outputPath ppe types terms) + when (not (null misses)) (respond (SearchTermsNotFound misses)) + -- We set latestFile to be programmatically generated, if we + -- are viewing these definitions to a file - this will skip the + -- next update for that file (which will happen immediately) + latestFile .= ((,True) <$> outputPath) + where + -- `view`: fuzzy find globally; `edit`: fuzzy find local to current branch + fuzzyBranch :: Action' m v (Branch0 m) + fuzzyBranch = + case outputLoc of + ConsoleLocation {} -> Branch.head <$> use root + -- fuzzy finding for 'edit's are local to the current branch + LatestFileLocation {} -> currentBranch0 + FileLocation {} -> currentBranch0 + where + currentBranch0 = do + currentPath' <- use currentPath + currentBranch <- getAt currentPath' + pure (Branch.head currentBranch) + -- `view`: don't include cycles; `edit`: include cycles + includeCycles = + case outputLoc of + ConsoleLocation -> Backend.DontIncludeCycles + FileLocation _ -> Backend.IncludeCycles + LatestFileLocation -> Backend.IncludeCycles + + -- Get the file path to send the definition(s) to. `Nothing` means the terminal. + getOutputPath :: Action' m v (Maybe FilePath) + getOutputPath = + case outputLoc of + ConsoleLocation -> pure Nothing + FileLocation path -> pure (Just path) + LatestFileLocation -> + use latestFile <&> \case + Nothing -> Just "scratch.u" + Just (path, _) -> Just path + +-- Takes a maybe (namespace address triple); returns it as-is if `Just`; +-- otherwise, tries to load a value from .unisonConfig, and complains +-- if needed. +resolveConfiguredGitUrl :: + PushPull -> + Path' -> + ExceptT (Output v) (Action' m v) WriteRemotePath +resolveConfiguredGitUrl pushPull destPath' = ExceptT do + currentPath' <- use currentPath + let destPath = Path.resolve currentPath' destPath' + let configKey = gitUrlKey destPath + (eval . ConfigLookup) configKey >>= \case + Just url -> + case P.parse UriParser.writeRepoPath (Text.unpack configKey) url of + Left e -> + pure . Left $ + ConfiguredGitUrlParseError pushPull destPath' url (show e) + Right ns -> + pure . Right $ ns + Nothing -> + pure . Left $ NoConfiguredGitUrl pushPull destPath' + +gitUrlKey :: Path.Absolute -> Text +gitUrlKey = configKey "GitUrl" + +configKey :: Text -> Path.Absolute -> Text +configKey k p = + Text.intercalate "." . toList $ + k + :<| fmap + NameSegment.toText + (Path.toSeq $ Path.unabsolute p) + +viewRemoteBranch :: ReadRemoteNamespace -> ExceptT GitError (Action' m v) (m (), Branch m) +viewRemoteBranch ns = ExceptT . eval $ ViewRemoteBranch ns + +syncRemoteRootBranch :: WriteRepo -> Branch m -> SyncMode.SyncMode -> ExceptT GitError (Action' m v) () +syncRemoteRootBranch repo b mode = + ExceptT . eval $ SyncRemoteRootBranch repo b mode + +-- todo: compare to `getHQTerms` / `getHQTypes`. Is one universally better? +resolveHQToLabeledDependencies :: Functor m => HQ.HashQualified Name -> Action' m v (Set LabeledDependency) +resolveHQToLabeledDependencies = \case + HQ.NameOnly n -> do + parseNames <- basicParseNames + let terms, types :: Set LabeledDependency + terms = Set.map LD.referent . Name.searchBySuffix n $ Names.terms parseNames + types = Set.map LD.typeRef . Name.searchBySuffix n $ Names.types parseNames + pure $ terms <> types + -- rationale: the hash should be unique enough that the name never helps + HQ.HashQualified _n sh -> resolveHashOnly sh + HQ.HashOnly sh -> resolveHashOnly sh + where + resolveHashOnly sh = do + terms <- eval $ TermReferentsByShortHash sh + types <- eval $ TypeReferencesByShortHash sh + pure $ Set.map LD.referent terms <> Set.map LD.typeRef types + +doDisplay :: Var v => OutputLocation -> NamesWithHistory -> Term v () -> Action' m v () +doDisplay outputLoc names tm = do + ppe <- prettyPrintEnvDecl names + tf <- use latestTypecheckedFile + let (tms, typs) = maybe mempty UF.indexByReference tf + latestFile' <- use latestFile + let loc = case outputLoc of + ConsoleLocation -> Nothing + FileLocation path -> Just path + LatestFileLocation -> fmap fst latestFile' <|> Just "scratch.u" + useCache = True + evalTerm tm = + fmap ErrorUtil.hush . fmap (fmap Term.unannotate) . eval $ + Evaluate1 (PPE.suffixifiedPPE ppe) useCache (Term.amap (const External) tm) + loadTerm (Reference.DerivedId r) = case Map.lookup r tms of + Nothing -> fmap (fmap Term.unannotate) . eval $ LoadTerm r + Just (tm, _) -> pure (Just $ Term.unannotate tm) + loadTerm _ = pure Nothing + loadDecl (Reference.DerivedId r) = case Map.lookup r typs of + Nothing -> fmap (fmap $ DD.amap (const ())) . eval $ LoadType r + Just decl -> pure (Just $ DD.amap (const ()) decl) + loadDecl _ = pure Nothing + loadTypeOfTerm' (Referent.Ref (Reference.DerivedId r)) + | Just (_, ty) <- Map.lookup r tms = pure $ Just (void ty) + loadTypeOfTerm' r = fmap (fmap void) . loadTypeOfTerm $ r + rendered <- DisplayValues.displayTerm ppe loadTerm loadTypeOfTerm' evalTerm loadDecl tm + respond $ DisplayRendered loc rendered + +getLinks :: + (Var v, Monad m) => + SrcLoc -> + Path.HQSplit' -> + Either (Set Reference) (Maybe String) -> + ExceptT + (Output v) + (Action' m v) + ( PPE.PrettyPrintEnv, + -- e.g. ("Foo.doc", #foodoc, Just (#builtin.Doc) + [(HQ.HashQualified Name, Reference, Maybe (Type v Ann))] + ) +getLinks srcLoc src mdTypeStr = ExceptT $ do + let go = fmap Right . getLinks' src + case mdTypeStr of + Left s -> go (Just s) + Right Nothing -> go Nothing + Right (Just mdTypeStr) -> + parseType srcLoc mdTypeStr >>= \case + Left e -> pure $ Left e + Right typ -> go . Just . Set.singleton $ Hashing.typeToReference typ + +getLinks' :: + (Var v, Monad m) => + Path.HQSplit' -> -- definition to print metadata of + Maybe (Set Reference) -> -- return all metadata if empty + Action' + m + v + ( PPE.PrettyPrintEnv, + -- e.g. ("Foo.doc", #foodoc, Just (#builtin.Doc) + [(HQ.HashQualified Name, Reference, Maybe (Type v Ann))] + ) +getLinks' src selection0 = do + root0 <- Branch.head <$> use root + currentPath' <- use currentPath + let resolveSplit' = Path.fromAbsoluteSplit . Path.toAbsoluteSplit currentPath' + p = resolveSplit' src -- ex: the (parent,hqsegment) of `List.map` - `List` + -- all metadata (type+value) associated with name `src` + allMd = + R4.d34 (BranchUtil.getTermMetadataHQNamed p root0) + <> R4.d34 (BranchUtil.getTypeMetadataHQNamed p root0) + allMd' = maybe allMd (`R.restrictDom` allMd) selection0 + -- then list the values after filtering by type + allRefs :: Set Reference = R.ran allMd' + sigs <- for (toList allRefs) (loadTypeOfTerm . Referent.Ref) + let deps = + Set.map LD.termRef allRefs + <> Set.unions [Set.map LD.typeRef . Type.dependencies $ t | Just t <- sigs] + ppe <- prettyPrintEnvDecl =<< makePrintNamesFromLabeled' deps + let ppeDecl = PPE.unsuffixifiedPPE ppe + let sortedSigs = sortOn snd (toList allRefs `zip` sigs) + let out = [(PPE.termName ppeDecl (Referent.Ref r), r, t) | (r, t) <- sortedSigs] + pure (PPE.suffixifiedPPE ppe, out) + +resolveShortBranchHash :: + ShortBranchHash -> ExceptT (Output v) (Action' m v) (Branch m) +resolveShortBranchHash hash = ExceptT do + hashSet <- eval $ BranchHashesByPrefix hash + len <- eval BranchHashLength + case Set.toList hashSet of + [] -> pure . Left $ NoBranchWithHash hash + [h] -> fmap Right . eval $ LoadLocalBranch h + _ -> pure . Left $ BranchHashAmbiguous hash (Set.map (SBH.fromHash len) hashSet) + +-- Returns True if the operation changed the namespace, False otherwise. +propagatePatchNoSync :: + (Monad m, Var v) => + Patch -> + Path.Absolute -> + Action' m v Bool +propagatePatchNoSync patch scopePath = do + r <- use root + let nroot = Branch.toNames (Branch.head r) + stepAtMNoSync' + ( Path.unabsolute scopePath, + lift . lift . Propagate.propagateAndApply nroot patch + ) + +-- Returns True if the operation changed the namespace, False otherwise. +propagatePatch :: + (Monad m, Var v) => + InputDescription -> + Patch -> + Path.Absolute -> + Action' m v Bool +propagatePatch inputDescription patch scopePath = do + r <- use root + let nroot = Branch.toNames (Branch.head r) + stepAtM' + (inputDescription <> " (applying patch)") + ( Path.unabsolute scopePath, + lift . lift . Propagate.propagateAndApply nroot patch + ) + +-- | Create the args needed for showTodoOutput and call it +doShowTodoOutput :: Monad m => Patch -> Path.Absolute -> Action' m v () +doShowTodoOutput patch scopePath = do + scope <- getAt scopePath + let names0 = Branch.toNames (Branch.head scope) + -- only needs the local references to check for obsolete defs + let getPpe = do + names <- makePrintNamesFromLabeled' (Patch.labeledDependencies patch) + prettyPrintEnvDecl names + showTodoOutput getPpe patch names0 + +-- | Show todo output if there are any conflicts or edits. +showTodoOutput :: + -- | Action that fetches the pretty print env. It's expensive because it + -- involves looking up historical names, so only call it if necessary. + Action' m v PPE.PrettyPrintEnvDecl -> + Patch -> + Names -> + Action' m v () +showTodoOutput getPpe patch names0 = do + todo <- checkTodo patch names0 + if TO.noConflicts todo && TO.noEdits todo + then respond NoConflictsOrEdits + else do + numberedArgs + .= ( Text.unpack . Reference.toText . view _2 + <$> fst (TO.todoFrontierDependents todo) + ) + ppe <- getPpe + respond $ TodoOutput ppe todo + +checkTodo :: Patch -> Names -> Action m i v (TO.TodoOutput v Ann) +checkTodo patch names0 = do + f <- computeFrontier (eval . GetDependents) patch names0 + let dirty = R.dom f + frontier = R.ran f + (frontierTerms, frontierTypes) <- loadDisplayInfo frontier + (dirtyTerms, dirtyTypes) <- loadDisplayInfo dirty + -- todo: something more intelligent here? + let scoreFn = const 1 + remainingTransitive <- + frontierTransitiveDependents (eval . GetDependents) names0 frontier + let scoredDirtyTerms = + List.sortOn (view _1) [(scoreFn r, r, t) | (r, t) <- dirtyTerms] + scoredDirtyTypes = + List.sortOn (view _1) [(scoreFn r, r, t) | (r, t) <- dirtyTypes] + pure $ + TO.TodoOutput + (Set.size remainingTransitive) + (frontierTerms, frontierTypes) + (scoredDirtyTerms, scoredDirtyTypes) + (Names.conflicts names0) + (Patch.conflicts patch) + where + frontierTransitiveDependents :: + Monad m => (Reference -> m (Set Reference)) -> Names -> Set Reference -> m (Set Reference) + frontierTransitiveDependents dependents names0 rs = do + let branchDependents r = Set.filter (Names.contains names0) <$> dependents r + tdeps <- transitiveClosure branchDependents rs + -- we don't want the frontier in the result + pure $ tdeps `Set.difference` rs + +-- (d, f) when d is "dirty" (needs update), +-- f is in the frontier (an edited dependency of d), +-- and d depends on f +-- a ⋖ b = a depends directly on b +-- dirty(d) ∧ frontier(f) <=> not(edited(d)) ∧ edited(f) ∧ d ⋖ f +-- +-- The range of this relation is the frontier, and the domain is +-- the set of dirty references. +computeFrontier :: + forall m. + Monad m => + (Reference -> m (Set Reference)) -> -- eg Codebase.dependents codebase + Patch -> + Names -> + m (R.Relation Reference Reference) +computeFrontier getDependents patch names = + let edited :: Set Reference + edited = R.dom (Patch._termEdits patch) <> R.dom (Patch._typeEdits patch) + addDependents :: R.Relation Reference Reference -> Reference -> m (R.Relation Reference Reference) + addDependents dependents ref = + (\ds -> R.insertManyDom ds ref dependents) . Set.filter (Names.contains names) + <$> getDependents ref + in do + -- (r,r2) ∈ dependsOn if r depends on r2 + dependsOn <- foldM addDependents R.empty edited + -- Dirty is everything that `dependsOn` Frontier, minus already edited defns + pure $ R.filterDom (not . flip Set.member edited) dependsOn + +eval :: Command m i v a -> Action m i v a +eval = lift . lift . Free.eval + +confirmedCommand :: Input -> Action m i v Bool +confirmedCommand i = do + i0 <- use lastInput + pure $ Just i == i0 + +listBranch :: Branch0 m -> [SearchResult] +listBranch (Branch.toNames -> b) = + List.sortOn (\s -> (SR.name s, s)) (SR.fromNames b) + +-- | restores the full hash to these search results, for _numberedArgs purposes +searchResultToHQString :: SearchResult -> String +searchResultToHQString = \case + SR.Tm' n r _ -> HQ.toString $ HQ.requalify n r + SR.Tp' n r _ -> HQ.toString $ HQ.requalify n (Referent.Ref r) + _ -> error "impossible match failure" + +-- Return a list of definitions whose names fuzzy match the given queries. +fuzzyNameDistance :: Name -> Name -> Maybe Int +fuzzyNameDistance (Name.toString -> q) (Name.toString -> n) = + Find.simpleFuzzyScore q n + +-- return `name` and `name....` +_searchBranchPrefix :: Branch m -> Name -> [SearchResult] +_searchBranchPrefix b n = case Path.unsnoc (Path.fromName n) of + Nothing -> [] + Just (init, last) -> case Branch.getAt init b of + Nothing -> [] + Just b -> SR.fromNames . Names.prefix0 n $ names0 + where + lastName = Path.toName (Path.singleton last) + subnames = + Branch.toNames . Branch.head $ + Branch.getAt' (Path.singleton last) b + rootnames = + Names.filter (== lastName) + . Branch.toNames + . set Branch.children mempty + $ Branch.head b + names0 = rootnames <> Names.prefix0 lastName subnames + +searchResultsFor :: Names -> [Referent] -> [Reference] -> [SearchResult] +searchResultsFor ns terms types = + [ SR.termSearchResult ns name ref + | ref <- terms, + name <- toList (Names.namesForReferent ns ref) + ] + <> [ SR.typeSearchResult ns name ref + | ref <- types, + name <- toList (Names.namesForReference ns ref) + ] + +searchBranchScored :: + forall score. + (Ord score) => + Names -> + (Name -> Name -> Maybe score) -> + [HQ.HashQualified Name] -> + [SearchResult] +searchBranchScored names0 score queries = + nubOrd . fmap snd . toList $ searchTermNamespace <> searchTypeNamespace + where + searchTermNamespace = foldMap do1query queries + where + do1query :: HQ.HashQualified Name -> Set (Maybe score, SearchResult) + do1query q = foldMap (score1hq q) (R.toList . Names.terms $ names0) + score1hq :: HQ.HashQualified Name -> (Name, Referent) -> Set (Maybe score, SearchResult) + score1hq query (name, ref) = case query of + HQ.NameOnly qn -> + pair qn + HQ.HashQualified qn h + | h `SH.isPrefixOf` Referent.toShortHash ref -> + pair qn + HQ.HashOnly h + | h `SH.isPrefixOf` Referent.toShortHash ref -> + Set.singleton (Nothing, result) + _ -> mempty + where + result = SR.termSearchResult names0 name ref + pair qn = case score qn name of + Just score -> Set.singleton (Just score, result) + Nothing -> mempty + searchTypeNamespace = foldMap do1query queries + where + do1query :: HQ.HashQualified Name -> Set (Maybe score, SearchResult) + do1query q = foldMap (score1hq q) (R.toList . Names.types $ names0) + score1hq :: HQ.HashQualified Name -> (Name, Reference) -> Set (Maybe score, SearchResult) + score1hq query (name, ref) = case query of + HQ.NameOnly qn -> + pair qn + HQ.HashQualified qn h + | h `SH.isPrefixOf` Reference.toShortHash ref -> + pair qn + HQ.HashOnly h + | h `SH.isPrefixOf` Reference.toShortHash ref -> + Set.singleton (Nothing, result) + _ -> mempty + where + result = SR.typeSearchResult names0 name ref + pair qn = case score qn name of + Just score -> Set.singleton (Just score, result) + Nothing -> mempty + +handleBackendError :: Backend.BackendError -> Action m i v () +handleBackendError = \case + Backend.NoSuchNamespace path -> + respond . BranchNotFound $ Path.absoluteToPath' path + Backend.BadRootBranch e -> respond $ BadRootBranch e + Backend.NoBranchForHash h -> do + sbhLength <- eval BranchHashLength + respond . NoBranchWithHash $ SBH.fromHash sbhLength h + Backend.CouldntLoadBranch h -> do + respond . CouldntLoadBranch $ h + Backend.CouldntExpandBranchHash sbh -> respond $ NoBranchWithHash sbh + Backend.AmbiguousBranchHash h hashes -> + respond $ BranchHashAmbiguous h hashes + Backend.MissingSignatureForTerm r -> + respond $ TermMissingType r + +respond :: Output v -> Action m i v () +respond output = eval $ Notify output + +respondNumbered :: NumberedOutput v -> Action m i v () +respondNumbered output = do + args <- eval $ NotifyNumbered output + unless (null args) $ + numberedArgs .= toList args + +unlessError :: ExceptT (Output v) (Action' m v) () -> Action' m v () +unlessError ma = runExceptT ma >>= either respond pure + +unlessError' :: (e -> Output v) -> ExceptT e (Action' m v) () -> Action' m v () +unlessError' f ma = unlessError $ withExceptT f ma + +-- | supply `dest0` if you want to print diff messages +-- supply unchangedMessage if you want to display it if merge had no effect +mergeBranchAndPropagateDefaultPatch :: + (Monad m, Var v) => + Branch.MergeMode -> + InputDescription -> + Maybe (Output v) -> + Branch m -> + Maybe Path.Path' -> + Path.Absolute -> + Action' m v () +mergeBranchAndPropagateDefaultPatch mode inputDescription unchangedMessage srcb dest0 dest = + ifM + (mergeBranch mode inputDescription srcb dest0 dest) + (loadPropagateDiffDefaultPatch inputDescription dest0 dest) + (for_ unchangedMessage respond) + where + mergeBranch :: + (Monad m, Var v) => + Branch.MergeMode -> + InputDescription -> + Branch m -> + Maybe Path.Path' -> + Path.Absolute -> + Action' m v Bool + mergeBranch mode inputDescription srcb dest0 dest = unsafeTime "Merge Branch" $ do + destb <- getAt dest + merged <- eval $ Merge mode srcb destb + b <- updateAtM inputDescription dest (const $ pure merged) + for_ dest0 $ \dest0 -> + diffHelper (Branch.head destb) (Branch.head merged) + >>= respondNumbered . uncurry (ShowDiffAfterMerge dest0 dest) + pure b + +loadPropagateDiffDefaultPatch :: + (Monad m, Var v) => + InputDescription -> + Maybe Path.Path' -> + Path.Absolute -> + Action' m v () +loadPropagateDiffDefaultPatch inputDescription dest0 dest = unsafeTime "Propagate Default Patch" $ do + original <- getAt dest + patch <- eval . Eval $ Branch.getPatch defaultPatchNameSegment (Branch.head original) + patchDidChange <- propagatePatch inputDescription patch dest + when patchDidChange . for_ dest0 $ \dest0 -> do + patched <- getAt dest + let patchPath = snoc dest0 defaultPatchNameSegment + diffHelper (Branch.head original) (Branch.head patched) + >>= respondNumbered . uncurry (ShowDiffAfterMergePropagate dest0 dest patchPath) + +-- | Get metadata type/value from a name. +-- +-- May fail with either: +-- +-- * 'MetadataMissingType', if the given name is associated with a single reference, but that reference doesn't have a +-- type. +-- * 'MetadataAmbiguous', if the given name is associated with more than one reference. +getMetadataFromName :: + Var v => + HQ.HashQualified Name -> + Action m (Either Event Input) v (Either (Output v) (Metadata.Type, Metadata.Value)) +getMetadataFromName name = do + (Set.toList <$> getHQTerms name) >>= \case + [ref@(Referent.Ref val)] -> + eval (LoadTypeOfTerm val) >>= \case + Nothing -> do + ppe <- getPPE + pure (Left (MetadataMissingType ppe ref)) + Just ty -> pure (Right (Hashing.typeToReference ty, val)) + -- FIXME: we want a different error message if the given name is associated with a data constructor (`Con`). + refs -> do + ppe <- getPPE + pure (Left (MetadataAmbiguous name ppe refs)) + where + getPPE :: Action m (Either Event Input) v PPE.PrettyPrintEnv + getPPE = do + currentPath' <- use currentPath + sbhLength <- eval BranchHashLength + Backend.basicSuffixifiedNames sbhLength <$> use root <*> pure (Backend.AllNames $ Path.unabsolute currentPath') + +-- | Get the set of terms related to a hash-qualified name. +getHQTerms :: HQ.HashQualified Name -> Action' m v (Set Referent) +getHQTerms = \case + HQ.NameOnly n -> do + root0 <- Branch.head <$> use root + currentPath' <- use currentPath + -- absolute-ify the name, then lookup in deepTerms of root + let path = + n + & Path.fromName' + & Path.resolve currentPath' + & Path.unabsolute + & Path.toName + pure $ R.lookupRan path (Branch.deepTerms root0) + HQ.HashOnly sh -> hashOnly sh + HQ.HashQualified _ sh -> hashOnly sh + where + hashOnly sh = eval $ TermReferentsByShortHash sh + +getAt :: Functor m => Path.Absolute -> Action m i v (Branch m) +getAt (Path.Absolute p) = + use root <&> fromMaybe Branch.empty . Branch.getAt p + +-- Update a branch at the given path, returning `True` if +-- an update occurred and false otherwise +updateAtM :: + Applicative m => + InputDescription -> + Path.Absolute -> + (Branch m -> Action m i v (Branch m)) -> + Action m i v Bool +updateAtM reason (Path.Absolute p) f = do + b <- use lastSavedRoot + b' <- Branch.modifyAtM p f b + updateRoot b' reason + pure $ b /= b' + +stepAt :: + forall m i v. + Monad m => + InputDescription -> + (Path, Branch0 m -> Branch0 m) -> + Action m i v () +stepAt cause = stepManyAt @m @[] cause . pure + +stepAtNoSync :: + forall m i v. + Monad m => + (Path, Branch0 m -> Branch0 m) -> + Action m i v () +stepAtNoSync = stepManyAtNoSync @m @[] . pure + +stepAtM :: + forall m i v. + Monad m => + InputDescription -> + (Path, Branch0 m -> m (Branch0 m)) -> + Action m i v () +stepAtM cause = stepManyAtM @m @[] cause . pure + +stepAtM' :: + forall m i v. + Monad m => + InputDescription -> + (Path, Branch0 m -> Action m i v (Branch0 m)) -> + Action m i v Bool +stepAtM' cause = stepManyAtM' @m @[] cause . pure + +stepAtMNoSync' :: + forall m i v. + Monad m => + (Path, Branch0 m -> Action m i v (Branch0 m)) -> + Action m i v Bool +stepAtMNoSync' = stepManyAtMNoSync' @m @[] . pure + +stepManyAt :: + (Monad m, Foldable f) => + InputDescription -> + f (Path, Branch0 m -> Branch0 m) -> + Action m i v () +stepManyAt reason actions = do + stepManyAtNoSync actions + b <- use root + updateRoot b reason + +-- Like stepManyAt, but doesn't update the root +stepManyAtNoSync :: + (Monad m, Foldable f) => + f (Path, Branch0 m -> Branch0 m) -> + Action m i v () +stepManyAtNoSync actions = do + b <- use root + let new = Branch.stepManyAt actions b + root .= new + +stepManyAtM :: + (Monad m, Foldable f) => + InputDescription -> + f (Path, Branch0 m -> m (Branch0 m)) -> + Action m i v () +stepManyAtM reason actions = do + stepManyAtMNoSync actions + b <- use root + updateRoot b reason + +stepManyAtMNoSync :: + (Monad m, Foldable f) => + f (Path, Branch0 m -> m (Branch0 m)) -> + Action m i v () +stepManyAtMNoSync actions = do + b <- use root + b' <- eval . Eval $ Branch.stepManyAtM actions b + root .= b' + +stepManyAtM' :: + (Monad m, Foldable f) => + InputDescription -> + f (Path, Branch0 m -> Action m i v (Branch0 m)) -> + Action m i v Bool +stepManyAtM' reason actions = do + b <- use root + b' <- Branch.stepManyAtM actions b + updateRoot b' reason + pure (b /= b') + +stepManyAtMNoSync' :: + (Monad m, Foldable f) => + f (Path, Branch0 m -> Action m i v (Branch0 m)) -> + Action m i v Bool +stepManyAtMNoSync' actions = do + b <- use root + b' <- Branch.stepManyAtM actions b + root .= b' + pure (b /= b') + +updateRoot :: Branch m -> InputDescription -> Action m i v () +updateRoot new reason = do + old <- use lastSavedRoot + when (old /= new) $ do + root .= new + eval $ SyncLocalRootBranch new + eval $ AppendToReflog reason old new + lastSavedRoot .= new + +-- cata for 0, 1, or more elements of a Foldable +-- tries to match as lazily as possible +zeroOneOrMore :: Foldable f => f a -> b -> (a -> b) -> (f a -> b) -> b +zeroOneOrMore f zero one more = case toList f of + _ : _ : _ -> more f + a : _ -> one a + _ -> zero + +-- Goal: If `remaining = root - toBeDeleted` contains definitions X which +-- depend on definitions Y not in `remaining` (which should also be in +-- `toBeDeleted`), then complain by returning (Y, X). +getEndangeredDependents :: + forall m. + Monad m => + (Reference -> m (Set Reference)) -> + Names -> + Names -> + m (Names, Names) +getEndangeredDependents getDependents toDelete root = do + let remaining = root `Names.difference` toDelete + toDelete', remaining', extinct :: Set Reference + toDelete' = Names.allReferences toDelete + remaining' = Names.allReferences remaining -- left over after delete + extinct = toDelete' `Set.difference` remaining' -- deleting and not left over + accumulateDependents m r = getDependents r <&> \ds -> Map.insert r ds m + dependentsOfExtinct :: Map Reference (Set Reference) <- + foldM accumulateDependents mempty extinct + let orphaned, endangered, failed :: Set Reference + orphaned = fold dependentsOfExtinct + endangered = orphaned `Set.intersection` remaining' + failed = Set.filter hasEndangeredDependent extinct + hasEndangeredDependent r = + any + (`Set.member` endangered) + (dependentsOfExtinct Map.! r) + pure + ( Names.restrictReferences failed toDelete, + Names.restrictReferences endangered root `Names.difference` toDelete + ) + +-- Applies the selection filter to the adds/updates of a slurp result, +-- meaning that adds/updates should only contain the selection or its transitive +-- dependencies, any unselected transitive dependencies of the selection will +-- be added to `extraDefinitions`. +applySelection :: + forall v a. + Var v => + [HQ'.HashQualified Name] -> + UF.TypecheckedUnisonFile v a -> + SlurpResult v -> + SlurpResult v +applySelection [] _ = id +applySelection hqs file = \sr@SlurpResult {adds, updates} -> + sr + { adds = adds `SC.intersection` closed, + updates = updates `SC.intersection` closed, + extraDefinitions = closed `SC.difference` selection + } + where + selectedNames = + Names.filterByHQs (Set.fromList hqs) (UF.typecheckedToNames file) + selection, closed :: SlurpComponent v + selection = SlurpComponent selectedTypes selectedTerms + closed = SC.closeWithDependencies file selection + selectedTypes, selectedTerms :: Set v + selectedTypes = Set.map var $ R.dom (Names.types selectedNames) + selectedTerms = Set.map var $ R.dom (Names.terms selectedNames) + +var :: Var v => Name -> v +var name = Var.named (Name.toText name) + +toSlurpResult :: + forall v. + Var v => + Path.Absolute -> + UF.TypecheckedUnisonFile v Ann -> + Names -> + SlurpResult v +toSlurpResult currentPath uf existingNames = + Slurp.subtractComponent (conflicts <> ctorCollisions) $ + SlurpResult + uf + mempty + adds + dups + mempty + conflicts + updates + termCtorCollisions + ctorTermCollisions + termAliases + typeAliases + mempty + where + fileNames = UF.typecheckedToNames uf + + sc :: R.Relation Name Referent -> R.Relation Name Reference -> SlurpComponent v + sc terms types = + SlurpComponent + { terms = Set.map var (R.dom terms), + types = Set.map var (R.dom types) + } + + -- conflict (n,r) if n is conflicted in names0 + conflicts :: SlurpComponent v + conflicts = sc terms types + where + terms = + R.filterDom + (conflicted . Names.termsNamed existingNames) + (Names.terms fileNames) + types = + R.filterDom + (conflicted . Names.typesNamed existingNames) + (Names.types fileNames) + conflicted s = Set.size s > 1 + + ctorCollisions :: SlurpComponent v + ctorCollisions = + mempty {SC.terms = termCtorCollisions <> ctorTermCollisions} + + -- termCtorCollision (n,r) if (n, r' /= r) exists in existingNames and + -- r is Ref and r' is Con + termCtorCollisions :: Set v + termCtorCollisions = + Set.fromList + [ var n + | (n, Referent.Ref {}) <- R.toList (Names.terms fileNames), + [r@Referent.Con {}] <- [toList $ Names.termsNamed existingNames n], + -- ignore collisions w/ ctors of types being updated + Set.notMember (Referent.toReference r) typesToUpdate + ] + + -- the set of typerefs that are being updated by this file + typesToUpdate :: Set Reference + typesToUpdate = + Set.fromList + [ r + | (n, r') <- R.toList (Names.types fileNames), + r <- toList (Names.typesNamed existingNames n), + r /= r' + ] + + -- ctorTermCollisions (n,r) if (n, r' /= r) exists in names0 and r is Con + -- and r' is Ref except we relaxed it to where r' can be Con or Ref + -- what if (n,r) and (n,r' /= r) exists in names and r, r' are Con + ctorTermCollisions :: Set v + ctorTermCollisions = + Set.fromList + [ var n + | (n, Referent.Con {}) <- R.toList (Names.terms fileNames), + r <- toList $ Names.termsNamed existingNames n, + -- ignore collisions w/ ctors of types being updated + Set.notMember (Referent.toReference r) typesToUpdate, + Set.notMember (var n) (terms dups) + ] + + -- duplicate (n,r) if (n,r) exists in names0 + dups :: SlurpComponent v + dups = sc terms types + where + terms = R.intersection (Names.terms existingNames) (Names.terms fileNames) + types = R.intersection (Names.types existingNames) (Names.types fileNames) + + -- update (n,r) if (n,r' /= r) exists in existingNames and r, r' are Ref + updates :: SlurpComponent v + updates = SlurpComponent (Set.fromList types) (Set.fromList terms) + where + terms = + [ var n + | (n, r'@Referent.Ref {}) <- R.toList (Names.terms fileNames), + [r@Referent.Ref {}] <- [toList $ Names.termsNamed existingNames n], + r' /= r + ] + types = + [ var n + | (n, r') <- R.toList (Names.types fileNames), + [r] <- [toList $ Names.typesNamed existingNames n], + r' /= r + ] + + buildAliases :: + R.Relation Name Referent -> + R.Relation Name Referent -> + Set v -> + Map v Slurp.Aliases + buildAliases existingNames namesFromFile duplicates = + Map.fromList + [ ( var n, + if null aliasesOfOld + then Slurp.AddAliases aliasesOfNew + else Slurp.UpdateAliases aliasesOfOld aliasesOfNew + ) + | (n, r@Referent.Ref {}) <- R.toList namesFromFile, + -- All the refs whose names include `n`, and are not `r` + let refs = Set.delete r $ R.lookupDom n existingNames + aliasesOfNew = + Set.map (Path.unprefixName currentPath) . Set.delete n $ + R.lookupRan r existingNames + aliasesOfOld = + Set.map (Path.unprefixName currentPath) . Set.delete n . R.dom $ + R.restrictRan existingNames refs, + not (null aliasesOfNew && null aliasesOfOld), + Set.notMember (var n) duplicates + ] + + termAliases :: Map v Slurp.Aliases + termAliases = + buildAliases + (Names.terms existingNames) + (Names.terms fileNames) + (SC.terms dups) + + typeAliases :: Map v Slurp.Aliases + typeAliases = + buildAliases + (R.mapRan Referent.Ref $ Names.types existingNames) + (R.mapRan Referent.Ref $ Names.types fileNames) + (SC.types dups) + + -- (n,r) is in `adds` if n isn't in existingNames + adds = sc terms types + where + terms = addTerms (Names.terms existingNames) (Names.terms fileNames) + types = addTypes (Names.types existingNames) (Names.types fileNames) + addTerms existingNames = R.filter go + where + go (n, Referent.Ref {}) = (not . R.memberDom n) existingNames + go _ = False + addTypes existingNames = R.filter go + where + go (n, _) = (not . R.memberDom n) existingNames + +displayI :: + (Monad m, Var v) => + Names -> + OutputLocation -> + HQ.HashQualified Name -> + Action m (Either Event Input) v () +displayI prettyPrintNames outputLoc hq = do + uf <- use latestTypecheckedFile >>= addWatch (HQ.toString hq) + case uf of + Nothing -> do + let parseNames = (`NamesWithHistory.NamesWithHistory` mempty) prettyPrintNames + results = NamesWithHistory.lookupHQTerm hq parseNames + if Set.null results + then respond $ SearchTermsNotFound [hq] + else + if Set.size results > 1 + then respond $ TermAmbiguous hq results + else -- ... but use the unsuffixed names for display + do + let tm = Term.fromReferent External $ Set.findMin results + pped <- prettyPrintEnvDecl parseNames + tm <- eval $ Evaluate1 (PPE.suffixifiedPPE pped) True tm + case tm of + Left e -> respond (EvaluationFailure e) + Right tm -> doDisplay outputLoc parseNames (Term.unannotate tm) + Just (toDisplay, unisonFile) -> do + ppe <- executePPE unisonFile + unlessError' EvaluationFailure do + evalResult <- ExceptT . eval . Evaluate ppe $ unisonFile + case Command.lookupEvalResult toDisplay evalResult of + Nothing -> error $ "Evaluation dropped a watch expression: " <> HQ.toString hq + Just tm -> lift do + ns <- displayNames unisonFile + doDisplay outputLoc ns tm + +docsI :: + (Ord v, Monad m, Var v) => + SrcLoc -> + Names -> + Path.HQSplit' -> + Action m (Either Event Input) v () +docsI srcLoc prettyPrintNames src = do + fileByName + where + {- Given `docs foo`, we look for docs in 3 places, in this order: + (fileByName) First check the file for `foo.doc`, and if found do `display foo.doc` + (codebaseByMetadata) Next check for doc metadata linked to `foo` in the codebase + (codebaseByName) Lastly check for `foo.doc` in the codebase and if found do `display foo.doc` + -} + hq :: HQ.HashQualified Name + hq = + let hq' :: HQ'.HashQualified Name + hq' = Name.convert @Path.Path' @Name <$> Name.convert src + in Name.convert hq' + + dotDoc :: HQ.HashQualified Name + dotDoc = hq <&> \n -> Name.joinDot n "doc" + + fileByName = do + ns <- maybe mempty UF.typecheckedToNames <$> use latestTypecheckedFile + fnames <- pure $ NamesWithHistory.NamesWithHistory ns mempty + case NamesWithHistory.lookupHQTerm dotDoc fnames of + s | Set.size s == 1 -> do + -- the displayI command expects full term names, so we resolve + -- the hash back to its full name in the file + fname' <- pure $ NamesWithHistory.longestTermName 10 (Set.findMin s) fnames + displayI prettyPrintNames ConsoleLocation fname' + _ -> codebaseByMetadata + + codebaseByMetadata = unlessError do + (ppe, out) <- getLinks srcLoc src (Left $ Set.fromList [DD.docRef, DD.doc2Ref]) + lift case out of + [] -> codebaseByName + [(_name, ref, _tm)] -> do + len <- eval BranchHashLength + let names = NamesWithHistory.NamesWithHistory prettyPrintNames mempty + let tm = Term.ref External ref + tm <- eval $ Evaluate1 (PPE.fromNames len names) True tm + case tm of + Left e -> respond (EvaluationFailure e) + Right tm -> doDisplay ConsoleLocation names (Term.unannotate tm) + out -> do + numberedArgs .= fmap (HQ.toString . view _1) out + respond $ ListOfLinks ppe out + + codebaseByName = do + parseNames <- basicParseNames + case NamesWithHistory.lookupHQTerm dotDoc (NamesWithHistory.NamesWithHistory parseNames mempty) of + s + | Set.size s == 1 -> displayI prettyPrintNames ConsoleLocation dotDoc + | Set.size s == 0 -> respond $ ListOfLinks mempty [] + | otherwise -> -- todo: return a list of links here too + respond $ ListOfLinks mempty [] + +filterBySlurpResult :: + Ord v => + SlurpResult v -> + UF.TypecheckedUnisonFile v Ann -> + UF.TypecheckedUnisonFile v Ann +filterBySlurpResult + SlurpResult {adds, updates} + ( UF.TypecheckedUnisonFileId + dataDeclarations' + effectDeclarations' + topLevelComponents' + watchComponents + hashTerms + ) = + UF.TypecheckedUnisonFileId datas effects tlcs watches hashTerms' + where + keep = updates <> adds + keepTerms = SC.terms keep + keepTypes = SC.types keep + hashTerms' = Map.restrictKeys hashTerms keepTerms + datas = Map.restrictKeys dataDeclarations' keepTypes + effects = Map.restrictKeys effectDeclarations' keepTypes + tlcs = filter (not . null) $ fmap (List.filter filterTLC) topLevelComponents' + watches = filter (not . null . snd) $ fmap (second (List.filter filterTLC)) watchComponents + filterTLC (v, _, _) = Set.member v keepTerms + +-- updates the namespace for adding `slurp` +doSlurpAdds :: + forall m v. + (Monad m, Var v) => + SlurpComponent v -> + UF.TypecheckedUnisonFile v Ann -> + (Branch0 m -> Branch0 m) +doSlurpAdds slurp uf = Branch.stepManyAt0 (typeActions <> termActions) + where + typeActions = map doType . toList $ SC.types slurp + termActions = + map doTerm . toList $ + SC.terms slurp <> Slurp.constructorsFor (SC.types slurp) uf + names = UF.typecheckedToNames uf + tests = Set.fromList $ fst <$> UF.watchesOfKind WK.TestWatch (UF.discardTypes uf) + (isTestType, isTestValue) = isTest + md v = + if Set.member v tests + then Metadata.singleton isTestType isTestValue + else Metadata.empty + doTerm :: v -> (Path, Branch0 m -> Branch0 m) + doTerm v = case toList (Names.termsNamed names (Name.unsafeFromVar v)) of + [] -> errorMissingVar v + [r] -> case Path.splitFromName (Name.unsafeFromVar v) of + Nothing -> errorEmptyVar + Just split -> BranchUtil.makeAddTermName split r (md v) + wha -> + error $ + "Unison bug, typechecked file w/ multiple terms named " + <> Var.nameStr v + <> ": " + <> show wha + doType :: v -> (Path, Branch0 m -> Branch0 m) + doType v = case toList (Names.typesNamed names (Name.unsafeFromVar v)) of + [] -> errorMissingVar v + [r] -> case Path.splitFromName (Name.unsafeFromVar v) of + Nothing -> errorEmptyVar + Just split -> BranchUtil.makeAddTypeName split r Metadata.empty + wha -> + error $ + "Unison bug, typechecked file w/ multiple types named " + <> Var.nameStr v + <> ": " + <> show wha + errorEmptyVar = error "encountered an empty var name" + errorMissingVar v = error $ "expected to find " ++ show v ++ " in " ++ show uf + +doSlurpUpdates :: + Monad m => + Map Name (Reference, Reference) -> + Map Name (Reference, Reference) -> + [(Name, Referent)] -> + (Branch0 m -> Branch0 m) +doSlurpUpdates typeEdits termEdits deprecated b0 = + Branch.stepManyAt0 (typeActions <> termActions <> deprecateActions) b0 + where + typeActions = join . map doType . Map.toList $ typeEdits + termActions = join . map doTerm . Map.toList $ termEdits + deprecateActions = join . map doDeprecate $ deprecated + where + doDeprecate (n, r) = case Path.splitFromName n of + Nothing -> errorEmptyVar + Just split -> [BranchUtil.makeDeleteTermName split r] + + -- we copy over the metadata on the old thing + -- todo: if the thing being updated, m, is metadata for something x in b0 + -- update x's md to reference `m` + doType, + doTerm :: + (Name, (Reference, Reference)) -> [(Path, Branch0 m -> Branch0 m)] + doType (n, (old, new)) = case Path.splitFromName n of + Nothing -> errorEmptyVar + Just split -> + [ BranchUtil.makeDeleteTypeName split old, + BranchUtil.makeAddTypeName split new oldMd + ] + where + oldMd = BranchUtil.getTypeMetadataAt split old b0 + doTerm (n, (old, new)) = case Path.splitFromName n of + Nothing -> errorEmptyVar + Just split -> + [ BranchUtil.makeDeleteTermName split (Referent.Ref old), + BranchUtil.makeAddTermName split (Referent.Ref new) oldMd + ] + where + -- oldMd is the metadata linked to the old definition + -- we relink it to the new definition + oldMd = BranchUtil.getTermMetadataAt split (Referent.Ref old) b0 + errorEmptyVar = error "encountered an empty var name" + +loadDisplayInfo :: + Set Reference -> + Action + m + i + v + ( [(Reference, Maybe (Type v Ann))], + [(Reference, DisplayObject () (DD.Decl v Ann))] + ) +loadDisplayInfo refs = do + termRefs <- filterM (eval . IsTerm) (toList refs) + typeRefs <- filterM (eval . IsType) (toList refs) + terms <- forM termRefs $ \r -> (r,) <$> eval (LoadTypeOfTerm r) + types <- forM typeRefs $ \r -> (r,) <$> loadTypeDisplayObject r + pure (terms, types) + +-- Any absolute names in the input which have `currentPath` as a prefix +-- are converted to names relative to current path. all other names are +-- converted to absolute names. For example: +-- +-- e.g. if currentPath = .foo.bar +-- then name foo.bar.baz becomes baz +-- name cat.dog becomes .cat.dog +fixupNamesRelative :: Path.Absolute -> Names -> Names +fixupNamesRelative currentPath' = Names.map fixName + where + prefix = Path.toName (Path.unabsolute currentPath') + fixName n = + if currentPath' == Path.absoluteEmpty + then n + else fromMaybe (Name.makeAbsolute n) (Name.stripNamePrefix prefix n) + +makeHistoricalParsingNames :: + Monad m => Set (HQ.HashQualified Name) -> Action' m v NamesWithHistory +makeHistoricalParsingNames lexedHQs = do + rawHistoricalNames <- findHistoricalHQs lexedHQs + basicNames <- basicParseNames + currentPath <- use currentPath + pure $ + NamesWithHistory + basicNames + ( Names.makeAbsolute rawHistoricalNames + <> fixupNamesRelative currentPath rawHistoricalNames + ) + +loadTypeDisplayObject :: + Reference -> Action m i v (DisplayObject () (DD.Decl v Ann)) +loadTypeDisplayObject = \case + Reference.Builtin _ -> pure (BuiltinObject ()) + Reference.DerivedId id -> + maybe (MissingObject $ Reference.idToShortHash id) UserObject + <$> eval (LoadType id) + +lexedSource :: Monad m => SourceName -> Source -> Action' m v (NamesWithHistory, LexedSource) +lexedSource name src = do + let tokens = L.lexer (Text.unpack name) (Text.unpack src) + getHQ = \case + L.Backticks s (Just sh) -> Just (HQ.HashQualified (Name.unsafeFromString s) sh) + L.WordyId s (Just sh) -> Just (HQ.HashQualified (Name.unsafeFromString s) sh) + L.SymbolyId s (Just sh) -> Just (HQ.HashQualified (Name.unsafeFromString s) sh) + L.Hash sh -> Just (HQ.HashOnly sh) + _ -> Nothing + hqs = Set.fromList . mapMaybe (getHQ . L.payload) $ tokens + parseNames <- makeHistoricalParsingNames hqs + pure (parseNames, (src, tokens)) + +suffixifiedPPE :: NamesWithHistory -> Action' m v PPE.PrettyPrintEnv +suffixifiedPPE ns = eval CodebaseHashLength <&> (`PPE.fromSuffixNames` ns) + +fqnPPE :: NamesWithHistory -> Action' m v PPE.PrettyPrintEnv +fqnPPE ns = eval CodebaseHashLength <&> (`PPE.fromNames` ns) + +parseSearchType :: + (Monad m, Var v) => + SrcLoc -> + String -> + Action' m v (Either (Output v) (Type v Ann)) +parseSearchType srcLoc typ = fmap Type.removeAllEffectVars <$> parseType srcLoc typ + +-- | A description of where the given parse was triggered from, for error messaging purposes. +type SrcLoc = String + +parseType :: + (Monad m, Var v) => + SrcLoc -> + String -> + Action' m v (Either (Output v) (Type v Ann)) +parseType input src = do + -- `show Input` is the name of the "file" being lexed + (names0, lexed) <- lexedSource (Text.pack input) (Text.pack src) + parseNames <- basicParseNames + let names = + NamesWithHistory.push + (NamesWithHistory.currentNames names0) + (NamesWithHistory.NamesWithHistory parseNames (NamesWithHistory.oldNames names0)) + e <- eval $ ParseType names lexed + pure $ case e of + Left err -> Left $ TypeParseError src err + Right typ -> case Type.bindNames mempty (NamesWithHistory.currentNames names) $ + Type.generalizeLowercase mempty typ of + Left es -> Left $ ParseResolutionFailures src (toList es) + Right typ -> Right typ + +makeShadowedPrintNamesFromLabeled :: + Monad m => Set LabeledDependency -> Names -> Action' m v NamesWithHistory +makeShadowedPrintNamesFromLabeled deps shadowing = + NamesWithHistory.shadowing shadowing <$> makePrintNamesFromLabeled' deps + +makePrintNamesFromLabeled' :: + Monad m => Set LabeledDependency -> Action' m v NamesWithHistory +makePrintNamesFromLabeled' deps = do + root <- use root + currentPath <- use currentPath + (_missing, rawHistoricalNames) <- + eval . Eval $ + Branch.findHistoricalRefs + deps + root + basicNames <- basicPrettyPrintNamesA + pure $ NamesWithHistory basicNames (fixupNamesRelative currentPath rawHistoricalNames) + +getTermsIncludingHistorical :: + Monad m => Path.HQSplit -> Branch0 m -> Action' m v (Set Referent) +getTermsIncludingHistorical (p, hq) b = case Set.toList refs of + [] -> case hq of + HQ'.HashQualified n hs -> do + names <- + findHistoricalHQs $ + Set.fromList [HQ.HashQualified (Name.unsafeFromText (NameSegment.toText n)) hs] + pure . R.ran $ Names.terms names + _ -> pure Set.empty + _ -> pure refs + where + refs = BranchUtil.getTerm (p, hq) b + +-- discards inputs that aren't hashqualified; +-- I'd enforce it with finer-grained types if we had them. +findHistoricalHQs :: Monad m => Set (HQ.HashQualified Name) -> Action' m v Names +findHistoricalHQs lexedHQs0 = do + root <- use root + currentPath <- use currentPath + let -- omg this nightmare name-to-path parsing code is littered everywhere. + -- We need to refactor so that the absolute-ness of a name isn't represented + -- by magical text combinations. + -- Anyway, this function takes a name, tries to determine whether it is + -- relative or absolute, and tries to return the corresponding name that is + -- /relative/ to the root. + preprocess n = case Name.toString n of + -- some absolute name that isn't just "." + '.' : t@(_ : _) -> Name.unsafeFromString t + -- something in current path + _ -> + if Path.isRoot currentPath + then n + else Name.joinDot (Path.toName . Path.unabsolute $ currentPath) n + + lexedHQs = Set.map (fmap preprocess) . Set.filter HQ.hasHash $ lexedHQs0 + (_missing, rawHistoricalNames) <- eval . Eval $ Branch.findHistoricalHQs lexedHQs root + pure rawHistoricalNames + +basicPrettyPrintNamesA :: Functor m => Action' m v Names +basicPrettyPrintNamesA = snd <$> basicNames' + +makeShadowedPrintNamesFromHQ :: Monad m => Set (HQ.HashQualified Name) -> Names -> Action' m v NamesWithHistory +makeShadowedPrintNamesFromHQ lexedHQs shadowing = do + rawHistoricalNames <- findHistoricalHQs lexedHQs + basicNames <- basicPrettyPrintNamesA + currentPath <- use currentPath + -- The basic names go into "current", but are shadowed by "shadowing". + -- They go again into "historical" as a hack that makes them available HQ-ed. + pure $ + NamesWithHistory.shadowing + shadowing + (NamesWithHistory basicNames (fixupNamesRelative currentPath rawHistoricalNames)) + +basicParseNames, slurpResultNames :: Functor m => Action' m v Names +basicParseNames = fst <$> basicNames' +-- we check the file against everything in the current path +slurpResultNames = currentPathNames + +currentPathNames :: Functor m => Action' m v Names +currentPathNames = do + currentPath' <- use currentPath + currentBranch' <- getAt currentPath' + pure $ Branch.toNames (Branch.head currentBranch') + +-- implementation detail of basicParseNames and basicPrettyPrintNames +basicNames' :: Functor m => Action' m v (Names, Names) +basicNames' = do + root' <- use root + currentPath' <- use currentPath + pure $ Backend.basicNames' root' (Backend.AllNames $ Path.unabsolute currentPath') + +data AddRunMainResult v + = NoTermWithThatName + | TermHasBadType (Type v Ann) + | RunMainSuccess (TypecheckedUnisonFile v Ann) + +-- Adds a watch expression of the given name to the file, if +-- it would resolve to a TLD in the file. Returns the freshened +-- variable name and the new typechecked file. +-- +-- Otherwise, returns `Nothing`. +addWatch :: + (Monad m, Var v) => + String -> + Maybe (TypecheckedUnisonFile v Ann) -> + Action' m v (Maybe (v, TypecheckedUnisonFile v Ann)) +addWatch _watchName Nothing = pure Nothing +addWatch watchName (Just uf) = do + let components = join $ UF.topLevelComponents uf + let mainComponent = filter ((\v -> Var.nameStr v == watchName) . view _1) components + case mainComponent of + [(v, tm, ty)] -> + pure . pure $ + let v2 = Var.freshIn (Set.fromList [v]) v + a = ABT.annotation tm + in ( v2, + UF.typecheckedUnisonFile + (UF.dataDeclarationsId' uf) + (UF.effectDeclarationsId' uf) + (UF.topLevelComponents' uf) + (UF.watchComponents uf <> [(WK.RegularWatch, [(v2, Term.var a v, ty)])]) + ) + _ -> addWatch watchName Nothing + +-- Given a typechecked file with a main function called `mainName` +-- of the type `'{IO} ()`, adds an extra binding which +-- forces the `main` function. +-- +-- If that function doesn't exist in the typechecked file, the +-- codebase is consulted. +addRunMain :: + (Monad m, Var v) => + String -> + Maybe (TypecheckedUnisonFile v Ann) -> + Action' m v (AddRunMainResult v) +addRunMain mainName Nothing = do + parseNames <- basicParseNames + let loadTypeOfTerm ref = eval $ LoadTypeOfTerm ref + mainType <- eval RuntimeMain + mainToFile + <$> MainTerm.getMainTerm loadTypeOfTerm parseNames mainName mainType + where + mainToFile (MainTerm.NotAFunctionName _) = NoTermWithThatName + mainToFile (MainTerm.NotFound _) = NoTermWithThatName + mainToFile (MainTerm.BadType _ ty) = maybe NoTermWithThatName TermHasBadType ty + mainToFile (MainTerm.Success hq tm typ) = + RunMainSuccess $ + let v = Var.named (HQ.toText hq) + in UF.typecheckedUnisonFile mempty mempty mempty [("main", [(v, tm, typ)])] -- mempty +addRunMain mainName (Just uf) = do + let components = join $ UF.topLevelComponents uf + let mainComponent = filter ((\v -> Var.nameStr v == mainName) . view _1) components + mainType <- eval RuntimeMain + case mainComponent of + [(v, tm, ty)] -> + pure $ + let v2 = Var.freshIn (Set.fromList [v]) v + a = ABT.annotation tm + in if Typechecker.isSubtype ty mainType + then + RunMainSuccess $ + let runMain = DD.forceTerm a a (Term.var a v) + in UF.typecheckedUnisonFile + (UF.dataDeclarationsId' uf) + (UF.effectDeclarationsId' uf) + (UF.topLevelComponents' uf) + (UF.watchComponents uf <> [("main", [(v2, runMain, mainType)])]) + else TermHasBadType ty + _ -> addRunMain mainName Nothing + +executePPE :: + (Var v, Monad m) => + TypecheckedUnisonFile v a -> + Action' m v PPE.PrettyPrintEnv +executePPE unisonFile = + suffixifiedPPE =<< displayNames unisonFile + +-- Produce a `Names` needed to display all the hashes used in the given file. +displayNames :: + (Var v, Monad m) => + TypecheckedUnisonFile v a -> + Action' m v NamesWithHistory +displayNames unisonFile = + -- voodoo + makeShadowedPrintNamesFromLabeled + (UF.termSignatureExternalLabeledDependencies unisonFile) + (UF.typecheckedToNames unisonFile) + +diffHelper :: + Monad m => + Branch0 m -> + Branch0 m -> + Action' m v (PPE.PrettyPrintEnv, OBranchDiff.BranchDiffOutput v Ann) +diffHelper before after = do + hqLength <- eval CodebaseHashLength + diff <- eval . Eval $ BranchDiff.diff0 before after + names0 <- basicPrettyPrintNamesA + ppe <- PPE.suffixifiedPPE <$> prettyPrintEnvDecl (NamesWithHistory names0 mempty) + (ppe,) + <$> OBranchDiff.toOutput + loadTypeOfTerm + declOrBuiltin + hqLength + (Branch.toNames before) + (Branch.toNames after) + ppe + diff + +loadTypeOfTerm :: Referent -> Action m i v (Maybe (Type v Ann)) +loadTypeOfTerm (Referent.Ref r) = eval $ LoadTypeOfTerm r +loadTypeOfTerm (Referent.Con (Reference.DerivedId r) cid _) = do + decl <- eval $ LoadType r + case decl of + Just (either DD.toDataDecl id -> dd) -> pure $ DD.typeOfConstructor dd cid + Nothing -> pure Nothing +loadTypeOfTerm Referent.Con {} = + error $ + reportBug "924628772" "Attempt to load a type declaration which is a builtin!" + +declOrBuiltin :: Reference -> Action m i v (Maybe (DD.DeclOrBuiltin v Ann)) +declOrBuiltin r = case r of + Reference.Builtin {} -> + pure . fmap DD.Builtin $ Map.lookup r Builtin.builtinConstructorType + Reference.DerivedId id -> + fmap DD.Decl <$> eval (LoadType id) + +fuzzySelectTermsAndTypes :: Branch0 m -> Action m (Either Event Input) v [HQ.HashQualified Name] +fuzzySelectTermsAndTypes searchBranch0 = do + let termsAndTypes = + Relation.dom (Names.hashQualifyTermsRelation (Relation.swap $ Branch.deepTerms searchBranch0)) + <> Relation.dom (Names.hashQualifyTypesRelation (Relation.swap $ Branch.deepTypes searchBranch0)) + fromMaybe [] <$> eval (FuzzySelect Fuzzy.defaultOptions HQ.toText (Set.toList termsAndTypes)) + +fuzzySelectNamespace :: Branch0 m -> Action m (Either Event Input) v [Path'] +fuzzySelectNamespace searchBranch0 = + do + fmap Path.toPath' + . fromMaybe [] + <$> eval + ( FuzzySelect + Fuzzy.defaultOptions {Fuzzy.allowMultiSelect = False} + Path.toText + (Set.toList $ Branch.deepPaths searchBranch0) + ) diff --git a/parser-typechecker/src/Unison/Codebase/Editor/Input.hs b/unison-cli/src/Unison/Codebase/Editor/Input.hs similarity index 95% rename from parser-typechecker/src/Unison/Codebase/Editor/Input.hs rename to unison-cli/src/Unison/Codebase/Editor/Input.hs index 60f068d0b3..d62e290dbb 100644 --- a/parser-typechecker/src/Unison/Codebase/Editor/Input.hs +++ b/unison-cli/src/Unison/Codebase/Editor/Input.hs @@ -16,6 +16,7 @@ import qualified Unison.HashQualified' as HQ' import Unison.Codebase.Path ( Path' ) import qualified Unison.Codebase.Path as Path import qualified Unison.Codebase.Path.Parse as Path +import Unison.Codebase.PushBehavior (PushBehavior) import Unison.Codebase.Editor.RemoteRepo import Unison.ShortHash (ShortHash) import Unison.Codebase.ShortBranchHash (ShortBranchHash) @@ -54,15 +55,15 @@ data Input | MergeLocalBranchI Path' Path' Branch.MergeMode | PreviewMergeLocalBranchI Path' Path' | DiffNamespaceI Path' Path' -- old new - | PullRemoteBranchI (Maybe ReadRemoteNamespace) Path' SyncMode Verbosity - | PushRemoteBranchI (Maybe WriteRemotePath) Path' SyncMode + | PullRemoteBranchI (Maybe ReadRemoteNamespace) Path' SyncMode Verbosity + | PushRemoteBranchI (Maybe WriteRemotePath) Path' PushBehavior SyncMode | CreatePullRequestI ReadRemoteNamespace ReadRemoteNamespace | LoadPullRequestI ReadRemoteNamespace ReadRemoteNamespace Path' | ResetRootI (Either ShortBranchHash Path') -- todo: Q: Does it make sense to publish to not-the-root of a Github repo? -- Does it make sense to fork from not-the-root of a Github repo? - -- used in Welcome module to give directions to user - | CreateMessage (P.Pretty P.ColorText) + -- used in Welcome module to give directions to user + | CreateMessage (P.Pretty P.ColorText) -- Change directory. If Nothing is provided, prompt an interactive fuzzy search. | SwitchBranchI (Maybe Path') | UpI @@ -112,8 +113,8 @@ data Input -- First `Maybe Int` is cap on number of results, if any -- Second `Maybe Int` is cap on diff elements shown, if any | HistoryI (Maybe Int) (Maybe Int) BranchId - -- execute an IO thunk - | ExecuteI String + -- execute an IO thunk with args + | ExecuteI String [String] -- execute an IO [Result] | IOTestI (HQ.HashQualified Name) -- make a standalone binary file diff --git a/parser-typechecker/src/Unison/Codebase/Editor/Output.hs b/unison-cli/src/Unison/Codebase/Editor/Output.hs similarity index 53% rename from parser-typechecker/src/Unison/Codebase/Editor/Output.hs rename to unison-cli/src/Unison/Codebase/Editor/Output.hs index 1ab3a73497..536d9e342e 100644 --- a/parser-typechecker/src/Unison/Codebase/Editor/Output.hs +++ b/unison-cli/src/Unison/Codebase/Editor/Output.hs @@ -1,67 +1,70 @@ {-# LANGUAGE PatternSynonyms #-} module Unison.Codebase.Editor.Output - ( Output(..) - , NumberedOutput(..) - , NumberedArgs - , ListDetailed - , HistoryTail(..) - , TestReportStats(..) - , UndoFailureReason(..) - , PushPull(..) - , ReflogEntry(..) - , pushPull - , isFailure - , isNumberedFailure - ) where + ( Output (..), + NumberedOutput (..), + NumberedArgs, + ListDetailed, + HistoryTail (..), + TestReportStats (..), + UndoFailureReason (..), + PushPull (..), + ReflogEntry (..), + pushPull, + isFailure, + isNumberedFailure, + ) +where -import Unison.Prelude - -import Unison.Server.Backend (ShallowListEntry(..)) -import Unison.Codebase.Editor.Input +import qualified Data.Set as Set import Unison.Codebase (GetRootBranchError) -import Unison.Codebase.Editor.SlurpResult (SlurpResult(..)) -import Unison.Codebase.Path (Path') -import Unison.Codebase.Patch (Patch) -import Unison.Codebase.Type (GitError) -import Unison.Name ( Name ) -import Unison.Names ( Names ) -import Unison.Parser.Ann (Ann) -import qualified Unison.Reference as Reference -import Unison.Reference ( Reference ) -import Unison.Referent ( Referent ) -import Unison.DataDeclaration ( Decl ) -import Unison.Util.Relation (Relation) import qualified Unison.Codebase.Branch as Branch +import Unison.Codebase.Editor.DisplayObject (DisplayObject) +import Unison.Codebase.Editor.Input +import Unison.Codebase.Editor.Output.BranchDiff (BranchDiffOutput) +import Unison.Codebase.Editor.RemoteRepo +import Unison.Codebase.Editor.SlurpResult (SlurpResult (..)) import qualified Unison.Codebase.Editor.SlurpResult as SR +import qualified Unison.Codebase.Editor.TodoOutput as TO +import Unison.Codebase.Patch (Patch) +import Unison.Codebase.Path (Path') import qualified Unison.Codebase.Path as Path +import Unison.Codebase.PushBehavior (PushBehavior) import qualified Unison.Codebase.Runtime as Runtime +import Unison.Codebase.ShortBranchHash (ShortBranchHash) +import Unison.Codebase.Type (GitError) +import Unison.DataDeclaration (Decl) import qualified Unison.HashQualified as HQ import qualified Unison.HashQualified' as HQ' +import Unison.LabeledDependency (LabeledDependency) +import Unison.Name (Name) +import Unison.NameSegment (NameSegment) +import Unison.Names (Names) +import qualified Unison.Names.ResolutionResult as Names +import qualified Unison.NamesWithHistory as Names import qualified Unison.Parser as Parser +import Unison.Parser.Ann (Ann) +import Unison.Prelude import qualified Unison.PrettyPrintEnv as PPE import qualified Unison.PrettyPrintEnvDecl as PPE -import qualified Unison.Typechecker.Context as Context -import qualified Unison.UnisonFile as UF -import qualified Unison.Util.Pretty as P -import Unison.Codebase.Editor.DisplayObject (DisplayObject) -import qualified Unison.Codebase.Editor.TodoOutput as TO +import Unison.Reference (Reference) +import qualified Unison.Reference as Reference +import Unison.Referent (Referent) +import Unison.Server.Backend (ShallowListEntry (..)) import Unison.Server.SearchResult' (SearchResult') +import Unison.ShortHash (ShortHash) import Unison.Term (Term) import Unison.Type (Type) -import qualified Unison.Names.ResolutionResult as Names -import qualified Unison.NamesWithHistory as Names -import qualified Data.Set as Set -import Unison.NameSegment (NameSegment) -import Unison.ShortHash (ShortHash) -import Unison.Codebase.ShortBranchHash (ShortBranchHash) -import Unison.Codebase.Editor.RemoteRepo -import Unison.Codebase.Editor.Output.BranchDiff (BranchDiffOutput) -import Unison.LabeledDependency (LabeledDependency) +import qualified Unison.Typechecker.Context as Context +import qualified Unison.UnisonFile as UF +import qualified Unison.Util.Pretty as P +import Unison.Util.Relation (Relation) import qualified Unison.WatchKind as WK type ListDetailed = Bool + type SourceName = Text + type NumberedArgs = [String] data PushPull = Push | Pull deriving (Eq, Ord, Show) @@ -82,24 +85,24 @@ data NumberedOutput v | ShowDiffAfterMergePreview Path.Path' Path.Absolute PPE.PrettyPrintEnv (BranchDiffOutput v Ann) | ShowDiffAfterPull Path.Path' Path.Absolute PPE.PrettyPrintEnv (BranchDiffOutput v Ann) | ShowDiffAfterCreatePR ReadRemoteNamespace ReadRemoteNamespace PPE.PrettyPrintEnv (BranchDiffOutput v Ann) - -- - | ShowDiffAfterCreateAuthor NameSegment Path.Path' Path.Absolute PPE.PrettyPrintEnv (BranchDiffOutput v Ann) + | -- + ShowDiffAfterCreateAuthor NameSegment Path.Path' Path.Absolute PPE.PrettyPrintEnv (BranchDiffOutput v Ann) -- | ShowDiff data Output v - -- Generic Success response; we might consider deleting this. - = Success - -- User did `add` or `update` before typechecking a file? - | NoUnisonFile - -- Used in Welcome module to instruct user - | PrintMessage (P.Pretty P.ColorText) + = -- Generic Success response; we might consider deleting this. + Success + | -- User did `add` or `update` before typechecking a file? + NoUnisonFile + | -- Used in Welcome module to instruct user + PrintMessage (P.Pretty P.ColorText) | InvalidSourceName String | SourceLoadFailed String - -- No main function, the [Type v Ann] are the allowed types - | NoMainFunction String PPE.PrettyPrintEnv [Type v Ann] - -- Main function found, but has improper type - | BadMainFunction String (Type v Ann) PPE.PrettyPrintEnv [Type v Ann] + | -- No main function, the [Type v Ann] are the allowed types + NoMainFunction String PPE.PrettyPrintEnv [Type v Ann] + | -- Main function found, but has improper type + BadMainFunction String (Type v Ann) PPE.PrettyPrintEnv [Type v Ann] | BranchEmpty (Either ShortBranchHash Path') | BranchNotEmpty Path' | LoadPullRequest ReadRemoteNamespace ReadRemoteNamespace Path' Path' Path' Path' @@ -127,67 +130,72 @@ data Output v | TermNotFound' ShortHash | TypeTermMismatch (HQ.HashQualified Name) (HQ.HashQualified Name) | SearchTermsNotFound [HQ.HashQualified Name] - -- ask confirmation before deleting the last branch that contains some defns - -- `Path` is one of the paths the user has requested to delete, and is paired - -- with whatever named definitions would not have any remaining names if - -- the path is deleted. - | DeleteBranchConfirmation + | -- ask confirmation before deleting the last branch that contains some defns + -- `Path` is one of the paths the user has requested to delete, and is paired + -- with whatever named definitions would not have any remaining names if + -- the path is deleted. + DeleteBranchConfirmation [(Path', (Names, [SearchResult' v Ann]))] - -- CantDelete input couldntDelete becauseTheseStillReferenceThem - | CantDelete PPE.PrettyPrintEnv [SearchResult' v Ann] [SearchResult' v Ann] + | -- CantDelete input couldntDelete becauseTheseStillReferenceThem + CantDelete PPE.PrettyPrintEnv [SearchResult' v Ann] [SearchResult' v Ann] | DeleteEverythingConfirmation | DeletedEverything - | ListNames Int -- hq length to print References - [(Reference, Set (HQ'.HashQualified Name))] -- type match, type names - [(Referent, Set (HQ'.HashQualified Name))] -- term match, term names - -- list of all the definitions within this branch + | ListNames + Int -- hq length to print References + [(Reference, Set (HQ'.HashQualified Name))] -- type match, type names + [(Referent, Set (HQ'.HashQualified Name))] -- term match, term names + -- list of all the definitions within this branch | ListOfDefinitions PPE.PrettyPrintEnv ListDetailed [SearchResult' v Ann] | ListOfLinks PPE.PrettyPrintEnv [(HQ.HashQualified Name, Reference, Maybe (Type v Ann))] | ListShallow PPE.PrettyPrintEnv [ShallowListEntry v Ann] | ListOfPatches (Set Name) - -- show the result of add/update - | SlurpOutput Input PPE.PrettyPrintEnv (SlurpResult v) - -- Original source, followed by the errors: - | ParseErrors Text [Parser.Err v] + | -- show the result of add/update + SlurpOutput Input PPE.PrettyPrintEnv (SlurpResult v) + | -- Original source, followed by the errors: + ParseErrors Text [Parser.Err v] | TypeErrors Text PPE.PrettyPrintEnv [Context.ErrorNote v Ann] | CompilerBugs Text PPE.PrettyPrintEnv [Context.CompilerBug v Ann] | DisplayConflicts (Relation Name Referent) (Relation Name Reference) | EvaluationFailure Runtime.Error - | Evaluated SourceFileContents - PPE.PrettyPrintEnv - [(v, Term v ())] - (Map v (Ann, WK.WatchKind, Term v (), Runtime.IsCacheHit)) + | Evaluated + SourceFileContents + PPE.PrettyPrintEnv + [(v, Term v ())] + (Map v (Ann, WK.WatchKind, Term v (), Runtime.IsCacheHit)) | Typechecked SourceName PPE.PrettyPrintEnv (SlurpResult v) (UF.TypecheckedUnisonFile v Ann) | DisplayRendered (Maybe FilePath) (P.Pretty P.ColorText) - -- "display" definitions, possibly to a FilePath on disk (e.g. editing) - | DisplayDefinitions (Maybe FilePath) - PPE.PrettyPrintEnvDecl - (Map Reference (DisplayObject () (Decl v Ann))) - (Map Reference (DisplayObject (Type v Ann) (Term v Ann))) - -- | Invariant: there's at least one conflict or edit in the TodoOutput. - | TodoOutput PPE.PrettyPrintEnvDecl (TO.TodoOutput v Ann) - | TestIncrementalOutputStart PPE.PrettyPrintEnv (Int,Int) Reference (Term v Ann) - | TestIncrementalOutputEnd PPE.PrettyPrintEnv (Int,Int) Reference (Term v Ann) - | TestResults TestReportStats - PPE.PrettyPrintEnv ShowSuccesses ShowFailures - [(Reference, Text)] -- oks - [(Reference, Text)] -- fails + | -- "display" definitions, possibly to a FilePath on disk (e.g. editing) + DisplayDefinitions + (Maybe FilePath) + PPE.PrettyPrintEnvDecl + (Map Reference (DisplayObject () (Decl v Ann))) + (Map Reference (DisplayObject (Type v Ann) (Term v Ann))) + | -- | Invariant: there's at least one conflict or edit in the TodoOutput. + TodoOutput PPE.PrettyPrintEnvDecl (TO.TodoOutput v Ann) + | TestIncrementalOutputStart PPE.PrettyPrintEnv (Int, Int) Reference (Term v Ann) + | TestIncrementalOutputEnd PPE.PrettyPrintEnv (Int, Int) Reference (Term v Ann) + | TestResults + TestReportStats + PPE.PrettyPrintEnv + ShowSuccesses + ShowFailures + [(Reference, Text)] -- oks + [(Reference, Text)] -- fails | CantUndo UndoFailureReason | ListEdits Patch PPE.PrettyPrintEnv - - -- new/unrepresented references followed by old/removed - -- todo: eventually replace these sets with [SearchResult' v Ann] - -- and a nicer render. - | BustedBuiltins (Set Reference) (Set Reference) - | GitError Input GitError + | -- new/unrepresented references followed by old/removed + -- todo: eventually replace these sets with [SearchResult' v Ann] + -- and a nicer render. + BustedBuiltins (Set Reference) (Set Reference) + | GitError GitError | ConfiguredMetadataParseError Path' String (P.Pretty P.ColorText) | NoConfiguredGitUrl PushPull Path' | ConfiguredGitUrlParseError PushPull Path' Text String | MetadataMissingType PPE.PrettyPrintEnv Referent | TermMissingType Reference | MetadataAmbiguous (HQ.HashQualified Name) PPE.PrettyPrintEnv [Referent] - -- todo: tell the user to run `todo` on the same patch they just used - | NothingToPatch PatchPath Path' + | -- todo: tell the user to run `todo` on the same patch they just used + NothingToPatch PatchPath Path' | PatchNeedsToBeConflictFree | PatchInvolvesExternalDependents PPE.PrettyPrintEnv (Set Reference) | WarnIncomingRootBranch ShortBranchHash (Set ShortBranchHash) @@ -197,8 +205,8 @@ data Output v | PullAlreadyUpToDate ReadRemoteNamespace Path' | MergeAlreadyUpToDate Path' Path' | PreviewMergeAlreadyUpToDate Path' Path' - -- | No conflicts or edits remain for the current patch. - | NoConflictsOrEdits + | -- | No conflicts or edits remain for the current patch. + NoConflictsOrEdits | NotImplemented | NoBranchWithHash ShortBranchHash | ListDependencies Int LabeledDependency [(Name, Reference)] (Set Reference) @@ -210,71 +218,77 @@ data Output v | DefaultMetadataNotification | BadRootBranch GetRootBranchError | CouldntLoadBranch Branch.Hash + | NamespaceEmpty (Either Path.Absolute (Path.Absolute, Path.Absolute)) | NoOp + -- Refused to push, either because a `push` targeted an empty namespace, or a `push.create` targeted a non-empty namespace. + | RefusedToPush PushBehavior deriving (Show) -data ReflogEntry = - ReflogEntry { hash :: ShortBranchHash, reason :: Text } +data ReflogEntry = ReflogEntry {hash :: ShortBranchHash, reason :: Text} deriving (Show) -data HistoryTail = - EndOfLog ShortBranchHash | - MergeTail ShortBranchHash [ShortBranchHash] | - PageEnd ShortBranchHash Int -- PageEnd nextHash nextIndex +data HistoryTail + = EndOfLog ShortBranchHash + | MergeTail ShortBranchHash [ShortBranchHash] + | PageEnd ShortBranchHash Int -- PageEnd nextHash nextIndex deriving (Show) data TestReportStats = CachedTests TotalCount CachedCount - | NewlyComputed deriving Show + | NewlyComputed + deriving (Show) type TotalCount = Int -- total number of tests + type CachedCount = Int -- number of tests found in the cache + type ShowSuccesses = Bool -- whether to list results or just summarize -type ShowFailures = Bool -- whether to list results or just summarize -data UndoFailureReason = CantUndoPastStart | CantUndoPastMerge deriving Show +type ShowFailures = Bool -- whether to list results or just summarize + +data UndoFailureReason = CantUndoPastStart | CantUndoPastMerge deriving (Show) type SourceFileContents = Text isFailure :: Ord v => Output v -> Bool isFailure o = case o of - Success{} -> False - PrintMessage{} -> False - BadRootBranch{} -> True - CouldntLoadBranch{} -> True - NoUnisonFile{} -> True - InvalidSourceName{} -> True - SourceLoadFailed{} -> True - NoMainFunction{} -> True - BadMainFunction{} -> True - CreatedNewBranch{} -> False - BranchAlreadyExists{} -> True - PatchAlreadyExists{} -> True + Success {} -> False + PrintMessage {} -> False + BadRootBranch {} -> True + CouldntLoadBranch {} -> True + NoUnisonFile {} -> True + InvalidSourceName {} -> True + SourceLoadFailed {} -> True + NoMainFunction {} -> True + BadMainFunction {} -> True + CreatedNewBranch {} -> False + BranchAlreadyExists {} -> True + PatchAlreadyExists {} -> True NoExactTypeMatches -> True - BranchEmpty{} -> True - BranchNotEmpty{} -> True - TypeAlreadyExists{} -> True - TypeParseError{} -> True - ParseResolutionFailures{} -> True - TypeHasFreeVars{} -> True - TermAlreadyExists{} -> True - LabeledReferenceAmbiguous{} -> True - LabeledReferenceNotFound{} -> True - DeleteNameAmbiguous{} -> True - TermAmbiguous{} -> True - BranchHashAmbiguous{} -> True - BadName{} -> True - BranchNotFound{} -> True - NameNotFound{} -> True - PatchNotFound{} -> True - TypeNotFound{} -> True - TypeNotFound'{} -> True - TermNotFound{} -> True - TermNotFound'{} -> True - TypeTermMismatch{} -> True + BranchEmpty {} -> True + BranchNotEmpty {} -> True + TypeAlreadyExists {} -> True + TypeParseError {} -> True + ParseResolutionFailures {} -> True + TypeHasFreeVars {} -> True + TermAlreadyExists {} -> True + LabeledReferenceAmbiguous {} -> True + LabeledReferenceNotFound {} -> True + DeleteNameAmbiguous {} -> True + TermAmbiguous {} -> True + BranchHashAmbiguous {} -> True + BadName {} -> True + BranchNotFound {} -> True + NameNotFound {} -> True + PatchNotFound {} -> True + TypeNotFound {} -> True + TypeNotFound' {} -> True + TermNotFound {} -> True + TermNotFound' {} -> True + TypeTermMismatch {} -> True SearchTermsNotFound ts -> not (null ts) - DeleteBranchConfirmation{} -> False - CantDelete{} -> True + DeleteBranchConfirmation {} -> False + CantDelete {} -> True DeleteEverythingConfirmation -> False DeletedEverything -> False ListNames _ tys tms -> null tms && null tys @@ -282,64 +296,65 @@ isFailure o = case o of ListOfDefinitions _ _ ds -> null ds ListOfPatches s -> Set.null s SlurpOutput _ _ sr -> not $ SR.isOk sr - ParseErrors{} -> True - TypeErrors{} -> True - CompilerBugs{} -> True - DisplayConflicts{} -> False - EvaluationFailure{} -> True - Evaluated{} -> False - Typechecked{} -> False + ParseErrors {} -> True + TypeErrors {} -> True + CompilerBugs {} -> True + DisplayConflicts {} -> False + EvaluationFailure {} -> True + Evaluated {} -> False + Typechecked {} -> False DisplayDefinitions _ _ m1 m2 -> null m1 && null m2 - DisplayRendered{} -> False + DisplayRendered {} -> False TodoOutput _ todo -> TO.todoScore todo > 0 || not (TO.noConflicts todo) - TestIncrementalOutputStart{} -> False - TestIncrementalOutputEnd{} -> False + TestIncrementalOutputStart {} -> False + TestIncrementalOutputEnd {} -> False TestResults _ _ _ _ _ fails -> not (null fails) - CantUndo{} -> True - ListEdits{} -> False - GitError{} -> True - BustedBuiltins{} -> True - ConfiguredMetadataParseError{} -> True - NoConfiguredGitUrl{} -> True - ConfiguredGitUrlParseError{} -> True - MetadataMissingType{} -> True - MetadataAmbiguous{} -> True - PatchNeedsToBeConflictFree{} -> True - PatchInvolvesExternalDependents{} -> True - NothingToPatch{} -> False - WarnIncomingRootBranch{} -> False - History{} -> False + CantUndo {} -> True + ListEdits {} -> False + GitError {} -> True + BustedBuiltins {} -> True + ConfiguredMetadataParseError {} -> True + NoConfiguredGitUrl {} -> True + ConfiguredGitUrlParseError {} -> True + MetadataMissingType {} -> True + MetadataAmbiguous {} -> True + PatchNeedsToBeConflictFree {} -> True + PatchInvolvesExternalDependents {} -> True + NothingToPatch {} -> False + WarnIncomingRootBranch {} -> False + History {} -> False StartOfCurrentPathHistory -> True NotImplemented -> True - DumpNumberedArgs{} -> False - DumpBitBooster{} -> False - NoBranchWithHash{} -> True - PullAlreadyUpToDate{} -> False - MergeAlreadyUpToDate{} -> False - PreviewMergeAlreadyUpToDate{} -> False - NoConflictsOrEdits{} -> False + DumpNumberedArgs {} -> False + DumpBitBooster {} -> False + NoBranchWithHash {} -> True + PullAlreadyUpToDate {} -> False + MergeAlreadyUpToDate {} -> False + PreviewMergeAlreadyUpToDate {} -> False + NoConflictsOrEdits {} -> False ListShallow _ es -> null es - HashAmbiguous{} -> True - ShowReflog{} -> False - LoadPullRequest{} -> False + HashAmbiguous {} -> True + ShowReflog {} -> False + LoadPullRequest {} -> False DefaultMetadataNotification -> False NoOp -> False - ListDependencies{} -> False - ListDependents{} -> False - TermMissingType{} -> True + ListDependencies {} -> False + ListDependents {} -> False + TermMissingType {} -> True DumpUnisonFileHashes _ x y z -> x == mempty && y == mempty && z == mempty + NamespaceEmpty _ -> False + RefusedToPush{} -> True isNumberedFailure :: NumberedOutput v -> Bool isNumberedFailure = \case - ShowDiffNamespace{} -> False - ShowDiffAfterDeleteDefinitions{} -> False - ShowDiffAfterDeleteBranch{} -> False - ShowDiffAfterModifyBranch{} -> False - ShowDiffAfterMerge{} -> False - ShowDiffAfterMergePropagate{} -> False - ShowDiffAfterMergePreview{} -> False - ShowDiffAfterUndo{} -> False - ShowDiffAfterPull{} -> False - ShowDiffAfterCreatePR{} -> False - ShowDiffAfterCreateAuthor{} -> False - + ShowDiffNamespace {} -> False + ShowDiffAfterDeleteDefinitions {} -> False + ShowDiffAfterDeleteBranch {} -> False + ShowDiffAfterModifyBranch {} -> False + ShowDiffAfterMerge {} -> False + ShowDiffAfterMergePropagate {} -> False + ShowDiffAfterMergePreview {} -> False + ShowDiffAfterUndo {} -> False + ShowDiffAfterPull {} -> False + ShowDiffAfterCreatePR {} -> False + ShowDiffAfterCreateAuthor {} -> False diff --git a/parser-typechecker/src/Unison/Codebase/Editor/Output/BranchDiff.hs b/unison-cli/src/Unison/Codebase/Editor/Output/BranchDiff.hs similarity index 100% rename from parser-typechecker/src/Unison/Codebase/Editor/Output/BranchDiff.hs rename to unison-cli/src/Unison/Codebase/Editor/Output/BranchDiff.hs diff --git a/parser-typechecker/src/Unison/Codebase/Editor/Output/DumpNamespace.hs b/unison-cli/src/Unison/Codebase/Editor/Output/DumpNamespace.hs similarity index 100% rename from parser-typechecker/src/Unison/Codebase/Editor/Output/DumpNamespace.hs rename to unison-cli/src/Unison/Codebase/Editor/Output/DumpNamespace.hs diff --git a/parser-typechecker/src/Unison/Codebase/Editor/Propagate.hs b/unison-cli/src/Unison/Codebase/Editor/Propagate.hs similarity index 100% rename from parser-typechecker/src/Unison/Codebase/Editor/Propagate.hs rename to unison-cli/src/Unison/Codebase/Editor/Propagate.hs diff --git a/parser-typechecker/src/Unison/Codebase/Editor/SlurpComponent.hs b/unison-cli/src/Unison/Codebase/Editor/SlurpComponent.hs similarity index 100% rename from parser-typechecker/src/Unison/Codebase/Editor/SlurpComponent.hs rename to unison-cli/src/Unison/Codebase/Editor/SlurpComponent.hs diff --git a/parser-typechecker/src/Unison/Codebase/Editor/SlurpResult.hs b/unison-cli/src/Unison/Codebase/Editor/SlurpResult.hs similarity index 100% rename from parser-typechecker/src/Unison/Codebase/Editor/SlurpResult.hs rename to unison-cli/src/Unison/Codebase/Editor/SlurpResult.hs diff --git a/parser-typechecker/src/Unison/Codebase/Editor/TodoOutput.hs b/unison-cli/src/Unison/Codebase/Editor/TodoOutput.hs similarity index 100% rename from parser-typechecker/src/Unison/Codebase/Editor/TodoOutput.hs rename to unison-cli/src/Unison/Codebase/Editor/TodoOutput.hs diff --git a/parser-typechecker/src/Unison/Codebase/Editor/UriParser.hs b/unison-cli/src/Unison/Codebase/Editor/UriParser.hs similarity index 100% rename from parser-typechecker/src/Unison/Codebase/Editor/UriParser.hs rename to unison-cli/src/Unison/Codebase/Editor/UriParser.hs diff --git a/parser-typechecker/src/Unison/Codebase/Editor/VersionParser.hs b/unison-cli/src/Unison/Codebase/Editor/VersionParser.hs similarity index 100% rename from parser-typechecker/src/Unison/Codebase/Editor/VersionParser.hs rename to unison-cli/src/Unison/Codebase/Editor/VersionParser.hs diff --git a/parser-typechecker/src/Unison/Codebase/TranscriptParser.hs b/unison-cli/src/Unison/Codebase/TranscriptParser.hs similarity index 98% rename from parser-typechecker/src/Unison/Codebase/TranscriptParser.hs rename to unison-cli/src/Unison/Codebase/TranscriptParser.hs index 3624789d4a..d4b2fb02c2 100644 --- a/parser-typechecker/src/Unison/Codebase/TranscriptParser.hs +++ b/unison-cli/src/Unison/Codebase/TranscriptParser.hs @@ -120,8 +120,8 @@ parse srcName txt = case P.parse (stanzas <* P.eof) srcName txt of Right a -> Right a Left e -> Left (show e) -run :: FilePath -> FilePath -> [Stanza] -> Codebase IO Symbol Ann -> IO Text -run dir configFile stanzas codebase = do +run :: String -> FilePath -> FilePath -> [Stanza] -> Codebase IO Symbol Ann -> IO Text +run version dir configFile stanzas codebase = do let initialPath = Path.absoluteEmpty putPrettyLn $ P.lines [ asciiartUnison, "", @@ -144,7 +144,7 @@ run dir configFile stanzas codebase = do (config, cancelConfig) <- catchIOError (watchConfig configFile) $ \_ -> die "Your .unisonConfig could not be loaded. Check that it's correct!" - runtime <- RTI.startRuntime "" + runtime <- RTI.startRuntime version traverse_ (atomically . Q.enqueue inputQueue) (stanzas `zip` [1..]) let patternMap = Map.fromList diff --git a/parser-typechecker/src/Unison/CommandLine.hs b/unison-cli/src/Unison/CommandLine.hs similarity index 85% rename from parser-typechecker/src/Unison/CommandLine.hs rename to unison-cli/src/Unison/CommandLine.hs index 2e6ab43e99..3c3609dd61 100644 --- a/parser-typechecker/src/Unison/CommandLine.hs +++ b/unison-cli/src/Unison/CommandLine.hs @@ -33,17 +33,12 @@ module Unison.CommandLine , watchBranchUpdates , watchConfig , watchFileSystem - -- * Exported for testing - , beforeHash ) where import Unison.Prelude import Control.Concurrent (forkIO, killThread) import Control.Concurrent.STM (atomically) -import qualified Control.Monad.Extra as Monad -import qualified Control.Monad.Reader as Reader -import qualified Control.Monad.State as State import Data.Configurator (autoReload, autoConfig) import Data.Configurator.Types (Config, Worth (..)) import Data.List (isSuffixOf, isPrefixOf) @@ -57,7 +52,6 @@ import System.FilePath ( takeFileName ) import Unison.Codebase (Codebase) import qualified Unison.Codebase as Codebase import qualified Unison.Codebase.Branch as Branch -import Unison.Codebase.Causal ( Causal ) import qualified Unison.Codebase.Causal as Causal import Unison.Codebase.Editor.Input (Event(..), Input(..)) import qualified Unison.Server.SearchResult as SR @@ -72,7 +66,7 @@ import qualified Unison.Util.Pretty as P import Unison.Util.TQueue (TQueue) import qualified Unison.Util.TQueue as Q import qualified Data.Configurator as Config -import Control.Lens (ifoldMap) +import Control.Lens (ifor) import qualified Unison.CommandLine.Globbing as Globbing import qualified Unison.CommandLine.InputPattern as InputPattern import Unison.Codebase.Branch (Branch0) @@ -119,31 +113,13 @@ watchBranchUpdates currentRoot q codebase = do -- heuristic. If a fairly recent head gets deposited at just the right -- time, it would get ignored by this logic. This seems unavoidable. let maxDepth = 20 -- if it's further back than this, consider it new - let isNew b = not <$> beforeHash maxDepth b (Branch._history currentRoot) + let isNew b = not <$> Causal.beforeHash maxDepth b (Branch._history currentRoot) notBefore <- filterM isNew (toList updatedBranches) when (length notBefore > 0) $ atomically . Q.enqueue q . IncomingRootBranch $ Set.fromList notBefore pure (cancelExternalBranchUpdates >> killThread thread) --- `True` if `h` is found in the history of `c` within `maxDepth` path length --- from the tip of `c` -beforeHash :: forall m h e . Monad m => Word -> Causal.RawHash h -> Causal m h e -> m Bool -beforeHash maxDepth h c = - Reader.runReaderT (State.evalStateT (go c) Set.empty) (0 :: Word) - where - go c | h == Causal.currentHash c = pure True - go c = do - currentDepth :: Word <- Reader.ask - if currentDepth >= maxDepth - then pure False - else do - seen <- State.get - cs <- lift . lift $ toList <$> sequence (Causal.children c) - let unseens = filter (\c -> c `Set.notMember` seen) cs - State.modify' (<> Set.fromList cs) - Monad.anyM (Reader.local (1+) . go) unseens - warnNote :: String -> String warnNote s = "⚠️ " <> s @@ -218,7 +194,7 @@ exactComplete q ss = go <$> filter (isPrefixOf q) ss where go s = prettyCompletionWithQueryPrefix (s == q) q s --- | Completes a list of options, limiting options to the same namespace as the query, +-- | Completes a list of options, limiting options to the same namespace as the query, -- or the namespace's children if the query is itself a namespace. -- -- E.g. @@ -257,26 +233,35 @@ fixupCompletion q cs@(h:t) = let then [ c { Line.replacement = q } | c <- cs ] else cs -parseInput - :: Branch0 m -- ^ Root branch, used to expand globs - -> Path.Absolute -- ^ Current path from root, used to expand globs - -> [String] -- ^ Numbered arguments - -> Map String InputPattern -- ^ Input Pattern Map - -> [String] -- ^ command:arguments - -> Either (P.Pretty CT.ColorText) Input +parseInput :: + -- | Root branch, used to expand globs + Branch0 m -> + -- | Current path from root, used to expand globs + Path.Absolute -> + -- | Numbered arguments + [String] -> + -- | Input Pattern Map + Map String InputPattern -> + -- | command:arguments + [String] -> + Either (P.Pretty CT.ColorText) Input parseInput rootBranch currentPath numberedArgs patterns segments = do case segments of [] -> Left "" command : args -> case Map.lookup command patterns of Just pat@(InputPattern {parse}) -> do - let expandedArgs :: [String] - expandedArgs = foldMap (expandNumber numberedArgs) args - parse $ - flip ifoldMap expandedArgs $ \i arg -> do - let targets = case InputPattern.argType pat i of - Just argT -> InputPattern.globTargets argT - Nothing -> mempty - Globbing.expandGlobs targets rootBranch currentPath arg + let expandedNumbers :: [String] + expandedNumbers = foldMap (expandNumber numberedArgs) args + expandedGlobs <- ifor expandedNumbers $ \i arg -> do + let targets = case InputPattern.argType pat i of + Just argT -> InputPattern.globTargets argT + Nothing -> mempty + case Globbing.expandGlobs targets rootBranch currentPath arg of + -- No globs encountered + Nothing -> pure [arg] + Just [] -> Left $ "No matches for: " <> fromString arg + Just matches -> pure matches + parse (concat expandedGlobs) Nothing -> Left . warn diff --git a/parser-typechecker/src/Unison/CommandLine/DisplayValues.hs b/unison-cli/src/Unison/CommandLine/DisplayValues.hs similarity index 100% rename from parser-typechecker/src/Unison/CommandLine/DisplayValues.hs rename to unison-cli/src/Unison/CommandLine/DisplayValues.hs diff --git a/parser-typechecker/src/Unison/CommandLine/FuzzySelect.hs b/unison-cli/src/Unison/CommandLine/FuzzySelect.hs similarity index 100% rename from parser-typechecker/src/Unison/CommandLine/FuzzySelect.hs rename to unison-cli/src/Unison/CommandLine/FuzzySelect.hs diff --git a/parser-typechecker/src/Unison/CommandLine/Globbing.hs b/unison-cli/src/Unison/CommandLine/Globbing.hs similarity index 87% rename from parser-typechecker/src/Unison/CommandLine/Globbing.hs rename to unison-cli/src/Unison/CommandLine/Globbing.hs index 0016ef0b60..c51f3de131 100644 --- a/parser-typechecker/src/Unison/CommandLine/Globbing.hs +++ b/unison-cli/src/Unison/CommandLine/Globbing.hs @@ -15,14 +15,13 @@ import Control.Lens as Lens hiding (noneOf) import qualified Unison.Codebase.Branch as Branch import qualified Unison.NameSegment as NameSegment import qualified Data.Text as Text -import qualified Data.Maybe as Maybe import qualified Unison.Util.Star3 as Star3 import qualified Unison.Util.Relation as Relation import qualified Data.Set as Set import Data.Set (Set) import qualified Unison.Util.Monoid as Monoid import qualified Data.Either as Either -import Control.Monad (guard) +import Control.Monad (when) -- | Possible targets which a glob may select. data TargetType @@ -92,26 +91,39 @@ expandGlobToNameSegments targets branch globPath = matchingChildBranches :: (NameSegment -> Bool) -> IndexedTraversal' NameSegment (Branch0 m) (Branch0 m) matchingChildBranches keyPredicate = Branch.children0 . indices keyPredicate +data GlobFailure = NoGlobs | NoTargets + -- | Expand a single glob pattern into all matching targets of the specified types. -expandGlobs :: forall m. Set TargetType - -> Branch0 m -- ^ Root branch - -> Path.Absolute -- ^ UCM's current path - -> String -- ^ The glob string, e.g. .base.List.?.doc - -> [String] -- ^ Fully expanded, absolute paths. E.g. [".base.List.map"] -expandGlobs targets rootBranch currentPath s = Maybe.fromMaybe [s] $ do - guard (not . null $ targets) +expandGlobs :: + forall m. + Set TargetType -> + -- | Root branch + Branch0 m -> + -- | UCM's current path + Path.Absolute -> + -- | The glob string, e.g. .base.List.?.doc + String -> + -- | Nothing if arg was not a glob. + -- otherwise, fully expanded, absolute paths. E.g. [".base.List.map"] + Maybe [String] +expandGlobs targets rootBranch currentPath s = either recover Just $ do let (isAbsolute, globPath) = globbedPathParser (Text.pack s) -- If we don't have any actual globs, we can fail to fall back to the original argument. - guard (any Either.isRight globPath) + when (not . any Either.isRight $ globPath) (Left NoGlobs) + when (null targets) (Left NoTargets) let currentBranch :: Branch0 m currentBranch | isAbsolute = rootBranch | otherwise = Branch.getAt0 (Path.unabsolute currentPath) rootBranch let paths = expandGlobToPaths targets globPath currentBranch - let relocatedPaths | isAbsolute = (Path.Absolute . Path.unrelative) <$> paths - | otherwise = Path.resolve currentPath <$> paths + let relocatedPaths + | isAbsolute = (Path.Absolute . Path.unrelative) <$> paths + | otherwise = Path.resolve currentPath <$> paths pure (Path.convert <$> relocatedPaths) - + where + recover = \case + NoGlobs -> Nothing + NoTargets -> Just [] -- | Parses a single name segment into a GlobArg or a bare segment according to whether -- there's a glob. diff --git a/parser-typechecker/src/Unison/CommandLine/InputPattern.hs b/unison-cli/src/Unison/CommandLine/InputPattern.hs similarity index 100% rename from parser-typechecker/src/Unison/CommandLine/InputPattern.hs rename to unison-cli/src/Unison/CommandLine/InputPattern.hs diff --git a/parser-typechecker/src/Unison/CommandLine/InputPatterns.hs b/unison-cli/src/Unison/CommandLine/InputPatterns.hs similarity index 96% rename from parser-typechecker/src/Unison/CommandLine/InputPatterns.hs rename to unison-cli/src/Unison/CommandLine/InputPatterns.hs index 9fe90d4e84..c2e9ab2df8 100644 --- a/parser-typechecker/src/Unison/CommandLine/InputPatterns.hs +++ b/unison-cli/src/Unison/CommandLine/InputPatterns.hs @@ -10,7 +10,6 @@ import Data.List.Extra (nubOrdOn) import qualified Data.Map as Map import qualified Data.Set as Set import qualified Data.Text as Text -import Data.Tuple.Extra (uncurry3) import System.Console.Haskeline.Completion (Completion (Completion)) import qualified System.Console.Haskeline.Completion as Completion import qualified Text.Megaparsec as P @@ -21,11 +20,11 @@ import qualified Unison.Codebase.Branch.Names as Branch import Unison.Codebase.Editor.Input (Input) import qualified Unison.Codebase.Editor.Input as Input import Unison.Codebase.Editor.RemoteRepo (ReadRemoteNamespace, WriteRemotePath, WriteRepo) -import qualified Unison.Codebase.Editor.RemoteRepo as RemoteRepo import qualified Unison.Codebase.Editor.SlurpResult as SR import qualified Unison.Codebase.Editor.UriParser as UriParser import qualified Unison.Codebase.Path as Path import qualified Unison.Codebase.Path.Parse as Path +import qualified Unison.Codebase.PushBehavior as PushBehavior import qualified Unison.Codebase.SyncMode as SyncMode import Unison.Codebase.Verbosity (Verbosity) import qualified Unison.Codebase.Verbosity as Verbosity @@ -977,14 +976,59 @@ push = ) ( \case [] -> - Right $ Input.PushRemoteBranchI Nothing Path.relativeEmpty' SyncMode.ShortCircuit + Right $ Input.PushRemoteBranchI Nothing Path.relativeEmpty' PushBehavior.RequireNonEmpty SyncMode.ShortCircuit url : rest -> do (repo, path) <- parsePushPath "url" url p <- case rest of [] -> Right Path.relativeEmpty' [path] -> first fromString $ Path.parsePath' path _ -> Left (I.help push) - Right $ Input.PushRemoteBranchI (Just (repo, path)) p SyncMode.ShortCircuit + Right $ Input.PushRemoteBranchI (Just (repo, path)) p PushBehavior.RequireNonEmpty SyncMode.ShortCircuit + ) + +pushCreate :: InputPattern +pushCreate = + InputPattern + "push.create" + [] + [(Required, gitUrlArg), (Optional, namespaceArg)] + ( P.lines + [ P.wrap + "The `push.create` command pushes a local namespace to an empty remote namespace.", + "", + P.wrapColumn2 + [ ( "`push.create remote local`", + "pushes the contents of the local namespace `local`" + <> "into the empty remote namespace `remote`." + ), + ( "`push remote`", + "publishes the current namespace into the empty remote namespace `remote`" + ), + ( "`push`", + "publishes the current namespace" + <> "into the empty remote namespace configured in `.unisonConfig`" + <> "with the key `GitUrl.ns` where `ns` is the current namespace" + ) + ], + "", + P.wrap "where `remote` is a git repository, optionally followed by `:`" + <> "and an absolute remote path, such as:", + P.indentN 2 . P.lines $ + [ P.backticked "https://github.com/org/repo", + P.backticked "https://github.com/org/repo:.some.remote.path" + ] + ] + ) + ( \case + [] -> + Right $ Input.PushRemoteBranchI Nothing Path.relativeEmpty' PushBehavior.RequireEmpty SyncMode.ShortCircuit + url : rest -> do + (repo, path) <- parsePushPath "url" url + p <- case rest of + [] -> Right Path.relativeEmpty' + [path] -> first fromString $ Path.parsePath' path + _ -> Left (I.help push) + Right $ Input.PushRemoteBranchI (Just (repo, path)) p PushBehavior.RequireEmpty SyncMode.ShortCircuit ) pushExhaustive :: InputPattern @@ -1004,14 +1048,14 @@ pushExhaustive = ) ( \case [] -> - Right $ Input.PushRemoteBranchI Nothing Path.relativeEmpty' SyncMode.Complete + Right $ Input.PushRemoteBranchI Nothing Path.relativeEmpty' PushBehavior.RequireNonEmpty SyncMode.Complete url : rest -> do (repo, path) <- parsePushPath "url" url p <- case rest of [] -> Right Path.relativeEmpty' [path] -> first fromString $ Path.parsePath' path _ -> Left (I.help push) - Right $ Input.PushRemoteBranchI (Just (repo, path)) p SyncMode.Complete + Right $ Input.PushRemoteBranchI (Just (repo, path)) p PushBehavior.RequireNonEmpty SyncMode.Complete ) createPullRequest :: InputPattern @@ -1661,16 +1705,19 @@ execute = InputPattern "run" [] - [] + [(Required, exactDefinitionTermQueryArg), (ZeroPlus, noCompletions)] ( P.wrapColumn2 - [ ( "`run mymain`", + [ ( "`run mymain args...`", "Runs `!mymain`, where `mymain` is searched for in the most recent" <> "typechecked file, or in the codebase." + <> "Any provided arguments will be passed as program arguments as though they were" + <> "provided at the command line when running mymain as an executable." ) ] ) ( \case - [w] -> pure . Input.ExecuteI $ w + [w] -> pure $ Input.ExecuteI w [] + (w : ws) -> pure $ Input.ExecuteI w ws _ -> Left $ showPatternHelp execute ) @@ -1756,6 +1803,7 @@ validInputs = diffNamespace, names, push, + pushCreate, pull, pullSilent, pushExhaustive, @@ -1949,8 +1997,8 @@ pathCompletor :: pathCompletor filterQuery getNames query _code b p = let b0root = Branch.head b b0local = Branch.getAt0 (Path.unabsolute p) b0root - -- todo: if these sets are huge, maybe trim results - in pure . filterQuery query . map Text.unpack $ + in -- todo: if these sets are huge, maybe trim results + pure . filterQuery query . map Text.unpack $ toList (getNames b0local) ++ if "." `isPrefixOf` query then map ("." <>) (toList (getNames b0root)) @@ -2010,26 +2058,3 @@ gitUrlArg = collectNothings :: (a -> Maybe b) -> [a] -> [a] collectNothings f as = [a | (Nothing, a) <- map f as `zip` as] - -patternFromInput :: Input -> InputPattern -patternFromInput = \case - Input.PushRemoteBranchI _ _ SyncMode.ShortCircuit -> push - Input.PushRemoteBranchI _ _ SyncMode.Complete -> pushExhaustive - Input.PullRemoteBranchI _ _ SyncMode.ShortCircuit Verbosity.Default -> pull - Input.PullRemoteBranchI _ _ SyncMode.ShortCircuit Verbosity.Silent -> pullSilent - Input.PullRemoteBranchI _ _ SyncMode.Complete _ -> pushExhaustive - _ -> error "todo: finish this function" - -inputStringFromInput :: IsString s => Input -> P.Pretty s -inputStringFromInput = \case - i@(Input.PushRemoteBranchI rh p' _) -> - (P.string . I.patternName $ patternFromInput i) - <> (" " <> maybe mempty (P.text . uncurry RemoteRepo.printHead) rh) - <> " " - <> P.shown p' - i@(Input.PullRemoteBranchI ns p' _ _) -> - (P.string . I.patternName $ patternFromInput i) - <> (" " <> maybe mempty (P.text . uncurry3 RemoteRepo.printNamespace) ns) - <> " " - <> P.shown p' - _ -> error "todo: finish this function" diff --git a/parser-typechecker/src/Unison/CommandLine/Main.hs b/unison-cli/src/Unison/CommandLine/Main.hs similarity index 100% rename from parser-typechecker/src/Unison/CommandLine/Main.hs rename to unison-cli/src/Unison/CommandLine/Main.hs diff --git a/unison-cli/src/Unison/CommandLine/OutputMessages.hs b/unison-cli/src/Unison/CommandLine/OutputMessages.hs new file mode 100644 index 0000000000..cbc0bd17c6 --- /dev/null +++ b/unison-cli/src/Unison/CommandLine/OutputMessages.hs @@ -0,0 +1,2525 @@ +{-# LANGUAGE OverloadedStrings #-} +{-# LANGUAGE PartialTypeSignatures #-} +{-# LANGUAGE RankNTypes #-} +{-# LANGUAGE RecordWildCards #-} +{-# LANGUAGE ViewPatterns #-} +{-# OPTIONS_GHC -fno-warn-partial-type-signatures #-} + +module Unison.CommandLine.OutputMessages where + +import Control.Lens +import qualified Control.Monad.State.Strict as State +import Data.Bifunctor (first, second) +import Data.List (sort, stripPrefix) +import Data.List.Extra (notNull, nubOrd, nubOrdOn) +import qualified Data.Map as Map +import qualified Data.Sequence as Seq +import qualified Data.Set as Set +import qualified Data.Text as Text +import Data.Text.IO (readFile, writeFile) +import Data.Tuple (swap) +import Data.Tuple.Extra (dupe, uncurry3) +import System.Directory + ( canonicalizePath, + doesFileExist, + getHomeDirectory, + ) +import U.Codebase.Sqlite.DbId (SchemaVersion (SchemaVersion)) +import qualified Unison.ABT as ABT +import qualified Unison.Builtin.Decls as DD +import qualified Unison.Codebase as Codebase +import qualified Unison.Codebase.Causal as Causal +import Unison.Codebase.Editor.DisplayObject (DisplayObject (BuiltinObject, MissingObject, UserObject)) +import qualified Unison.Codebase.Editor.Input as Input +import Unison.Codebase.Editor.Output +import qualified Unison.Codebase.Editor.Output as E +import qualified Unison.Codebase.Editor.Output as Output +import qualified Unison.Codebase.Editor.Output.BranchDiff as OBD +import Unison.Codebase.Editor.RemoteRepo (ReadRepo, WriteRepo) +import qualified Unison.Codebase.Editor.RemoteRepo as RemoteRepo +import qualified Unison.Codebase.Editor.SlurpResult as SlurpResult +import qualified Unison.Codebase.Editor.TodoOutput as TO +import Unison.Codebase.GitError +import Unison.Codebase.Patch (Patch (..)) +import qualified Unison.Codebase.Patch as Patch +import qualified Unison.Codebase.Path as Path +import qualified Unison.Codebase.PushBehavior as PushBehavior +import qualified Unison.Codebase.Runtime as Runtime +import Unison.Codebase.ShortBranchHash (ShortBranchHash) +import qualified Unison.Codebase.ShortBranchHash as SBH +import Unison.Codebase.SqliteCodebase.GitError (GitSqliteCodebaseError (GitCouldntParseRootBranchHash, UnrecognizedSchemaVersion)) +import qualified Unison.Codebase.TermEdit as TermEdit +import Unison.Codebase.Type (GitError (GitCodebaseError, GitProtocolError, GitSqliteCodebaseError)) +import qualified Unison.Codebase.TypeEdit as TypeEdit +import Unison.CommandLine + ( bigproblem, + note, + tip, + ) +import Unison.CommandLine.InputPatterns (makeExample, makeExample') +import qualified Unison.CommandLine.InputPatterns as IP +import qualified Unison.DataDeclaration as DD +import qualified Unison.DeclPrinter as DeclPrinter +import qualified Unison.Hash as Hash +import qualified Unison.HashQualified as HQ +import qualified Unison.HashQualified' as HQ' +import Unison.LabeledDependency as LD +import Unison.Name (Name) +import qualified Unison.Name as Name +import Unison.NamePrinter + ( prettyHashQualified, + prettyHashQualified', + prettyLabeledDependency, + prettyName, + prettyNamedReference, + prettyNamedReferent, + prettyReference, + prettyReferent, + prettyShortHash, + styleHashQualified, + styleHashQualified', + ) +import Unison.Names (Names (..)) +import qualified Unison.Names as Names +import qualified Unison.NamesWithHistory as Names +import Unison.Parser.Ann (Ann, startingLine) +import Unison.Prelude hiding (unlessM) +import qualified Unison.PrettyPrintEnv as PPE +import qualified Unison.PrettyPrintEnv.Util as PPE +import qualified Unison.PrettyPrintEnvDecl as PPE +import Unison.PrettyTerminal + ( clearCurrentLine, + putPretty', + ) +import Unison.PrintError + ( prettyParseError, + prettyResolutionFailures, + printNoteWithSource, + renderCompilerBug, + ) +import Unison.Reference (Reference) +import qualified Unison.Reference as Reference +import Unison.Referent (Referent) +import qualified Unison.Referent as Referent +import qualified Unison.Referent' as Referent +import qualified Unison.Result as Result +import Unison.Server.Backend (ShallowListEntry (..), TermEntry (..), TypeEntry (..)) +import qualified Unison.Server.SearchResult' as SR' +import qualified Unison.ShortHash as SH +import Unison.Term (Term) +import qualified Unison.Term as Term +import qualified Unison.TermPrinter as TermPrinter +import Unison.Type (Type) +import qualified Unison.TypePrinter as TypePrinter +import qualified Unison.UnisonFile as UF +import qualified Unison.Util.ColorText as CT +import qualified Unison.Util.List as List +import Unison.Util.Monoid + ( intercalateMap, + unlessM, + ) +import qualified Unison.Util.Pretty as P +import qualified Unison.Util.Relation as R +import Unison.Var (Var) +import qualified Unison.Var as Var +import qualified Unison.WatchKind as WK +import Prelude hiding (readFile, writeFile) + +type Pretty = P.Pretty P.ColorText + +shortenDirectory :: FilePath -> IO FilePath +shortenDirectory dir = do + home <- getHomeDirectory + pure $ case stripPrefix home dir of + Just d -> "~" <> d + Nothing -> dir + +renderFileName :: FilePath -> IO Pretty +renderFileName dir = P.group . P.blue . fromString <$> shortenDirectory dir + +notifyNumbered :: Var v => NumberedOutput v -> (Pretty, NumberedArgs) +notifyNumbered o = case o of + ShowDiffNamespace oldPrefix newPrefix ppe diffOutput -> + showDiffNamespace ShowNumbers ppe oldPrefix newPrefix diffOutput + ShowDiffAfterDeleteDefinitions ppe diff -> + first + ( \p -> + P.lines + [ p, + "", + undoTip + ] + ) + (showDiffNamespace ShowNumbers ppe e e diff) + ShowDiffAfterDeleteBranch bAbs ppe diff -> + first + ( \p -> + P.lines + [ p, + "", + undoTip + ] + ) + (showDiffNamespace ShowNumbers ppe bAbs bAbs diff) + ShowDiffAfterModifyBranch b' _ _ (OBD.isEmpty -> True) -> + (P.wrap $ "Nothing changed in" <> prettyPath' b' <> ".", mempty) + ShowDiffAfterModifyBranch b' bAbs ppe diff -> + first + ( \p -> + P.lines + [ P.wrap $ "Here's what changed in" <> prettyPath' b' <> ":", + "", + p, + "", + undoTip + ] + ) + (showDiffNamespace ShowNumbers ppe bAbs bAbs diff) + ShowDiffAfterMerge _ _ _ (OBD.isEmpty -> True) -> + (P.wrap $ "Nothing changed as a result of the merge.", mempty) + ShowDiffAfterMerge dest' destAbs ppe diffOutput -> + first + ( \p -> + P.lines + [ P.wrap $ "Here's what's changed in " <> prettyPath' dest' <> "after the merge:", + "", + p, + "", + tip $ + "You can use " <> IP.makeExample' IP.todo + <> "to see if this generated any work to do in this namespace" + <> "and " + <> IP.makeExample' IP.test + <> "to run the tests." + <> "Or you can use" + <> IP.makeExample' IP.undo + <> " or" + <> IP.makeExample' IP.viewReflog + <> " to undo the results of this merge." + ] + ) + (showDiffNamespace ShowNumbers ppe destAbs destAbs diffOutput) + ShowDiffAfterMergePropagate dest' destAbs patchPath' ppe diffOutput -> + first + ( \p -> + P.lines + [ P.wrap $ + "Here's what's changed in " <> prettyPath' dest' + <> "after applying the patch at " + <> P.group (prettyPath' patchPath' <> ":"), + "", + p, + "", + tip $ + "You can use " + <> IP.makeExample IP.todo [prettyPath' patchPath', prettyPath' dest'] + <> "to see if this generated any work to do in this namespace" + <> "and " + <> IP.makeExample' IP.test + <> "to run the tests." + <> "Or you can use" + <> IP.makeExample' IP.undo + <> " or" + <> IP.makeExample' IP.viewReflog + <> " to undo the results of this merge." + ] + ) + (showDiffNamespace ShowNumbers ppe destAbs destAbs diffOutput) + ShowDiffAfterMergePreview dest' destAbs ppe diffOutput -> + first + ( \p -> + P.lines + [ P.wrap $ "Here's what would change in " <> prettyPath' dest' <> "after the merge:", + "", + p + ] + ) + (showDiffNamespace ShowNumbers ppe destAbs destAbs diffOutput) + ShowDiffAfterUndo ppe diffOutput -> + first + (\p -> P.lines ["Here are the changes I undid", "", p]) + (showDiffNamespace ShowNumbers ppe e e diffOutput) + ShowDiffAfterPull dest' destAbs ppe diff -> + if OBD.isEmpty diff + then ("✅ Looks like " <> prettyPath' dest' <> " is up to date.", mempty) + else + first + ( \p -> + P.lines + [ P.wrap $ "Here's what's changed in " <> prettyPath' dest' <> "after the pull:", + "", + p, + "", + undoTip + ] + ) + (showDiffNamespace ShowNumbers ppe destAbs destAbs diff) + ShowDiffAfterCreatePR baseRepo headRepo ppe diff -> + if OBD.isEmpty diff + then + ( P.wrap $ + "Looks like there's no difference between " + <> prettyRemoteNamespace baseRepo + <> "and" + <> prettyRemoteNamespace headRepo + <> ".", + mempty + ) + else + first + ( \p -> + ( P.lines + [ P.wrap $ + "The changes summarized below are available for you to review," + <> "using the following command:", + "", + P.indentN 2 $ + IP.makeExampleNoBackticks + IP.loadPullRequest + [ (prettyRemoteNamespace baseRepo), + (prettyRemoteNamespace headRepo) + ], + "", + p + ] + ) + ) + (showDiffNamespace HideNumbers ppe e e diff) + -- todo: these numbers aren't going to work, + -- since the content isn't necessarily here. + -- Should we have a mode with no numbers? :P + + ShowDiffAfterCreateAuthor authorNS authorPath' bAbs ppe diff -> + first + ( \p -> + P.lines + [ p, + "", + tip $ + "Add" <> prettyName "License" <> "values for" + <> prettyName (Name.fromSegment authorNS) + <> "under" + <> P.group (prettyPath' authorPath' <> ".") + ] + ) + (showDiffNamespace ShowNumbers ppe bAbs bAbs diff) + where + e = Path.absoluteEmpty + undoTip = + tip $ + "You can use" <> IP.makeExample' IP.undo + <> "or" + <> IP.makeExample' IP.viewReflog + <> "to undo this change." + +prettyRemoteNamespace :: + ( RemoteRepo.ReadRepo, + Maybe ShortBranchHash, + Path.Path + ) -> + P.Pretty P.ColorText +prettyRemoteNamespace = + P.group . P.text . uncurry3 RemoteRepo.printNamespace + +notifyUser :: forall v. Var v => FilePath -> Output v -> IO Pretty +notifyUser dir o = case o of + Success -> pure $ P.bold "Done." + PrintMessage pretty -> do + pure pretty + BadRootBranch e -> case e of + Codebase.NoRootBranch -> + pure . P.fatalCallout $ "I couldn't find the codebase root!" + Codebase.CouldntParseRootBranch s -> + pure + . P.warnCallout + $ "I coulnd't parse a valid namespace from " + <> P.string (show s) + <> "." + Codebase.CouldntLoadRootBranch h -> + pure + . P.warnCallout + $ "I couldn't find a root namespace with the hash " + <> prettySBH (SBH.fullFromHash h) + <> "." + CouldntLoadBranch h -> + pure . P.fatalCallout . P.wrap $ + "I have reason to believe that" + <> P.shown h + <> "exists in the codebase, but there was a failure" + <> "when I tried to load it." + NamespaceEmpty p -> + case p of + Right (p0, p1) -> + pure + . P.warnCallout + $ "The namespaces " + <> P.string (show p0) + <> " and " + <> P.string (show p1) + <> " are empty. Was there a typo?" + Left p0 -> + pure + . P.warnCallout + $ "The namespace " + <> P.string (show p0) + <> " is empty. Was there a typo?" + WarnIncomingRootBranch current hashes -> + pure $ + if null hashes + then + P.wrap $ + "Please let someone know I generated an empty IncomingRootBranch" + <> " event, which shouldn't be possible!" + else + P.lines + [ P.wrap $ + (if length hashes == 1 then "A" else "Some") + <> "codebase" + <> P.plural hashes "root" + <> "appeared unexpectedly" + <> "with" + <> P.group (P.plural hashes "hash" <> ":"), + "", + (P.indentN 2 . P.oxfordCommas) + (map prettySBH $ toList hashes), + "", + P.wrap $ + "and I'm not sure what to do about it." + <> "The last root namespace hash that I knew about was:", + "", + P.indentN 2 $ prettySBH current, + "", + P.wrap $ "Now might be a good time to make a backup of your codebase. 😬", + "", + P.wrap $ + "After that, you might try using the" <> makeExample' IP.forkLocal + <> "command to inspect the namespaces listed above, and decide which" + <> "one you want as your root." + <> "You can also use" + <> makeExample' IP.viewReflog + <> "to see the" + <> "last few root namespace hashes on record.", + "", + P.wrap $ + "Once you find one you like, you can use the" + <> makeExample' IP.resetRoot + <> "command to set it." + ] + LoadPullRequest baseNS headNS basePath headPath mergedPath squashedPath -> + pure $ + P.lines + [ P.wrap $ "I checked out" <> prettyRemoteNamespace baseNS <> "to" <> P.group (prettyPath' basePath <> "."), + P.wrap $ "I checked out" <> prettyRemoteNamespace headNS <> "to" <> P.group (prettyPath' headPath <> "."), + "", + P.wrap $ "The merged result is in" <> P.group (prettyPath' mergedPath <> "."), + P.wrap $ "The (squashed) merged result is in" <> P.group (prettyPath' squashedPath <> "."), + P.wrap $ + "Use" + <> IP.makeExample + IP.diffNamespace + [prettyPath' basePath, prettyPath' mergedPath] + <> "or" + <> IP.makeExample + IP.diffNamespace + [prettyPath' basePath, prettyPath' squashedPath] + <> "to see what's been updated.", + P.wrap $ + "Use" + <> IP.makeExample + IP.todo + [ prettyPath' (snoc mergedPath "patch"), + prettyPath' mergedPath + ] + <> "to see what work is remaining for the merge.", + P.wrap $ + "Use" + <> IP.makeExample + IP.push + [prettyRemoteNamespace baseNS, prettyPath' mergedPath] + <> "or" + <> IP.makeExample + IP.push + [prettyRemoteNamespace baseNS, prettyPath' squashedPath] + <> "to push the changes." + ] + DisplayDefinitions outputLoc ppe types terms -> + displayDefinitions outputLoc ppe types terms + DisplayRendered outputLoc pp -> + displayRendered outputLoc pp + TestResults stats ppe _showSuccess _showFailures oks fails -> case stats of + CachedTests 0 _ -> pure . P.callout "😶" $ "No tests to run." + CachedTests n n' + | n == n' -> + pure $ + P.lines [cache, "", displayTestResults True ppe oks fails] + CachedTests _n m -> + pure $ + if m == 0 + then "✅ " + else + P.indentN 2 $ + P.lines ["", cache, "", displayTestResults False ppe oks fails, "", "✅ "] + where + NewlyComputed -> do + clearCurrentLine + pure $ + P.lines + [ " " <> P.bold "New test results:", + "", + displayTestResults True ppe oks fails + ] + where + cache = P.bold "Cached test results " <> "(`help testcache` to learn more)" + TestIncrementalOutputStart ppe (n, total) r _src -> do + putPretty' $ + P.shown (total - n) <> " tests left to run, current test: " + <> (P.syntaxToColor $ prettyHashQualified (PPE.termName ppe $ Referent.Ref r)) + pure mempty + TestIncrementalOutputEnd _ppe (_n, _total) _r result -> do + clearCurrentLine + if isTestOk result + then putPretty' " ✅ " + else putPretty' " 🚫 " + pure mempty + TermMissingType ref -> + pure . P.fatalCallout . P.lines $ + [ P.wrap $ + "The type signature for reference " + <> P.blue (P.text (Reference.toText ref)) + <> " is missing from the codebase! This means something might be wrong " + <> " with the codebase, or the term was deleted just now " + <> " by someone else. Trying your command again might fix it." + ] + MetadataMissingType ppe ref -> + pure . P.fatalCallout . P.lines $ + [ P.wrap $ + "The metadata value " <> P.red (prettyTermName ppe ref) + <> "is missing a type signature in the codebase.", + "", + P.wrap $ + "This might be due to pulling an incomplete" + <> "or invalid codebase, or because files inside the codebase" + <> "are being deleted external to UCM." + ] + MetadataAmbiguous hq _ppe [] -> + pure . P.warnCallout + . P.wrap + $ "I couldn't find any metadata matching " + <> P.syntaxToColor (prettyHashQualified hq) + MetadataAmbiguous _ ppe refs -> + pure . P.warnCallout . P.lines $ + [ P.wrap $ + "I'm not sure which metadata value you're referring to" + <> "since there are multiple matches:", + "", + P.indentN 2 $ P.spaced (P.blue . prettyTermName ppe <$> refs), + "", + tip "Try again and supply one of the above definitions explicitly." + ] + EvaluationFailure err -> pure err + TypeTermMismatch typeName termName -> + pure $ + P.warnCallout "I was expecting either two types or two terms but was given a type " + <> P.syntaxToColor (prettyHashQualified typeName) + <> " and a term " + <> P.syntaxToColor (prettyHashQualified termName) + <> "." + SearchTermsNotFound hqs | null hqs -> pure mempty + SearchTermsNotFound hqs -> + pure $ + P.warnCallout "The following names were not found in the codebase. Check your spelling." + <> P.newline + <> (P.syntaxToColor $ P.indent " " (P.lines (prettyHashQualified <$> hqs))) + PatchNotFound _ -> + pure . P.warnCallout $ "I don't know about that patch." + NameNotFound _ -> + pure . P.warnCallout $ "I don't know about that name." + TermNotFound _ -> + pure . P.warnCallout $ "I don't know about that term." + TypeNotFound _ -> + pure . P.warnCallout $ "I don't know about that type." + TermAlreadyExists _ _ -> + pure . P.warnCallout $ "A term by that name already exists." + TypeAlreadyExists _ _ -> + pure . P.warnCallout $ "A type by that name already exists." + PatchAlreadyExists _ -> + pure . P.warnCallout $ "A patch by that name already exists." + BranchEmpty b -> + pure . P.warnCallout . P.wrap $ + P.group (either P.shown prettyPath' b) <> "is an empty namespace." + BranchNotEmpty path -> + pure . P.warnCallout $ + "I was expecting the namespace " <> prettyPath' path + <> " to be empty for this operation, but it isn't." + CantDelete ppe failed failedDependents -> + pure . P.warnCallout $ + P.lines + [ P.wrap "I couldn't delete ", + "", + P.indentN 2 $ listOfDefinitions' ppe False failed, + "", + "because it's still being used by these definitions:", + "", + P.indentN 2 $ listOfDefinitions' ppe False failedDependents + ] + CantUndo reason -> case reason of + CantUndoPastStart -> pure . P.warnCallout $ "Nothing more to undo." + CantUndoPastMerge -> pure . P.warnCallout $ "Sorry, I can't undo a merge (not implemented yet)." + NoMainFunction main ppe ts -> + pure . P.callout "😶" $ + P.lines + [ P.wrap $ + "I looked for a function" <> P.backticked (P.string main) + <> "in the most recently typechecked file and codebase but couldn't find one. It has to have the type:", + "", + P.indentN 2 $ P.lines [P.string main <> " : " <> TypePrinter.pretty ppe t | t <- ts] + ] + BadMainFunction main ty ppe ts -> + pure . P.callout "😶" $ + P.lines + [ P.string "I found this function:", + "", + P.indentN 2 $ P.string main <> " : " <> TypePrinter.pretty ppe ty, + "", + P.wrap $ P.string "but in order for me to" <> P.backticked (P.string "run") <> "it it needs to have the type:", + "", + P.indentN 2 $ P.lines [P.string main <> " : " <> TypePrinter.pretty ppe t | t <- ts] + ] + NoUnisonFile -> do + dir' <- canonicalizePath dir + fileName <- renderFileName dir' + pure . P.callout "😶" $ + P.lines + [ P.wrap "There's nothing for me to add right now.", + "", + P.column2 [(P.bold "Hint:", msg fileName)] + ] + where + msg dir = + P.wrap $ + "I'm currently watching for definitions in .u files under the" + <> dir + <> "directory. Make sure you've updated something there before using the" + <> makeExample' IP.add + <> "or" + <> makeExample' IP.update + <> "commands, or use" + <> makeExample' IP.load + <> "to load a file explicitly." + InvalidSourceName name -> + pure . P.callout "😶" $ + P.wrap $ + "The file " + <> P.blue (P.shown name) + <> " does not exist or is not a valid source file." + SourceLoadFailed name -> + pure . P.callout "😶" $ + P.wrap $ + "The file " + <> P.blue (P.shown name) + <> " could not be loaded." + BranchNotFound b -> + pure . P.warnCallout $ "The namespace " <> P.blue (P.shown b) <> " doesn't exist." + CreatedNewBranch path -> + pure $ + "☝️ The namespace " <> P.blue (P.shown path) <> " is empty." + -- RenameOutput rootPath oldName newName r -> do + -- nameChange "rename" "renamed" oldName newName r + -- AliasOutput rootPath existingName newName r -> do + -- nameChange "alias" "aliased" existingName newName r + DeletedEverything -> + pure . P.wrap . P.lines $ + [ "Okay, I deleted everything except the history.", + "Use " <> IP.makeExample' IP.undo <> " to undo, or " + <> IP.makeExample' IP.mergeBuiltins + <> " to restore the absolute " + <> "basics to the current path." + ] + DeleteEverythingConfirmation -> + pure . P.warnCallout . P.lines $ + [ "Are you sure you want to clear away everything?", + "You could use " <> IP.makeExample' IP.cd + <> " to switch to a new namespace instead." + ] + DeleteBranchConfirmation _uniqueDeletions -> error "todo" + -- let + -- pretty (branchName, (ppe, results)) = + -- header $ listOfDefinitions' ppe False results + -- where + -- header = plural uniqueDeletions id ((P.text branchName <> ":") `P.hang`) + -- + -- in putPrettyLn . P.warnCallout + -- $ P.wrap ("The" + -- <> plural uniqueDeletions "namespace contains" "namespaces contain" + -- <> "definitions that don't exist in any other branches:") + -- <> P.border 2 (mconcat (fmap pretty uniqueDeletions)) + -- <> P.newline + -- <> P.wrap "Please repeat the same command to confirm the deletion." + ListOfDefinitions ppe detailed results -> + listOfDefinitions ppe detailed results + ListOfLinks ppe results -> + listOfLinks ppe [(name, tm) | (name, _ref, tm) <- results] + ListNames _len [] [] -> + pure . P.callout "😶" $ + P.wrap "I couldn't find anything by that name." + ListNames len types terms -> + pure . P.sepNonEmpty "\n\n" $ + [ formatTypes types, + formatTerms terms + ] + where + formatTerms tms = + P.lines . P.nonEmpty $ P.plural tms (P.blue "Term") : (go <$> tms) + where + go (ref, hqs) = + P.column2 + [ ("Hash:", P.syntaxToColor (prettyReferent len ref)), + ("Names: ", P.group (P.spaced (P.bold . P.syntaxToColor . prettyHashQualified' <$> toList hqs))) + ] + formatTypes types = + P.lines . P.nonEmpty $ P.plural types (P.blue "Type") : (go <$> types) + where + go (ref, hqs) = + P.column2 + [ ("Hash:", P.syntaxToColor (prettyReference len ref)), + ("Names:", P.group (P.spaced (P.bold . P.syntaxToColor . prettyHashQualified' <$> toList hqs))) + ] + -- > names foo + -- Terms: + -- Hash: #asdflkjasdflkjasdf + -- Names: .util.frobnicate foo blarg.mcgee + -- + -- Term (with hash #asldfkjsdlfkjsdf): .util.frobnicate, foo, blarg.mcgee + -- Types (with hash #hsdflkjsdfsldkfj): Optional, Maybe, foo + ListShallow ppe entries -> + -- todo: make a version of prettyNumberedResult to support 3-columns + pure $ + if null entries + then P.lit "nothing to show" + else numberedEntries entries + where + numberedEntries :: [ShallowListEntry v a] -> P.Pretty P.ColorText + numberedEntries entries = + (P.column3 . fmap f) ([(1 :: Integer) ..] `zip` fmap formatEntry entries) + where + f (i, (p1, p2)) = (P.hiBlack . fromString $ show i <> ".", p1, p2) + formatEntry :: ShallowListEntry v a -> (P.Pretty P.ColorText, P.Pretty P.ColorText) + formatEntry = \case + ShallowTermEntry (TermEntry _r hq ot _) -> + ( P.syntaxToColor . prettyHashQualified' . fmap Name.fromSegment $ hq, + P.lit "(" <> maybe "type missing" (TypePrinter.pretty ppe) ot <> P.lit ")" + ) + ShallowTypeEntry (TypeEntry r hq _) -> + ( P.syntaxToColor . prettyHashQualified' . fmap Name.fromSegment $ hq, + isBuiltin r + ) + ShallowBranchEntry ns _ count -> + ( (P.syntaxToColor . prettyName . Name.fromSegment) ns <> "/", + case count of + 1 -> P.lit "(1 definition)" + _n -> P.lit "(" <> P.shown count <> P.lit " definitions)" + ) + ShallowPatchEntry ns -> + ( (P.syntaxToColor . prettyName . Name.fromSegment) ns, + P.lit "(patch)" + ) + isBuiltin = \case + Reference.Builtin {} -> P.lit "(builtin type)" + Reference.DerivedId {} -> P.lit "(type)" + SlurpOutput input ppe s -> + let isPast = case input of + Input.AddI {} -> True + Input.UpdateI {} -> True + _ -> False + in pure $ SlurpResult.pretty isPast ppe s + NoExactTypeMatches -> + pure . P.callout "☝️" $ P.wrap "I couldn't find exact type matches, resorting to fuzzy matching..." + TypeParseError src e -> + pure . P.fatalCallout $ + P.lines + [ P.wrap "I couldn't parse the type you supplied:", + "", + prettyParseError src e + ] + ParseResolutionFailures src es -> + pure $ + prettyResolutionFailures src es + TypeHasFreeVars typ -> + pure . P.warnCallout $ + P.lines + [ P.wrap "The type uses these names, but I'm not sure what they are:", + P.sep ", " (map (P.text . Var.name) . toList $ ABT.freeVars typ) + ] + ParseErrors src es -> + pure . P.sep "\n\n" $ prettyParseError (Text.unpack src) <$> es + TypeErrors src ppenv notes -> do + let showNote = + intercalateMap "\n\n" (printNoteWithSource ppenv (Text.unpack src)) + . map Result.TypeError + pure . showNote $ notes + CompilerBugs src env bugs -> pure $ intercalateMap "\n\n" bug bugs + where + bug = renderCompilerBug env (Text.unpack src) + Evaluated fileContents ppe bindings watches -> + if null watches + then pure "\n" + else -- todo: hashqualify binding names if necessary to distinguish them from + -- defs in the codebase. In some cases it's fine for bindings to + -- shadow codebase names, but you don't want it to capture them in + -- the decompiled output. + + let prettyBindings = + P.bracket . P.lines $ + P.wrap "The watch expression(s) reference these definitions:" : + "" : + [ (P.syntaxToColor $ TermPrinter.prettyBinding ppe (HQ.unsafeFromVar v) b) + | (v, b) <- bindings + ] + prettyWatches = + P.sep + "\n\n" + [ watchPrinter fileContents ppe ann kind evald isCacheHit + | (ann, kind, evald, isCacheHit) <- + sortOn (\(a, _, _, _) -> a) . toList $ watches + ] + in -- todo: use P.nonempty + pure $ + if null bindings + then prettyWatches + else prettyBindings <> "\n" <> prettyWatches + DisplayConflicts termNamespace typeNamespace -> + pure $ + P.sepNonEmpty + "\n\n" + [ showConflicts "terms" terms, + showConflicts "types" types + ] + where + terms = R.dom termNamespace + types = R.dom typeNamespace + showConflicts :: Foldable f => Pretty -> f Name -> Pretty + showConflicts thingsName things = + if (null things) + then mempty + else + P.lines + [ "These " <> thingsName <> " have conflicts: ", + "", + P.lines [(" " <> prettyName x) | x <- toList things] + ] + -- TODO: Present conflicting TermEdits and TypeEdits + -- if we ever allow users to edit hashes directly. + Typechecked sourceName ppe slurpResult uf -> do + let fileStatusMsg = SlurpResult.pretty False ppe slurpResult + let containsWatchExpressions = notNull $ UF.watchComponents uf + if UF.nonEmpty uf + then do + fileName <- renderFileName $ Text.unpack sourceName + pure $ + P.linesNonEmpty + ( [ if fileStatusMsg == mempty + then P.okCallout $ fileName <> " changed." + else + if SlurpResult.isAllDuplicates slurpResult + then + P.wrap $ + "I found and" + <> P.bold "typechecked" + <> "the definitions in " + <> P.group (fileName <> ".") + <> "This file " + <> P.bold "has been previously added" + <> "to the codebase." + else + P.linesSpaced $ + [ P.wrap $ + "I found and" + <> P.bold "typechecked" + <> "these definitions in " + <> P.group (fileName <> ".") + <> "If you do an " + <> IP.makeExample' IP.add + <> " or " + <> P.group (IP.makeExample' IP.update <> ",") + <> "here's how your codebase would" + <> "change:", + P.indentN 2 $ SlurpResult.pretty False ppe slurpResult + ] + ] + ++ if containsWatchExpressions + then + [ "", + P.wrap $ + "Now evaluating any watch expressions" + <> "(lines starting with `>`)... " + <> P.group (P.hiBlack "Ctrl+C cancels.") + ] + else [] + ) + else + if (null $ UF.watchComponents uf) + then + pure . P.wrap $ + "I loaded " <> P.text sourceName <> " and didn't find anything." + else pure mempty + TodoOutput names todo -> pure (todoOutput names todo) + GitError e -> pure $ case e of + GitSqliteCodebaseError e -> case e of + UnrecognizedSchemaVersion repo localPath (SchemaVersion v) -> + P.wrap $ + "I don't know how to interpret schema version " <> P.shown v + <> "in the repository at" + <> prettyReadRepo repo + <> "in the cache directory at" + <> P.backticked' (P.string localPath) "." + GitCouldntParseRootBranchHash repo s -> + P.wrap $ + "I couldn't parse the string" + <> P.red (P.string s) + <> "into a namespace hash, when opening the repository at" + <> P.group (prettyReadRepo repo <> ".") + GitProtocolError e -> case e of + NoGit -> + P.wrap $ + "I couldn't find git. Make sure it's installed and on your path." + CleanupError e -> + P.wrap $ + "I encountered an exception while trying to clean up a git cache directory:" + <> P.group (P.shown e) + CloneException repo msg -> + P.wrap $ + "I couldn't clone the repository at" <> prettyReadRepo repo <> ";" + <> "the error was:" + <> (P.indentNAfterNewline 2 . P.group . P.string) msg + PushNoOp repo -> + P.wrap $ + "The repository at" <> prettyWriteRepo repo <> "is already up-to-date." + PushException repo msg -> + P.wrap $ + "I couldn't push to the repository at" <> prettyWriteRepo repo <> ";" + <> "the error was:" + <> (P.indentNAfterNewline 2 . P.group . P.string) msg + UnrecognizableCacheDir uri localPath -> + P.wrap $ + "A cache directory for" + <> P.backticked (P.text $ RemoteRepo.printReadRepo uri) + <> "already exists at" + <> P.backticked' (P.string localPath) "," + <> "but it doesn't seem to" + <> "be a git repository, so I'm not sure what to do next. Delete it?" + UnrecognizableCheckoutDir uri localPath -> + P.wrap $ + "I tried to clone" + <> P.backticked (P.text $ RemoteRepo.printReadRepo uri) + <> "into a cache directory at" + <> P.backticked' (P.string localPath) "," + <> "but I can't recognize the" + <> "result as a git repository, so I'm not sure what to do next." + PushDestinationHasNewStuff repo -> + P.callout "⏸" . P.lines $ + [ P.wrap $ + "The repository at" <> prettyWriteRepo repo + <> "has some changes I don't know about.", + "", + P.wrap $ "Try" <> pull <> "to merge these changes locally, then" <> push <> "again." + ] + where + push = P.group . P.backticked . IP.patternName $ IP.push + pull = P.group . P.backticked . IP.patternName $ IP.pull + GitCodebaseError e -> case e of + CouldntLoadRootBranch repo hash -> + P.wrap $ + "I couldn't load the designated root hash" + <> P.group ("(" <> P.text (Hash.base32Hex $ Causal.unRawHash hash) <> ")") + <> "from the repository at" + <> prettyReadRepo repo + CouldntLoadSyncedBranch ns h -> + P.wrap $ + "I just finished importing the branch" <> P.red (P.shown h) + <> "from" + <> P.red (prettyRemoteNamespace ns) + <> "but now I can't find it." + CouldntFindRemoteBranch repo path -> + P.wrap $ + "I couldn't find the remote branch at" + <> P.shown path + <> "in the repository at" + <> prettyReadRepo repo + NoRemoteNamespaceWithHash repo sbh -> + P.wrap $ + "The repository at" <> prettyReadRepo repo + <> "doesn't contain a namespace with the hash prefix" + <> (P.blue . P.text . SBH.toText) sbh + RemoteNamespaceHashAmbiguous repo sbh hashes -> + P.lines + [ P.wrap $ + "The namespace hash" <> prettySBH sbh + <> "at" + <> prettyReadRepo repo + <> "is ambiguous." + <> "Did you mean one of these hashes?", + "", + P.indentN 2 $ + P.lines + ( prettySBH . SBH.fromHash ((Text.length . SBH.toText) sbh * 2) + <$> Set.toList hashes + ), + "", + P.wrap "Try again with a few more hash characters to disambiguate." + ] + ListEdits patch ppe -> do + let types = Patch._typeEdits patch + terms = Patch._termEdits patch + + prettyTermEdit (r, TermEdit.Deprecate) = + ( P.syntaxToColor . prettyHashQualified . PPE.termName ppe . Referent.Ref $ r, + "-> (deprecated)" + ) + prettyTermEdit (r, TermEdit.Replace r' _typing) = + ( P.syntaxToColor . prettyHashQualified . PPE.termName ppe . Referent.Ref $ r, + "-> " <> (P.syntaxToColor . prettyHashQualified . PPE.termName ppe . Referent.Ref $ r') + ) + prettyTypeEdit (r, TypeEdit.Deprecate) = + ( P.syntaxToColor . prettyHashQualified $ PPE.typeName ppe r, + "-> (deprecated)" + ) + prettyTypeEdit (r, TypeEdit.Replace r') = + ( P.syntaxToColor . prettyHashQualified $ PPE.typeName ppe r, + "-> " <> (P.syntaxToColor . prettyHashQualified . PPE.typeName ppe $ r') + ) + pure $ + P.sepNonEmpty + "\n\n" + [ if R.null types + then mempty + else + "Edited Types:" + `P.hang` P.column2 (prettyTypeEdit <$> R.toList types), + if R.null terms + then mempty + else + "Edited Terms:" + `P.hang` P.column2 (prettyTermEdit <$> R.toList terms), + if R.null types && R.null terms + then "This patch is empty." + else + tip . P.string $ + "To remove entries from a patch, use " + <> IP.deleteTermReplacementCommand + <> " or " + <> IP.deleteTypeReplacementCommand + <> ", as appropriate." + ] + BustedBuiltins (Set.toList -> new) (Set.toList -> old) -> + -- todo: this could be prettier! Have a nice list like `find` gives, but + -- that requires querying the codebase to determine term types. Probably + -- the only built-in types will be primitive types like `Int`, so no need + -- to look up decl types. + -- When we add builtin terms, they may depend on new derived types, so + -- these derived types should be added to the branch too; but not + -- necessarily ever be automatically deprecated. (A library curator might + -- deprecate them; more work needs to go into the idea of sharing deprecations and stuff. + pure . P.warnCallout . P.lines $ + case (new, old) of + ([], []) -> error "BustedBuiltins busted, as there were no busted builtins." + ([], old) -> + P.wrap ("This codebase includes some builtins that are considered deprecated. Use the " <> makeExample' IP.updateBuiltins <> " command when you're ready to work on eliminating them from your codebase:") : + "" : + fmap (P.text . Reference.toText) old + (new, []) -> + P.wrap ("This version of Unison provides builtins that are not part of your codebase. Use " <> makeExample' IP.updateBuiltins <> " to add them:") : + "" : fmap (P.text . Reference.toText) new + (new@(_ : _), old@(_ : _)) -> + [ P.wrap + ( "Sorry and/or good news! This version of Unison supports a different set of builtins than this codebase uses. You can use " + <> makeExample' IP.updateBuiltins + <> " to add the ones you're missing and deprecate the ones I'm missing. 😉" + ), + "You're missing:" `P.hang` P.lines (fmap (P.text . Reference.toText) new), + "I'm missing:" `P.hang` P.lines (fmap (P.text . Reference.toText) old) + ] + ListOfPatches patches -> + pure $ + if null patches + then P.lit "nothing to show" + else numberedPatches patches + where + numberedPatches :: Set Name -> P.Pretty P.ColorText + numberedPatches patches = + (P.column2 . fmap format) ([(1 :: Integer) ..] `zip` (toList patches)) + where + format (i, p) = (P.hiBlack . fromString $ show i <> ".", prettyName p) + ConfiguredMetadataParseError p md err -> + pure . P.fatalCallout . P.lines $ + [ P.wrap $ + "I couldn't understand the default metadata that's set for " + <> prettyPath' p + <> " in .unisonConfig.", + P.wrap $ + "The value I found was" + <> (P.backticked . P.blue . P.string) md + <> "but I encountered the following error when trying to parse it:", + "", + err + ] + NoConfiguredGitUrl pp p -> + pure . P.fatalCallout . P.wrap $ + "I don't know where to " + <> pushPull "push to!" "pull from!" pp + <> ( if Path.isRoot' p + then "" + else + "Add a line like `GitUrl." <> P.shown p + <> " = ' to .unisonConfig. " + ) + <> "Type `help " + <> pushPull "push" "pull" pp + <> "` for more information." + -- | ConfiguredGitUrlParseError PushPull Path' Text String + ConfiguredGitUrlParseError pp p url err -> + pure . P.fatalCallout . P.lines $ + [ P.wrap $ + "I couldn't understand the GitUrl that's set for" + <> prettyPath' p + <> "in .unisonConfig", + P.wrap $ + "The value I found was" <> (P.backticked . P.blue . P.text) url + <> "but I encountered the following error when trying to parse it:", + "", + P.string err, + "", + P.wrap $ + "Type" <> P.backticked ("help " <> pushPull "push" "pull" pp) + <> "for more information." + ] + NoBranchWithHash _h -> + pure . P.callout "😶" $ + P.wrap $ "I don't know of a namespace with that hash." + NotImplemented -> pure $ P.wrap "That's not implemented yet. Sorry! 😬" + BranchAlreadyExists p -> + pure . P.wrap $ + "The namespace" <> prettyPath' p <> "already exists." + LabeledReferenceNotFound hq -> + pure . P.callout "\129300" . P.wrap . P.syntaxToColor $ + "Sorry, I couldn't find anything named" <> prettyHashQualified hq <> "." + LabeledReferenceAmbiguous hashLen hq (LD.partition -> (tps, tms)) -> + pure . P.callout "\129300" . P.lines $ + [ P.wrap "That name is ambiguous. It could refer to any of the following definitions:", + "", + P.indentN 2 (P.lines (map qualifyTerm tms ++ map qualifyType tps)) + ] + where + qualifyTerm :: Referent -> P.Pretty P.ColorText + qualifyTerm = + P.syntaxToColor . case hq of + HQ.NameOnly n -> prettyNamedReferent hashLen n + HQ.HashQualified n _ -> prettyNamedReferent hashLen n + HQ.HashOnly _ -> prettyReferent hashLen + qualifyType :: Reference -> P.Pretty P.ColorText + qualifyType = + P.syntaxToColor . case hq of + HQ.NameOnly n -> prettyNamedReference hashLen n + HQ.HashQualified n _ -> prettyNamedReference hashLen n + HQ.HashOnly _ -> prettyReference hashLen + DeleteNameAmbiguous hashLen p tms tys -> + pure . P.callout "\129300" . P.lines $ + [ P.wrap "That name is ambiguous. It could refer to any of the following definitions:", + "", + P.indentN 2 (P.lines (map qualifyTerm (Set.toList tms) ++ map qualifyType (Set.toList tys))), + "", + P.wrap "You may:", + "", + P.indentN 2 . P.bulleted $ + [ P.wrap "Delete one by an unambiguous name, given above.", + P.wrap "Delete them all by re-issuing the previous command." + ] + ] + where + name :: Name + name = Path.toName' (HQ'.toName (Path.unsplitHQ' p)) + qualifyTerm :: Referent -> P.Pretty P.ColorText + qualifyTerm = P.syntaxToColor . prettyNamedReferent hashLen name + qualifyType :: Reference -> P.Pretty P.ColorText + qualifyType = P.syntaxToColor . prettyNamedReference hashLen name + TermAmbiguous _ _ -> pure "That term is ambiguous." + HashAmbiguous h rs -> + pure . P.callout "\129300" . P.lines $ + [ P.wrap $ + "The hash" <> prettyShortHash h <> "is ambiguous." + <> "Did you mean one of these hashes?", + "", + P.indentN 2 $ P.lines (P.shown <$> Set.toList rs), + "", + P.wrap "Try again with a few more hash characters to disambiguate." + ] + BranchHashAmbiguous h rs -> + pure . P.callout "\129300" . P.lines $ + [ P.wrap $ + "The namespace hash" <> prettySBH h <> "is ambiguous." + <> "Did you mean one of these hashes?", + "", + P.indentN 2 $ P.lines (prettySBH <$> Set.toList rs), + "", + P.wrap "Try again with a few more hash characters to disambiguate." + ] + BadName n -> + pure . P.wrap $ P.string n <> " is not a kind of name I understand." + TermNotFound' sh -> + pure $ + "I could't find a term with hash " + <> (prettyShortHash sh) + TypeNotFound' sh -> + pure $ + "I could't find a type with hash " + <> (prettyShortHash sh) + NothingToPatch _patchPath dest -> + pure $ + P.callout "😶" . P.wrap $ + "This had no effect. Perhaps the patch has already been applied" + <> "or it doesn't intersect with the definitions in" + <> P.group (prettyPath' dest <> ".") + PatchNeedsToBeConflictFree -> + pure . P.wrap $ + "I tried to auto-apply the patch, but couldn't because it contained" + <> "contradictory entries." + PatchInvolvesExternalDependents _ _ -> + pure "That patch involves external dependents." + ShowReflog [] -> pure . P.warnCallout $ "The reflog appears to be empty!" + ShowReflog entries -> + pure $ + P.lines + [ P.wrap $ + "Here is a log of the root namespace hashes," + <> "starting with the most recent," + <> "along with the command that got us there." + <> "Try:", + "", + -- `head . tail` is safe: entries never has 1 entry, and [] is handled above + let e2 = head . tail $ entries + in P.indentN 2 . P.wrapColumn2 $ + [ ( IP.makeExample IP.forkLocal ["2", ".old"], + "" + ), + ( IP.makeExample IP.forkLocal [prettySBH . Output.hash $ e2, ".old"], + "to make an old namespace accessible again," + ), + (mempty, mempty), + ( IP.makeExample IP.resetRoot [prettySBH . Output.hash $ e2], + "to reset the root namespace and its history to that of the specified" + <> "namespace." + ) + ], + "", + P.numberedList . fmap renderEntry $ entries + ] + where + renderEntry :: Output.ReflogEntry -> P.Pretty CT.ColorText + renderEntry (Output.ReflogEntry hash reason) = + P.wrap $ + P.blue (prettySBH hash) <> " : " <> P.text reason + History _cap history tail -> + pure $ + P.lines + [ note $ "The most recent namespace hash is immediately below this message.", + "", + P.sep "\n\n" [go h diff | (h, diff) <- reverse history], + "", + tailMsg + ] + where + tailMsg = case tail of + E.EndOfLog h -> + P.lines + [ "□ " <> prettySBH h <> " (start of history)" + ] + E.MergeTail h hs -> + P.lines + [ P.wrap $ "This segment of history starts with a merge." <> ex, + "", + "⊙ " <> prettySBH h, + "⑃", + P.lines (prettySBH <$> hs) + ] + E.PageEnd h _n -> + P.lines + [ P.wrap $ "There's more history before the versions shown here." <> ex, + "", + dots, + "", + "⊙ " <> prettySBH h, + "" + ] + dots = "⠇" + go hash diff = + P.lines + [ "⊙ " <> prettySBH hash, + "", + P.indentN 2 $ prettyDiff diff + ] + ex = + "Use" <> IP.makeExample IP.history ["#som3n4m3space"] + <> "to view history starting from a given namespace hash." + StartOfCurrentPathHistory -> + pure $ + P.wrap "You're already at the very beginning! 🙂" + PullAlreadyUpToDate ns dest -> + pure . P.callout "😶" $ + P.wrap $ + prettyPath' dest <> "was already up-to-date with" + <> P.group (prettyRemoteNamespace ns <> ".") + MergeAlreadyUpToDate src dest -> + pure . P.callout "😶" $ + P.wrap $ + prettyPath' dest <> "was already up-to-date with" + <> P.group (prettyPath' src <> ".") + PreviewMergeAlreadyUpToDate src dest -> + pure . P.callout "😶" $ + P.wrap $ + prettyPath' dest <> "is already up-to-date with" + <> P.group (prettyPath' src <> ".") + DumpNumberedArgs args -> pure . P.numberedList $ fmap P.string args + NoConflictsOrEdits -> + pure (P.okCallout "No conflicts or edits in progress.") + NoOp -> pure $ P.string "I didn't make any changes." + DefaultMetadataNotification -> pure $ P.wrap "I added some default metadata." + DumpBitBooster head map -> + let go output [] = output + go output (head : queue) = case Map.lookup head map of + Nothing -> go (renderLine head [] : output) queue + Just tails -> go (renderLine head tails : output) (queue ++ tails) + where + renderHash = take 10 . Text.unpack . Hash.base32Hex . Causal.unRawHash + renderLine head tail = + (renderHash head) ++ "|" ++ intercalateMap " " renderHash tail + ++ case Map.lookup (Hash.base32Hex . Causal.unRawHash $ head) tags of + Just t -> "|tag: " ++ t + Nothing -> "" + -- some specific hashes that we want to label in the output + tags :: Map Text String + tags = + Map.fromList . fmap swap $ + [ ("unisonbase 2019/8/6", "54s9qjhaonotuo4sp6ujanq7brngk32f30qt5uj61jb461h9fcca6vv5levnoo498bavne4p65lut6k6a7rekaruruh9fsl19agu8j8"), + ("unisonbase 2019/8/5", "focmbmg7ca7ht7opvjaqen58fobu3lijfa9adqp7a1l1rlkactd7okoimpfmd0ftfmlch8gucleh54t3rd1e7f13fgei86hnsr6dt1g"), + ("unisonbase 2019/7/31", "jm2ltsg8hh2b3c3re7aru6e71oepkqlc3skr2v7bqm4h1qgl3srucnmjcl1nb8c9ltdv56dpsgpdur1jhpfs6n5h43kig5bs4vs50co"), + ("unisonbase 2019/7/25", "an1kuqsa9ca8tqll92m20tvrmdfk0eksplgjbda13evdlngbcn5q72h8u6nb86ojr7cvnemjp70h8cq1n95osgid1koraq3uk377g7g"), + ("ucm m1b", "o6qocrqcqht2djicb1gcmm5ct4nr45f8g10m86bidjt8meqablp0070qae2tvutnvk4m9l7o1bkakg49c74gduo9eati20ojf0bendo"), + ("ucm m1, m1a", "auheev8io1fns2pdcnpf85edsddj27crpo9ajdujum78dsncvfdcdu5o7qt186bob417dgmbd26m8idod86080bfivng1edminu3hug") + ] + in pure $ + P.lines + [ P.lines (fmap fromString . reverse . nubOrd $ go [] [head]), + "", + "Paste that output into http://bit-booster.com/graph.html" + ] + ListDependents hqLength ld names missing -> + pure $ + if names == mempty && missing == mempty + then c (prettyLabeledDependency hqLength ld) <> " doesn't have any dependents." + else + "Dependents of " <> c (prettyLabeledDependency hqLength ld) <> ":\n\n" + <> (P.indentN 2 (P.numberedColumn2Header num pairs)) + where + num n = P.hiBlack $ P.shown n <> "." + header = (P.hiBlack "Reference", P.hiBlack "Name") + pairs = + header : + ( fmap (first c . second c) $ + [(p $ Reference.toShortHash r, prettyName n) | (n, r) <- names] + ++ [(p $ Reference.toShortHash r, "(no name available)") | r <- toList missing] + ) + p = prettyShortHash . SH.take hqLength + c = P.syntaxToColor + -- this definition is identical to the previous one, apart from the word + -- "Dependencies", but undecided about whether or how to refactor + ListDependencies hqLength ld names missing -> + pure $ + if names == mempty && missing == mempty + then c (prettyLabeledDependency hqLength ld) <> " doesn't have any dependencies." + else + "Dependencies of " <> c (prettyLabeledDependency hqLength ld) <> ":\n\n" + <> (P.indentN 2 (P.numberedColumn2Header num pairs)) + where + num n = P.hiBlack $ P.shown n <> "." + header = (P.hiBlack "Reference", P.hiBlack "Name") + pairs = + header : + ( fmap (first c . second c) $ + [(p $ Reference.toShortHash r, prettyName n) | (n, r) <- names] + ++ [(p $ Reference.toShortHash r, "(no name available)") | r <- toList missing] + ) + p = prettyShortHash . SH.take hqLength + c = P.syntaxToColor + DumpUnisonFileHashes hqLength datas effects terms -> + pure . P.syntaxToColor . P.lines $ + ( effects <&> \(n, r) -> + "ability " + <> prettyHashQualified' (HQ'.take hqLength . HQ'.fromNamedReference n $ Reference.DerivedId r) + ) + <> ( datas <&> \(n, r) -> + "type " + <> prettyHashQualified' (HQ'.take hqLength . HQ'.fromNamedReference n $ Reference.DerivedId r) + ) + <> ( terms <&> \(n, r) -> + prettyHashQualified' (HQ'.take hqLength . HQ'.fromNamedReference n $ Reference.DerivedId r) + ) + RefusedToPush pushBehavior -> + (pure . P.warnCallout . P.lines) case pushBehavior of + PushBehavior.RequireEmpty -> + [ "The remote namespace is not empty.", + "", + "Did you mean to use " <> IP.makeExample' IP.push <> " instead?" + ] + PushBehavior.RequireNonEmpty -> + [ "The remote namespace is empty.", + "", + "Did you mean to use " <> IP.makeExample' IP.pushCreate <> " instead?" + ] + where + _nameChange _cmd _pastTenseCmd _oldName _newName _r = error "todo" + +-- do +-- when (not . Set.null $ E.changedSuccessfully r) . putPrettyLn . P.okCallout $ +-- P.wrap $ "I" <> pastTenseCmd <> "the" +-- <> ns (E.changedSuccessfully r) +-- <> P.blue (prettyName oldName) +-- <> "to" <> P.group (P.green (prettyName newName) <> ".") +-- when (not . Set.null $ E.oldNameConflicted r) . putPrettyLn . P.warnCallout $ +-- (P.wrap $ "I couldn't" <> cmd <> "the" +-- <> ns (E.oldNameConflicted r) +-- <> P.blue (prettyName oldName) +-- <> "to" <> P.green (prettyName newName) +-- <> "because of conflicts.") +-- <> "\n\n" +-- <> tip ("Use " <> makeExample' IP.todo <> " to view more information on conflicts and remaining work.") +-- when (not . Set.null $ E.newNameAlreadyExists r) . putPrettyLn . P.warnCallout $ +-- (P.wrap $ "I couldn't" <> cmd <> P.blue (prettyName oldName) +-- <> "to" <> P.green (prettyName newName) +-- <> "because the " +-- <> ns (E.newNameAlreadyExists r) +-- <> "already exist(s).") +-- <> "\n\n" +-- <> tip +-- ("Use" <> makeExample IP.rename [prettyName newName, ""] <> "to make" <> prettyName newName <> "available.") +-- where +-- ns targets = P.oxfordCommas $ +-- map (fromString . Names.renderNameTarget) (toList targets) + +prettyPath' :: Path.Path' -> Pretty +prettyPath' p' = + if Path.isCurrentPath p' + then "the current namespace" + else P.blue (P.shown p') + +prettyRelative :: Path.Relative -> Pretty +prettyRelative = P.blue . P.shown + +prettySBH :: IsString s => ShortBranchHash -> P.Pretty s +prettySBH hash = P.group $ "#" <> P.text (SBH.toText hash) + +formatMissingStuff :: + (Show tm, Show typ) => + [(HQ.HashQualified Name, tm)] -> + [(HQ.HashQualified Name, typ)] -> + Pretty +formatMissingStuff terms types = + ( unlessM (null terms) . P.fatalCallout $ + P.wrap "The following terms have a missing or corrupted type signature:" + <> "\n\n" + <> P.column2 [(P.syntaxToColor $ prettyHashQualified name, fromString (show ref)) | (name, ref) <- terms] + ) + <> ( unlessM (null types) . P.fatalCallout $ + P.wrap "The following types weren't found in the codebase:" + <> "\n\n" + <> P.column2 [(P.syntaxToColor $ prettyHashQualified name, fromString (show ref)) | (name, ref) <- types] + ) + +displayDefinitions' :: + Var v => + Ord a1 => + PPE.PrettyPrintEnvDecl -> + Map Reference.Reference (DisplayObject () (DD.Decl v a1)) -> + Map Reference.Reference (DisplayObject (Type v a1) (Term v a1)) -> + Pretty +displayDefinitions' ppe0 types terms = P.syntaxToColor $ P.sep "\n\n" (prettyTypes <> prettyTerms) + where + ppeBody r = PPE.declarationPPE ppe0 r + ppeDecl = PPE.unsuffixifiedPPE ppe0 + prettyTerms = + map go . Map.toList + -- sort by name + $ + Map.mapKeys (first (PPE.termName ppeDecl . Referent.Ref) . dupe) terms + prettyTypes = + map go2 . Map.toList $ + Map.mapKeys (first (PPE.typeName ppeDecl) . dupe) types + go ((n, r), dt) = + case dt of + MissingObject r -> missing n r + BuiltinObject typ -> + P.hang + ("builtin " <> prettyHashQualified n <> " :") + (TypePrinter.prettySyntax (ppeBody r) typ) + UserObject tm -> TermPrinter.prettyBinding (ppeBody r) n tm + go2 ((n, r), dt) = + case dt of + MissingObject r -> missing n r + BuiltinObject _ -> builtin n + UserObject decl -> DeclPrinter.prettyDecl (PPE.declarationPPEDecl ppe0 r) r n decl + builtin n = P.wrap $ "--" <> prettyHashQualified n <> " is built-in." + missing n r = + P.wrap + ( "-- The name " <> prettyHashQualified n <> " is assigned to the " + <> "reference " + <> fromString (show r ++ ",") + <> "which is missing from the codebase." + ) + <> P.newline + <> tip "You might need to repair the codebase manually." + +displayRendered :: Maybe FilePath -> Pretty -> IO Pretty +displayRendered outputLoc pp = + maybe (pure pp) scratchAndDisplay outputLoc + where + scratchAndDisplay path = do + path' <- canonicalizePath path + prependToFile pp path' + pure (message pp path') + where + prependToFile pp path = do + existingContents <- do + exists <- doesFileExist path + if exists + then readFile path + else pure "" + writeFile path . Text.pack . P.toPlain 80 $ + P.lines [pp, "", P.text existingContents] + message pp path = + P.callout "☝️" $ + P.lines + [ P.wrap $ "I added this to the top of " <> fromString path, + "", + P.indentN 2 pp + ] + +displayDefinitions :: + Var v => + Ord a1 => + Maybe FilePath -> + PPE.PrettyPrintEnvDecl -> + Map Reference.Reference (DisplayObject () (DD.Decl v a1)) -> + Map Reference.Reference (DisplayObject (Type v a1) (Term v a1)) -> + IO Pretty +displayDefinitions _outputLoc _ppe types terms + | Map.null types && Map.null terms = + pure $ P.callout "😶" "No results to display." +displayDefinitions outputLoc ppe types terms = + maybe displayOnly scratchAndDisplay outputLoc + where + displayOnly = pure code + scratchAndDisplay path = do + path' <- canonicalizePath path + prependToFile code path' + pure (message code path') + where + prependToFile code path = do + existingContents <- do + exists <- doesFileExist path + if exists + then readFile path + else pure "" + writeFile path . Text.pack . P.toPlain 80 $ + P.lines + [ code, + "", + "---- " <> "Anything below this line is ignored by Unison.", + "", + P.text existingContents + ] + message code path = + P.callout "☝️" $ + P.lines + [ P.wrap $ "I added these definitions to the top of " <> fromString path, + "", + P.indentN 2 code, + "", + P.wrap $ + "You can edit them there, then do" <> makeExample' IP.update + <> "to replace the definitions currently in this namespace." + ] + code = + P.syntaxToColor $ P.sep "\n\n" (prettyTypes <> prettyTerms) + where + ppeBody r = PPE.declarationPPE ppe r + ppeDecl = PPE.unsuffixifiedPPE ppe + prettyTerms = + map go . Map.toList $ + -- sort by name + Map.mapKeys (first (PPE.termName ppeDecl . Referent.Ref) . dupe) terms + prettyTypes = + map go2 . Map.toList $ + Map.mapKeys (first (PPE.typeName ppeDecl) . dupe) types + go ((n, r), dt) = + case dt of + MissingObject r -> missing n r + BuiltinObject typ -> + P.hang + ("builtin " <> prettyHashQualified n <> " :") + (TypePrinter.prettySyntax (ppeBody r) typ) + UserObject tm -> TermPrinter.prettyBinding (ppeBody r) n tm + go2 ((n, r), dt) = + case dt of + MissingObject r -> missing n r + BuiltinObject _ -> builtin n + UserObject decl -> DeclPrinter.prettyDecl (PPE.declarationPPEDecl ppe r) r n decl + builtin n = P.wrap $ "--" <> prettyHashQualified n <> " is built-in." + missing n r = + P.wrap + ( "-- The name " <> prettyHashQualified n <> " is assigned to the " + <> "reference " + <> fromString (show r ++ ",") + <> "which is missing from the codebase." + ) + <> P.newline + <> tip "You might need to repair the codebase manually." + +displayTestResults :: + Bool -> -- whether to show the tip + PPE.PrettyPrintEnv -> + [(Reference, Text)] -> + [(Reference, Text)] -> + Pretty +displayTestResults showTip ppe oksUnsorted failsUnsorted = + let oks = Name.sortByText fst [(name r, msg) | (r, msg) <- oksUnsorted] + fails = Name.sortByText fst [(name r, msg) | (r, msg) <- failsUnsorted] + name r = HQ.toText $ PPE.termName ppe (Referent.Ref r) + okMsg = + if null oks + then mempty + else P.column2 [(P.green "◉ " <> P.text r, " " <> P.green (P.text msg)) | (r, msg) <- oks] + okSummary = + if null oks + then mempty + else "✅ " <> P.bold (P.num (length oks)) <> P.green " test(s) passing" + failMsg = + if null fails + then mempty + else P.column2 [(P.red "✗ " <> P.text r, " " <> P.red (P.text msg)) | (r, msg) <- fails] + failSummary = + if null fails + then mempty + else "🚫 " <> P.bold (P.num (length fails)) <> P.red " test(s) failing" + tipMsg = + if not showTip || (null oks && null fails) + then mempty + else + tip $ + "Use " <> P.blue ("view " <> P.text (fst $ head (fails ++ oks))) + <> "to view the source of a test." + in if null oks && null fails + then "😶 No tests available." + else + P.sep "\n\n" . P.nonEmpty $ + [ okMsg, + failMsg, + P.sep ", " . P.nonEmpty $ [failSummary, okSummary], + tipMsg + ] + +unsafePrettyTermResultSig' :: + Var v => + PPE.PrettyPrintEnv -> + SR'.TermResult' v a -> + Pretty +unsafePrettyTermResultSig' ppe = \case + SR'.TermResult' name (Just typ) r _aliases -> + head (TypePrinter.prettySignatures' ppe [(r, name, typ)]) + _ -> error "Don't pass Nothing" + +-- produces: +-- -- #5v5UtREE1fTiyTsTK2zJ1YNqfiF25SkfUnnji86Lms#0 +-- Optional.None, Maybe.Nothing : Maybe a +unsafePrettyTermResultSigFull' :: + Var v => + PPE.PrettyPrintEnv -> + SR'.TermResult' v a -> + Pretty +unsafePrettyTermResultSigFull' ppe = \case + SR'.TermResult' hq (Just typ) r aliases -> + P.lines + [ P.hiBlack "-- " <> greyHash (HQ.fromReferent r), + P.group $ + P.commas (fmap greyHash $ hq : map HQ'.toHQ (toList aliases)) <> " : " + <> (P.syntaxToColor $ TypePrinter.pretty0 ppe mempty (-1) typ), + mempty + ] + _ -> error "Don't pass Nothing" + where + greyHash = styleHashQualified' id P.hiBlack + +prettyTypeResultHeader' :: Var v => SR'.TypeResult' v a -> Pretty +prettyTypeResultHeader' (SR'.TypeResult' name dt r _aliases) = + prettyDeclTriple (name, r, dt) + +-- produces: +-- -- #5v5UtREE1fTiyTsTK2zJ1YNqfiF25SkfUnnji86Lms +-- type Optional +-- type Maybe +prettyTypeResultHeaderFull' :: Var v => SR'.TypeResult' v a -> Pretty +prettyTypeResultHeaderFull' (SR'.TypeResult' name dt r aliases) = + P.lines stuff <> P.newline + where + stuff = + (P.hiBlack "-- " <> greyHash (HQ.fromReference r)) : + fmap + (\name -> prettyDeclTriple (name, r, dt)) + (name : map HQ'.toHQ (toList aliases)) + where + greyHash = styleHashQualified' id P.hiBlack + +prettyDeclTriple :: + Var v => + (HQ.HashQualified Name, Reference.Reference, DisplayObject () (DD.Decl v a)) -> + Pretty +prettyDeclTriple (name, _, displayDecl) = case displayDecl of + BuiltinObject _ -> P.hiBlack "builtin " <> P.hiBlue "type " <> P.blue (P.syntaxToColor $ prettyHashQualified name) + MissingObject _ -> mempty -- these need to be handled elsewhere + UserObject decl -> P.syntaxToColor $ DeclPrinter.prettyDeclHeader name decl + +prettyDeclPair :: + Var v => + PPE.PrettyPrintEnv -> + (Reference, DisplayObject () (DD.Decl v a)) -> + Pretty +prettyDeclPair ppe (r, dt) = prettyDeclTriple (PPE.typeName ppe r, r, dt) + +renderNameConflicts :: Set.Set Name -> Set.Set Name -> Pretty +renderNameConflicts conflictedTypeNames conflictedTermNames = + unlessM (null allNames) $ + P.callout "❓" . P.sep "\n\n" . P.nonEmpty $ + [ showConflictedNames "types" conflictedTypeNames, + showConflictedNames "terms" conflictedTermNames, + tip $ + "This occurs when merging branches that both independently introduce the same name. Use " + <> makeExample IP.view (prettyName <$> take 3 allNames) + <> "to see the conflicting definitions, then use " + <> makeExample' + ( if (not . null) conflictedTypeNames + then IP.renameType + else IP.renameTerm + ) + <> "to resolve the conflicts." + ] + where + allNames = toList (conflictedTermNames <> conflictedTypeNames) + showConflictedNames things conflictedNames = + unlessM (Set.null conflictedNames) $ + P.wrap ("These" <> P.bold (things <> "have conflicting definitions:")) + `P.hang` P.commas (P.blue . prettyName <$> toList conflictedNames) + +renderEditConflicts :: + PPE.PrettyPrintEnv -> Patch -> Pretty +renderEditConflicts ppe Patch {..} = + unlessM (null editConflicts) . P.callout "❓" . P.sep "\n\n" $ + [ P.wrap $ + "These" <> P.bold "definitions were edited differently" + <> "in namespaces that have been merged into this one." + <> "You'll have to tell me what to use as the new definition:", + P.indentN 2 (P.lines (formatConflict <$> editConflicts)) + -- , tip $ "Use " <> makeExample IP.resolve [name (head editConflicts), " "] <> " to pick a replacement." -- todo: eventually something with `edit` + ] + where + -- todo: could possibly simplify all of this, but today is a copy/paste day. + editConflicts :: [Either (Reference, Set TypeEdit.TypeEdit) (Reference, Set TermEdit.TermEdit)] + editConflicts = + (fmap Left . Map.toList . R.toMultimap . R.filterManyDom $ _typeEdits) + <> (fmap Right . Map.toList . R.toMultimap . R.filterManyDom $ _termEdits) + typeName r = styleHashQualified P.bold (PPE.typeName ppe r) + termName r = styleHashQualified P.bold (PPE.termName ppe (Referent.Ref r)) + formatTypeEdits (r, toList -> es) = + P.wrap $ + "The type" <> typeName r <> "was" + <> ( if TypeEdit.Deprecate `elem` es + then "deprecated and also replaced with" + else "replaced with" + ) + <> P.oxfordCommas [typeName r | TypeEdit.Replace r <- es] + formatTermEdits (r, toList -> es) = + P.wrap $ + "The term" <> termName r <> "was" + <> ( if TermEdit.Deprecate `elem` es + then "deprecated and also replaced with" + else "replaced with" + ) + <> P.oxfordCommas [termName r | TermEdit.Replace r _ <- es] + formatConflict = either formatTypeEdits formatTermEdits + +type Numbered = State.State (Int, Seq.Seq String) + +todoOutput :: Var v => PPE.PrettyPrintEnvDecl -> TO.TodoOutput v a -> Pretty +todoOutput ppe todo = + todoConflicts <> todoEdits + where + ppeu = PPE.unsuffixifiedPPE ppe + ppes = PPE.suffixifiedPPE ppe + (frontierTerms, frontierTypes) = TO.todoFrontier todo + (dirtyTerms, dirtyTypes) = TO.todoFrontierDependents todo + corruptTerms = + [(PPE.termName ppeu (Referent.Ref r), r) | (r, Nothing) <- frontierTerms] + corruptTypes = + [(PPE.typeName ppeu r, r) | (r, MissingObject _) <- frontierTypes] + goodTerms ts = + [(Referent.Ref r, PPE.termName ppeu (Referent.Ref r), typ) | (r, Just typ) <- ts] + todoConflicts = + if TO.noConflicts todo + then mempty + else + P.lines . P.nonEmpty $ + [ renderEditConflicts ppeu (TO.editConflicts todo), + renderNameConflicts conflictedTypeNames conflictedTermNames + ] + where + -- If a conflict is both an edit and a name conflict, we show it in the edit + -- conflicts section + c :: Names + c = removeEditConflicts (TO.editConflicts todo) (TO.nameConflicts todo) + conflictedTypeNames = (R.dom . Names.types) c + conflictedTermNames = (R.dom . Names.terms) c + -- e.g. `foo#a` has been independently updated to `foo#b` and `foo#c`. + -- This means there will be a name conflict: + -- foo -> #b + -- foo -> #c + -- as well as an edit conflict: + -- #a -> #b + -- #a -> #c + -- We want to hide/ignore the name conflicts that are also targets of an + -- edit conflict, so that the edit conflict will be dealt with first. + -- For example, if hash `h` has multiple edit targets { #x, #y, #z, ...}, + -- we'll temporarily remove name conflicts pointing to { #x, #y, #z, ...}. + removeEditConflicts :: Patch -> Names -> Names + removeEditConflicts Patch {..} Names {..} = Names terms' types' + where + terms' = R.filterRan (`Set.notMember` conflictedTermEditTargets) terms + types' = R.filterRan (`Set.notMember` conflictedTypeEditTargets) types + conflictedTypeEditTargets :: Set Reference + conflictedTypeEditTargets = + Set.fromList $ toList (R.ran typeEditConflicts) >>= TypeEdit.references + conflictedTermEditTargets :: Set Referent.Referent + conflictedTermEditTargets = + Set.fromList . fmap Referent.Ref $ + toList (R.ran termEditConflicts) >>= TermEdit.references + typeEditConflicts = R.filterDom (`R.manyDom` _typeEdits) _typeEdits + termEditConflicts = R.filterDom (`R.manyDom` _termEdits) _termEdits + + todoEdits = + unlessM (TO.noEdits todo) . P.callout "🚧" . P.sep "\n\n" . P.nonEmpty $ + [ P.wrap + ( "The namespace has" <> fromString (show (TO.todoScore todo)) + <> "transitive dependent(s) left to upgrade." + <> "Your edit frontier is the dependents of these definitions:" + ), + P.indentN 2 . P.lines $ + ( (prettyDeclPair ppeu <$> toList frontierTypes) + ++ TypePrinter.prettySignatures' ppes (goodTerms frontierTerms) + ), + P.wrap "I recommend working on them in the following order:", + P.numberedList $ + let unscore (_score, a, b) = (a, b) + in (prettyDeclPair ppeu . unscore <$> toList dirtyTypes) + ++ TypePrinter.prettySignatures' + ppes + (goodTerms $ unscore <$> dirtyTerms), + formatMissingStuff corruptTerms corruptTypes + ] + +listOfDefinitions :: + Var v => PPE.PrettyPrintEnv -> E.ListDetailed -> [SR'.SearchResult' v a] -> IO Pretty +listOfDefinitions ppe detailed results = + pure $ listOfDefinitions' ppe detailed results + +listOfLinks :: + Var v => PPE.PrettyPrintEnv -> [(HQ.HashQualified Name, Maybe (Type v a))] -> IO Pretty +listOfLinks _ [] = + pure . P.callout "😶" . P.wrap $ + "No results. Try using the " + <> IP.makeExample IP.link [] + <> "command to add metadata to a definition." +listOfLinks ppe results = + pure $ + P.lines + [ P.numberedColumn2 + num + [ (P.syntaxToColor $ prettyHashQualified hq, ": " <> prettyType typ) | (hq, typ) <- results + ], + "", + tip $ + "Try using" <> IP.makeExample IP.display ["1"] + <> "to display the first result or" + <> IP.makeExample IP.view ["1"] + <> "to view its source." + ] + where + num i = P.hiBlack $ P.shown i <> "." + prettyType Nothing = "❓ (missing a type for this definition)" + prettyType (Just t) = TypePrinter.pretty ppe t + +data ShowNumbers = ShowNumbers | HideNumbers + +-- | `ppe` is just for rendering type signatures +-- `oldPath, newPath :: Path.Absolute` are just for producing fully-qualified +-- numbered args +showDiffNamespace :: + forall v. + Var v => + ShowNumbers -> + PPE.PrettyPrintEnv -> + Path.Absolute -> + Path.Absolute -> + OBD.BranchDiffOutput v Ann -> + (Pretty, NumberedArgs) +showDiffNamespace _ _ _ _ diffOutput + | OBD.isEmpty diffOutput = + ("The namespaces are identical.", mempty) +showDiffNamespace sn ppe oldPath newPath OBD.BranchDiffOutput {..} = + (P.sepNonEmpty "\n\n" p, toList args) + where + (p, (menuSize, args)) = + (`State.runState` (0 :: Int, Seq.empty)) $ + sequence + [ if (not . null) newTypeConflicts + || (not . null) newTermConflicts + then do + prettyUpdatedTypes :: [Pretty] <- traverse prettyUpdateType newTypeConflicts + prettyUpdatedTerms :: [Pretty] <- traverse prettyUpdateTerm newTermConflicts + pure $ + P.sepNonEmpty + "\n\n" + [ P.red "New name conflicts:", + P.indentN 2 . P.sepNonEmpty "\n\n" $ prettyUpdatedTypes <> prettyUpdatedTerms + ] + else pure mempty, + if (not . null) resolvedTypeConflicts + || (not . null) resolvedTermConflicts + then do + prettyUpdatedTypes :: [Pretty] <- traverse prettyUpdateType resolvedTypeConflicts + prettyUpdatedTerms :: [Pretty] <- traverse prettyUpdateTerm resolvedTermConflicts + pure $ + P.sepNonEmpty + "\n\n" + [ P.bold "Resolved name conflicts:", + P.indentN 2 . P.sepNonEmpty "\n\n" $ prettyUpdatedTypes <> prettyUpdatedTerms + ] + else pure mempty, + if (not . null) updatedTypes + || (not . null) updatedTerms + || propagatedUpdates > 0 + || (not . null) updatedPatches + then do + prettyUpdatedTypes :: [Pretty] <- traverse prettyUpdateType updatedTypes + prettyUpdatedTerms :: [Pretty] <- traverse prettyUpdateTerm updatedTerms + prettyUpdatedPatches :: [Pretty] <- traverse (prettySummarizePatch newPath) updatedPatches + pure $ + P.sepNonEmpty + "\n\n" + [ P.bold "Updates:", + P.indentNonEmptyN 2 . P.sepNonEmpty "\n\n" $ prettyUpdatedTypes <> prettyUpdatedTerms, + if propagatedUpdates > 0 + then + P.indentN 2 $ + P.wrap + ( P.hiBlack $ + "There were " + <> P.shown propagatedUpdates + <> "auto-propagated updates." + ) + else mempty, + P.indentNonEmptyN 2 . P.linesNonEmpty $ prettyUpdatedPatches + ] + else pure mempty, + if (not . null) addedTypes + || (not . null) addedTerms + || (not . null) addedPatches + then do + prettyAddedTypes :: Pretty <- prettyAddTypes addedTypes + prettyAddedTerms :: Pretty <- prettyAddTerms addedTerms + prettyAddedPatches :: [Pretty] <- traverse (prettySummarizePatch newPath) addedPatches + pure $ + P.sepNonEmpty + "\n\n" + [ P.bold "Added definitions:", + P.indentNonEmptyN 2 $ P.linesNonEmpty [prettyAddedTypes, prettyAddedTerms], + P.indentNonEmptyN 2 $ P.lines prettyAddedPatches + ] + else pure mempty, + if (not . null) removedTypes + || (not . null) removedTerms + || (not . null) removedPatches + then do + prettyRemovedTypes :: Pretty <- prettyRemoveTypes removedTypes + prettyRemovedTerms :: Pretty <- prettyRemoveTerms removedTerms + prettyRemovedPatches :: [Pretty] <- traverse (prettyNamePatch oldPath) removedPatches + pure $ + P.sepNonEmpty + "\n\n" + [ P.bold "Removed definitions:", + P.indentN 2 $ + P.linesNonEmpty + [ prettyRemovedTypes, + prettyRemovedTerms, + P.linesNonEmpty prettyRemovedPatches + ] + ] + else pure mempty, + if (not . null) renamedTypes + || (not . null) renamedTerms + then do + results <- prettyRenameGroups renamedTypes renamedTerms + pure $ + P.sepNonEmpty + "\n\n" + [ P.bold "Name changes:", + P.indentN 2 . P.sepNonEmpty "\n\n" $ results + ] + else -- todo: change separator to just '\n' here if all the results are 1 to 1 + pure mempty + ] + + {- new implementation + 23. X ┐ => (added) 24. X' + 25. X2 ┘ (removed) 26. X2 + -} + prettyRenameGroups :: + [OBD.RenameTypeDisplay v a] -> + [OBD.RenameTermDisplay v a] -> + Numbered [Pretty] + prettyRenameGroups types terms = + (<>) + <$> traverse + (prettyGroup . (over (_1 . _1) Referent.Ref)) + (types `zip` [0 ..]) + <*> traverse prettyGroup (terms `zip` [length types ..]) + where + leftNamePad :: P.Width = + foldl1' max $ + map + (foldl1' max . map (P.Width . HQ'.nameLength) . toList . view _3) + terms + <> map + (foldl1' max . map (P.Width . HQ'.nameLength) . toList . view _3) + types + prettyGroup :: + ( (Referent, b, Set (HQ'.HashQualified Name), Set (HQ'.HashQualified Name)), + Int + ) -> + Numbered Pretty + prettyGroup ((r, _, olds, news), i) = + let -- [ "peach ┐" + -- , "peach' ┘"] + olds' :: [Numbered Pretty] = + map (\(oldhq, oldp) -> numHQ' oldPath oldhq r <&> (\n -> n <> " " <> oldp)) + . (zip (toList olds)) + . P.boxRight + . map (P.rightPad leftNamePad . phq') + $ toList olds + + added' = toList $ Set.difference news olds + removed' = toList $ Set.difference olds news + -- [ "(added) 24. X'" + -- , "(removed) 26. X2" + -- ] + + news' :: [Numbered Pretty] = + map (number addedLabel) added' ++ map (number removedLabel) removed' + where + addedLabel = "(added)" + removedLabel = "(removed)" + number label name = + numHQ' newPath name r + <&> (\num -> num <> " " <> phq' name <> " " <> label) + + buildTable :: [Numbered Pretty] -> [Numbered Pretty] -> Numbered Pretty + buildTable lefts rights = + let hlefts = + if i == 0 + then pure (P.bold "Original") : lefts + else lefts + hrights = if i == 0 then pure (P.bold "Changes") : rights else rights + in P.column2UnzippedM @Numbered mempty hlefts hrights + in buildTable olds' news' + + prettyUpdateType :: OBD.UpdateTypeDisplay v a -> Numbered Pretty + {- + 1. ability Foo#pqr x y + 2. - AllRightsReserved : License + 3. + MIT : License + 4. ability Foo#abc + 5. - apiDocs : License + 6. + MIT : License + -} + prettyUpdateType (Nothing, mdUps) = + P.column2 <$> traverse (mdTypeLine newPath) mdUps + {- + 1. ┌ ability Foo#pqr x y + 2. └ ability Foo#xyz a b + ⧩ + 4. ┌ ability Foo#abc + │ 5. - apiDocs : Doc + │ 6. + MIT : License + 7. └ ability Foo#def + 8. - apiDocs : Doc + 9. + MIT : License + + 1. ┌ foo#abc : Nat -> Nat -> Poop + 2. └ foo#xyz : Nat + ↓ + 4. foo : Poop + 5. + foo.docs : Doc + -} + prettyUpdateType (Just olds, news) = + do + olds <- traverse (mdTypeLine oldPath) [(name, r, decl, mempty) | (name, r, decl) <- olds] + news <- traverse (mdTypeLine newPath) news + let (oldnums, olddatas) = unzip olds + let (newnums, newdatas) = unzip news + pure . P.column2 $ + zip + (oldnums <> [""] <> newnums) + (P.boxLeft olddatas <> [downArrow] <> P.boxLeft newdatas) + + {- + 13. ┌ability Yyz (+1 metadata) + 14. └ability copies.Yyz (+2 metadata) + -} + prettyAddTypes :: [OBD.AddedTypeDisplay v a] -> Numbered Pretty + prettyAddTypes = fmap P.lines . traverse prettyGroup + where + prettyGroup :: OBD.AddedTypeDisplay v a -> Numbered Pretty + prettyGroup (hqmds, r, odecl) = do + pairs <- traverse (prettyLine r odecl) hqmds + let (nums, decls) = unzip pairs + let boxLeft = case hqmds of _ : _ : _ -> P.boxLeft; _ -> id + pure . P.column2 $ zip nums (boxLeft decls) + prettyLine :: Reference -> Maybe (DD.DeclOrBuiltin v a) -> (HQ'.HashQualified Name, [OBD.MetadataDisplay v a]) -> Numbered (Pretty, Pretty) + prettyLine r odecl (hq, mds) = do + n <- numHQ' newPath hq (Referent.Ref r) + pure . (n,) $ + prettyDecl hq odecl <> case length mds of + 0 -> mempty + c -> " (+" <> P.shown c <> " metadata)" + + prettyAddTerms :: [OBD.AddedTermDisplay v a] -> Numbered Pretty + prettyAddTerms = fmap (P.column3 . mconcat) . traverse prettyGroup . reorderTerms + where + reorderTerms = sortOn (not . Referent.isConstructor . view _2) + prettyGroup :: OBD.AddedTermDisplay v a -> Numbered [(Pretty, Pretty, Pretty)] + prettyGroup (hqmds, r, otype) = do + pairs <- traverse (prettyLine r otype) hqmds + let (nums, names, decls) = unzip3 pairs + boxLeft = case hqmds of _ : _ : _ -> P.boxLeft; _ -> id + pure $ zip3 nums (boxLeft names) decls + prettyLine r otype (hq, mds) = do + n <- numHQ' newPath hq r + pure . (n,phq' hq,) $ + ": " <> prettyType otype <> case length mds of + 0 -> mempty + c -> " (+" <> P.shown c <> " metadata)" + + prettySummarizePatch, prettyNamePatch :: Path.Absolute -> OBD.PatchDisplay -> Numbered Pretty + -- 12. patch p (added 3 updates, deleted 1) + prettySummarizePatch prefix (name, patchDiff) = do + n <- numPatch prefix name + let addCount = + (R.size . view Patch.addedTermEdits) patchDiff + + (R.size . view Patch.addedTypeEdits) patchDiff + delCount = + (R.size . view Patch.removedTermEdits) patchDiff + + (R.size . view Patch.removedTypeEdits) patchDiff + messages = + (if addCount > 0 then ["added " <> P.shown addCount] else []) + ++ (if delCount > 0 then ["deleted " <> P.shown addCount] else []) + message = case messages of + [] -> mempty + x : ys -> " (" <> P.commas (x <> " updates" : ys) <> ")" + pure $ n <> P.bold " patch " <> prettyName name <> message + -- 18. patch q + prettyNamePatch prefix (name, _patchDiff) = do + n <- numPatch prefix name + pure $ n <> P.bold " patch " <> prettyName name + + {- + Removes: + + 10. ┌ oldn'busted : Nat -> Nat -> Poop + 11. └ oldn'busted' + 12. ability BadType + 13. patch defunctThingy + -} + prettyRemoveTypes :: [OBD.RemovedTypeDisplay v a] -> Numbered Pretty + prettyRemoveTypes = fmap P.lines . traverse prettyGroup + where + prettyGroup :: OBD.RemovedTypeDisplay v a -> Numbered Pretty + prettyGroup (hqs, r, odecl) = do + lines <- traverse (prettyLine r odecl) hqs + let (nums, decls) = unzip lines + boxLeft = case hqs of _ : _ : _ -> P.boxLeft; _ -> id + pure . P.column2 $ zip nums (boxLeft decls) + prettyLine r odecl hq = do + n <- numHQ' newPath hq (Referent.Ref r) + pure (n, prettyDecl hq odecl) + + prettyRemoveTerms :: [OBD.RemovedTermDisplay v a] -> Numbered Pretty + prettyRemoveTerms = fmap (P.column3 . mconcat) . traverse prettyGroup . reorderTerms + where + reorderTerms = sortOn (not . Referent.isConstructor . view _2) + prettyGroup :: OBD.RemovedTermDisplay v a -> Numbered [(Pretty, Pretty, Pretty)] + prettyGroup ([], r, _) = + error $ "trying to remove " <> show r <> " without any names." + prettyGroup (hq1 : hqs, r, otype) = do + line1 <- prettyLine1 r otype hq1 + lines <- traverse (prettyLine r) hqs + let (nums, names, decls) = unzip3 (line1 : lines) + boxLeft = case hqs of _ : _ -> P.boxLeft; _ -> id + pure $ zip3 nums (boxLeft names) decls + prettyLine1 r otype hq = do + n <- numHQ' newPath hq r + pure (n, phq' hq, ": " <> prettyType otype) + prettyLine r hq = do + n <- numHQ' newPath hq r + pure (n, phq' hq, mempty) + + downArrow = P.bold "↓" + mdTypeLine :: Path.Absolute -> OBD.TypeDisplay v a -> Numbered (Pretty, Pretty) + mdTypeLine p (hq, r, odecl, mddiff) = do + n <- numHQ' p hq (Referent.Ref r) + fmap ((n,) . P.linesNonEmpty) . sequence $ + [ pure $ prettyDecl hq odecl, + P.indentN leftNumsWidth <$> prettyMetadataDiff mddiff + ] + + -- + 2. MIT : License + -- - 3. AllRightsReserved : License + mdTermLine :: + Path.Absolute -> + P.Width -> + OBD.TermDisplay v a -> + Numbered (Pretty, Pretty) + mdTermLine p namesWidth (hq, r, otype, mddiff) = do + n <- numHQ' p hq r + fmap ((n,) . P.linesNonEmpty) + . sequence + $ [ pure $ P.rightPad namesWidth (phq' hq) <> " : " <> prettyType otype, + prettyMetadataDiff mddiff + ] + + prettyUpdateTerm :: OBD.UpdateTermDisplay v a -> Numbered Pretty + prettyUpdateTerm (Nothing, newTerms) = + if null newTerms + then error "Super invalid UpdateTermDisplay" + else fmap P.column2 $ traverse (mdTermLine newPath namesWidth) newTerms + where + namesWidth = foldl1' max $ fmap (P.Width . HQ'.nameLength . view _1) newTerms + prettyUpdateTerm (Just olds, news) = fmap P.column2 $ do + olds <- + traverse + (mdTermLine oldPath namesWidth) + [(name, r, typ, mempty) | (name, r, typ) <- olds] + news <- traverse (mdTermLine newPath namesWidth) news + let (oldnums, olddatas) = unzip olds + let (newnums, newdatas) = unzip news + pure $ + zip + (oldnums <> [""] <> newnums) + (P.boxLeft olddatas <> [downArrow] <> P.boxLeft newdatas) + where + namesWidth = + foldl1' max $ + fmap (P.Width . HQ'.nameLength . view _1) news + <> fmap (P.Width . HQ'.nameLength . view _1) olds + + prettyMetadataDiff :: OBD.MetadataDiff (OBD.MetadataDisplay v a) -> Numbered Pretty + prettyMetadataDiff OBD.MetadataDiff {..} = + P.column2M $ + map (elem oldPath "- ") removedMetadata + <> map (elem newPath "+ ") addedMetadata + where + elem p x (hq, r, otype) = do + num <- numHQ p hq r + pure (x <> num <> " " <> phq hq, ": " <> prettyType otype) + + prettyType = maybe (P.red "type not found") (TypePrinter.pretty ppe) + prettyDecl hq = + maybe + (P.red "type not found") + (P.syntaxToColor . DeclPrinter.prettyDeclOrBuiltinHeader (HQ'.toHQ hq)) + phq' :: _ -> Pretty = P.syntaxToColor . prettyHashQualified' + phq :: _ -> Pretty = P.syntaxToColor . prettyHashQualified + -- + -- DeclPrinter.prettyDeclHeader : HQ -> Either + numPatch :: Path.Absolute -> Name -> Numbered Pretty + numPatch prefix name = + addNumberedArg . Name.toString . Name.makeAbsolute $ Path.prefixName prefix name + + numHQ :: Path.Absolute -> HQ.HashQualified Name -> Referent -> Numbered Pretty + numHQ prefix hq r = addNumberedArg (HQ.toString hq') + where + hq' = HQ.requalify (fmap (Name.makeAbsolute . Path.prefixName prefix) hq) r + + numHQ' :: Path.Absolute -> HQ'.HashQualified Name -> Referent -> Numbered Pretty + numHQ' prefix hq r = addNumberedArg (HQ'.toString hq') + where + hq' = HQ'.requalify (fmap (Name.makeAbsolute . Path.prefixName prefix) hq) r + + addNumberedArg :: String -> Numbered Pretty + addNumberedArg s = case sn of + ShowNumbers -> do + (n, args) <- State.get + State.put (n + 1, args Seq.|> s) + pure $ padNumber (n + 1) + HideNumbers -> pure mempty + + padNumber :: Int -> Pretty + padNumber n = P.hiBlack . P.rightPad leftNumsWidth $ P.shown n <> "." + + leftNumsWidth = P.Width $ length (show menuSize) + length ("." :: String) + +noResults :: Pretty +noResults = + P.callout "😶" $ + P.wrap $ + "No results. Check your spelling, or try using tab completion " + <> "to supply command arguments." + +listOfDefinitions' :: + Var v => + PPE.PrettyPrintEnv -> -- for printing types of terms :-\ + E.ListDetailed -> + [SR'.SearchResult' v a] -> + Pretty +listOfDefinitions' ppe detailed results = + if null results + then noResults + else + P.lines + . P.nonEmpty + $ prettyNumberedResults : + [ formatMissingStuff termsWithMissingTypes missingTypes, + unlessM (null missingBuiltins) + . bigproblem + $ P.wrap + "I encountered an inconsistency in the codebase; these definitions refer to built-ins that this version of unison doesn't know about:" + `P.hang` P.column2 + ( (P.bold "Name", P.bold "Built-in") + -- : ("-", "-") + : + fmap + ( bimap + (P.syntaxToColor . prettyHashQualified) + (P.text . Referent.toText) + ) + missingBuiltins + ) + ] + where + prettyNumberedResults = P.numberedList prettyResults + -- todo: group this by namespace + prettyResults = + map + (SR'.foldResult' renderTerm renderType) + (filter (not . missingType) results) + where + (renderTerm, renderType) = + if detailed + then (unsafePrettyTermResultSigFull' ppe, prettyTypeResultHeaderFull') + else (unsafePrettyTermResultSig' ppe, prettyTypeResultHeader') + missingType (SR'.Tm _ Nothing _ _) = True + missingType (SR'.Tp _ (MissingObject _) _ _) = True + missingType _ = False + -- termsWithTypes = [(name,t) | (name, Just t) <- sigs0 ] + -- where sigs0 = (\(name, _, typ) -> (name, typ)) <$> terms + termsWithMissingTypes = + [ (name, Reference.idToShortHash r) + | SR'.Tm name Nothing (Referent.Ref (Reference.DerivedId r)) _ <- results + ] + missingTypes = + nubOrdOn snd $ + [(name, r) | SR'.Tp name (MissingObject r) _ _ <- results] + <> [ (name, Reference.toShortHash r) + | SR'.Tm name Nothing (Referent.toTypeReference -> Just r) _ <- results + ] + missingBuiltins = + results >>= \case + SR'.Tm name Nothing r@(Referent.Ref (Reference.Builtin _)) _ -> + [(name, r)] + _ -> [] + +watchPrinter :: + Var v => + Text -> + PPE.PrettyPrintEnv -> + Ann -> + WK.WatchKind -> + Term v () -> + Runtime.IsCacheHit -> + Pretty +watchPrinter src ppe ann kind term isHit = + P.bracket $ + let lines = Text.lines src + lineNum = fromMaybe 1 $ startingLine ann + lineNumWidth = length (show lineNum) + extra = " " <> replicate (length kind) ' ' -- for the ` | > ` after the line number + line = lines !! (lineNum - 1) + addCache p = if isHit then p <> " (cached)" else p + renderTest (Term.App' (Term.Constructor' _ id) (Term.Text' msg)) = + "\n" + <> if id == DD.okConstructorId + then + addCache + (P.green "✅ " <> P.bold "Passed" <> P.green (P.text msg')) + else + if id == DD.failConstructorId + then + addCache + (P.red "🚫 " <> P.bold "FAILED" <> P.red (P.text msg')) + else P.red "❓ " <> TermPrinter.pretty ppe term + where + msg' = + if Text.take 1 msg == " " + then msg + else " " <> msg + renderTest x = + fromString $ "\n Unison bug: " <> show x <> " is not a test." + in P.lines + [ fromString (show lineNum) <> " | " <> P.text line, + case (kind, term) of + (WK.TestWatch, Term.List' tests) -> foldMap renderTest tests + _ -> + P.lines + [ fromString (replicate lineNumWidth ' ') + <> fromString extra + <> (if isHit then id else P.purple) "⧩", + P.indentN (P.Width (lineNumWidth + length extra)) + . (if isHit then id else P.bold) + $ TermPrinter.pretty ppe term + ] + ] + +filestatusTip :: Pretty +filestatusTip = tip "Use `help filestatus` to learn more." + +prettyDiff :: Names.Diff -> Pretty +prettyDiff diff = + let orig = Names.originalNames diff + adds = Names.addedNames diff + removes = Names.removedNames diff + + addedTerms = + [ (n, r) | (n, r) <- R.toList (Names.terms adds), not $ R.memberRan r (Names.terms removes) + ] + addedTypes = + [ (n, r) | (n, r) <- R.toList (Names.types adds), not $ R.memberRan r (Names.types removes) + ] + added = sort (hqTerms ++ hqTypes) + where + hqTerms = [Names.hqName adds n (Right r) | (n, r) <- addedTerms] + hqTypes = [Names.hqName adds n (Left r) | (n, r) <- addedTypes] + + removedTerms = + [ (n, r) | (n, r) <- R.toList (Names.terms removes), not $ R.memberRan r (Names.terms adds), Set.notMember n addedTermsSet + ] + where + addedTermsSet = Set.fromList (map fst addedTerms) + removedTypes = + [ (n, r) | (n, r) <- R.toList (Names.types removes), not $ R.memberRan r (Names.types adds), Set.notMember n addedTypesSet + ] + where + addedTypesSet = Set.fromList (map fst addedTypes) + removed = sort (hqTerms ++ hqTypes) + where + hqTerms = [Names.hqName removes n (Right r) | (n, r) <- removedTerms] + hqTypes = [Names.hqName removes n (Left r) | (n, r) <- removedTypes] + + movedTerms = + [ (n, n2) | (n, r) <- R.toList (Names.terms removes), n2 <- toList (R.lookupRan r (Names.terms adds)) + ] + movedTypes = + [ (n, n2) | (n, r) <- R.toList (Names.types removes), n2 <- toList (R.lookupRan r (Names.types adds)) + ] + moved = Name.sortNamed fst . nubOrd $ (movedTerms <> movedTypes) + + copiedTerms = + List.multimap + [ (n, n2) | (n2, r) <- R.toList (Names.terms adds), not (R.memberRan r (Names.terms removes)), n <- toList (R.lookupRan r (Names.terms orig)) + ] + copiedTypes = + List.multimap + [ (n, n2) | (n2, r) <- R.toList (Names.types adds), not (R.memberRan r (Names.types removes)), n <- toList (R.lookupRan r (Names.types orig)) + ] + copied = + Name.sortNamed fst $ + Map.toList (Map.unionWith (<>) copiedTerms copiedTypes) + in P.sepNonEmpty + "\n\n" + [ if not $ null added + then + P.lines + [ -- todo: split out updates + P.green "+ Adds / updates:", + "", + P.indentN 2 . P.wrap $ + P.sep " " (P.syntaxToColor . prettyHashQualified' <$> added) + ] + else mempty, + if not $ null removed + then + P.lines + [ P.hiBlack "- Deletes:", + "", + P.indentN 2 . P.wrap $ + P.sep " " (P.syntaxToColor . prettyHashQualified' <$> removed) + ] + else mempty, + if not $ null moved + then + P.lines + [ P.purple "> Moves:", + "", + P.indentN 2 $ + P.column2 $ + (P.hiBlack "Original name", P.hiBlack "New name") : + [(prettyName n, prettyName n2) | (n, n2) <- moved] + ] + else mempty, + if not $ null copied + then + P.lines + [ P.yellow "= Copies:", + "", + P.indentN 2 $ + P.column2 $ + (P.hiBlack "Original name", P.hiBlack "New name(s)") : + [ (prettyName n, P.sep " " (prettyName <$> ns)) + | (n, ns) <- copied + ] + ] + else mempty + ] + +prettyTermName :: PPE.PrettyPrintEnv -> Referent -> Pretty +prettyTermName ppe r = + P.syntaxToColor $ + prettyHashQualified (PPE.termName ppe r) + +prettyReadRepo :: ReadRepo -> Pretty +prettyReadRepo (RemoteRepo.ReadGitRepo url) = P.blue (P.text url) + +prettyWriteRepo :: WriteRepo -> Pretty +prettyWriteRepo (RemoteRepo.WriteGitRepo url) = P.blue (P.text url) + +isTestOk :: Term v Ann -> Bool +isTestOk tm = case tm of + Term.List' ts -> all isSuccess ts + where + isSuccess (Term.App' (Term.Constructor' ref cid) _) = + cid == DD.okConstructorId + && ref == DD.testResultRef + isSuccess _ = False + _ -> False diff --git a/parser-typechecker/src/Unison/CommandLine/Welcome.hs b/unison-cli/src/Unison/CommandLine/Welcome.hs similarity index 100% rename from parser-typechecker/src/Unison/CommandLine/Welcome.hs rename to unison-cli/src/Unison/CommandLine/Welcome.hs diff --git a/unison-cli/tests/Main.hs b/unison-cli/tests/Main.hs new file mode 100644 index 0000000000..65cd9683fd --- /dev/null +++ b/unison-cli/tests/Main.hs @@ -0,0 +1,30 @@ +module Main where + +import EasyTest +import System.Environment (getArgs) +import System.IO +import qualified Unison.Test.ClearCache as ClearCache +import qualified Unison.Test.CommandLine as CommandLine +import qualified Unison.Test.GitSync as GitSync +import qualified Unison.Test.UriParser as UriParser +import qualified Unison.Test.VersionParser as VersionParser + +test :: Test () +test = + tests + [ ClearCache.test, + CommandLine.test, + GitSync.test, + UriParser.test, + VersionParser.test + ] + +main :: IO () +main = do + args <- getArgs + mapM_ (`hSetEncoding` utf8) [stdout, stdin, stderr] + case args of + [] -> runOnly "" test + [prefix] -> runOnly prefix test + [seed, prefix] -> rerunOnly (read seed) prefix test + _ -> error "expected no args, a prefix, or a seed and a prefix" diff --git a/parser-typechecker/tests/Unison/Test/ClearCache.hs b/unison-cli/tests/Unison/Test/ClearCache.hs similarity index 100% rename from parser-typechecker/tests/Unison/Test/ClearCache.hs rename to unison-cli/tests/Unison/Test/ClearCache.hs diff --git a/parser-typechecker/tests/Unison/Test/CommandLine.hs b/unison-cli/tests/Unison/Test/CommandLine.hs similarity index 100% rename from parser-typechecker/tests/Unison/Test/CommandLine.hs rename to unison-cli/tests/Unison/Test/CommandLine.hs diff --git a/parser-typechecker/tests/Unison/Test/GitSync.hs b/unison-cli/tests/Unison/Test/GitSync.hs similarity index 95% rename from parser-typechecker/tests/Unison/Test/GitSync.hs rename to unison-cli/tests/Unison/Test/GitSync.hs index f698ad0b6e..225ca504a7 100644 --- a/parser-typechecker/tests/Unison/Test/GitSync.hs +++ b/unison-cli/tests/Unison/Test/GitSync.hs @@ -41,7 +41,7 @@ test = scope "gitsync22" . tests $ .> alias.type ##Nat builtin.Nat .> history .> history builtin - .> push ${repo} + .> push.create ${repo} ``` |]) (\repo -> [i| @@ -62,7 +62,7 @@ test = scope "gitsync22" . tests $ ```ucm .> add .> history - .> push ${repo} + .> push.create ${repo} ``` |]) (\repo -> [i| @@ -87,7 +87,7 @@ test = scope "gitsync22" . tests $ .> link doc y .> links y .> history - .> push ${repo} + .> push.create ${repo} ``` |]) (\repo -> [i| @@ -106,7 +106,7 @@ test = scope "gitsync22" . tests $ .> add .> alias.type ##Nat Nat .> link doc Nat - .> push ${repo} + .> push.create ${repo} ``` |]) (\repo -> [i| @@ -127,7 +127,7 @@ test = scope "gitsync22" . tests $ ``` ```ucm .> add - .> push ${repo} + .> push.create ${repo} ``` |]) (\repo -> [i| @@ -163,7 +163,7 @@ test = scope "gitsync22" . tests $ ``` ```ucm .> view.patch patch - .> push ${repo} + .> push.create ${repo} ``` |]) (\repo -> [i| @@ -187,7 +187,7 @@ test = scope "gitsync22" . tests $ ```ucm .> update .> history - .> push ${repo} + .> push.create ${repo} ``` |]) (\repo -> [i| @@ -215,7 +215,7 @@ test = scope "gitsync22" . tests $ ```ucm .> debug.file .> add - .> push ${repo} + .> push.create ${repo} ``` |]) -- simplest-user @@ -238,7 +238,7 @@ test = scope "gitsync22" . tests $ ```ucm .> debug.file .myLib> add - .myLib> push ${repo} + .myLib> push.create ${repo} ``` |]) -- simplest-user @@ -260,7 +260,7 @@ test = scope "gitsync22" . tests $ ```ucm .myLib> debug.file .myLib> add - .myLib> push ${repo} + .myLib> push.create ${repo} ``` |]) -- simplest-user @@ -294,7 +294,7 @@ test = scope "gitsync22" . tests $ ``` ```ucm .workaround1552.myLib.v2> update - .workaround1552.myLib> push ${repo} + .workaround1552.myLib> push.create ${repo} ``` |]) (\repo -> [i| @@ -338,7 +338,7 @@ test = scope "gitsync22" . tests $ .patches> replace .defns.A .defns.B .patches> alias.type .defns.A A .patches> replace .defns.x .defns.y - .patches> push ${repo} + .patches> push.create ${repo} ``` |]) (\repo -> [i| @@ -359,7 +359,7 @@ test = scope "gitsync22" . tests $ ``` ```ucm .> add - .> push ${repo} + .> push.create ${repo} ``` |]) (\repo -> [i| @@ -383,7 +383,7 @@ CallStack (from HasCallStack): ```ucm .> alias.type ##Nat builtin.Nat2 .> alias.type ##Int builtin.Int2 - .> push ${repo}:.foo.bar + .> push.create ${repo}:.foo.bar ``` |]) (\repo -> [i| @@ -405,8 +405,8 @@ CallStack (from HasCallStack): ```ucm .> alias.type ##Nat builtin.Nat2 .> alias.type ##Int builtin.Int2 - .> push ${repo} - .> push ${repo}:.foo.bar + .> push.create ${repo} + .> push.create ${repo}:.foo.bar ``` |]) (\repo -> [i| @@ -519,7 +519,7 @@ fastForwardPush = scope "fastforward-push" do void $ Ucm.runTranscript author [i| ```ucm .lib> alias.type ##Nat Nat - .lib> push ${repo} + .lib> push.create ${repo} .lib> alias.type ##Int Int .lib> push ${repo} ``` @@ -549,13 +549,13 @@ destroyedRemote = scope "destroyed-remote" do void $ Ucm.runTranscript codebase [i| ```ucm .lib> alias.type ##Nat Nat - .lib> push ${repo} + .lib> push.create ${repo} ``` |] reinitRepo repo void $ Ucm.runTranscript codebase [i| ```ucm - .lib> push ${repo} + .lib> push.create ${repo} ``` |] ok diff --git a/parser-typechecker/tests/Unison/Test/Ucm.hs b/unison-cli/tests/Unison/Test/Ucm.hs similarity index 97% rename from parser-typechecker/tests/Unison/Test/Ucm.hs rename to unison-cli/tests/Unison/Test/Ucm.hs index 5d58f1bd2b..194eded968 100644 --- a/parser-typechecker/tests/Unison/Test/Ucm.hs +++ b/unison-cli/tests/Unison/Test/Ucm.hs @@ -79,7 +79,7 @@ runTranscript (Codebase codebasePath fmt) transcript = do output <- flip (either err) (TR.parse "transcript" (Text.pack . stripMargin $ unTranscript transcript)) $ \stanzas -> fmap Text.unpack $ - TR.run + TR.run "Unison.Test.Ucm.runTranscript Invalid Version String" codebasePath configFile stanzas diff --git a/parser-typechecker/tests/Unison/Test/UriParser.hs b/unison-cli/tests/Unison/Test/UriParser.hs similarity index 100% rename from parser-typechecker/tests/Unison/Test/UriParser.hs rename to unison-cli/tests/Unison/Test/UriParser.hs diff --git a/parser-typechecker/tests/Unison/Test/VersionParser.hs b/unison-cli/tests/Unison/Test/VersionParser.hs similarity index 100% rename from parser-typechecker/tests/Unison/Test/VersionParser.hs rename to unison-cli/tests/Unison/Test/VersionParser.hs diff --git a/unison-cli/unison-cli.cabal b/unison-cli/unison-cli.cabal index 7e13fa7f5a..6de1e502dc 100644 --- a/unison-cli/unison-cli.cabal +++ b/unison-cli/unison-cli.cabal @@ -21,6 +21,90 @@ flag optimized manual: True default: False +library + exposed-modules: + Unison.Codebase.Editor.AuthorInfo + Unison.Codebase.Editor.Command + Unison.Codebase.Editor.HandleCommand + Unison.Codebase.Editor.HandleInput + Unison.Codebase.Editor.Input + Unison.Codebase.Editor.Output + Unison.Codebase.Editor.Output.BranchDiff + Unison.Codebase.Editor.Output.DumpNamespace + Unison.Codebase.Editor.Propagate + Unison.Codebase.Editor.SlurpComponent + Unison.Codebase.Editor.SlurpResult + Unison.Codebase.Editor.TodoOutput + Unison.Codebase.Editor.UriParser + Unison.Codebase.Editor.VersionParser + Unison.Codebase.TranscriptParser + Unison.CommandLine + Unison.CommandLine.DisplayValues + Unison.CommandLine.FuzzySelect + Unison.CommandLine.Globbing + Unison.CommandLine.InputPattern + Unison.CommandLine.InputPatterns + Unison.CommandLine.Main + Unison.CommandLine.OutputMessages + Unison.CommandLine.Welcome + other-modules: + Paths_unison_cli + hs-source-dirs: + src + default-extensions: + ApplicativeDo + BangPatterns + BlockArguments + DeriveFunctor + DeriveGeneric + DerivingStrategies + DoAndIfThenElse + FlexibleContexts + FlexibleInstances + GeneralizedNewtypeDeriving + LambdaCase + MultiParamTypeClasses + NamedFieldPuns + OverloadedStrings + PatternSynonyms + RankNTypes + ScopedTypeVariables + TupleSections + TypeApplications + ViewPatterns + ghc-options: -Wall + build-depends: + ListLike + , async + , base + , bytestring + , configurator + , containers >=0.6.3 + , cryptonite + , directory + , errors + , extra + , filepath + , haskeline + , lens + , megaparsec >=5.0.0 && <7.0.0 + , mtl + , open-browser + , random >=1.2.0 + , regex-tdfa + , stm + , text + , unison-codebase-sqlite + , unison-core1 + , unison-parser-typechecker + , unison-prelude + , unison-util + , unison-util-relation + , unliftio + if flag(optimized) + ghc-options: -O2 -funbox-strict-fields + default-language: Haskell2010 + executable integration-tests main-is: Suite.hs other-modules: @@ -30,27 +114,58 @@ executable integration-tests integration-tests default-extensions: ApplicativeDo + BangPatterns BlockArguments DeriveFunctor + DeriveGeneric DerivingStrategies DoAndIfThenElse FlexibleContexts FlexibleInstances + GeneralizedNewtypeDeriving LambdaCase MultiParamTypeClasses + NamedFieldPuns + OverloadedStrings + PatternSynonyms + RankNTypes ScopedTypeVariables TupleSections TypeApplications - ghc-options: -W -threaded -rtsopts "-with-rtsopts=-N -T" -v0 + ViewPatterns + ghc-options: -Wall -W -threaded -rtsopts "-with-rtsopts=-N -T" -v0 build-depends: - base + ListLike + , async + , base + , bytestring + , configurator + , containers >=0.6.3 + , cryptonite + , directory , easytest + , errors + , extra + , filepath + , haskeline + , lens + , megaparsec >=5.0.0 && <7.0.0 + , mtl + , open-browser , process + , random >=1.2.0 + , regex-tdfa , shellmet + , stm , text + , time + , unison-codebase-sqlite , unison-core1 , unison-parser-typechecker , unison-prelude + , unison-util + , unison-util-relation + , unliftio if flag(optimized) ghc-options: -O2 -funbox-strict-fields default-language: Haskell2010 @@ -63,31 +178,59 @@ executable transcripts transcripts default-extensions: ApplicativeDo + BangPatterns BlockArguments DeriveFunctor + DeriveGeneric DerivingStrategies DoAndIfThenElse FlexibleContexts FlexibleInstances + GeneralizedNewtypeDeriving LambdaCase MultiParamTypeClasses + NamedFieldPuns + OverloadedStrings + PatternSynonyms + RankNTypes ScopedTypeVariables TupleSections TypeApplications - ghc-options: -threaded -rtsopts -with-rtsopts=-N -v0 + ViewPatterns + ghc-options: -Wall -threaded -rtsopts -with-rtsopts=-N -v0 build-tools: unison build-depends: - base + ListLike + , async + , base + , bytestring + , configurator + , containers >=0.6.3 + , cryptonite , directory , easytest + , errors + , extra , filepath + , haskeline + , lens + , megaparsec >=5.0.0 && <7.0.0 + , mtl + , open-browser , process + , random >=1.2.0 + , regex-tdfa , shellmet + , stm , text + , unison-codebase-sqlite , unison-core1 , unison-parser-typechecker , unison-prelude + , unison-util + , unison-util-relation + , unliftio if flag(optimized) ghc-options: -O2 -funbox-strict-fields default-language: Haskell2010 @@ -104,35 +247,58 @@ executable unison unison default-extensions: ApplicativeDo + BangPatterns BlockArguments DeriveFunctor + DeriveGeneric DerivingStrategies DoAndIfThenElse FlexibleContexts FlexibleInstances + GeneralizedNewtypeDeriving LambdaCase MultiParamTypeClasses + NamedFieldPuns + OverloadedStrings + PatternSynonyms + RankNTypes ScopedTypeVariables TupleSections TypeApplications - ghc-options: -threaded -rtsopts -with-rtsopts=-I0 -optP-Wno-nonportable-include-path + ViewPatterns + ghc-options: -Wall -threaded -rtsopts -with-rtsopts=-I0 -optP-Wno-nonportable-include-path build-depends: - base + ListLike + , async + , base , bytestring , configurator + , containers >=0.6.3 + , cryptonite , directory , errors + , extra , filepath - , megaparsec + , haskeline + , lens + , megaparsec >=5.0.0 && <7.0.0 , mtl + , open-browser , optparse-applicative >=0.16.1.0 + , random >=1.2.0 + , regex-tdfa , shellmet + , stm , template-haskell , temporary , text + , unison-cli + , unison-codebase-sqlite , unison-core1 , unison-parser-typechecker , unison-prelude + , unison-util + , unison-util-relation , unliftio if flag(optimized) ghc-options: -O2 -funbox-strict-fields @@ -140,3 +306,75 @@ executable unison build-depends: unix default-language: Haskell2010 + +test-suite tests + type: exitcode-stdio-1.0 + main-is: Main.hs + other-modules: + Unison.Test.ClearCache + Unison.Test.CommandLine + Unison.Test.GitSync + Unison.Test.Ucm + Unison.Test.UriParser + Unison.Test.VersionParser + Paths_unison_cli + hs-source-dirs: + tests + default-extensions: + ApplicativeDo + BangPatterns + BlockArguments + DeriveFunctor + DeriveGeneric + DerivingStrategies + DoAndIfThenElse + FlexibleContexts + FlexibleInstances + GeneralizedNewtypeDeriving + LambdaCase + MultiParamTypeClasses + NamedFieldPuns + OverloadedStrings + PatternSynonyms + RankNTypes + ScopedTypeVariables + TupleSections + TypeApplications + ViewPatterns + ghc-options: -Wall + build-depends: + ListLike + , async + , base + , bytestring + , configurator + , containers >=0.6.3 + , cryptonite + , directory + , easytest + , errors + , extra + , filepath + , haskeline + , here + , lens + , megaparsec >=5.0.0 && <7.0.0 + , mtl + , open-browser + , random >=1.2.0 + , regex-tdfa + , shellmet + , stm + , temporary + , text + , unison-cli + , unison-codebase-sqlite + , unison-core1 + , unison-parser-typechecker + , unison-prelude + , unison-util + , unison-util-relation + , unliftio + if flag(optimized) + ghc-options: -O2 -funbox-strict-fields + default-language: Haskell2010 diff --git a/unison-cli/unison/ArgParse.hs b/unison-cli/unison/ArgParse.hs index 3adda73929..f7f19d5e46 100644 --- a/unison-cli/unison/ArgParse.hs +++ b/unison-cli/unison/ArgParse.hs @@ -80,8 +80,8 @@ data ShouldSaveCodebase | DontSaveCodebase deriving (Show, Eq) -data CodebasePathOption - = CreateCodebaseWhenMissing FilePath +data CodebasePathOption + = CreateCodebaseWhenMissing FilePath | DontCreateCodebaseWhenMissing FilePath deriving (Show, Eq) @@ -97,7 +97,7 @@ data Command | PrintVersion -- @deprecated in trunk after M2g. Remove the Init command completely after M2h has been released | Init - | Run RunSource + | Run RunSource [String] | Transcript ShouldForkCodebase ShouldSaveCodebase (NonEmpty FilePath ) deriving (Show, Eq) @@ -150,24 +150,48 @@ versionCommand = command "version" (info versionParser (fullDesc <> progDesc "Pr initCommand :: Mod CommandFields Command initCommand = command "init" (info initParser (progDesc initHelp)) where - initHelp = + initHelp = "This command is has been removed. Use --codebase-create instead to create a codebase in the specified directory when starting the UCM." + +runDesc :: String -> String -> String +runDesc cmd location = + "Execute a definition from " <> location <> ", passing on the provided arguments. " + <> " To pass flags to your program, use `" + <> cmd + <> " -- --my-flag`" + runSymbolCommand :: Mod CommandFields Command runSymbolCommand = - command "run" (info runSymbolParser (fullDesc <> progDesc "Execute a definition from the codebase")) + command "run" (info runSymbolParser (fullDesc <> progDesc help)) + where + help = + "Execute a definition from the codebase, passing on the provided arguments. " + <> " To pass flags to your program, use `run -- --my-flag`" runFileCommand :: Mod CommandFields Command runFileCommand = - command "run.file" (info runFileParser (fullDesc <> progDesc "Execute a definition from a file")) + command "run.file" (info runFileParser (fullDesc <> progDesc help)) + where + help = + "Execute a definition from a file, passing on the provided arguments. " + <> " To pass flags to your program, use `run.file -- --my-flag`" runPipeCommand :: Mod CommandFields Command runPipeCommand = - command "run.pipe" (info runPipeParser (fullDesc <> progDesc "Execute code from stdin")) + command "run.pipe" (info runPipeParser (fullDesc <> progDesc help)) + where + help = + "Execute a definition from stdin, passing on the provided arguments. " + <> " To pass flags to your program, use `run -- --my-flag`" runCompiledCommand :: Mod CommandFields Command runCompiledCommand = - command "run.compiled" (info runCompiledParser (fullDesc <> progDesc "Execute previously compiled output")) + command "run.compiled" (info runCompiledParser (fullDesc <> progDesc help)) + where + help = + "Execute a definition from a previously compiled file, passing on the provided arguments. " + <> " To pass flags to your program, use `run -- --my-flag`" transcriptCommand :: Mod CommandFields Command transcriptCommand = @@ -208,7 +232,7 @@ commandParser envOpts = , transcriptForkCommand , launchHeadlessCommand envOpts ] - + globalOptionsParser :: Parser GlobalOptions globalOptionsParser = do -- ApplicativeDo codebasePathOption <- codebasePathParser <|> codebaseCreateParser @@ -216,13 +240,13 @@ globalOptionsParser = do -- ApplicativeDo pure GlobalOptions{codebasePathOption = codebasePathOption} codebasePathParser :: Parser (Maybe CodebasePathOption) -codebasePathParser = do +codebasePathParser = do optString <- optional . strOption $ long "codebase" <> metavar "codebase/path" <> help "The path to an existing codebase" pure (fmap DontCreateCodebaseWhenMissing optString) - + codebaseCreateParser :: Parser (Maybe CodebasePathOption) codebaseCreateParser = do path <- optional . strOption $ @@ -278,28 +302,31 @@ launchParser envOpts isHeadless = do -- ApplicativeDo pure (Launch isHeadless codebaseServerOpts downloadBase) initParser :: Parser Command -initParser = pure Init +initParser = pure Init versionParser :: Parser Command versionParser = pure PrintVersion +runArgumentParser :: Parser [String] +runArgumentParser = many (strArgument (metavar "RUN-ARGS")) + runSymbolParser :: Parser Command runSymbolParser = - Run . RunFromSymbol <$> strArgument (metavar "SYMBOL") + Run . RunFromSymbol <$> strArgument (metavar "SYMBOL") <*> runArgumentParser runFileParser :: Parser Command -runFileParser = do -- ApplicativeDo - pathTofile <- fileArgument "path/to/file" - symbolName <- strArgument (metavar "SYMBOL") - pure $ Run (RunFromFile pathTofile symbolName) +runFileParser = + Run <$> (RunFromFile <$> fileArgument "path/to/file" + <*> strArgument (metavar "SYMBOL")) + <*> runArgumentParser runPipeParser :: Parser Command runPipeParser = - Run . RunFromPipe <$> strArgument (metavar "SYMBOL") + Run . RunFromPipe <$> strArgument (metavar "SYMBOL") <*> runArgumentParser runCompiledParser :: Parser Command runCompiledParser = - Run . RunCompiled <$> fileArgument "path/to/file" + Run . RunCompiled <$> fileArgument "path/to/file" <*> runArgumentParser saveCodebaseFlag :: Parser ShouldSaveCodebase saveCodebaseFlag = flag DontSaveCodebase SaveCodebase (long "save-codebase" <> help saveHelp) diff --git a/unison-cli/unison/Main.hs b/unison-cli/unison/Main.hs index df6ea4c6c4..12d14ff94f 100644 --- a/unison-cli/unison/Main.hs +++ b/unison-cli/unison/Main.hs @@ -16,7 +16,7 @@ import qualified Data.ByteString.Lazy as BL import qualified Data.Text as Text import qualified GHC.Conc import System.Directory (canonicalizePath, getCurrentDirectory, removeDirectoryRecursive) -import System.Environment (getProgName) +import System.Environment (getProgName, withArgs) import qualified System.Exit as Exit import qualified System.FilePath as FP import System.IO.Error (catchIOError) @@ -94,12 +94,12 @@ main = do , P.wrap ("will " <> P.bold "always" <> " create a codebase in your home directory if one does not already exist.") ]) - Run (RunFromSymbol mainName) -> do + Run (RunFromSymbol mainName) args -> do ((closeCodebase, theCodebase),_) <- getCodebaseOrExit mCodePathOption runtime <- RTI.startRuntime Version.gitDescribeWithDate - execute theCodebase runtime mainName + withArgs args $ execute theCodebase runtime mainName closeCodebase - Run (RunFromFile file mainName) + Run (RunFromFile file mainName) args | not (isDotU file) -> PT.putPrettyLn $ P.callout "⚠️" "Files must have a .u extension." | otherwise -> do e <- safeReadUtf8 file @@ -109,9 +109,9 @@ main = do ((closeCodebase, theCodebase), initRes) <- getCodebaseOrExit mCodePathOption rt <- RTI.startRuntime Version.gitDescribeWithDate let fileEvent = Input.UnisonFileChanged (Text.pack file) contents - launch currentDir config rt theCodebase [Left fileEvent, Right $ Input.ExecuteI mainName, Right Input.QuitI] Nothing ShouldNotDownloadBase initRes + launch currentDir config rt theCodebase [Left fileEvent, Right $ Input.ExecuteI mainName args, Right Input.QuitI] Nothing ShouldNotDownloadBase initRes closeCodebase - Run (RunFromPipe mainName) -> do + Run (RunFromPipe mainName) args -> do e <- safeReadUtf8StdIn case e of Left _ -> PT.putPrettyLn $ P.callout "⚠️" "I had trouble reading this input." @@ -121,12 +121,12 @@ main = do let fileEvent = Input.UnisonFileChanged (Text.pack "") contents launch currentDir config rt theCodebase - [Left fileEvent, Right $ Input.ExecuteI mainName, Right Input.QuitI] + [Left fileEvent, Right $ Input.ExecuteI mainName args, Right Input.QuitI] Nothing ShouldNotDownloadBase initRes closeCodebase - Run (RunCompiled file) -> + Run (RunCompiled file) args -> BL.readFile file >>= \bs -> try (evaluate $ RTI.decodeStandalone bs) >>= \case Left (PE _cs err) -> do @@ -151,7 +151,7 @@ main = do \ program. The parser generated an unrecognized error." Right (Right (v, rf, w, sto)) | not vmatch -> mismatchMsg - | otherwise -> RTI.runStandalone sto w + | otherwise -> withArgs args $ RTI.runStandalone sto w where vmatch = v == Text.pack Version.gitDescribeWithDate ws s = P.wrap (P.text s) @@ -248,7 +248,7 @@ runTranscripts' mcodepath transcriptDir args = do configFilePath <- getConfigFilePath mcodepath -- We don't need to create a codebase through `getCodebaseOrExit` as we've already done so previously. ((closeCodebase, theCodebase),_) <- getCodebaseOrExit (Just (DontCreateCodebaseWhenMissing transcriptDir)) - mdOut <- TR.run transcriptDir configFilePath stanzas theCodebase + mdOut <- TR.run Version.gitDescribeWithDate transcriptDir configFilePath stanzas theCodebase closeCodebase let out = currentDir FP. FP.addExtension (FP.dropExtension fileName ++ ".output") diff --git a/unison-core/package.yaml b/unison-core/package.yaml index a744d1da1d..77738de834 100644 --- a/unison-core/package.yaml +++ b/unison-core/package.yaml @@ -21,10 +21,10 @@ library: - mtl - rfc5051 - safe - - sandi - text - transformers - unison-prelude + - unison-util - unison-util-relation - util - vector diff --git a/unison-core/src/Unison/Hash.hs b/unison-core/src/Unison/Hash.hs index f9fb8a6078..015d02caaa 100644 --- a/unison-core/src/Unison/Hash.hs +++ b/unison-core/src/Unison/Hash.hs @@ -2,123 +2,27 @@ {-# LANGUAGE OverloadedStrings #-} module Unison.Hash - ( Hash(Hash) - , toBytes - , base32Hex - , base32Hexs - , fromBase32Hex - , fromBytes - , fromByteString - , toByteString - , unsafeFromBase32Hex - , showBase32Hex - , validBase32HexChars - ) where - + ( Hash (Hash), + base32Hex, + fromBase32Hex, + Hash.toByteString, + validBase32HexChars, + ) +where + +import qualified U.Util.Base32Hex as Base32Hex +import U.Util.Hash (Hash (Hash)) +import qualified U.Util.Hash as Hash import Unison.Prelude -import Data.ByteString.Builder (doubleBE, word64BE, int64BE, toLazyByteString) -import qualified Data.ByteArray as BA - -import qualified Crypto.Hash as CH -import qualified Data.ByteString as B -import qualified Data.ByteString.Lazy as BL -import qualified Data.ByteString.Short as SBS - -import qualified Unison.Hashable as H -import qualified Codec.Binary.Base32Hex as Base32Hex -import qualified Data.Text as Text -import qualified Data.Set as Set - --- | Hash which uniquely identifies a Unison type or term -newtype Hash = Hash { toBytes :: SBS.ShortByteString } deriving (Eq,Ord,Generic) - -instance Show Hash where - show h = take 999 $ Text.unpack (base32Hex h) - -instance H.Hashable Hash where - tokens h = [H.Bytes (toByteString h)] - -fromByteString :: ByteString -> Hash -fromByteString = fromBytes - -toByteString :: Hash -> ByteString -toByteString = SBS.fromShort . toBytes - -instance H.Accumulate Hash where - accumulate = fromBytes . BA.convert . CH.hashFinalize . go CH.hashInit where - go :: CH.Context CH.SHA3_512 -> [H.Token Hash] -> CH.Context CH.SHA3_512 - go acc tokens = CH.hashUpdates acc (tokens >>= toBS) - toBS (H.Tag b) = [B.singleton b] - toBS (H.Bytes bs) = [encodeLength $ B.length bs, bs] - toBS (H.Int i) = BL.toChunks . toLazyByteString . int64BE $ i - toBS (H.Nat i) = BL.toChunks . toLazyByteString . word64BE $ i - toBS (H.Double d) = BL.toChunks . toLazyByteString . doubleBE $ d - toBS (H.Text txt) = - let tbytes = encodeUtf8 txt - in [encodeLength (B.length tbytes), tbytes] - toBS (H.Hashed h) = [toByteString h] - encodeLength :: Integral n => n -> B.ByteString - encodeLength = BL.toStrict . toLazyByteString . word64BE . fromIntegral - fromBytes = fromByteString - toBytes = toByteString - -- | Return the lowercase unpadded base32Hex encoding of this 'Hash'. -- Multibase prefix would be 'v', see https://github.com/multiformats/multibase base32Hex :: Hash -> Text -base32Hex (Hash h) - -- we're using an uppercase encoder that adds padding, so we drop the - -- padding and convert it to lowercase - = Text.toLower - . Text.dropWhileEnd (== '=') - . decodeUtf8 - . Base32Hex.encode - $ SBS.fromShort h - -validBase32HexChars :: Set Char -validBase32HexChars = Set.fromList $ ['0' .. '9'] ++ ['a' .. 'v'] +base32Hex = Base32Hex.toText . Hash.toBase32Hex -- | Produce a 'Hash' from a base32hex-encoded version of its binary representation fromBase32Hex :: Text -> Maybe Hash -fromBase32Hex txt = case Base32Hex.decode (encodeUtf8 $ Text.toUpper txt <> paddingChars) of - Left (_, _rem) -> Nothing - Right h -> pure $ Hash (SBS.toShort h) - where - -- The decoder we're using is a base32 uppercase decoder that expects padding, - -- so we provide it with the appropriate number of padding characters for the - -- expected hash length. - -- - -- The decoder requires 40 bit (8 5-bit characters) chunks, so if the number - -- of characters of the input is not a multiple of 8, we add '=' padding chars - -- until it is. - -- - -- See https://tools.ietf.org/html/rfc4648#page-8 - paddingChars :: Text - paddingChars = case Text.length txt `mod` 8 of - 0 -> "" - n -> Text.replicate (8 - n) "=" - - hashLength :: Int - hashLength = 512 - - _paddingChars :: Text - _paddingChars = case hashLength `mod` 40 of - 0 -> "" - 8 -> "======" - 16 -> "====" - 24 -> "===" - 32 -> "=" - i -> error $ "impossible hash length `mod` 40 not in {0,8,16,24,32}: " <> show i +fromBase32Hex = fmap Hash.fromBase32Hex . Base32Hex.fromText -base32Hexs :: Hash -> String -base32Hexs = Text.unpack . base32Hex - -unsafeFromBase32Hex :: Text -> Hash -unsafeFromBase32Hex txt = - fromMaybe (error $ "invalid base32Hex value: " ++ Text.unpack txt) $ fromBase32Hex txt - -fromBytes :: ByteString -> Hash -fromBytes = Hash . SBS.toShort - -showBase32Hex :: H.Hashable t => t -> String -showBase32Hex = base32Hexs . H.accumulate' +validBase32HexChars :: Set Char +validBase32HexChars = Base32Hex.validChars diff --git a/unison-core/src/Unison/HashQualified'.hs b/unison-core/src/Unison/HashQualified'.hs index fe472baa44..b05ed048c6 100644 --- a/unison-core/src/Unison/HashQualified'.hs +++ b/unison-core/src/Unison/HashQualified'.hs @@ -118,6 +118,13 @@ requalify hq r = case hq of NameOnly n -> fromNamedReferent n r HashQualified n _ -> fromNamedReferent n r +-- | Sort the list of names by length of segments: smaller number of segments is listed first. NameOnly < HashQualified +sortByLength :: [HashQualified Name] -> [HashQualified Name] +sortByLength = + sortOn \case + NameOnly name -> (length (Name.reverseSegments name), Nothing, Name.isAbsolute name) + HashQualified name hash -> (length (Name.reverseSegments name), Just hash, Name.isAbsolute name) + -- `HashQualified` is usually used for display, so we sort it alphabetically instance Name.Alphabetical n => Ord (HashQualified n) where compare (NameOnly n) (NameOnly n2) = Name.compareAlphabetical n n2 diff --git a/unison-core/src/Unison/HashQualified.hs b/unison-core/src/Unison/HashQualified.hs index c747623516..aac0f28243 100644 --- a/unison-core/src/Unison/HashQualified.hs +++ b/unison-core/src/Unison/HashQualified.hs @@ -50,10 +50,9 @@ toName = \case -- [.foo.bar, foo.bar] -> [foo.bar, .foo.bar] sortByLength :: [HashQualified Name] -> [HashQualified Name] sortByLength hs = sortOn f hs where - f (NameOnly n) = (countDots n, 0, Left n) - f (HashQualified n _h) = (countDots n, 1, Left n) + f (NameOnly n) = (length (Name.reverseSegments n), 0, Left n) + f (HashQualified n _h) = (length (Name.reverseSegments n), 1, Left n) f (HashOnly h) = (maxBound, 0, Right h) - countDots n = Text.count "." (Text.dropEnd 1 (Name.toText n)) hasName, hasHash :: HashQualified Name -> Bool hasName = isJust . toName diff --git a/unison-core/src/Unison/Hashable.hs b/unison-core/src/Unison/Hashable.hs index b6ab2248d6..c73b482622 100644 --- a/unison-core/src/Unison/Hashable.hs +++ b/unison-core/src/Unison/Hashable.hs @@ -2,14 +2,22 @@ module Unison.Hashable where import Unison.Prelude +import qualified Crypto.Hash as CH +import qualified Data.ByteArray as BA +import qualified Data.ByteString as B +import Data.ByteString.Builder (doubleBE, int64BE, toLazyByteString, word64BE) +import qualified Data.ByteString.Lazy as BL import qualified Data.Map as Map import qualified Data.Set as Set +import qualified U.Util.Hash as H import Unison.Util.Relation (Relation) -import Unison.Util.Relation3 (Relation3) -import Unison.Util.Relation4 (Relation4) import qualified Unison.Util.Relation as Relation +import Unison.Util.Relation3 (Relation3) import qualified Unison.Util.Relation3 as Relation3 +import Unison.Util.Relation4 (Relation4) import qualified Unison.Util.Relation4 as Relation4 +import U.Util.Hash (Hash) +import qualified U.Util.Hash as Hash data Token h = Tag !Word8 @@ -108,3 +116,24 @@ instance Hashable Int64 where instance Hashable Bool where tokens b = [Tag . fromIntegral $ fromEnum b] + +instance Hashable Hash where + tokens h = [Bytes (Hash.toByteString h)] + +instance Accumulate Hash where + accumulate = fromBytes . BA.convert . CH.hashFinalize . go CH.hashInit where + go :: CH.Context CH.SHA3_512 -> [Token Hash] -> CH.Context CH.SHA3_512 + go acc tokens = CH.hashUpdates acc (tokens >>= toBS) + toBS (Tag b) = [B.singleton b] + toBS (Bytes bs) = [encodeLength $ B.length bs, bs] + toBS (Int i) = BL.toChunks . toLazyByteString . int64BE $ i + toBS (Nat i) = BL.toChunks . toLazyByteString . word64BE $ i + toBS (Double d) = BL.toChunks . toLazyByteString . doubleBE $ d + toBS (Text txt) = + let tbytes = encodeUtf8 txt + in [encodeLength (B.length tbytes), tbytes] + toBS (Hashed h) = [H.toByteString h] + encodeLength :: Integral n => n -> B.ByteString + encodeLength = BL.toStrict . toLazyByteString . word64BE . fromIntegral + fromBytes = H.fromByteString + toBytes = H.toByteString diff --git a/unison-core/src/Unison/NamesWithHistory.hs b/unison-core/src/Unison/NamesWithHistory.hs index 9329c217e3..206622aee6 100644 --- a/unison-core/src/Unison/NamesWithHistory.hs +++ b/unison-core/src/Unison/NamesWithHistory.hs @@ -232,8 +232,8 @@ termName length r NamesWithHistory{..} = where hq n = HQ'.take length (HQ'.fromNamedReferent n r) isConflicted n = R.manyDom n (Names.terms currentNames) -suffixedTypeName :: Int -> Reference -> NamesWithHistory -> [HQ.HashQualified Name] -suffixedTermName :: Int -> Referent -> NamesWithHistory -> [HQ.HashQualified Name] +suffixedTypeName :: Int -> Reference -> NamesWithHistory -> [HQ'.HashQualified Name] +suffixedTermName :: Int -> Referent -> NamesWithHistory -> [HQ'.HashQualified Name] (suffixedTermName,suffixedTypeName) = ( suffixedName termName (Names.terms . currentNames) HQ'.fromNamedReferent , suffixedName typeName (Names.types . currentNames) HQ'.fromNamedReference ) @@ -241,19 +241,19 @@ suffixedTermName :: Int -> Referent -> NamesWithHistory -> [HQ.HashQualified Nam suffixedName fallback getRel hq' length r ns@(getRel -> rel) = if R.memberRan r rel then go $ toList (R.lookupRan r rel) - else sort $ map Name.convert $ Set.toList (fallback length r ns) + else sort $ Set.toList (fallback length r ns) where -- Orders names, using these criteria, in this order: -- 1. NameOnly comes before HashQualified, -- 2. Shorter names (in terms of segment count) come before longer ones -- 3. If same on attributes 1 and 2, compare alphabetically - go :: [Name] -> [HashQualified Name] + go :: [Name] -> [HQ'.HashQualified Name] go fqns = map (view _4) . sort $ map f fqns where f fqn = let n' = Name.shortestUniqueSuffix fqn r rel isHQ'd = R.manyDom fqn rel -- it is conflicted hq n = HQ'.take length (hq' n r) - hqn = Name.convert $ if isHQ'd then hq n' else HQ'.fromName n' + hqn = if isHQ'd then hq n' else HQ'.fromName n' in (isHQ'd, Name.countSegments fqn, Name.isAbsolute n', hqn) -- Set HashQualified -> Branch m -> Action' m v Names diff --git a/unison-core/unison-core1.cabal b/unison-core/unison-core1.cabal index 623d3e7048..c411ccaf19 100644 --- a/unison-core/unison-core1.cabal +++ b/unison-core/unison-core1.cabal @@ -99,10 +99,10 @@ library , prelude-extras , rfc5051 , safe - , sandi , text , transformers , unison-prelude + , unison-util , unison-util-relation , util , vector diff --git a/unison-src/transcripts/alias-many.output.md b/unison-src/transcripts/alias-many.output.md index 30f60cd2d2..d89ec68d6e 100644 --- a/unison-src/transcripts/alias-many.output.md +++ b/unison-src/transcripts/alias-many.output.md @@ -201,270 +201,271 @@ Let's try it! 171. io2.IO.fileExists.impl : Text ->{IO} Either Failure Boolean 172. io2.IO.forkComp : '{IO} a ->{IO} ThreadId - 173. io2.IO.getBuffering.impl : Handle + 173. io2.IO.getArgs.impl : '{IO} Either Failure [Text] + 174. io2.IO.getBuffering.impl : Handle ->{IO} Either Failure BufferMode - 174. io2.IO.getBytes.impl : Handle + 175. io2.IO.getBytes.impl : Handle -> Nat ->{IO} Either Failure Bytes - 175. io2.IO.getCurrentDirectory.impl : '{IO} Either + 176. io2.IO.getCurrentDirectory.impl : '{IO} Either Failure Text - 176. io2.IO.getEnv.impl : Text ->{IO} Either Failure Text - 177. io2.IO.getFileSize.impl : Text ->{IO} Either Failure Nat - 178. io2.IO.getFileTimestamp.impl : Text + 177. io2.IO.getEnv.impl : Text ->{IO} Either Failure Text + 178. io2.IO.getFileSize.impl : Text ->{IO} Either Failure Nat + 179. io2.IO.getFileTimestamp.impl : Text ->{IO} Either Failure Nat - 179. io2.IO.getLine.impl : Handle ->{IO} Either Failure Text - 180. io2.IO.getTempDirectory.impl : '{IO} Either Failure Text - 181. io2.IO.handlePosition.impl : Handle + 180. io2.IO.getLine.impl : Handle ->{IO} Either Failure Text + 181. io2.IO.getTempDirectory.impl : '{IO} Either Failure Text + 182. io2.IO.handlePosition.impl : Handle ->{IO} Either Failure Nat - 182. io2.IO.isDirectory.impl : Text + 183. io2.IO.isDirectory.impl : Text ->{IO} Either Failure Boolean - 183. io2.IO.isFileEOF.impl : Handle + 184. io2.IO.isFileEOF.impl : Handle ->{IO} Either Failure Boolean - 184. io2.IO.isFileOpen.impl : Handle + 185. io2.IO.isFileOpen.impl : Handle ->{IO} Either Failure Boolean - 185. io2.IO.isSeekable.impl : Handle + 186. io2.IO.isSeekable.impl : Handle ->{IO} Either Failure Boolean - 186. io2.IO.kill.impl : ThreadId ->{IO} Either Failure () - 187. io2.IO.listen.impl : Socket ->{IO} Either Failure () - 188. io2.IO.openFile.impl : Text + 187. io2.IO.kill.impl : ThreadId ->{IO} Either Failure () + 188. io2.IO.listen.impl : Socket ->{IO} Either Failure () + 189. io2.IO.openFile.impl : Text -> FileMode ->{IO} Either Failure Handle - 189. io2.IO.putBytes.impl : Handle + 190. io2.IO.putBytes.impl : Handle -> Bytes ->{IO} Either Failure () - 190. io2.IO.ref : a ->{IO} Ref {IO} a - 191. io2.IO.removeDirectory.impl : Text + 191. io2.IO.ref : a ->{IO} Ref {IO} a + 192. io2.IO.removeDirectory.impl : Text ->{IO} Either Failure () - 192. io2.IO.removeFile.impl : Text ->{IO} Either Failure () - 193. io2.IO.renameDirectory.impl : Text + 193. io2.IO.removeFile.impl : Text ->{IO} Either Failure () + 194. io2.IO.renameDirectory.impl : Text -> Text ->{IO} Either Failure () - 194. io2.IO.renameFile.impl : Text + 195. io2.IO.renameFile.impl : Text -> Text ->{IO} Either Failure () - 195. io2.IO.seekHandle.impl : Handle + 196. io2.IO.seekHandle.impl : Handle -> SeekMode -> Int ->{IO} Either Failure () - 196. io2.IO.serverSocket.impl : Optional Text + 197. io2.IO.serverSocket.impl : Optional Text -> Text ->{IO} Either Failure Socket - 197. io2.IO.setBuffering.impl : Handle + 198. io2.IO.setBuffering.impl : Handle -> BufferMode ->{IO} Either Failure () - 198. io2.IO.setCurrentDirectory.impl : Text + 199. io2.IO.setCurrentDirectory.impl : Text ->{IO} Either Failure () - 199. io2.IO.socketAccept.impl : Socket + 200. io2.IO.socketAccept.impl : Socket ->{IO} Either Failure Socket - 200. io2.IO.socketPort.impl : Socket ->{IO} Either Failure Nat - 201. io2.IO.socketReceive.impl : Socket + 201. io2.IO.socketPort.impl : Socket ->{IO} Either Failure Nat + 202. io2.IO.socketReceive.impl : Socket -> Nat ->{IO} Either Failure Bytes - 202. io2.IO.socketSend.impl : Socket + 203. io2.IO.socketSend.impl : Socket -> Bytes ->{IO} Either Failure () - 203. io2.IO.stdHandle : StdHandle -> Handle - 204. io2.IO.systemTime.impl : '{IO} Either Failure Nat - 205. io2.IO.systemTimeMicroseconds : '{IO} Int - 206. unique type io2.IOError - 207. io2.IOError.AlreadyExists : IOError - 208. io2.IOError.EOF : IOError - 209. io2.IOError.IllegalOperation : IOError - 210. io2.IOError.NoSuchThing : IOError - 211. io2.IOError.PermissionDenied : IOError - 212. io2.IOError.ResourceBusy : IOError - 213. io2.IOError.ResourceExhausted : IOError - 214. io2.IOError.UserError : IOError - 215. unique type io2.IOFailure - 216. builtin type io2.MVar - 217. io2.MVar.isEmpty : MVar a ->{IO} Boolean - 218. io2.MVar.new : a ->{IO} MVar a - 219. io2.MVar.newEmpty : '{IO} MVar a - 220. io2.MVar.put.impl : MVar a -> a ->{IO} Either Failure () - 221. io2.MVar.read.impl : MVar a ->{IO} Either Failure a - 222. io2.MVar.swap.impl : MVar a -> a ->{IO} Either Failure a - 223. io2.MVar.take.impl : MVar a ->{IO} Either Failure a - 224. io2.MVar.tryPut.impl : MVar a + 204. io2.IO.stdHandle : StdHandle -> Handle + 205. io2.IO.systemTime.impl : '{IO} Either Failure Nat + 206. io2.IO.systemTimeMicroseconds : '{IO} Int + 207. unique type io2.IOError + 208. io2.IOError.AlreadyExists : IOError + 209. io2.IOError.EOF : IOError + 210. io2.IOError.IllegalOperation : IOError + 211. io2.IOError.NoSuchThing : IOError + 212. io2.IOError.PermissionDenied : IOError + 213. io2.IOError.ResourceBusy : IOError + 214. io2.IOError.ResourceExhausted : IOError + 215. io2.IOError.UserError : IOError + 216. unique type io2.IOFailure + 217. builtin type io2.MVar + 218. io2.MVar.isEmpty : MVar a ->{IO} Boolean + 219. io2.MVar.new : a ->{IO} MVar a + 220. io2.MVar.newEmpty : '{IO} MVar a + 221. io2.MVar.put.impl : MVar a -> a ->{IO} Either Failure () + 222. io2.MVar.read.impl : MVar a ->{IO} Either Failure a + 223. io2.MVar.swap.impl : MVar a -> a ->{IO} Either Failure a + 224. io2.MVar.take.impl : MVar a ->{IO} Either Failure a + 225. io2.MVar.tryPut.impl : MVar a -> a ->{IO} Either Failure Boolean - 225. io2.MVar.tryRead.impl : MVar a + 226. io2.MVar.tryRead.impl : MVar a ->{IO} Either Failure (Optional a) - 226. io2.MVar.tryTake : MVar a ->{IO} Optional a - 227. unique type io2.SeekMode - 228. io2.SeekMode.AbsoluteSeek : SeekMode - 229. io2.SeekMode.RelativeSeek : SeekMode - 230. io2.SeekMode.SeekFromEnd : SeekMode - 231. builtin type io2.Socket - 232. unique type io2.StdHandle - 233. io2.StdHandle.StdErr : StdHandle - 234. io2.StdHandle.StdIn : StdHandle - 235. io2.StdHandle.StdOut : StdHandle - 236. builtin type io2.STM - 237. io2.STM.atomically : '{STM} a ->{IO} a - 238. io2.STM.retry : '{STM} a - 239. builtin type io2.ThreadId - 240. builtin type io2.Tls - 241. builtin type io2.Tls.Cipher - 242. builtin type io2.Tls.ClientConfig - 243. io2.Tls.ClientConfig.certificates.set : [SignedCert] + 227. io2.MVar.tryTake : MVar a ->{IO} Optional a + 228. unique type io2.SeekMode + 229. io2.SeekMode.AbsoluteSeek : SeekMode + 230. io2.SeekMode.RelativeSeek : SeekMode + 231. io2.SeekMode.SeekFromEnd : SeekMode + 232. builtin type io2.Socket + 233. unique type io2.StdHandle + 234. io2.StdHandle.StdErr : StdHandle + 235. io2.StdHandle.StdIn : StdHandle + 236. io2.StdHandle.StdOut : StdHandle + 237. builtin type io2.STM + 238. io2.STM.atomically : '{STM} a ->{IO} a + 239. io2.STM.retry : '{STM} a + 240. builtin type io2.ThreadId + 241. builtin type io2.Tls + 242. builtin type io2.Tls.Cipher + 243. builtin type io2.Tls.ClientConfig + 244. io2.Tls.ClientConfig.certificates.set : [SignedCert] -> ClientConfig -> ClientConfig - 244. io2.TLS.ClientConfig.ciphers.set : [Cipher] + 245. io2.TLS.ClientConfig.ciphers.set : [Cipher] -> ClientConfig -> ClientConfig - 245. io2.Tls.ClientConfig.default : Text + 246. io2.Tls.ClientConfig.default : Text -> Bytes -> ClientConfig - 246. io2.Tls.ClientConfig.versions.set : [Version] + 247. io2.Tls.ClientConfig.versions.set : [Version] -> ClientConfig -> ClientConfig - 247. io2.Tls.decodeCert.impl : Bytes + 248. io2.Tls.decodeCert.impl : Bytes -> Either Failure SignedCert - 248. io2.Tls.decodePrivateKey : Bytes -> [PrivateKey] - 249. io2.Tls.encodeCert : SignedCert -> Bytes - 250. io2.Tls.encodePrivateKey : PrivateKey -> Bytes - 251. io2.Tls.handshake.impl : Tls ->{IO} Either Failure () - 252. io2.Tls.newClient.impl : ClientConfig + 249. io2.Tls.decodePrivateKey : Bytes -> [PrivateKey] + 250. io2.Tls.encodeCert : SignedCert -> Bytes + 251. io2.Tls.encodePrivateKey : PrivateKey -> Bytes + 252. io2.Tls.handshake.impl : Tls ->{IO} Either Failure () + 253. io2.Tls.newClient.impl : ClientConfig -> Socket ->{IO} Either Failure Tls - 253. io2.Tls.newServer.impl : ServerConfig + 254. io2.Tls.newServer.impl : ServerConfig -> Socket ->{IO} Either Failure Tls - 254. builtin type io2.Tls.PrivateKey - 255. io2.Tls.receive.impl : Tls ->{IO} Either Failure Bytes - 256. io2.Tls.send.impl : Tls -> Bytes ->{IO} Either Failure () - 257. builtin type io2.Tls.ServerConfig - 258. io2.Tls.ServerConfig.certificates.set : [SignedCert] + 255. builtin type io2.Tls.PrivateKey + 256. io2.Tls.receive.impl : Tls ->{IO} Either Failure Bytes + 257. io2.Tls.send.impl : Tls -> Bytes ->{IO} Either Failure () + 258. builtin type io2.Tls.ServerConfig + 259. io2.Tls.ServerConfig.certificates.set : [SignedCert] -> ServerConfig -> ServerConfig - 259. io2.Tls.ServerConfig.ciphers.set : [Cipher] + 260. io2.Tls.ServerConfig.ciphers.set : [Cipher] -> ServerConfig -> ServerConfig - 260. io2.Tls.ServerConfig.default : [SignedCert] + 261. io2.Tls.ServerConfig.default : [SignedCert] -> PrivateKey -> ServerConfig - 261. io2.Tls.ServerConfig.versions.set : [Version] + 262. io2.Tls.ServerConfig.versions.set : [Version] -> ServerConfig -> ServerConfig - 262. builtin type io2.Tls.SignedCert - 263. io2.Tls.terminate.impl : Tls ->{IO} Either Failure () - 264. builtin type io2.Tls.Version - 265. unique type io2.TlsFailure - 266. builtin type io2.TVar - 267. io2.TVar.new : a ->{STM} TVar a - 268. io2.TVar.newIO : a ->{IO} TVar a - 269. io2.TVar.read : TVar a ->{STM} a - 270. io2.TVar.readIO : TVar a ->{IO} a - 271. io2.TVar.swap : TVar a -> a ->{STM} a - 272. io2.TVar.write : TVar a -> a ->{STM} () - 273. unique type IsPropagated - 274. IsPropagated.IsPropagated : IsPropagated - 275. unique type IsTest - 276. IsTest.IsTest : IsTest - 277. unique type Link - 278. builtin type Link.Term - 279. Link.Term : Term -> Link - 280. Link.Term.toText : Term -> Text - 281. builtin type Link.Type - 282. Link.Type : Type -> Link - 283. builtin type List - 284. List.++ : [a] -> [a] -> [a] - 285. List.+: : a -> [a] -> [a] - 286. List.:+ : [a] -> a -> [a] - 287. List.at : Nat -> [a] -> Optional a - 288. List.cons : a -> [a] -> [a] - 289. List.drop : Nat -> [a] -> [a] - 290. List.empty : [a] - 291. List.size : [a] -> Nat - 292. List.snoc : [a] -> a -> [a] - 293. List.take : Nat -> [a] -> [a] - 294. metadata.isPropagated : IsPropagated - 295. metadata.isTest : IsTest - 296. builtin type Nat - 297. Nat.* : Nat -> Nat -> Nat - 298. Nat.+ : Nat -> Nat -> Nat - 299. Nat./ : Nat -> Nat -> Nat - 300. Nat.and : Nat -> Nat -> Nat - 301. Nat.complement : Nat -> Nat - 302. Nat.drop : Nat -> Nat -> Nat - 303. Nat.eq : Nat -> Nat -> Boolean - 304. Nat.fromText : Text -> Optional Nat - 305. Nat.gt : Nat -> Nat -> Boolean - 306. Nat.gteq : Nat -> Nat -> Boolean - 307. Nat.increment : Nat -> Nat - 308. Nat.isEven : Nat -> Boolean - 309. Nat.isOdd : Nat -> Boolean - 310. Nat.leadingZeros : Nat -> Nat - 311. Nat.lt : Nat -> Nat -> Boolean - 312. Nat.lteq : Nat -> Nat -> Boolean - 313. Nat.mod : Nat -> Nat -> Nat - 314. Nat.or : Nat -> Nat -> Nat - 315. Nat.popCount : Nat -> Nat - 316. Nat.pow : Nat -> Nat -> Nat - 317. Nat.shiftLeft : Nat -> Nat -> Nat - 318. Nat.shiftRight : Nat -> Nat -> Nat - 319. Nat.sub : Nat -> Nat -> Int - 320. Nat.toFloat : Nat -> Float - 321. Nat.toInt : Nat -> Int - 322. Nat.toText : Nat -> Text - 323. Nat.trailingZeros : Nat -> Nat - 324. Nat.xor : Nat -> Nat -> Nat - 325. structural type Optional a - 326. Optional.None : Optional a - 327. Optional.Some : a -> Optional a - 328. builtin type Ref - 329. Ref.read : Ref g a ->{g} a - 330. Ref.write : Ref g a -> a ->{g} () - 331. builtin type Request - 332. builtin type Scope - 333. Scope.ref : a ->{Scope s} Ref {Scope s} a - 334. Scope.run : (∀ s. '{g, Scope s} r) ->{g} r - 335. structural type SeqView a b - 336. SeqView.VElem : a -> b -> SeqView a b - 337. SeqView.VEmpty : SeqView a b - 338. unique type Test.Result - 339. Test.Result.Fail : Text -> Result - 340. Test.Result.Ok : Text -> Result - 341. builtin type Text - 342. Text.!= : Text -> Text -> Boolean - 343. Text.++ : Text -> Text -> Text - 344. Text.drop : Nat -> Text -> Text - 345. Text.empty : Text - 346. Text.eq : Text -> Text -> Boolean - 347. Text.fromCharList : [Char] -> Text - 348. Text.fromUtf8.impl : Bytes -> Either Failure Text - 349. Text.gt : Text -> Text -> Boolean - 350. Text.gteq : Text -> Text -> Boolean - 351. Text.lt : Text -> Text -> Boolean - 352. Text.lteq : Text -> Text -> Boolean - 353. Text.repeat : Nat -> Text -> Text - 354. Text.size : Text -> Nat - 355. Text.take : Nat -> Text -> Text - 356. Text.toCharList : Text -> [Char] - 357. Text.toUtf8 : Text -> Bytes - 358. Text.uncons : Text -> Optional (Char, Text) - 359. Text.unsnoc : Text -> Optional (Text, Char) - 360. todo : a -> b - 361. structural type Tuple a b - 362. Tuple.Cons : a -> b -> Tuple a b - 363. structural type Unit - 364. Unit.Unit : () - 365. Universal.< : a -> a -> Boolean - 366. Universal.<= : a -> a -> Boolean - 367. Universal.== : a -> a -> Boolean - 368. Universal.> : a -> a -> Boolean - 369. Universal.>= : a -> a -> Boolean - 370. Universal.compare : a -> a -> Int - 371. unsafe.coerceAbilities : (a ->{e1} b) -> a ->{e2} b - 372. builtin type Value - 373. Value.dependencies : Value -> [Term] - 374. Value.deserialize : Bytes -> Either Text Value - 375. Value.load : Value ->{IO} Either [Term] a - 376. Value.serialize : Value -> Bytes - 377. Value.value : a -> Value + 263. builtin type io2.Tls.SignedCert + 264. io2.Tls.terminate.impl : Tls ->{IO} Either Failure () + 265. builtin type io2.Tls.Version + 266. unique type io2.TlsFailure + 267. builtin type io2.TVar + 268. io2.TVar.new : a ->{STM} TVar a + 269. io2.TVar.newIO : a ->{IO} TVar a + 270. io2.TVar.read : TVar a ->{STM} a + 271. io2.TVar.readIO : TVar a ->{IO} a + 272. io2.TVar.swap : TVar a -> a ->{STM} a + 273. io2.TVar.write : TVar a -> a ->{STM} () + 274. unique type IsPropagated + 275. IsPropagated.IsPropagated : IsPropagated + 276. unique type IsTest + 277. IsTest.IsTest : IsTest + 278. unique type Link + 279. builtin type Link.Term + 280. Link.Term : Term -> Link + 281. Link.Term.toText : Term -> Text + 282. builtin type Link.Type + 283. Link.Type : Type -> Link + 284. builtin type List + 285. List.++ : [a] -> [a] -> [a] + 286. List.+: : a -> [a] -> [a] + 287. List.:+ : [a] -> a -> [a] + 288. List.at : Nat -> [a] -> Optional a + 289. List.cons : a -> [a] -> [a] + 290. List.drop : Nat -> [a] -> [a] + 291. List.empty : [a] + 292. List.size : [a] -> Nat + 293. List.snoc : [a] -> a -> [a] + 294. List.take : Nat -> [a] -> [a] + 295. metadata.isPropagated : IsPropagated + 296. metadata.isTest : IsTest + 297. builtin type Nat + 298. Nat.* : Nat -> Nat -> Nat + 299. Nat.+ : Nat -> Nat -> Nat + 300. Nat./ : Nat -> Nat -> Nat + 301. Nat.and : Nat -> Nat -> Nat + 302. Nat.complement : Nat -> Nat + 303. Nat.drop : Nat -> Nat -> Nat + 304. Nat.eq : Nat -> Nat -> Boolean + 305. Nat.fromText : Text -> Optional Nat + 306. Nat.gt : Nat -> Nat -> Boolean + 307. Nat.gteq : Nat -> Nat -> Boolean + 308. Nat.increment : Nat -> Nat + 309. Nat.isEven : Nat -> Boolean + 310. Nat.isOdd : Nat -> Boolean + 311. Nat.leadingZeros : Nat -> Nat + 312. Nat.lt : Nat -> Nat -> Boolean + 313. Nat.lteq : Nat -> Nat -> Boolean + 314. Nat.mod : Nat -> Nat -> Nat + 315. Nat.or : Nat -> Nat -> Nat + 316. Nat.popCount : Nat -> Nat + 317. Nat.pow : Nat -> Nat -> Nat + 318. Nat.shiftLeft : Nat -> Nat -> Nat + 319. Nat.shiftRight : Nat -> Nat -> Nat + 320. Nat.sub : Nat -> Nat -> Int + 321. Nat.toFloat : Nat -> Float + 322. Nat.toInt : Nat -> Int + 323. Nat.toText : Nat -> Text + 324. Nat.trailingZeros : Nat -> Nat + 325. Nat.xor : Nat -> Nat -> Nat + 326. structural type Optional a + 327. Optional.None : Optional a + 328. Optional.Some : a -> Optional a + 329. builtin type Ref + 330. Ref.read : Ref g a ->{g} a + 331. Ref.write : Ref g a -> a ->{g} () + 332. builtin type Request + 333. builtin type Scope + 334. Scope.ref : a ->{Scope s} Ref {Scope s} a + 335. Scope.run : (∀ s. '{g, Scope s} r) ->{g} r + 336. structural type SeqView a b + 337. SeqView.VElem : a -> b -> SeqView a b + 338. SeqView.VEmpty : SeqView a b + 339. unique type Test.Result + 340. Test.Result.Fail : Text -> Result + 341. Test.Result.Ok : Text -> Result + 342. builtin type Text + 343. Text.!= : Text -> Text -> Boolean + 344. Text.++ : Text -> Text -> Text + 345. Text.drop : Nat -> Text -> Text + 346. Text.empty : Text + 347. Text.eq : Text -> Text -> Boolean + 348. Text.fromCharList : [Char] -> Text + 349. Text.fromUtf8.impl : Bytes -> Either Failure Text + 350. Text.gt : Text -> Text -> Boolean + 351. Text.gteq : Text -> Text -> Boolean + 352. Text.lt : Text -> Text -> Boolean + 353. Text.lteq : Text -> Text -> Boolean + 354. Text.repeat : Nat -> Text -> Text + 355. Text.size : Text -> Nat + 356. Text.take : Nat -> Text -> Text + 357. Text.toCharList : Text -> [Char] + 358. Text.toUtf8 : Text -> Bytes + 359. Text.uncons : Text -> Optional (Char, Text) + 360. Text.unsnoc : Text -> Optional (Text, Char) + 361. todo : a -> b + 362. structural type Tuple a b + 363. Tuple.Cons : a -> b -> Tuple a b + 364. structural type Unit + 365. Unit.Unit : () + 366. Universal.< : a -> a -> Boolean + 367. Universal.<= : a -> a -> Boolean + 368. Universal.== : a -> a -> Boolean + 369. Universal.> : a -> a -> Boolean + 370. Universal.>= : a -> a -> Boolean + 371. Universal.compare : a -> a -> Int + 372. unsafe.coerceAbilities : (a ->{e1} b) -> a ->{e2} b + 373. builtin type Value + 374. Value.dependencies : Value -> [Term] + 375. Value.deserialize : Bytes -> Either Text Value + 376. Value.load : Value ->{IO} Either [Term] a + 377. Value.serialize : Value -> Bytes + 378. Value.value : a -> Value .builtin> alias.many 94-104 .mylib diff --git a/unison-src/transcripts/builtins-merge.output.md b/unison-src/transcripts/builtins-merge.output.md index 5b02fc736a..0770245f47 100644 --- a/unison-src/transcripts/builtins-merge.output.md +++ b/unison-src/transcripts/builtins-merge.output.md @@ -61,7 +61,7 @@ The `builtins.merge` command adds the known builtins to a `builtin` subnamespace 50. Value/ (5 definitions) 51. bug (a -> b) 52. crypto/ (12 definitions) - 53. io2/ (123 definitions) + 53. io2/ (124 definitions) 54. metadata/ (2 definitions) 55. todo (a -> b) 56. unsafe/ (1 definition) diff --git a/unison-src/transcripts/diff.output.md b/unison-src/transcripts/diff.output.md index 9cec95d7f2..2a0caca921 100644 --- a/unison-src/transcripts/diff.output.md +++ b/unison-src/transcripts/diff.output.md @@ -153,19 +153,9 @@ Here's what we've done so far: ```ucm .> diff.namespace nothing ns1 - Added definitions: + ⚠️ - 1. structural type A a - 2. structural ability X a1 a2 - 3. A.A : Nat -> A a - 4. X.x : {X a1 a2} Nat - 5. b : Nat - 6. bdependent : Nat - 7. c : Nat - 8. ┌ fromJust : Nat (+1 metadata) - 9. └ fromJust' : Nat (+1 metadata) - 10. ┌ helloWorld : Text - 11. └ helloWorld2 : Text + The namespace .nothing is empty. Was there a typo? .> diff.namespace ns1 ns2 diff --git a/unison-src/transcripts/emptyCodebase.output.md b/unison-src/transcripts/emptyCodebase.output.md index b286008b8d..d03bc46bd8 100644 --- a/unison-src/transcripts/emptyCodebase.output.md +++ b/unison-src/transcripts/emptyCodebase.output.md @@ -23,7 +23,7 @@ Technically, the definitions all exist, but they have no names. `builtins.merge` .foo> ls - 1. builtin/ (377 definitions) + 1. builtin/ (378 definitions) ``` And for a limited time, you can get even more builtin goodies: @@ -35,7 +35,7 @@ And for a limited time, you can get even more builtin goodies: .foo> ls - 1. builtin/ (545 definitions) + 1. builtin/ (546 definitions) ``` More typically, you'd start out by pulling `base. diff --git a/unison-src/transcripts/fix2567.md b/unison-src/transcripts/fix2567.md new file mode 100644 index 0000000000..bcadf240cc --- /dev/null +++ b/unison-src/transcripts/fix2567.md @@ -0,0 +1,18 @@ +Regression test for https://github.com/unisonweb/unison/issues/2567 + +```ucm:hide +.> alias.type ##Nat .foo.bar.Nat +``` + +```unison:hide +structural ability Foo where + blah : Nat -> Nat + zing.woot : Nat -> (Nat,Nat) -> Nat +``` + +```ucm +.some.subnamespace> add +.some.subnamespace> alias.term Foo.zing.woot Foo.woot +.> view Foo +.somewhere> view Foo +``` \ No newline at end of file diff --git a/unison-src/transcripts/fix2567.output.md b/unison-src/transcripts/fix2567.output.md new file mode 100644 index 0000000000..507556154b --- /dev/null +++ b/unison-src/transcripts/fix2567.output.md @@ -0,0 +1,36 @@ +Regression test for https://github.com/unisonweb/unison/issues/2567 + +```unison +structural ability Foo where + blah : Nat -> Nat + zing.woot : Nat -> (Nat,Nat) -> Nat +``` + +```ucm + ☝️ The namespace .some.subnamespace is empty. + +.some.subnamespace> add + + ⍟ I've added these definitions: + + structural ability Foo + +.some.subnamespace> alias.term Foo.zing.woot Foo.woot + + Done. + +.> view Foo + + structural ability some.subnamespace.Foo where + woot : Nat -> (Nat, Nat) ->{some.subnamespace.Foo} Nat + blah : Nat ->{some.subnamespace.Foo} Nat + + ☝️ The namespace .somewhere is empty. + +.somewhere> view Foo + + structural ability .some.subnamespace.Foo where + woot : Nat -> (Nat, Nat) ->{.some.subnamespace.Foo} Nat + blah : Nat ->{.some.subnamespace.Foo} Nat + +``` diff --git a/unison-src/transcripts/globbing.md b/unison-src/transcripts/globbing.md index 3a919323bd..599d72ea4a 100644 --- a/unison-src/transcripts/globbing.md +++ b/unison-src/transcripts/globbing.md @@ -15,12 +15,13 @@ Add some definitions which we can match over: ```unison:hide convertToThing = 1 convertFromThing = 2 +otherTerm = 3 -- Nested definitions -nested.toList = 3 -nested.toMap = 4 -othernest.toList = 5 -othernest.toMap = 6 +nested.toList = 4 +nested.toMap = 5 +othernest.toList = 6 +othernest.toMap = 7 ``` ```ucm:hide @@ -52,7 +53,7 @@ You may have up to one glob per name segment. Globbing only expands to the appropriate argument type. E.g. `view` should not see glob expansions for namespaces. -This should expand to the empty argument and silently succeed. +This should expand to only the otherTerm. ```ucm .> view other? diff --git a/unison-src/transcripts/globbing.output.md b/unison-src/transcripts/globbing.output.md index b9ad5dbb75..2021a94ab4 100644 --- a/unison-src/transcripts/globbing.output.md +++ b/unison-src/transcripts/globbing.output.md @@ -15,12 +15,13 @@ Add some definitions which we can match over: ```unison convertToThing = 1 convertFromThing = 2 +otherTerm = 3 -- Nested definitions -nested.toList = 3 -nested.toMap = 4 -othernest.toList = 5 -othernest.toMap = 6 +nested.toList = 4 +nested.toMap = 5 +othernest.toList = 6 +othernest.toMap = 7 ``` Globbing as a prefix, infix, or suffix wildcard. @@ -57,18 +58,18 @@ Globbing can occur in any name segment. .> view ?.toList nested.toList : ##Nat - nested.toList = 3 + nested.toList = 4 othernest.toList : ##Nat - othernest.toList = 5 + othernest.toList = 6 .> view nested.to? nested.toList : ##Nat - nested.toList = 3 + nested.toList = 4 nested.toMap : ##Nat - nested.toMap = 4 + nested.toMap = 5 ``` You may have up to one glob per name segment. @@ -77,26 +78,29 @@ You may have up to one glob per name segment. .> view ?.to? nested.toList : ##Nat - nested.toList = 3 + nested.toList = 4 nested.toMap : ##Nat - nested.toMap = 4 + nested.toMap = 5 othernest.toList : ##Nat - othernest.toList = 5 + othernest.toList = 6 othernest.toMap : ##Nat - othernest.toMap = 6 + othernest.toMap = 7 ``` Globbing only expands to the appropriate argument type. E.g. `view` should not see glob expansions for namespaces. -This should expand to the empty argument and silently succeed. +This should expand to only the otherTerm. ```ucm .> view other? + otherTerm : ##Nat + otherTerm = 3 + ``` Globbing should work from within a namespace with both absolute and relative patterns. @@ -104,17 +108,17 @@ Globbing should work from within a namespace with both absolute and relative pat .nested> view .othernest.to? .othernest.toList : ##Nat - .othernest.toList = 5 + .othernest.toList = 6 .othernest.toMap : ##Nat - .othernest.toMap = 6 + .othernest.toMap = 7 .nested> view to? toList : ##Nat - toList = 3 + toList = 4 toMap : ##Nat - toMap = 4 + toMap = 5 ``` diff --git a/unison-src/transcripts/io.md b/unison-src/transcripts/io.md index 3284aebf04..65170c1282 100644 --- a/unison-src/transcripts/io.md +++ b/unison-src/transcripts/io.md @@ -271,3 +271,63 @@ testHomeEnvVar _ = .> add .> io.test testHomeEnvVar ``` + +### Read command line args + +`runMeWithNoArgs`, `runMeWithOneArg`, and `runMeWithTwoArgs` raise exceptions +unless they called with the right number of arguments. + +```unison:hide +testGetArgs.fail : Text -> Failure +testGetArgs.fail descr = Failure (typeLink IOFailure) descr !Any + +testGetArgs.runMeWithNoArgs : '{io2.IO, Exception} () +testGetArgs.runMeWithNoArgs = 'let + args = reraise !getArgs.impl + match args with + [] -> printLine "called with no args" + _ -> raise (fail "called with args") + +testGetArgs.runMeWithOneArg : '{io2.IO, Exception} () +testGetArgs.runMeWithOneArg = 'let + args = reraise !getArgs.impl + match args with + [] -> raise (fail "called with no args") + [_] -> printLine "called with one arg" + _ -> raise (fail "called with too many args") + +testGetArgs.runMeWithTwoArgs : '{io2.IO, Exception} () +testGetArgs.runMeWithTwoArgs = 'let + args = reraise !getArgs.impl + match args with + [] -> raise (fail "called with no args") + [_] -> raise (fail "called with one arg") + [_, _] -> printLine "called with two args" + _ -> raise (fail "called with too many args") +``` + +Test that they can be run with the right number of args. +```ucm +.> add +.> cd testGetArgs +.> run runMeWithNoArgs +.> run runMeWithOneArg foo +.> run runMeWithTwoArgs foo bar +``` + +Calling our examples with the wrong number of args will error. + +```ucm:error +.> run runMeWithNoArgs foo +``` + +```ucm:error +.> run runMeWithOneArg +``` +```ucm:error +.> run runMeWithOneArg foo bar +``` + +```ucm:error +.> run runMeWithTwoArgs +``` diff --git a/unison-src/transcripts/io.output.md b/unison-src/transcripts/io.output.md index 2a7438dd86..ed3664a043 100644 --- a/unison-src/transcripts/io.output.md +++ b/unison-src/transcripts/io.output.md @@ -442,3 +442,100 @@ testHomeEnvVar _ = Tip: Use view testHomeEnvVar to view the source of a test. ``` +### Read command line args + +`runMeWithNoArgs`, `runMeWithOneArg`, and `runMeWithTwoArgs` raise exceptions +unless they called with the right number of arguments. + +```unison +testGetArgs.fail : Text -> Failure +testGetArgs.fail descr = Failure (typeLink IOFailure) descr !Any + +testGetArgs.runMeWithNoArgs : '{io2.IO, Exception} () +testGetArgs.runMeWithNoArgs = 'let + args = reraise !getArgs.impl + match args with + [] -> printLine "called with no args" + _ -> raise (fail "called with args") + +testGetArgs.runMeWithOneArg : '{io2.IO, Exception} () +testGetArgs.runMeWithOneArg = 'let + args = reraise !getArgs.impl + match args with + [] -> raise (fail "called with no args") + [_] -> printLine "called with one arg" + _ -> raise (fail "called with too many args") + +testGetArgs.runMeWithTwoArgs : '{io2.IO, Exception} () +testGetArgs.runMeWithTwoArgs = 'let + args = reraise !getArgs.impl + match args with + [] -> raise (fail "called with no args") + [_] -> raise (fail "called with one arg") + [_, _] -> printLine "called with two args" + _ -> raise (fail "called with too many args") +``` + +Test that they can be run with the right number of args. +```ucm +.> add + + ⍟ I've added these definitions: + + testGetArgs.fail : Text -> Failure + testGetArgs.runMeWithNoArgs : '{IO, Exception} () + testGetArgs.runMeWithOneArg : '{IO, Exception} () + testGetArgs.runMeWithTwoArgs : '{IO, Exception} () + +.> cd testGetArgs + +.> run runMeWithNoArgs + +.> run runMeWithOneArg foo + +.> run runMeWithTwoArgs foo bar + +``` +Calling our examples with the wrong number of args will error. + +```ucm +.> run runMeWithNoArgs foo + + 💔💥 + + The program halted with an unhandled exception: + + Failure (typeLink IOFailure) "called with args" !Any + +``` +```ucm +.> run runMeWithOneArg + + 💔💥 + + The program halted with an unhandled exception: + + Failure (typeLink IOFailure) "called with no args" !Any + +``` +```ucm +.> run runMeWithOneArg foo bar + + 💔💥 + + The program halted with an unhandled exception: + + Failure + (typeLink IOFailure) "called with too many args" !Any + +``` +```ucm +.> run runMeWithTwoArgs + + 💔💥 + + The program halted with an unhandled exception: + + Failure (typeLink IOFailure) "called with no args" !Any + +``` diff --git a/unison-src/transcripts/merges.output.md b/unison-src/transcripts/merges.output.md index 8316706e84..e814435fe0 100644 --- a/unison-src/transcripts/merges.output.md +++ b/unison-src/transcripts/merges.output.md @@ -112,13 +112,13 @@ We can also delete the fork if we're done with it. (Don't worry, it's still in t Note: The most recent namespace hash is immediately below this message. - ⊙ #04qr4r9bem + ⊙ #4dmogr46d2 - Deletes: feature1.y - ⊙ #1i02tmn0q3 + ⊙ #fe6mqhfcun + Adds / updates: @@ -129,26 +129,26 @@ We can also delete the fork if we're done with it. (Don't worry, it's still in t Original name New name(s) feature1.y master.y - ⊙ #h623ft74rq + ⊙ #qlrc4272pk + Adds / updates: feature1.y - ⊙ #h5umgqnj9q + ⊙ #mu2ju8e2ic > Moves: Original name New name x master.x - ⊙ #dolvpjs9jb + ⊙ #4adnodif8j + Adds / updates: x - □ #264ttii5lu (start of history) + □ #ucb56c3fgj (start of history) ``` To resurrect an old version of a namespace, you can learn its hash via the `history` command, then use `fork #namespacehash .newname`. diff --git a/unison-src/transcripts/reflog.output.md b/unison-src/transcripts/reflog.output.md index 7e33f8608e..af3d7b7a6b 100644 --- a/unison-src/transcripts/reflog.output.md +++ b/unison-src/transcripts/reflog.output.md @@ -59,17 +59,17 @@ y = 2 most recent, along with the command that got us there. Try: `fork 2 .old` - `fork #23gofh3nso .old` to make an old namespace + `fork #o7ncp4f3j1 .old` to make an old namespace accessible again, - `reset-root #23gofh3nso` to reset the root namespace and + `reset-root #o7ncp4f3j1` to reset the root namespace and its history to that of the specified namespace. - 1. #cje83urajg : add - 2. #23gofh3nso : add - 3. #264ttii5lu : builtins.merge - 4. #1juguqe7eo : (initial reflogged namespace) + 1. #nc81qsj2br : add + 2. #o7ncp4f3j1 : add + 3. #ucb56c3fgj : builtins.merge + 4. #sjg2v58vn2 : (initial reflogged namespace) ``` If we `reset-root` to its previous value, `y` disappears. diff --git a/unison-src/transcripts/squash.output.md b/unison-src/transcripts/squash.output.md index ec21d5cb23..4971a2e62f 100644 --- a/unison-src/transcripts/squash.output.md +++ b/unison-src/transcripts/squash.output.md @@ -13,7 +13,7 @@ Let's look at some examples. We'll start with a namespace with just the builtins - □ #8323ahgrae (start of history) + □ #edu0qq546n (start of history) .> fork builtin builtin2 @@ -42,21 +42,21 @@ Now suppose we `fork` a copy of builtin, then rename `Nat.+` to `frobnicate`, th Note: The most recent namespace hash is immediately below this message. - ⊙ #2q1rds10q7 + ⊙ #u7d9er9k2f > Moves: Original name New name Nat.frobnicate Nat.+ - ⊙ #5up39bm5ps + ⊙ #4dcjftvejg > Moves: Original name New name Nat.+ Nat.frobnicate - □ #8323ahgrae (start of history) + □ #edu0qq546n (start of history) ``` If we merge that back into `builtin`, we get that same chain of history: @@ -71,21 +71,21 @@ If we merge that back into `builtin`, we get that same chain of history: Note: The most recent namespace hash is immediately below this message. - ⊙ #2q1rds10q7 + ⊙ #u7d9er9k2f > Moves: Original name New name Nat.frobnicate Nat.+ - ⊙ #5up39bm5ps + ⊙ #4dcjftvejg > Moves: Original name New name Nat.+ Nat.frobnicate - □ #8323ahgrae (start of history) + □ #edu0qq546n (start of history) ``` Let's try again, but using a `merge.squash` (or just `squash`) instead. The history will be unchanged: @@ -106,7 +106,7 @@ Let's try again, but using a `merge.squash` (or just `squash`) instead. The hist - □ #8323ahgrae (start of history) + □ #edu0qq546n (start of history) ``` The churn that happened in `mybuiltin` namespace ended up back in the same spot, so the squash merge of that namespace with our original namespace had no effect. @@ -191,47 +191,47 @@ At this point, Alice and Bob both have some history beyond what's in trunk: - □ #hdqr1brb29 (start of history) + □ #hkrqt3tm05 (start of history) .> history alice Note: The most recent namespace hash is immediately below this message. - ⊙ #bin0oov6bc + ⊙ #uollchacf2 > Moves: Original name New name neatoFun productionReadyId - ⊙ #ccsg56jgoh + ⊙ #7b6lii2lmc > Moves: Original name New name radNumber superRadNumber - ⊙ #a7iq2ak3tk + ⊙ #1l7bsgu3om + Adds / updates: bodaciousNumero neatoFun radNumber - □ #hdqr1brb29 (start of history) + □ #hkrqt3tm05 (start of history) .> history bob Note: The most recent namespace hash is immediately below this message. - ⊙ #h90udgqpmb + ⊙ #aicts31vr6 + Adds / updates: babyDon'tHurtMe no whatIsLove - □ #hdqr1brb29 (start of history) + □ #hkrqt3tm05 (start of history) ``` Alice then squash merges into `trunk`, as does Bob. It's as if Alice and Bob both made their changes in one single commit. @@ -257,13 +257,13 @@ Alice then squash merges into `trunk`, as does Bob. It's as if Alice and Bob bot Note: The most recent namespace hash is immediately below this message. - ⊙ #ui8bdim4lp + ⊙ #gjfd096e1s + Adds / updates: bodaciousNumero productionReadyId superRadNumber - □ #hdqr1brb29 (start of history) + □ #hkrqt3tm05 (start of history) .> merge.squash bob trunk @@ -285,19 +285,19 @@ Alice then squash merges into `trunk`, as does Bob. It's as if Alice and Bob bot Note: The most recent namespace hash is immediately below this message. - ⊙ #ibjuc8j93d + ⊙ #k7bfk3l7uv + Adds / updates: babyDon'tHurtMe no whatIsLove - ⊙ #ui8bdim4lp + ⊙ #gjfd096e1s + Adds / updates: bodaciousNumero productionReadyId superRadNumber - □ #hdqr1brb29 (start of history) + □ #hkrqt3tm05 (start of history) ``` Since squash merges don't produce any merge nodes, we can `undo` a couple times to get back to our starting state: @@ -336,7 +336,7 @@ Since squash merges don't produce any merge nodes, we can `undo` a couple times - □ #hdqr1brb29 (start of history) + □ #hkrqt3tm05 (start of history) ``` This time, we'll first squash Alice and Bob's changes together before squashing their combined changes into `trunk`. The resulting `trunk` will have just a single entry in it, combining both Alice and Bob's changes: @@ -380,14 +380,14 @@ This time, we'll first squash Alice and Bob's changes together before squashing Note: The most recent namespace hash is immediately below this message. - ⊙ #q9stiuk3ke + ⊙ #ka70nifphh + Adds / updates: babyDon'tHurtMe bodaciousNumero no productionReadyId superRadNumber whatIsLove - □ #hdqr1brb29 (start of history) + □ #hkrqt3tm05 (start of history) ``` So, there you have it. With squashing, you can control the granularity of your history. @@ -420,7 +420,7 @@ Another thing we can do is `squash` into an empty namespace. This effectively ma - □ #3vvamd3psi (start of history) + □ #sui24env59 (start of history) ``` There's nothing really special here, `squash src dest` discards `src` history that comes after the LCA of `src` and `dest`, it's just that in the case of an empty namespace, that LCA is the beginning of time (the empty namespace), so all the history of `src` is discarded. @@ -485,13 +485,13 @@ This checks to see that squashing correctly preserves deletions: Note: The most recent namespace hash is immediately below this message. - ⊙ #8sh52cri4a + ⊙ #0kh907mpqb - Deletes: Nat.* Nat.+ - □ #8323ahgrae (start of history) + □ #edu0qq546n (start of history) ``` Notice that `Nat.+` and `Nat.*` are deleted by the squash, and we see them deleted in one atomic step in the history. From 7a9c8e03dce12c8c884f8cafd5b1f25adddccb32 Mon Sep 17 00:00:00 2001 From: Chris Penner Date: Thu, 11 Nov 2021 10:49:42 -0600 Subject: [PATCH 109/297] More notes --- .../src/Unison/Codebase/SqliteCodebase/MigrateSchema12.hs | 2 ++ 1 file changed, 2 insertions(+) diff --git a/parser-typechecker/src/Unison/Codebase/SqliteCodebase/MigrateSchema12.hs b/parser-typechecker/src/Unison/Codebase/SqliteCodebase/MigrateSchema12.hs index b7e9d3bf47..d26c16f882 100644 --- a/parser-typechecker/src/Unison/Codebase/SqliteCodebase/MigrateSchema12.hs +++ b/parser-typechecker/src/Unison/Codebase/SqliteCodebase/MigrateSchema12.hs @@ -95,6 +95,8 @@ import Unison.Var (Var) -- * [ ] Delete V1 Hashing to ensure it's unused -- * [x] Salt V2 hashes with version number -- * [ ] confirm that pulls are handled ok +-- * [ ] Update the schema version in the database after migrating so we only migrate +-- once. migrateSchema12 :: forall a m v. (MonadIO m, Var v) => Connection -> Codebase m v a -> m () migrateSchema12 conn codebase = do From dd1c97c2152e9d3b0ea590f0bf593fbaa2ab1c54 Mon Sep 17 00:00:00 2001 From: Mitchell Rosen Date: Thu, 11 Nov 2021 13:10:32 -0500 Subject: [PATCH 110/297] fix delete logic --- .../U/Codebase/Sqlite/Queries.hs | 59 +++++++++++-------- .../SqliteCodebase/MigrateSchema12.hs | 5 +- 2 files changed, 36 insertions(+), 28 deletions(-) diff --git a/codebase2/codebase-sqlite/U/Codebase/Sqlite/Queries.hs b/codebase2/codebase-sqlite/U/Codebase/Sqlite/Queries.hs index 8c7bf8e68d..a1f139ac7b 100644 --- a/codebase2/codebase-sqlite/U/Codebase/Sqlite/Queries.hs +++ b/codebase2/codebase-sqlite/U/Codebase/Sqlite/Queries.hs @@ -56,7 +56,6 @@ module U.Codebase.Sqlite.Queries ( loadObjectWithTypeById, loadObjectWithHashIdAndTypeById, updateObjectBlob, -- unused - deleteObject, -- * namespace_root table loadMaybeNamespaceRoot, @@ -86,7 +85,6 @@ module U.Codebase.Sqlite.Queries ( clearWatches, -- * indexes - deleteIndexesForObject, -- ** dependents index addToDependentsIndex, getDependentsForDependency, @@ -108,6 +106,9 @@ module U.Codebase.Sqlite.Queries ( namespaceHashIdByBase32Prefix, causalHashIdByBase32Prefix, + -- * garbage collection + garbageCollectObjectsWithoutHashes, + -- * db misc createSchema, schemaVersion, @@ -413,16 +414,6 @@ updateObjectBlob oId bs = execute sql (oId, bs) where sql = [here| UPDATE object SET bytes = ? WHERE id = ? |] --- | Delete a row in the @object@ table. -deleteObject :: DB m => ObjectId -> m () -deleteObject oid = - execute - [here| - DELETE FROM object - WHERE id = ? - |] - (Only oid) - -- |Maybe we would generalize this to something other than NamespaceHash if we -- end up wanting to store other kinds of Causals here too. saveCausal :: DB m => CausalHashId -> BranchHashId -> m () @@ -661,28 +652,46 @@ getTypeMentionsReferencesForComponent r = fixupTypeIndexRow :: Reference' TextId HashId :. Referent.Id -> (Reference' TextId HashId, Referent.Id) fixupTypeIndexRow (rh :. ri) = (rh, ri) --- | Delete all mentions of an object in index tables. -deleteIndexesForObject :: DB m => ObjectId -> m () -deleteIndexesForObject oid = do - execute +-- | Delete objects without hashes. An object typically *would* have a hash, but (for example) during a migration in which an object's hash +-- may change, its corresponding hash_object row may be updated to point at a new version of that object. This procedure clears out all +-- references to objects that do not have any corresponding hash_object rows. +garbageCollectObjectsWithoutHashes :: DB m => m () +garbageCollectObjectsWithoutHashes = do + execute_ + [here| + CREATE TEMPORARY TABLE object_without_hash AS + SELECT id + FROM object + WHERE id NOT IN ( + SELECT object_id + FROM hash_object + ) + |] + execute_ [here| DELETE FROM dependents_index - WHERE dependency_object_id = ? - OR dependent_object_id = ? + WHERE dependency_object_id IN object_without_hash + OR dependent_object_id IN object_without_hash |] - (oid, oid) - execute + execute_ [here| DELETE FROM find_type_index - WHERE term_referent_object_id = ? + WHERE term_referent_object_id IN object_without_hash |] - (Only oid) - execute + execute_ [here| DELETE FROM find_type_mentions_index - WHERE term_referent_object_id = ? + WHERE term_referent_object_id IN object_without_hash + |] + execute_ + [here| + DELETE FROM object + WHERE id IN object_without_hash + |] + execute_ + [here| + DROP TABLE object_without_hash |] - (Only oid) addToDependentsIndex :: DB m => Reference.Reference -> Reference.Id -> m () addToDependentsIndex dependency dependent = execute sql (dependency :. dependent) diff --git a/parser-typechecker/src/Unison/Codebase/SqliteCodebase/MigrateSchema12.hs b/parser-typechecker/src/Unison/Codebase/SqliteCodebase/MigrateSchema12.hs index b7e9d3bf47..05cdce4daa 100644 --- a/parser-typechecker/src/Unison/Codebase/SqliteCodebase/MigrateSchema12.hs +++ b/parser-typechecker/src/Unison/Codebase/SqliteCodebase/MigrateSchema12.hs @@ -110,9 +110,8 @@ migrateSchema12 conn codebase = do ifor_ (objLookup migrationState) \oldObjId (newObjId, _, _, _) -> do (runDB conn . liftQ) do Q.recordObjectRehash oldObjId newObjId - Q.deleteIndexesForObject oldObjId - -- what about deleting old watches? - Q.deleteObject oldObjId + -- what about deleting old watches? + runDB conn (liftQ Q.garbageCollectObjectsWithoutHashes) where progress :: Sync.Progress (ReaderT (Env m v a) (StateT MigrationState m)) Entity progress = From 86dfc171cf82f8cd76f554ea4af66dc3049efcd6 Mon Sep 17 00:00:00 2001 From: Arya Irani Date: Thu, 11 Nov 2021 23:10:33 -0500 Subject: [PATCH 111/297] pull Hashable into Hashing.V2 internals and out of Causal --- .../src/Unison/Codebase/Branch.hs | 41 +-- .../src/Unison/Codebase/Branch/Type.hs | 4 +- .../src/Unison/Codebase/Causal.hs | 274 ++++----------- .../src/Unison/Codebase/Causal/Type.hs | 127 +++++++ .../src/Unison/Codebase/SqliteCodebase.hs | 2 +- .../Codebase/SqliteCodebase/Conversions.hs | 2 +- .../MigrateSchema12/DbHelpers.hs | 8 +- .../src/Unison/Hashing/V2/ABT.hs | 4 +- .../src/Unison/Hashing/V2/Branch.hs | 15 +- .../src/Unison/Hashing/V2/Causal.hs | 4 +- .../src/Unison/Hashing/V2/Convert.hs | 80 +++-- .../src/Unison/Hashing/V2/DataDeclaration.hs | 5 +- .../src/Unison/Hashing/V2/Hashable.hs | 139 ++++++++ .../src/Unison/Hashing/V2/Kind.hs | 4 +- .../src/Unison/Hashing/V2/Patch.hs | 9 +- .../src/Unison/Hashing/V2/Pattern.hs | 2 +- .../src/Unison/Hashing/V2/Reference.hs | 8 +- .../src/Unison/Hashing/V2/Reference/Util.hs | 2 +- .../src/Unison/Hashing/V2/Referent.hs | 4 +- .../src/Unison/Hashing/V2/Term.hs | 4 +- .../src/Unison/Hashing/V2/TermEdit.hs | 4 +- .../src/Unison/Hashing/V2/Type.hs | 4 +- .../src/Unison/Hashing/V2/TypeEdit.hs | 6 +- .../src/Unison/Server/Backend.hs | 2 +- parser-typechecker/src/Unison/Util/Star3.hs | 9 - parser-typechecker/tests/Suite.hs | 3 - .../tests/Unison/Test/Codebase/Causal.hs | 318 ------------------ .../unison-parser-typechecker.cabal | 3 +- stack.yaml | 2 +- unison-core/src/Unison/NameSegment.hs | 4 - 30 files changed, 441 insertions(+), 652 deletions(-) create mode 100644 parser-typechecker/src/Unison/Codebase/Causal/Type.hs create mode 100644 parser-typechecker/src/Unison/Hashing/V2/Hashable.hs delete mode 100644 parser-typechecker/tests/Unison/Test/Codebase/Causal.hs diff --git a/parser-typechecker/src/Unison/Codebase/Branch.hs b/parser-typechecker/src/Unison/Codebase/Branch.hs index 7bb39a65f7..af18a7665a 100644 --- a/parser-typechecker/src/Unison/Codebase/Branch.hs +++ b/parser-typechecker/src/Unison/Codebase/Branch.hs @@ -74,10 +74,10 @@ import Unison.Prelude hiding (empty) import Prelude hiding (head,read,subtract) -import Control.Lens hiding ( children, cons, transform, uncons ) -import Data.Bifunctor ( second ) -import qualified Data.Map as Map -import qualified Data.Set as Set +import Control.Lens hiding (children, cons, transform, uncons) +import Data.Bifunctor (second) +import qualified Data.Map as Map +import qualified Data.Set as Set import Unison.Codebase.Branch.Raw (Raw (Raw)) import Unison.Codebase.Branch.Type ( Branch (..), @@ -91,26 +91,24 @@ import Unison.Codebase.Branch.Type headHash, history, ) -import qualified Unison.Codebase.Patch as Patch -import Unison.Codebase.Patch ( Patch ) -import qualified Unison.Codebase.Causal as Causal import Unison.Codebase.Causal (Causal) -import Unison.Codebase.Path ( Path(..) ) -import qualified Unison.Codebase.Path as Path -import Unison.NameSegment ( NameSegment ) -import qualified Unison.Codebase.Metadata as Metadata -import Unison.Hashable ( Hashable ) -import qualified Unison.Hashable as H +import qualified Unison.Codebase.Causal as Causal +import qualified Unison.Codebase.Metadata as Metadata +import Unison.Codebase.Patch (Patch) +import qualified Unison.Codebase.Patch as Patch +import Unison.Codebase.Path (Path (..)) +import qualified Unison.Codebase.Path as Path import qualified Unison.Hashing.V2.Convert as H -import Unison.Name ( Name ) -import qualified Unison.Name as Name -import Unison.Reference ( Reference ) -import Unison.Referent ( Referent ) -import qualified Unison.Util.Relation as R -import qualified Unison.Util.Relation4 as R4 -import qualified Unison.Util.Star3 as Star3 +import Unison.Name (Name) +import qualified Unison.Name as Name +import Unison.NameSegment (NameSegment) +import Unison.Reference (Reference) +import Unison.Referent (Referent) import qualified Unison.Util.List as List import Unison.Util.Relation (Relation) +import qualified Unison.Util.Relation as R +import qualified Unison.Util.Relation4 as R4 +import qualified Unison.Util.Star3 as Star3 deepReferents :: Branch0 m -> Set Referent deepReferents = R.dom . deepTerms @@ -451,9 +449,6 @@ stepManyAt0M actions b = go (toList actions) b where c2 <- stepChildren (view children b) currentAction (set children c2 b) -instance Hashable (Branch0 m) where - tokens = H.tokensBranch0 - -- todo: consider inlining these into Actions2 addTermName :: Referent -> NameSegment -> Metadata.Metadata -> Branch0 m -> Branch0 m diff --git a/parser-typechecker/src/Unison/Codebase/Branch/Type.hs b/parser-typechecker/src/Unison/Codebase/Branch/Type.hs index f7442f016e..44126b115b 100644 --- a/parser-typechecker/src/Unison/Codebase/Branch/Type.hs +++ b/parser-typechecker/src/Unison/Codebase/Branch/Type.hs @@ -6,8 +6,8 @@ import Control.Lens (makeLenses, makeLensesFor) import Data.Map (Map) import Data.Set (Set) import Unison.Codebase.Branch.Raw (Raw) -import Unison.Codebase.Causal (Causal) -import qualified Unison.Codebase.Causal as Causal +import Unison.Codebase.Causal.Type (Causal) +import qualified Unison.Codebase.Causal.Type as Causal import qualified Unison.Codebase.Metadata as Metadata import Unison.Codebase.Patch (Patch) import Unison.Codebase.Path (Path) diff --git a/parser-typechecker/src/Unison/Codebase/Causal.hs b/parser-typechecker/src/Unison/Codebase/Causal.hs index 29f6e4eef9..11baf27834 100644 --- a/parser-typechecker/src/Unison/Codebase/Causal.hs +++ b/parser-typechecker/src/Unison/Codebase/Causal.hs @@ -4,26 +4,20 @@ module Unison.Codebase.Causal ( Causal (..), - Raw (..), - RawHash (..), + RawHash (RawHash, unRawHash), head_, one, cons, cons', consDistinct, uncons, - hash, children, - Deserialize, - Serialize, - cachedRead, threeWayMerge, threeWayMerge', squashMerge', lca, stepDistinct, stepDistinctM, - sync, transform, unsafeMapHashPreserving, before, @@ -34,195 +28,44 @@ where import Unison.Prelude import qualified Control.Monad.Extra as Monad (anyM) -import Control.Monad.State (StateT) -import qualified Control.Monad.State as State import qualified Control.Monad.Reader as Reader +import qualified Control.Monad.State as State import qualified Data.Map as Map -import Data.Sequence (ViewL (..)) -import qualified Data.Sequence as Seq import qualified Data.Set as Set -import qualified U.Util.Cache as Cache -import Unison.Hash (Hash) -import Unison.Hashable (Hashable) -import qualified Unison.Hashable as Hashable +import Unison.Codebase.Branch.Type (Branch0, UnwrappedBranch) +import Unison.Codebase.Causal.Type + ( Causal + ( Cons, + Merge, + One, + currentHash, + head, + tail, + tails + ), + RawHash (RawHash, unRawHash), + before, + children, + head_, + lca, + ) +import qualified Unison.Hashing.V2.Convert as Hashing import Prelude hiding (head, read, tail) -import qualified Control.Lens as Lens - -{- -`Causal a` has 5 operations, specified algebraically here: - -* `before : Causal m a -> Causal m a -> m Bool` defines a partial order on - `Causal`. -* `head : Causal m a -> a`, which represents the "latest" `a` value in a causal - chain. -* `one : a -> Causal m a`, satisfying `head (one hd) == hd` -* `cons : a -> Causal a -> Causal a`, satisfying `head (cons hd tl) == hd` and - also `before tl (cons hd tl)`. -* `merge : CommutativeSemigroup a => Causal a -> Causal a -> Causal a`, which is - commutative (but not associative) and satisfies: - * `before c1 (merge c1 c2)` - * `before c2 (merge c1 c2)` -* `sequence : Causal a -> Causal a -> Causal a`, which is defined as - `sequence c1 c2 = cons (head c2) (merge c1 c2)`. - * `before c1 (sequence c1 c2)` - * `head (sequence c1 c2) == head c2` --} - -newtype RawHash a = RawHash { unRawHash :: Hash } - deriving (Eq, Ord, Generic) - -instance Show (RawHash a) where - show = show . unRawHash - -instance Show e => Show (Causal m h e) where - show = \case - One h e -> "One " ++ (take 3 . show) h ++ " " ++ show e - Cons h e t -> "Cons " ++ (take 3 . show) h ++ " " ++ show e ++ " " ++ (take 3 . show) (fst t) - Merge h e ts -> "Merge " ++ (take 3 . show) h ++ " " ++ show e ++ " " ++ (show . fmap (take 3 . show) . toList) (Map.keysSet ts) - --- h is the type of the pure data structure that will be hashed and used as --- an index; e.g. h = Branch00, e = Branch0 m -data Causal m h e - = One { currentHash :: RawHash h - , head :: e - } - | Cons { currentHash :: RawHash h - , head :: e - , tail :: (RawHash h, m (Causal m h e)) - } - -- The merge operation `<>` flattens and normalizes for order - | Merge { currentHash :: RawHash h - , head :: e - , tails :: Map (RawHash h) (m (Causal m h e)) - } - -Lens.makeLensesFor [("head", "head_")] ''Causal - --- A serializer `Causal m h e`. Nonrecursive -- only responsible for --- writing a single node of the causal structure. -data Raw h e - = RawOne e - | RawCons e (RawHash h) - | RawMerge e (Set (RawHash h)) - -type Deserialize m h e = RawHash h -> m (Raw h e) - -cachedRead :: MonadIO m - => Cache.Cache (RawHash h) (Causal m h e) - -> Deserialize m h e - -> RawHash h -> m (Causal m h e) -cachedRead cache deserializeRaw h = Cache.lookup cache h >>= \case - Nothing -> do - raw <- deserializeRaw h - causal <- pure $ case raw of - RawOne e -> One h e - RawCons e tailHash -> Cons h e (tailHash, read tailHash) - RawMerge e tailHashes -> Merge h e $ - Map.fromList [(h, read h) | h <- toList tailHashes ] - Cache.insert cache h causal - pure causal - Just causal -> pure causal - where - read = cachedRead cache deserializeRaw - -type Serialize m h e = RawHash h -> Raw h e -> m () - --- Sync a causal to some persistent store, stopping when hitting a Hash which --- has already been written, according to the `exists` function provided. -sync - :: forall m h e - . Monad m - => (RawHash h -> m Bool) - -> Serialize (StateT (Set (RawHash h)) m) h e - -> Causal m h e - -> StateT (Set (RawHash h)) m () -sync exists serialize c = do - queued <- State.get - itExists <- if Set.member (currentHash c) queued then pure True - else lift . exists $ currentHash c - unless itExists $ go c - where - go :: Causal m h e -> StateT (Set (RawHash h)) m () - go c = do - queued <- State.get - when (Set.notMember (currentHash c) queued) $ do - State.modify (Set.insert $ currentHash c) - case c of - One currentHash head -> serialize currentHash $ RawOne head - Cons currentHash head (tailHash, tailm) -> do - -- write out the tail first, so what's on disk is always valid - b <- lift $ exists tailHash - unless b $ go =<< lift tailm - serialize currentHash (RawCons head tailHash) - Merge currentHash head tails -> do - for_ (Map.toList tails) $ \(hash, cm) -> do - b <- lift $ exists hash - unless b $ go =<< lift cm - serialize currentHash (RawMerge head (Map.keysSet tails)) - -instance Eq (Causal m h a) where - a == b = currentHash a == currentHash b - -instance Ord (Causal m h a) where - a <= b = currentHash a <= currentHash b - -instance Hashable (RawHash h) where - tokens (RawHash h) = Hashable.tokens h - --- Find the lowest common ancestor of two causals. -lca :: Monad m => Causal m h e -> Causal m h e -> m (Maybe (Causal m h e)) -lca a b = - lca' (Seq.singleton $ pure a) (Seq.singleton $ pure b) - --- `lca' xs ys` finds the lowest common ancestor of any element of `xs` and any --- element of `ys`. --- This is a breadth-first search used in the implementation of `lca a b`. -lca' - :: Monad m - => Seq (m (Causal m h e)) - -> Seq (m (Causal m h e)) - -> m (Maybe (Causal m h e)) -lca' = go Set.empty Set.empty where - go seenLeft seenRight remainingLeft remainingRight = - case Seq.viewl remainingLeft of - Seq.EmptyL -> search seenLeft remainingRight - a :< as -> do - left <- a - if Set.member (currentHash left) seenRight - then pure $ Just left - -- Note: swapping position of left and right when we recurse so that - -- we search each side equally. This avoids having to case on both - -- arguments, and the order shouldn't really matter. - else go seenRight - (Set.insert (currentHash left) seenLeft) - remainingRight - (as <> children left) - search seen remaining = case Seq.viewl remaining of - Seq.EmptyL -> pure Nothing - a :< as -> do - current <- a - if Set.member (currentHash current) seen - then pure $ Just current - else search seen (as <> children current) - -children :: Causal m h e -> Seq (m (Causal m h e)) -children (One _ _ ) = Seq.empty -children (Cons _ _ (_, t)) = Seq.singleton t -children (Merge _ _ ts ) = Seq.fromList $ Map.elems ts +import qualified Unison.Codebase.Branch.Raw as Branch -- A `squashMerge combine c1 c2` gives the same resulting `e` -- as a `threeWayMerge`, but doesn't introduce a merge node for the -- result. Instead, the resulting causal is a simple `Cons` onto `c2` -- (or is equal to `c2` if `c1` changes nothing). squashMerge' - :: forall m h e - . (Monad m, Hashable e, Eq e) - => (Causal m h e -> Causal m h e -> m (Maybe (Causal m h e))) - -> (e -> m e) - -> (Maybe e -> e -> e -> m e) - -> Causal m h e - -> Causal m h e - -> m (Causal m h e) + :: forall m + . Monad m + => (UnwrappedBranch m -> UnwrappedBranch m -> m (Maybe (UnwrappedBranch m))) + -> (Branch0 m -> m (Branch0 m)) + -> (Maybe (Branch0 m) -> Branch0 m -> Branch0 m -> m (Branch0 m)) + -> UnwrappedBranch m + -> UnwrappedBranch m + -> m (UnwrappedBranch m) squashMerge' lca discardHistory combine c1 c2 = do theLCA <- lca c1 c2 let done newHead = consDistinct newHead c2 @@ -233,22 +76,22 @@ squashMerge' lca discardHistory combine c1 c2 = do | lca == c2 -> done <$> discardHistory (head c1) | otherwise -> done <$> combine (Just $ head lca) (head c1) (head c2) -threeWayMerge :: forall m h e - . (Monad m, Hashable e) - => (Maybe e -> e -> e -> m e) - -> Causal m h e - -> Causal m h e - -> m (Causal m h e) +threeWayMerge :: forall m + . Monad m + => (Maybe (Branch0 m) -> Branch0 m -> Branch0 m -> m (Branch0 m)) + -> UnwrappedBranch m + -> UnwrappedBranch m + -> m (UnwrappedBranch m) threeWayMerge = threeWayMerge' lca threeWayMerge' - :: forall m h e - . (Monad m, Hashable e) - => (Causal m h e -> Causal m h e -> m (Maybe (Causal m h e))) - -> (Maybe e -> e -> e -> m e) - -> Causal m h e - -> Causal m h e - -> m (Causal m h e) + :: forall m + . Monad m + => (UnwrappedBranch m -> UnwrappedBranch m -> m (Maybe (UnwrappedBranch m))) + -> (Maybe (Branch0 m) -> Branch0 m -> Branch0 m -> m (Branch0 m)) + -> UnwrappedBranch m + -> UnwrappedBranch m + -> m (UnwrappedBranch m) threeWayMerge' lca combine c1 c2 = do theLCA <- lca c1 c2 case theLCA of @@ -260,12 +103,10 @@ threeWayMerge' lca combine c1 c2 = do where children = Map.fromList [(currentHash c1, pure c1), (currentHash c2, pure c2)] - done :: e -> Causal m h e + done :: Branch0 m -> UnwrappedBranch m done newHead = - Merge (RawHash (hash (newHead, Map.keys children))) newHead children - -before :: Monad m => Causal m h e -> Causal m h e -> m Bool -before a b = (== Just a) <$> lca a b + let h = Hashing.hashCausal newHead (Map.keysSet children) + in Merge (RawHash h) newHead children -- `True` if `h` is found in the history of `c` within `maxDepth` path length -- from the tip of `c` @@ -285,27 +126,28 @@ beforeHash maxDepth h c = State.modify' (<> Set.fromList cs) Monad.anyM (Reader.local (1+) . go) unseens -hash :: Hashable e => e -> Hash -hash = Hashable.accumulate' - -stepDistinct :: (Applicative m, Eq e, Hashable e) => (e -> e) -> Causal m h e -> Causal m h e +stepDistinct :: Applicative m => (Branch0 m -> Branch0 m) -> UnwrappedBranch m -> UnwrappedBranch m stepDistinct f c = f (head c) `consDistinct` c stepDistinctM - :: (Applicative m, Functor n, Eq e, Hashable e) - => (e -> n e) -> Causal m h e -> n (Causal m h e) + :: (Applicative m, Functor n) + => (Branch0 m -> n (Branch0 m)) -> UnwrappedBranch m -> n (UnwrappedBranch m) stepDistinctM f c = (`consDistinct` c) <$> f (head c) -one :: Hashable e => e -> Causal m h e -one e = One (RawHash $ hash e) e +one :: Branch0 m -> UnwrappedBranch m +one e = + let h = Hashing.hashCausal e mempty + in One (RawHash h) e -cons :: (Applicative m, Hashable e) => e -> Causal m h e -> Causal m h e +cons :: Applicative m => Branch0 m -> UnwrappedBranch m -> UnwrappedBranch m cons e tl = cons' e (currentHash tl) (pure tl) -cons' :: Hashable e => e -> RawHash h -> m (Causal m h e) -> Causal m h e -cons' e ht mt = Cons (RawHash $ hash [hash e, unRawHash ht]) e (ht, mt) +cons' :: Branch0 m -> RawHash Branch.Raw -> m (UnwrappedBranch m) -> UnwrappedBranch m +cons' b0 hTail mTail = + let h = Hashing.hashCausal b0 (Set.singleton hTail) + in Cons (RawHash h) b0 (hTail, mTail) -consDistinct :: (Applicative m, Eq e, Hashable e) => e -> Causal m h e -> Causal m h e +consDistinct :: Applicative m => Branch0 m -> UnwrappedBranch m -> UnwrappedBranch m consDistinct e tl = if head tl == e then tl else cons e tl diff --git a/parser-typechecker/src/Unison/Codebase/Causal/Type.hs b/parser-typechecker/src/Unison/Codebase/Causal/Type.hs new file mode 100644 index 0000000000..2c05ea402c --- /dev/null +++ b/parser-typechecker/src/Unison/Codebase/Causal/Type.hs @@ -0,0 +1,127 @@ +{-# LANGUAGE DeriveGeneric #-} +{-# LANGUAGE RankNTypes #-} +{-# LANGUAGE TemplateHaskell #-} + +module Unison.Codebase.Causal.Type + ( Causal (..), + RawHash (..), + before, + children, + lca, + head_, + ) +where + +import qualified Control.Lens as Lens +import qualified Data.Map as Map +import qualified Data.Sequence as Seq +import qualified Data.Set as Set +import Unison.Hash (Hash) +import Unison.Prelude +import Prelude hiding (head, read, tail) + +{- +`Causal a` has 5 operations, specified algebraically here: + +* `before : Causal m a -> Causal m a -> m Bool` defines a partial order on + `Causal`. +* `head : Causal m a -> a`, which represents the "latest" `a` value in a causal + chain. +* `one : a -> Causal m a`, satisfying `head (one hd) == hd` +* `cons : a -> Causal a -> Causal a`, satisfying `head (cons hd tl) == hd` and + also `before tl (cons hd tl)`. +* `merge : CommutativeSemigroup a => Causal a -> Causal a -> Causal a`, which is + commutative (but not associative) and satisfies: + * `before c1 (merge c1 c2)` + * `before c2 (merge c1 c2)` +* `sequence : Causal a -> Causal a -> Causal a`, which is defined as + `sequence c1 c2 = cons (head c2) (merge c1 c2)`. + * `before c1 (sequence c1 c2)` + * `head (sequence c1 c2) == head c2` +-} + +newtype RawHash a = RawHash {unRawHash :: Hash} + deriving (Eq, Ord, Generic) + +instance Show (RawHash a) where + show = show . unRawHash + +instance Show e => Show (Causal m h e) where + show = \case + One h e -> "One " ++ (take 3 . show) h ++ " " ++ show e + Cons h e t -> "Cons " ++ (take 3 . show) h ++ " " ++ show e ++ " " ++ (take 3 . show) (fst t) + Merge h e ts -> "Merge " ++ (take 3 . show) h ++ " " ++ show e ++ " " ++ (show . fmap (take 3 . show) . toList) (Map.keysSet ts) + +-- h is the type of the pure data structure that will be hashed and used as +-- an index; e.g. h = Branch00, e = Branch0 m +data Causal m h e + = One + { currentHash :: RawHash h, + head :: e + } + | Cons + { currentHash :: RawHash h, + head :: e, + tail :: (RawHash h, m (Causal m h e)) + } + | -- The merge operation `<>` flattens and normalizes for order + Merge + { currentHash :: RawHash h, + head :: e, + tails :: Map (RawHash h) (m (Causal m h e)) + } + +Lens.makeLensesFor [("head", "head_")] ''Causal + +children :: Causal m h e -> Seq (m (Causal m h e)) +children (One _ _) = Seq.empty +children (Cons _ _ (_, t)) = Seq.singleton t +children (Merge _ _ ts) = Seq.fromList $ Map.elems ts + +before :: Monad m => Causal m h e -> Causal m h e -> m Bool +before a b = (== Just a) <$> lca a b + +-- Find the lowest common ancestor of two causals. +lca :: Monad m => Causal m h e -> Causal m h e -> m (Maybe (Causal m h e)) +lca a b = + lca' (Seq.singleton $ pure a) (Seq.singleton $ pure b) + +-- `lca' xs ys` finds the lowest common ancestor of any element of `xs` and any +-- element of `ys`. +-- This is a breadth-first search used in the implementation of `lca a b`. +lca' :: + Monad m => + Seq (m (Causal m h e)) -> + Seq (m (Causal m h e)) -> + m (Maybe (Causal m h e)) +lca' = go Set.empty Set.empty + where + go seenLeft seenRight remainingLeft remainingRight = + case Seq.viewl remainingLeft of + Seq.EmptyL -> search seenLeft remainingRight + a Seq.:< as -> do + left <- a + if Set.member (currentHash left) seenRight + then pure $ Just left + else -- Note: swapping position of left and right when we recurse so that + -- we search each side equally. This avoids having to case on both + -- arguments, and the order shouldn't really matter. + + go + seenRight + (Set.insert (currentHash left) seenLeft) + remainingRight + (as <> children left) + search seen remaining = case Seq.viewl remaining of + Seq.EmptyL -> pure Nothing + a Seq.:< as -> do + current <- a + if Set.member (currentHash current) seen + then pure $ Just current + else search seen (as <> children current) + +instance Eq (Causal m h a) where + a == b = currentHash a == currentHash b + +instance Ord (Causal m h a) where + a <= b = currentHash a <= currentHash b diff --git a/parser-typechecker/src/Unison/Codebase/SqliteCodebase.hs b/parser-typechecker/src/Unison/Codebase/SqliteCodebase.hs index 184b5858c5..e066e930d9 100644 --- a/parser-typechecker/src/Unison/Codebase/SqliteCodebase.hs +++ b/parser-typechecker/src/Unison/Codebase/SqliteCodebase.hs @@ -74,7 +74,7 @@ import Unison.Codebase (Codebase, CodebasePath) import qualified Unison.Codebase as Codebase1 import Unison.Codebase.Branch (Branch (..)) import qualified Unison.Codebase.Branch as Branch -import qualified Unison.Codebase.Causal as Causal +import qualified Unison.Codebase.Causal.Type as Causal import Unison.Codebase.Editor.Git (gitIn, gitTextIn, pullBranch) import Unison.Codebase.Editor.RemoteRepo (ReadRemoteNamespace, WriteRepo (WriteGitRepo), printWriteRepo, writeToRead) import qualified Unison.Codebase.GitError as GitError diff --git a/parser-typechecker/src/Unison/Codebase/SqliteCodebase/Conversions.hs b/parser-typechecker/src/Unison/Codebase/SqliteCodebase/Conversions.hs index fa126dd9e4..3625704cf5 100644 --- a/parser-typechecker/src/Unison/Codebase/SqliteCodebase/Conversions.hs +++ b/parser-typechecker/src/Unison/Codebase/SqliteCodebase/Conversions.hs @@ -32,7 +32,7 @@ import qualified U.Util.Hash as V2.Hash import qualified U.Util.Map as Map import qualified Unison.ABT as V1.ABT import qualified Unison.Codebase.Branch as V1.Branch -import qualified Unison.Codebase.Causal as V1.Causal +import qualified Unison.Codebase.Causal.Type as V1.Causal import qualified Unison.Codebase.Metadata as V1.Metadata import qualified Unison.Codebase.Patch as V1 import qualified Unison.Codebase.ShortBranchHash as V1 diff --git a/parser-typechecker/src/Unison/Codebase/SqliteCodebase/MigrateSchema12/DbHelpers.hs b/parser-typechecker/src/Unison/Codebase/SqliteCodebase/MigrateSchema12/DbHelpers.hs index e59250f267..74e0df3afe 100644 --- a/parser-typechecker/src/Unison/Codebase/SqliteCodebase/MigrateSchema12/DbHelpers.hs +++ b/parser-typechecker/src/Unison/Codebase/SqliteCodebase/MigrateSchema12/DbHelpers.hs @@ -26,9 +26,10 @@ import qualified U.Util.Map as Map import qualified U.Util.Set as Set import qualified Unison.Codebase.SqliteCodebase.Conversions as Cv import Unison.Hash (Hash) -import qualified Unison.Hashable as H +import Unison.Hashing.V2.Branch (NameSegment (..)) import qualified Unison.Hashing.V2.Branch as Hashing.Branch import qualified Unison.Hashing.V2.Patch as Hashing (Patch (..)) +import qualified Unison.Hashing.V2.Patch as Hashing.Patch import qualified Unison.Hashing.V2.Reference as Hashing (Reference) import qualified Unison.Hashing.V2.Reference as Hashing.Reference import qualified Unison.Hashing.V2.Referent as Hashing (Referent) @@ -37,12 +38,11 @@ import qualified Unison.Hashing.V2.TermEdit as Hashing (TermEdit) import qualified Unison.Hashing.V2.TermEdit as Hashing.TermEdit import qualified Unison.Hashing.V2.TypeEdit as Hashing (TypeEdit) import qualified Unison.Hashing.V2.TypeEdit as Hashing.TypeEdit -import Unison.NameSegment (NameSegment (..)) import Unison.Prelude dbBranchHash :: EDB m => S.DbBranch -> m Hash dbBranchHash (S.Branch.Full.Branch tms tps patches children) = - fmap H.accumulate' $ + fmap Hashing.Branch.hashBranch $ Hashing.Branch.Raw <$> doTerms tms <*> doTypes tps @@ -74,7 +74,7 @@ dbBranchHash (S.Branch.Full.Branch tms tps patches children) = dbPatchHash :: forall m. EDB m => S.Patch -> m Hash dbPatchHash S.Patch {S.termEdits, S.typeEdits} = - fmap H.accumulate' $ + fmap Hashing.Patch.hashPatch $ Hashing.Patch <$> doTermEdits termEdits <*> doTypeEdits typeEdits diff --git a/parser-typechecker/src/Unison/Hashing/V2/ABT.hs b/parser-typechecker/src/Unison/Hashing/V2/ABT.hs index 54bba0b4f0..adb9909b81 100644 --- a/parser-typechecker/src/Unison/Hashing/V2/ABT.hs +++ b/parser-typechecker/src/Unison/Hashing/V2/ABT.hs @@ -14,11 +14,11 @@ import Unison.ABT import Data.List hiding (cycle, find) import Data.Vector ((!)) import Prelude hiding (abs,cycle) -import Unison.Hashable (Accumulate,Hashable1,hash1) +import Unison.Hashing.V2.Hashable (Accumulate,Hashable1,hash1) import qualified Data.Map as Map import qualified Data.Set as Set import qualified Data.Vector as Vector -import qualified Unison.Hashable as Hashable +import qualified Unison.Hashing.V2.Hashable as Hashable -- Hash a strongly connected component and sort its definitions into a canonical order. hashComponent :: diff --git a/parser-typechecker/src/Unison/Hashing/V2/Branch.hs b/parser-typechecker/src/Unison/Hashing/V2/Branch.hs index 0fefd7ffed..15c4ad1ff8 100644 --- a/parser-typechecker/src/Unison/Hashing/V2/Branch.hs +++ b/parser-typechecker/src/Unison/Hashing/V2/Branch.hs @@ -5,14 +5,13 @@ {-# LANGUAGE TemplateHaskell #-} {-# LANGUAGE ViewPatterns #-} -module Unison.Hashing.V2.Branch (Raw (..), MdValues (..)) where +module Unison.Hashing.V2.Branch (NameSegment(..), Raw (..), MdValues (..), hashBranch) where import Unison.Hash (Hash) -import Unison.Hashable (Hashable) -import qualified Unison.Hashable as H +import Unison.Hashing.V2.Hashable (Hashable) +import qualified Unison.Hashing.V2.Hashable as H import Unison.Hashing.V2.Reference (Reference) import Unison.Hashing.V2.Referent (Referent) -import Unison.NameSegment (NameSegment) import Unison.Prelude type MetadataValue = Reference @@ -21,6 +20,11 @@ newtype MdValues = MdValues (Set MetadataValue) deriving (Eq, Ord, Show) deriving (Hashable) via Set MetadataValue +newtype NameSegment = NameSegment Text deriving (Eq, Ord, Show) + +hashBranch :: Raw -> Hash +hashBranch = H.accumulate' + data Raw = Raw { terms :: Map NameSegment (Map Referent MdValues), types :: Map NameSegment (Map Reference MdValues), @@ -35,3 +39,6 @@ instance Hashable Raw where H.accumulateToken (children b), H.accumulateToken (patches b) ] + +instance H.Hashable NameSegment where + tokens (NameSegment t) = [H.Text t] diff --git a/parser-typechecker/src/Unison/Hashing/V2/Causal.hs b/parser-typechecker/src/Unison/Hashing/V2/Causal.hs index eff2129716..0b4fcba401 100644 --- a/parser-typechecker/src/Unison/Hashing/V2/Causal.hs +++ b/parser-typechecker/src/Unison/Hashing/V2/Causal.hs @@ -11,8 +11,8 @@ where import Data.Set (Set) import qualified Data.Set as Set import Unison.Hash (Hash) -import Unison.Hashable (Hashable) -import qualified Unison.Hashable as H +import Unison.Hashing.V2.Hashable (Hashable) +import qualified Unison.Hashing.V2.Hashable as H hashCausal :: H.Accumulate h => Causal -> h hashCausal = H.accumulate' diff --git a/parser-typechecker/src/Unison/Hashing/V2/Convert.hs b/parser-typechecker/src/Unison/Hashing/V2/Convert.hs index e85bdc1957..0af81ee24d 100644 --- a/parser-typechecker/src/Unison/Hashing/V2/Convert.hs +++ b/parser-typechecker/src/Unison/Hashing/V2/Convert.hs @@ -3,7 +3,8 @@ module Unison.Hashing.V2.Convert ( ResolutionResult, - tokensBranch0, + hashBranch, + hashCausal, hashDataDecls, hashDecls, hashPatch, @@ -29,7 +30,7 @@ import Data.Set (Set) import qualified Data.Set as Set import qualified Unison.ABT as ABT import qualified Unison.Codebase.Branch.Type as Memory.Branch -import qualified Unison.Codebase.Causal as Memory.Causal +import qualified Unison.Codebase.Causal.Type as Memory.Causal import qualified Unison.Codebase.Patch as Memory.Patch import qualified Unison.Codebase.TermEdit as Memory.TermEdit import qualified Unison.Codebase.TypeEdit as Memory.TypeEdit @@ -37,8 +38,6 @@ import qualified Unison.ConstructorType as CT import qualified Unison.ConstructorType as Memory.ConstructorType import qualified Unison.DataDeclaration as Memory.DD import Unison.Hash (Hash) -import Unison.Hashable (Accumulate, Token) -import qualified Unison.Hashable as H import qualified Unison.Hashing.V2.Branch as Hashing.Branch import qualified Unison.Hashing.V2.Causal as Hashing.Causal import qualified Unison.Hashing.V2.DataDeclaration as Hashing.DD @@ -52,7 +51,7 @@ import qualified Unison.Hashing.V2.TermEdit as Hashing.TermEdit import qualified Unison.Hashing.V2.Type as Hashing.Type import qualified Unison.Hashing.V2.TypeEdit as Hashing.TypeEdit import qualified Unison.Kind as Memory.Kind -import Unison.NameSegment (NameSegment) +import qualified Unison.NameSegment as Memory.NameSegment import Unison.Names.ResolutionResult (ResolutionResult) import qualified Unison.Pattern as Memory.Pattern import qualified Unison.Reference as Memory.Reference @@ -61,6 +60,7 @@ import qualified Unison.Term as Memory.Term import qualified Unison.Type as Memory.Type import qualified Unison.Util.Relation as Relation import qualified Unison.Util.Star3 as Memory.Star3 +import qualified U.Util.Map as Map import Unison.Var (Var) typeToReference :: Var v => Memory.Type.Type v a -> Memory.Reference.Reference @@ -347,33 +347,34 @@ m2hPatch (Memory.Patch.Patch termEdits typeEdits) = Memory.TypeEdit.Deprecate -> Hashing.TypeEdit.Deprecate hashPatch :: Memory.Patch.Patch -> Hash -hashPatch = H.accumulate' . m2hPatch +hashPatch = Hashing.Patch.hashPatch . m2hPatch -tokensBranch0 :: Accumulate h => Memory.Branch.Branch0 m -> [Token h] -tokensBranch0 = H.tokens . m2hBranch +hashBranch0 :: Memory.Branch.Branch0 m -> Hash +hashBranch0 = Hashing.Branch.hashBranch . m2hBranch0 -- hashing of branches isn't currently delegated here, because it's enmeshed -- with every `cons` or `step` function. I think it would be good to do while -- we're updating the hash function, but I'm also not looking forward to doing it -- and it's not clearly a problem yet. -_hashBranch :: Memory.Branch.Branch m -> Hash -_hashBranch = H.accumulate . _tokensBranch - -_tokensBranch :: Accumulate h => Memory.Branch.Branch m -> [Token h] -_tokensBranch = H.tokens . _m2hCausal . Memory.Branch._history - -_m2hCausal :: Memory.Branch.UnwrappedBranch m -> Hashing.Causal.Causal -- Hashing.Branch.Raw -_m2hCausal = undefined -- TODO: re-implement - -- \case - --Memory.Causal.One _h e -> - -- Hashing.Causal.Causal (m2hBranch e) mempty - --Memory.Causal.Cons _h e (ht, _) -> - -- Hashing.Causal.Causal (m2hBranch e) $ Set.singleton (Memory.Causal.unRawHash ht) - --Memory.Causal.Merge _h e ts -> - -- Hashing.Causal.Causal (m2hBranch e) $ Set.map Memory.Causal.unRawHash (Map.keysSet ts) - -m2hBranch :: Memory.Branch.Branch0 m -> Hashing.Branch.Raw -m2hBranch b = +hashBranch :: Memory.Branch.Branch m -> Hash +hashBranch = Hashing.Causal.hashCausal . m2hCausal . Memory.Branch._history + +hashCausal :: Memory.Branch.Branch0 m -> Set (Memory.Causal.RawHash h) -> Hash +hashCausal b0 tails = + Hashing.Causal.hashCausal $ + Hashing.Causal.Causal (hashBranch0 b0) (Set.map Memory.Causal.unRawHash tails) + +m2hCausal :: Memory.Branch.UnwrappedBranch m -> Hashing.Causal.Causal +m2hCausal = \case + Memory.Causal.One _h e -> + Hashing.Causal.Causal (hashBranch0 e) mempty + Memory.Causal.Cons _h e (ht, _) -> + Hashing.Causal.Causal (hashBranch0 e) $ Set.singleton (Memory.Causal.unRawHash ht) + Memory.Causal.Merge _h e ts -> + Hashing.Causal.Causal (hashBranch0 e) $ Set.map Memory.Causal.unRawHash (Map.keysSet ts) + +m2hBranch0 :: Memory.Branch.Branch0 m -> Hashing.Branch.Raw +m2hBranch0 b = Hashing.Branch.Raw (doTerms (Memory.Branch._terms b)) (doTypes (Memory.Branch._types b)) @@ -381,10 +382,12 @@ m2hBranch b = (doChildren (Memory.Branch._children b)) where -- is there a more readable way to structure these that's also linear? - doTerms :: Memory.Branch.Star Memory.Referent.Referent NameSegment -> Map NameSegment (Map Hashing.Referent.Referent Hashing.Branch.MdValues) + doTerms :: + Memory.Branch.Star Memory.Referent.Referent Memory.NameSegment.NameSegment -> + Map Hashing.Branch.NameSegment (Map Hashing.Referent.Referent Hashing.Branch.MdValues) doTerms s = Map.fromList - [ (ns, m2) + [ (m2hNameSegment ns, m2) | ns <- toList . Relation.ran $ Memory.Star3.d1 s, let m2 = Map.fromList @@ -395,10 +398,12 @@ m2hBranch b = ] ] - doTypes :: Memory.Branch.Star Memory.Reference.Reference NameSegment -> Map NameSegment (Map Hashing.Reference.Reference Hashing.Branch.MdValues) + doTypes :: + Memory.Branch.Star Memory.Reference.Reference Memory.NameSegment.NameSegment -> + Map Hashing.Branch.NameSegment (Map Hashing.Reference.Reference Hashing.Branch.MdValues) doTypes s = Map.fromList - [ (ns, m2) + [ (m2hNameSegment ns, m2) | ns <- toList . Relation.ran $ Memory.Star3.d1 s, let m2 = Map.fromList @@ -410,8 +415,15 @@ m2hBranch b = ] ] - doPatches :: Map NameSegment (Memory.Branch.EditHash, m Memory.Patch.Patch) -> Map NameSegment Hash - doPatches = Map.map fst + doPatches :: + Map Memory.NameSegment.NameSegment (Memory.Branch.EditHash, m Memory.Patch.Patch) -> + Map Hashing.Branch.NameSegment Hash + doPatches = Map.bimap m2hNameSegment fst - doChildren :: Map NameSegment (Memory.Branch.Branch m) -> Map NameSegment Hash - doChildren = Map.map (Memory.Causal.unRawHash . Memory.Branch.headHash) + doChildren :: + Map Memory.NameSegment.NameSegment (Memory.Branch.Branch m) -> + Map Hashing.Branch.NameSegment Hash + doChildren = Map.bimap m2hNameSegment (Memory.Causal.unRawHash . Memory.Branch.headHash) + +m2hNameSegment :: Memory.NameSegment.NameSegment -> Hashing.Branch.NameSegment +m2hNameSegment (Memory.NameSegment.NameSegment s) = Hashing.Branch.NameSegment s diff --git a/parser-typechecker/src/Unison/Hashing/V2/DataDeclaration.hs b/parser-typechecker/src/Unison/Hashing/V2/DataDeclaration.hs index dd7586004e..cf30b5f2db 100644 --- a/parser-typechecker/src/Unison/Hashing/V2/DataDeclaration.hs +++ b/parser-typechecker/src/Unison/Hashing/V2/DataDeclaration.hs @@ -20,9 +20,9 @@ import Data.Bifunctor (first, second) import qualified Data.Map as Map import qualified Unison.ABT as ABT import Unison.Hash (Hash) -import Unison.Hashable (Hashable1) -import qualified Unison.Hashable as Hashable import qualified Unison.Hashing.V2.ABT as ABT +import Unison.Hashing.V2.Hashable (Hashable1) +import qualified Unison.Hashing.V2.Hashable as Hashable import Unison.Hashing.V2.Reference (Reference) import qualified Unison.Hashing.V2.Reference as Reference import qualified Unison.Hashing.V2.Reference.Util as Reference.Util @@ -33,6 +33,7 @@ import qualified Unison.Names.ResolutionResult as Names import Unison.Prelude import Unison.Var (Var) import Prelude hiding (cycle) + type Decl v a = Either (EffectDeclaration v a) (DataDeclaration v a) data Modifier = Structural | Unique Text -- | Opaque (Set Reference) diff --git a/parser-typechecker/src/Unison/Hashing/V2/Hashable.hs b/parser-typechecker/src/Unison/Hashing/V2/Hashable.hs new file mode 100644 index 0000000000..9632bedad0 --- /dev/null +++ b/parser-typechecker/src/Unison/Hashing/V2/Hashable.hs @@ -0,0 +1,139 @@ +module Unison.Hashing.V2.Hashable where + +import qualified Crypto.Hash as CH +import qualified Data.ByteArray as BA +import qualified Data.ByteString as B +import Data.ByteString.Builder (doubleBE, int64BE, toLazyByteString, word64BE) +import qualified Data.ByteString.Lazy as BL +import qualified Data.Map as Map +import qualified Data.Set as Set +import U.Util.Hash (Hash) +import qualified U.Util.Hash as Hash +import Unison.Prelude +import Unison.Util.Relation (Relation) +import qualified Unison.Util.Relation as Relation +import Unison.Util.Relation3 (Relation3) +import qualified Unison.Util.Relation3 as Relation3 +import Unison.Util.Relation4 (Relation4) +import qualified Unison.Util.Relation4 as Relation4 + +data Token h + = Tag !Word8 + | Bytes !ByteString + | Int !Int64 + | Text !Text + | Double !Double + | Hashed !h + | Nat !Word64 + +class Accumulate h where + accumulate :: [Token h] -> h + fromBytes :: ByteString -> h + toBytes :: h -> ByteString + +accumulateToken :: (Accumulate h, Hashable t) => t -> Token h +accumulateToken = Hashed . accumulate' + +accumulate' :: (Accumulate h, Hashable t) => t -> h +accumulate' = accumulate . (hashVersion :) . tokens + where + hashVersion = Tag 2 + +class Hashable t where + tokens :: Accumulate h => t -> [Token h] + +instance Hashable a => Hashable [a] where + tokens = map accumulateToken + +instance (Hashable a, Hashable b) => Hashable (a, b) where + tokens (a, b) = [accumulateToken a, accumulateToken b] + +instance (Hashable a) => Hashable (Set.Set a) where + tokens = tokens . Set.toList + +instance (Hashable k, Hashable v) => Hashable (Map.Map k v) where + tokens = tokens . Map.toList + +instance (Hashable a, Hashable b) => Hashable (Relation a b) where + tokens = tokens . Relation.toList + +instance (Hashable d1, Hashable d2, Hashable d3) => Hashable (Relation3 d1 d2 d3) where + tokens s = [accumulateToken $ Relation3.toNestedList s] + +instance (Hashable d1, Hashable d2, Hashable d3, Hashable d4) => Hashable (Relation4 d1 d2 d3 d4) where + tokens s = [accumulateToken $ Relation4.toNestedList s] + +class Functor f => Hashable1 f where + -- | Produce a hash for an `f a`, given a hashing function for `a`. + -- If there is a notion of order-independence in some aspect of a subterm + -- of `f`, then the first argument (`hashUnordered :: [a] -> ([h], a -> h)`) + -- should be used to impose an order, and then apply that order in further hashing. + -- Otherwise the second argument (`hash :: a -> h`) should be used. + -- + -- Example 1: A simple functor with no unordered components. Hashable1 instance + -- just uses `hash`: + -- + -- data T a = One a | Two a a deriving Functor + -- + -- instance Hashable1 T where + -- hash1 _ hash t = case t of + -- One a -> accumulate [Tag 0, Hashed (hash a)] + -- Two a a2 -> accumulate [Tag 1, Hashed (hash a), Hashed (hash a2)] + -- + -- Example 2: A functor with unordered components. For hashing, we need to + -- pick a canonical ordering of the unordered components, so we + -- use `hashUnordered`: + -- + -- data U a = U { unordered :: [a], uno :: a, dos :: a } deriving Functor + -- + -- instance Hashable1 U where + -- hash1 hashUnordered _ (U unordered uno dos) = + -- let (hs, hash) = hashUnordered unordered + -- in accumulate $ map Hashed hs ++ [Hashed (hash uno), Hashed (hash dos)] + hash1 :: (Ord h, Accumulate h) => ([a] -> ([h], a -> h)) -> (a -> h) -> f a -> h + +instance Hashable () where + tokens _ = [] + +instance Hashable Double where + tokens d = [Double d] + +instance Hashable Text where + tokens s = [Text s] + +instance Hashable Char where + tokens c = [Nat $ fromIntegral $ fromEnum c] + +instance Hashable ByteString where + tokens bs = [Bytes bs] + +instance Hashable Word64 where + tokens w = [Nat w] + +instance Hashable Int64 where + tokens w = [Int w] + +instance Hashable Bool where + tokens b = [Tag . fromIntegral $ fromEnum b] + +instance Hashable Hash where + tokens h = [Bytes (Hash.toByteString h)] + +instance Accumulate Hash where + accumulate = fromBytes . BA.convert . CH.hashFinalize . go CH.hashInit + where + go :: CH.Context CH.SHA3_512 -> [Token Hash] -> CH.Context CH.SHA3_512 + go acc tokens = CH.hashUpdates acc (tokens >>= toBS) + toBS (Tag b) = [B.singleton b] + toBS (Bytes bs) = [encodeLength $ B.length bs, bs] + toBS (Int i) = BL.toChunks . toLazyByteString . int64BE $ i + toBS (Nat i) = BL.toChunks . toLazyByteString . word64BE $ i + toBS (Double d) = BL.toChunks . toLazyByteString . doubleBE $ d + toBS (Text txt) = + let tbytes = encodeUtf8 txt + in [encodeLength (B.length tbytes), tbytes] + toBS (Hashed h) = [Hash.toByteString h] + encodeLength :: Integral n => n -> B.ByteString + encodeLength = BL.toStrict . toLazyByteString . word64BE . fromIntegral + fromBytes = Hash.fromByteString + toBytes = Hash.toByteString diff --git a/parser-typechecker/src/Unison/Hashing/V2/Kind.hs b/parser-typechecker/src/Unison/Hashing/V2/Kind.hs index 6cdc6224f5..e7b082704d 100644 --- a/parser-typechecker/src/Unison/Hashing/V2/Kind.hs +++ b/parser-typechecker/src/Unison/Hashing/V2/Kind.hs @@ -4,8 +4,8 @@ module Unison.Hashing.V2.Kind where import Unison.Prelude -import Unison.Hashable (Hashable) -import qualified Unison.Hashable as Hashable +import Unison.Hashing.V2.Hashable (Hashable) +import qualified Unison.Hashing.V2.Hashable as Hashable data Kind = Star | Arrow Kind Kind deriving (Eq,Ord,Read,Show,Generic) diff --git a/parser-typechecker/src/Unison/Hashing/V2/Patch.hs b/parser-typechecker/src/Unison/Hashing/V2/Patch.hs index a26faebc1d..58ad7e7268 100644 --- a/parser-typechecker/src/Unison/Hashing/V2/Patch.hs +++ b/parser-typechecker/src/Unison/Hashing/V2/Patch.hs @@ -2,17 +2,20 @@ {-# LANGUAGE RecordWildCards #-} {-# LANGUAGE TemplateHaskell #-} -module Unison.Hashing.V2.Patch (Patch (..)) where +module Unison.Hashing.V2.Patch (Patch (..), hashPatch) where import Data.Map (Map) import Data.Set (Set) -import Unison.Hashable (Hashable) -import qualified Unison.Hashable as H +import Unison.Hashing.V2.Hashable (Hashable) +import qualified Unison.Hashing.V2.Hashable as H import Unison.Hashing.V2.Reference (Reference) import Unison.Hashing.V2.Referent (Referent) import Unison.Hashing.V2.TermEdit (TermEdit) import Unison.Hashing.V2.TypeEdit (TypeEdit) +hashPatch :: H.Accumulate h => Patch -> h +hashPatch = H.accumulate' + data Patch = Patch { termEdits :: Map Referent (Set TermEdit), typeEdits :: Map Reference (Set TypeEdit) diff --git a/parser-typechecker/src/Unison/Hashing/V2/Pattern.hs b/parser-typechecker/src/Unison/Hashing/V2/Pattern.hs index 843bf646fd..eb3000ea4c 100644 --- a/parser-typechecker/src/Unison/Hashing/V2/Pattern.hs +++ b/parser-typechecker/src/Unison/Hashing/V2/Pattern.hs @@ -9,7 +9,7 @@ import Data.Foldable as Foldable hiding (foldMap') import Data.List (intercalate) import qualified Data.Set as Set import Unison.DataDeclaration.ConstructorId (ConstructorId) -import qualified Unison.Hashable as H +import qualified Unison.Hashing.V2.Hashable as H import Unison.Hashing.V2.Reference (Reference) import qualified Unison.Hashing.V2.Type as Type import Unison.Prelude diff --git a/parser-typechecker/src/Unison/Hashing/V2/Reference.hs b/parser-typechecker/src/Unison/Hashing/V2/Reference.hs index b40fc1f26c..a7b66b34c9 100644 --- a/parser-typechecker/src/Unison/Hashing/V2/Reference.hs +++ b/parser-typechecker/src/Unison/Hashing/V2/Reference.hs @@ -10,7 +10,6 @@ module Unison.Hashing.V2.Reference pattern DerivedId, Id (..), components, - toText, ) where @@ -18,8 +17,8 @@ import Unison.Prelude import qualified Data.Text as Text import qualified Unison.Hash as H -import Unison.Hashable (Hashable) -import qualified Unison.Hashable as Hashable +import Unison.Hashing.V2.Hashable (Hashable) +import qualified Unison.Hashing.V2.Hashable as Hashable import Unison.ShortHash (ShortHash) import qualified Unison.ShortHash as SH @@ -56,9 +55,6 @@ toShortHash (Derived h i) = SH.ShortHash (H.base32Hex h) (Just $ showSuffix i) N showSuffix :: Pos -> Text showSuffix = Text.pack . show -toText :: Reference -> Text -toText = SH.toText . toShortHash - component :: H.Hash -> [k] -> [(k, Id)] component h ks = let in [ (k, (Id h i)) | (k, i) <- ks `zip` [0..]] diff --git a/parser-typechecker/src/Unison/Hashing/V2/Reference/Util.hs b/parser-typechecker/src/Unison/Hashing/V2/Reference/Util.hs index 0e7f6376cd..b81da536b2 100644 --- a/parser-typechecker/src/Unison/Hashing/V2/Reference/Util.hs +++ b/parser-typechecker/src/Unison/Hashing/V2/Reference/Util.hs @@ -3,7 +3,7 @@ module Unison.Hashing.V2.Reference.Util where import Unison.Prelude import qualified Unison.Hashing.V2.Reference as Reference -import Unison.Hashable (Hashable1) +import Unison.Hashing.V2.Hashable (Hashable1) import Unison.ABT (Var) import qualified Unison.Hashing.V2.ABT as ABT import qualified Data.Map as Map diff --git a/parser-typechecker/src/Unison/Hashing/V2/Referent.hs b/parser-typechecker/src/Unison/Hashing/V2/Referent.hs index f19c75b0e0..2ca2aaf69f 100644 --- a/parser-typechecker/src/Unison/Hashing/V2/Referent.hs +++ b/parser-typechecker/src/Unison/Hashing/V2/Referent.hs @@ -10,8 +10,8 @@ module Unison.Hashing.V2.Referent where import Unison.DataDeclaration.ConstructorId (ConstructorId) -import Unison.Hashable (Hashable) -import qualified Unison.Hashable as H +import Unison.Hashing.V2.Hashable (Hashable) +import qualified Unison.Hashing.V2.Hashable as H import Unison.Hashing.V2.Reference (Reference) data Referent = Ref Reference | Con Reference ConstructorId diff --git a/parser-typechecker/src/Unison/Hashing/V2/Term.hs b/parser-typechecker/src/Unison/Hashing/V2/Term.hs index e106db4e26..cc45f8add8 100644 --- a/parser-typechecker/src/Unison/Hashing/V2/Term.hs +++ b/parser-typechecker/src/Unison/Hashing/V2/Term.hs @@ -25,8 +25,8 @@ import qualified Unison.ABT as ABT import qualified Unison.Blank as B import Unison.DataDeclaration.ConstructorId (ConstructorId) import qualified Unison.Hash as Hash -import Unison.Hashable (Hashable1, accumulateToken) -import qualified Unison.Hashable as Hashable +import Unison.Hashing.V2.Hashable (Hashable1, accumulateToken) +import qualified Unison.Hashing.V2.Hashable as Hashable import qualified Unison.Hashing.V2.ABT as ABT import Unison.Hashing.V2.Pattern (Pattern) import Unison.Hashing.V2.Reference (Reference) diff --git a/parser-typechecker/src/Unison/Hashing/V2/TermEdit.hs b/parser-typechecker/src/Unison/Hashing/V2/TermEdit.hs index b75773c09c..d05000257d 100644 --- a/parser-typechecker/src/Unison/Hashing/V2/TermEdit.hs +++ b/parser-typechecker/src/Unison/Hashing/V2/TermEdit.hs @@ -1,7 +1,7 @@ module Unison.Hashing.V2.TermEdit (TermEdit (..)) where -import Unison.Hashable (Hashable) -import qualified Unison.Hashable as H +import Unison.Hashing.V2.Hashable (Hashable) +import qualified Unison.Hashing.V2.Hashable as H import Unison.Hashing.V2.Referent (Referent) data TermEdit = Replace Referent | Deprecate diff --git a/parser-typechecker/src/Unison/Hashing/V2/Type.hs b/parser-typechecker/src/Unison/Hashing/V2/Type.hs index 3813073454..a2f5232f93 100644 --- a/parser-typechecker/src/Unison/Hashing/V2/Type.hs +++ b/parser-typechecker/src/Unison/Hashing/V2/Type.hs @@ -27,8 +27,8 @@ module Unison.Hashing.V2.Type ( import qualified Data.Map as Map import qualified Data.Set as Set import qualified Unison.ABT as ABT -import Unison.Hashable (Hashable1) -import qualified Unison.Hashable as Hashable +import Unison.Hashing.V2.Hashable (Hashable1) +import qualified Unison.Hashing.V2.Hashable as Hashable import qualified Unison.Hashing.V2.ABT as ABT import Unison.Hashing.V2.Reference (Reference) import qualified Unison.Hashing.V2.Reference as Reference diff --git a/parser-typechecker/src/Unison/Hashing/V2/TypeEdit.hs b/parser-typechecker/src/Unison/Hashing/V2/TypeEdit.hs index bc9bddfb54..328e82a8b3 100644 --- a/parser-typechecker/src/Unison/Hashing/V2/TypeEdit.hs +++ b/parser-typechecker/src/Unison/Hashing/V2/TypeEdit.hs @@ -1,7 +1,7 @@ -module Unison.Hashing.V2.TypeEdit (TypeEdit(..)) where +module Unison.Hashing.V2.TypeEdit (TypeEdit (..)) where -import Unison.Hashable (Hashable) -import qualified Unison.Hashable as H +import Unison.Hashing.V2.Hashable (Hashable) +import qualified Unison.Hashing.V2.Hashable as H import Unison.Hashing.V2.Reference (Reference) data TypeEdit = Replace Reference | Deprecate diff --git a/parser-typechecker/src/Unison/Server/Backend.hs b/parser-typechecker/src/Unison/Server/Backend.hs index efdd6de2a9..570861c737 100644 --- a/parser-typechecker/src/Unison/Server/Backend.hs +++ b/parser-typechecker/src/Unison/Server/Backend.hs @@ -36,7 +36,7 @@ import qualified Unison.Codebase as Codebase import Unison.Codebase.Branch (Branch, Branch0) import qualified Unison.Codebase.Branch as Branch import qualified Unison.Codebase.Branch.Names as Branch -import qualified Unison.Codebase.Causal (RawHash(RawHash)) +import qualified Unison.Codebase.Causal.Type (RawHash(RawHash)) import Unison.Codebase.Editor.DisplayObject import qualified Unison.Codebase.Metadata as Metadata import Unison.Codebase.Path (Path) diff --git a/parser-typechecker/src/Unison/Util/Star3.hs b/parser-typechecker/src/Unison/Util/Star3.hs index 8b5bbde33b..d32ef021df 100644 --- a/parser-typechecker/src/Unison/Util/Star3.hs +++ b/parser-typechecker/src/Unison/Util/Star3.hs @@ -6,7 +6,6 @@ import Unison.Prelude import Unison.Util.Relation (Relation) import qualified Data.Set as Set -import qualified Unison.Hashable as H import qualified Unison.Util.Relation as R import qualified Data.Map as Map @@ -224,11 +223,3 @@ instance (Ord fact, Ord d1, Ord d2, Ord d3) => Monoid (Star3 fact d1 d2 d3) wher d1' = d1 s1 <> d1 s2 d2' = d2 s1 <> d2 s2 d3' = d3 s1 <> d3 s2 - -instance (H.Hashable fact, H.Hashable d1, H.Hashable d2, H.Hashable d3) - => H.Hashable (Star3 fact d1 d2 d3) where - tokens s = - [ H.accumulateToken (fact s) - , H.accumulateToken (d1 s) - , H.accumulateToken (d2 s) - , H.accumulateToken (d3 s) ] diff --git a/parser-typechecker/tests/Suite.hs b/parser-typechecker/tests/Suite.hs index 16e1a8d839..c208d544db 100644 --- a/parser-typechecker/tests/Suite.hs +++ b/parser-typechecker/tests/Suite.hs @@ -10,7 +10,6 @@ import qualified Unison.Core.Test.Name as Name import qualified Unison.Test.ABT as ABT import qualified Unison.Test.Cache as Cache import qualified Unison.Test.Codebase.Branch as Branch -import qualified Unison.Test.Codebase.Causal as Causal import qualified Unison.Test.Codebase.Path as Path import qualified Unison.Test.ColorText as ColorText import qualified Unison.Test.DataDeclaration as DataDeclaration @@ -36,7 +35,6 @@ import qualified Unison.Test.Var as Var import qualified Unison.Test.ANF as ANF import qualified Unison.Test.MCode as MCode import qualified Unison.Test.CodebaseInit as CodebaseInit --- import qualified Unison.Test.BaseUpgradePushPullTest as BaseUpgradePushPullTest test :: Test () test = tests @@ -57,7 +55,6 @@ test = tests , Text.test , Relation.test , Path.test - , Causal.test , Referent.test , ABT.test , ANF.test diff --git a/parser-typechecker/tests/Unison/Test/Codebase/Causal.hs b/parser-typechecker/tests/Unison/Test/Codebase/Causal.hs deleted file mode 100644 index 005e520848..0000000000 --- a/parser-typechecker/tests/Unison/Test/Codebase/Causal.hs +++ /dev/null @@ -1,318 +0,0 @@ -{-# LANGUAGE PartialTypeSignatures #-} - -module Unison.Test.Codebase.Causal where - -import EasyTest -import Unison.Codebase.Causal ( Causal(Cons, Merge) - , RawHash(..) - , one - , currentHash - , before - ) -import qualified Unison.Codebase.Causal as Causal -import qualified Unison.Codebase.Causal.FoldHistory as Causal -import Control.Monad.Trans.State (State, state, put) -import Data.Int (Int64) -import qualified Data.Map as Map -import qualified Data.Set as Set -import Control.Monad (replicateM_) -import Control.Monad.Extra (ifM) -import Control.Applicative (liftA2) -import Data.List (foldl1') -import Data.Functor ((<&>)) -import Unison.Hashable (Hashable) -import Data.Set (Set) -import Data.Functor.Identity -import Unison.Hash (Hash) - -c :: M (Causal M Int64 [Int64]) -c = merge (foldr cons (one [1]) t1) - (foldr cons (foldr cons (one [1]) t2) t3) - where - t1, t2, t3 :: [[Int64]] - t1 = fmap pure [5,4..2] - t2 = fmap pure [100..105] - t3 = fmap pure [999,998] - -c2 :: M (Causal M Int64 [Int64]) -c2 = merge (foldr cons (one [1]) t1) - (foldr cons (foldr cons (one [1]) t2) t3) - where - t1, t2, t3 :: [[Int64]] - t1 = fmap pure [5,4..2] - t2 = fmap pure [10,9..2] - t3 = fmap pure [999,998] - -{- -λ> show Unison.Test.Codebase.Causal.c -"Identity Merge 4gP [999,5] [\"3rG\",\"58U\"]" -λ> runIdentity Unison.Test.Codebase.Causal.result -step a=fromList [1,10] seen=[] rest=fromList [Merge 4gP [999,5] ["3rG","58U"]] -step a=fromList [1,10] seen=["4gP"] rest=fromList [Cons 3rG [999] 4LX,Cons 58U [5] 4vC] -step a=fromList [1,10] seen=["3rG","4gP"] rest=fromList [Cons 58U [5] 4vC,Cons 4LX [998] 26J] -step a=fromList [1,10] seen=["3rG","4gP","58U"] rest=fromList [Cons 4LX [998] 26J,Cons 4vC [4] yFt] -step a=fromList [1,10] seen=["3rG","4LX","4gP","58U"] rest=fromList [Cons 4vC [4] yFt,Cons 26J [100] 4FR] -step a=fromList [1,10] seen=["3rG","4LX","4gP","4vC","58U"] rest=fromList [Cons 26J [100] 4FR,Cons yFt [3] 3So] -step a=fromList [1,10] seen=["26J","3rG","4LX","4gP","4vC","58U"] rest=fromList [Cons yFt [3] 3So,Cons 4FR [101] 4az] -step a=fromList [1,10] seen=["yFt","26J","3rG","4LX","4gP","4vC","58U"] rest=fromList [Cons 4FR [101] 4az,Cons 3So [2] 5Lu] -step a=fromList [1,10] seen=["yFt","26J","3rG","4FR","4LX","4gP","4vC","58U"] rest=fromList [Cons 3So [2] 5Lu,Cons 4az [102] 2V3] -step a=fromList [1,10] seen=["yFt","26J","3So","3rG","4FR","4LX","4gP","4vC","58U"] rest=fromList [Cons 4az [102] 2V3,One 5Lu [1]] -step a=fromList [1,10] seen=["yFt","26J","3So","3rG","4FR","4LX","4az","4gP","4vC","58U"] rest=fromList [One 5Lu [1],Cons 2V3 [103] 5pS] -step a=fromList [10] seen=["yFt","26J","3So","3rG","4FR","4LX","4az","4gP","4vC","58U","5Lu"] rest=fromList [Cons 2V3 [103] 5pS] -step a=fromList [10] seen=["yFt","26J","2V3","3So","3rG","4FR","4LX","4az","4gP","4vC","58U","5Lu"] rest=fromList [Cons 5pS [104] 2tq] -step a=fromList [10] seen=["yFt","26J","2V3","3So","3rG","4FR","4LX","4az","4gP","4vC","58U","5Lu","5pS"] rest=fromList [Cons 2tq [105] 5Lu] -step a=fromList [10] seen=["yFt","26J","2V3","2tq","3So","3rG","4FR","4LX","4az","4gP","4vC","58U","5Lu","5pS"] rest=fromList [One 5Lu [1]] -step a=fromList [10] seen=["yFt","26J","2V3","2tq","3So","3rG","4FR","4LX","4az","4gP","4vC","58U","5Lu","5pS"] rest=fromList [] -Unsatisfied (fromList [10]) - -λ> runIdentity Unison.Test.Codebase.Causal.result (with c2) -step a=fromList [1,10] seen=[] rest=fromList [Cons 2tg [999] 3AW] -step a=fromList [1,10] seen=["2tg"] rest=fromList [Cons 3AW [998] 33b] -step a=fromList [1,10] seen=["2tg","3AW"] rest=fromList [Cons 33b [10] 2NF] -step a=fromList [1] seen=["2tg","33b","3AW"] rest=fromList [Cons 2NF [9] 57i] -step a=fromList [1] seen=["2NF","2tg","33b","3AW"] rest=fromList [Cons 57i [8] ipV] -step a=fromList [1] seen=["2NF","2tg","33b","3AW","57i"] rest=fromList [Cons ipV [7] 3BZ] -step a=fromList [1] seen=["ipV","2NF","2tg","33b","3AW","57i"] rest=fromList [Cons 3BZ [6] 58U] -step a=fromList [1] seen=["ipV","2NF","2tg","33b","3AW","3BZ","57i"] rest=fromList [Cons 58U [5] 4vC] -step a=fromList [1] seen=["ipV","2NF","2tg","33b","3AW","3BZ","57i","58U"] rest=fromList [Cons 4vC [4] yFt] -step a=fromList [1] seen=["ipV","2NF","2tg","33b","3AW","3BZ","4vC","57i","58U"] rest=fromList [Cons yFt [3] 3So] -step a=fromList [1] seen=["ipV","yFt","2NF","2tg","33b","3AW","3BZ","4vC","57i","58U"] rest=fromList [Cons 3So [2] 5Lu] -step a=fromList [1] seen=["ipV","yFt","2NF","2tg","33b","3AW","3BZ","3So","4vC","57i","58U"] rest=fromList [One 5Lu [1]] -Satisfied (fromList []) -λ> - --} - -test :: Test () -test = - scope "causal" - . tests - $ [ scope "threeWayMerge.ex1" - . expect - $ Causal.head testThreeWay - == Set.fromList [3, 4] - , scope "threeWayMerge.idempotent" - . expect - $ testIdempotent oneCausal -- == oneCausal - -- $ prop_mergeIdempotent - - , scope "threeWayMerge.identity" - . expect - $ testIdentity oneCausal emptyCausal - -- $ prop_mergeIdentity - , scope "threeWayMerge.commutative" - . expect - $ testCommutative (Set.fromList [3,4]) oneRemoved - -- $ prop_mergeCommutative - {- , scope "threeWayMerge.commonAncestor" - . expect - $ testCommonAncestor - -- $ prop_mergeCommonAncestor --} - , scope "lca.hasLca" lcaPairTest - , scope "lca.noLca" noLcaPairTest - , scope "beforeHash" $ beforeHashTests - ] - -beforeHashTests :: Test () -beforeHashTests = do - -- c1 and c2 have unrelated histories - c1 <- pure $ Causal.one (0 :: Int64) - c2 <- pure $ Causal.one (1 :: Int64) - -- c1' and c2' are extension of c1 and c2, respectively - c1' <- pure $ Causal.cons 2 c1 - c2' <- pure $ Causal.cons 3 c2 - c12 <- Causal.threeWayMerge sillyMerge c1' c2' - - -- verifying basic properties of `before` for these examples - expect' =<< before c1 c1 - expect' =<< before c1 c12 - expect' =<< before c2 c2 - expect' =<< before c2 c12 - expect' =<< before c2 c2' - expect' =<< before c1 c1' - expect' . not =<< before c1 c2 - expect' . not =<< before c2 c1 - - -- make sure the search cutoff works - - -- even though both start with `Causal.one 0`, that's - -- more than 10 steps back from `longCausal 1000`, so we - -- want this to be false - expect' . not =<< before c1 (longCausal (1000 :: Int64)) - ok - where - before h c = Causal.beforeHash 10 (Causal.currentHash h) c - sillyMerge _lca l _r = pure l - longCausal 0 = Causal.one 0 - longCausal n = Causal.cons n (longCausal (n - 1)) - -int64 :: Test Int64 -int64 = random - -extend - :: Int - -> Causal Identity Hash Int64 - -> Test (Causal Identity Hash Int64) -extend 0 ca = pure ca -extend n ca = do - i <- int64 - extend (n-1) (Causal.cons i ca) - -lcaPair :: Test (Causal Identity Hash Int64, Causal Identity Hash Int64) -lcaPair = do - base <- one <$> int64 - ll <- int' 0 20 - lr <- int' 0 20 - (,) <$> extend ll base <*> extend lr base - -lcaPairTest :: Test () -lcaPairTest = replicateM_ 50 test >> ok - where - test = runIdentity . uncurry Causal.lca <$> lcaPair >>= \case - Just _ -> pure () - Nothing -> crash "expected lca" - -noLcaPair - :: Test (Causal Identity Hash Int64, Causal Identity Hash Int64) -noLcaPair = do - basel <- one <$> int64 - baser <- one <$> int64 - ll <- int' 0 20 - lr <- int' 0 20 - (,) <$> extend ll basel <*> extend lr baser - -noLcaPairTest :: Test () -noLcaPairTest = replicateM_ 50 test >> ok - where - test = runIdentity . uncurry Causal.lca <$> noLcaPair >>= \case - Nothing -> pure () - Just _ -> crash "expected no lca" - -oneRemoved :: Causal Identity Hash (Set Int64) -oneRemoved = foldr Causal.cons - (one (Set.singleton 1)) - (Set.fromList <$> [[2, 3, 4], [1, 2, 3, 4], [1, 2]]) - -twoRemoved :: Causal Identity Hash (Set Int64) -twoRemoved = foldr Causal.cons - (one (Set.singleton 1)) - (Set.fromList <$> [[1, 3, 4], [1, 2, 3], [1, 2]]) - -testThreeWay :: Causal Identity Hash (Set Int64) -testThreeWay = runIdentity - $ threeWayMerge' oneRemoved twoRemoved - -setCombine :: Applicative m => Ord a => Set a -> Set a -> m (Set a) -setCombine a b = pure $ a <> b - -setDiff :: Applicative m => Ord a => Set a -> Set a -> m (Set a, Set a) -setDiff old new = pure (Set.difference new old, Set.difference old new) - -setPatch :: Applicative m => Ord a => Set a -> (Set a, Set a) -> m (Set a) -setPatch s (added, removed) = pure (added <> Set.difference s removed) - --- merge x x == x, should not add a new head, and also the value at the head should be the same of course -testIdempotent :: Causal Identity Hash (Set Int64) -> Bool -- Causal Identity Hash (Set Int64) -testIdempotent causal = - runIdentity (threeWayMerge' causal causal) - == causal - --- prop_mergeIdempotent :: Bool --- prop_mergeIdempotent = and (map testIdempotent (take 1000 generateRandomCausals)) - -oneCausal :: Causal Identity Hash (Set Int64) -oneCausal = Causal.one (Set.fromList [1]) - --- generateRandomCausals :: Causal Identity Hash (Set Int64) --- generateRandomCausals = undefined - -easyCombine - :: (Monad m, Semigroup d) - => (e -> e -> m e) - -> (e -> e -> m d) - -> (e -> d -> m e) - -> (Maybe e -> e -> e -> m e) -easyCombine comb _ _ Nothing l r = comb l r -easyCombine _ diff appl (Just ca) l r = do - dl <- diff ca l - dr <- diff ca r - appl ca (dl <> dr) - -threeWayMerge' - :: Causal Identity Hash (Set Int64) - -> Causal Identity Hash (Set Int64) - -> Identity (Causal Identity Hash (Set Int64)) -threeWayMerge' = Causal.threeWayMerge (easyCombine setCombine setDiff setPatch) - --- merge x mempty == x, merge mempty x == x -testIdentity :: Causal Identity Hash (Set Int64) -> Causal Identity Hash (Set Int64) -> Bool -testIdentity causal mempty = - (threeWayMerge' causal mempty) - == (threeWayMerge' mempty causal) - -emptyCausal :: Causal Identity Hash (Set Int64) -emptyCausal = one (Set.empty) - --- merge (cons hd tl) tl == cons hd tl, merge tl (cons hd tl) == cons hd tl -testCommutative :: Set Int64 -> Causal Identity Hash (Set Int64) -> Bool -testCommutative hd tl = (threeWayMerge' (Causal.cons hd tl) tl) - == (threeWayMerge' tl (Causal.cons hd tl)) - - -{- -testCommonAncestor :: -testCommonAncestor = --} - - - --- [ scope "foldHistoryUntil" . expect $ execState c mempty == Set.fromList [3,2,1]] - ---result :: M (Causal.FoldHistoryResult (Set Int64)) ---result = Causal.foldHistoryUntil f (Set.fromList [10, 1]) =<< c2 where --- f s e = let s' = Set.difference s (Set.fromList e) in (s', Set.null s') - -result, result2 :: M (Causal.FoldHistoryResult (Set Int64)) -(result, result2) = - (Causal.foldHistoryUntil f (Set.fromList [10, 1]) =<< (do c' <- c; put mempty ; pure c') - ,Causal.foldHistoryUntil f (Set.fromList [10, 1]) =<< (do c' <- c2; put mempty ; pure c')) - where f s e = let s' = Set.difference s (Set.fromList e) in (s', Set.null s') - ----- special cons and merge that mess with state monad for logging -type M = State [[Int64]] -cons :: [Int64] - -> Causal M h [Int64] - -> Causal M h [Int64] - -merge :: Causal M h [Int64] - -> Causal M h [Int64] - -> M (Causal M h [Int64]) - -(cons, merge) = (cons'' pure, merge'' pure) - where - pure :: Causal m h [Int64] -> M (Causal m h [Int64]) - pure c = state (\s -> (c, Causal.head c : s)) - -cons'' :: Hashable e1 - => (Causal m1 h e2 -> m2 (Causal m2 h e1)) - -> e1 -> Causal m1 h e2 -> Causal m2 h e1 -cons'' pure e tl = - Cons (RawHash $ Causal.hash [Causal.hash e, unRawHash . currentHash $ tl]) e (currentHash tl, pure tl) - -merge'' :: (Monad m, Semigroup e) - => (Causal m h e -> m (Causal m h e)) - -> Causal m h e -> Causal m h e -> m (Causal m h e) -merge'' pure a b = - ifM (before a b) (pure b) . ifM (before b a) (pure a) $ case (a, b) of - (Merge _ _ tls, Merge _ _ tls2) -> merge0 $ Map.union tls tls2 - (Merge _ _ tls, b) -> merge0 $ Map.insert (currentHash b) (pure b) tls - (b, Merge _ _ tls) -> merge0 $ Map.insert (currentHash b) (pure b) tls - (a, b) -> - merge0 $ Map.fromList [(currentHash a, pure a), (currentHash b, pure b)] - where - merge0 m = - let e = if Map.null m - then error "Causal.merge0 empty map" - else foldl1' (liftA2 (<>)) (fmap Causal.head <$> Map.elems m) - h = Causal.hash (Map.keys m) -- sorted order - in e <&> \e -> Merge (RawHash h) e m - diff --git a/parser-typechecker/unison-parser-typechecker.cabal b/parser-typechecker/unison-parser-typechecker.cabal index ef17d0a928..d83a349309 100644 --- a/parser-typechecker/unison-parser-typechecker.cabal +++ b/parser-typechecker/unison-parser-typechecker.cabal @@ -38,6 +38,7 @@ library Unison.Codebase.BuiltinAnnotation Unison.Codebase.Causal Unison.Codebase.Causal.FoldHistory + Unison.Codebase.Causal.Type Unison.Codebase.CodeLookup Unison.Codebase.CodeLookup.Util Unison.Codebase.Editor.DisplayObject @@ -82,6 +83,7 @@ library Unison.Hashing.V2.Causal Unison.Hashing.V2.Convert Unison.Hashing.V2.DataDeclaration + Unison.Hashing.V2.Hashable Unison.Hashing.V2.Kind Unison.Hashing.V2.Patch Unison.Hashing.V2.Pattern @@ -352,7 +354,6 @@ executable tests Unison.Test.ANF Unison.Test.Cache Unison.Test.Codebase.Branch - Unison.Test.Codebase.Causal Unison.Test.Codebase.Path Unison.Test.CodebaseInit Unison.Test.ColorText diff --git a/stack.yaml b/stack.yaml index b5c9e00188..9c1b8ad8b8 100644 --- a/stack.yaml +++ b/stack.yaml @@ -44,7 +44,7 @@ extra-deps: ghc-options: # All packages - "$locals": -Wall -Werror -Wno-name-shadowing -Wno-type-defaults -Wno-missing-pattern-synonym-signatures -fprint-expanded-synonyms -fwrite-ide-info #-freverse-errors + "$locals": -Wall -Wno-name-shadowing -Wno-type-defaults -Wno-missing-pattern-synonym-signatures -fprint-expanded-synonyms -fwrite-ide-info #-freverse-errors # See https://github.com/haskell/haskell-language-server/issues/208 "$everything": -haddock diff --git a/unison-core/src/Unison/NameSegment.hs b/unison-core/src/Unison/NameSegment.hs index 1ccfdbe6b5..a5b2c49105 100644 --- a/unison-core/src/Unison/NameSegment.hs +++ b/unison-core/src/Unison/NameSegment.hs @@ -7,7 +7,6 @@ import Unison.Prelude import qualified Data.Text as Text import qualified Data.Text.Lazy.Builder as Text (Builder) import qualified Data.Text.Lazy.Builder as Text.Builder -import qualified Unison.Hashable as H import Unison.Util.Alphabetical (Alphabetical, compareAlphabetical) -- Represents the parts of a name between the `.`s @@ -46,9 +45,6 @@ reverseSegments' = go rem = Text.dropEnd (Text.length seg + 1) t in seg : go rem -instance H.Hashable NameSegment where - tokens s = [H.Text (toText s)] - isEmpty :: NameSegment -> Bool isEmpty ns = toText ns == mempty From f0ab98764c6289f91ceedaef8f2b5be165300716 Mon Sep 17 00:00:00 2001 From: Chris Penner Date: Fri, 12 Nov 2021 08:59:01 -0600 Subject: [PATCH 112/297] Set new namespace root after migrating, but before cleanup --- .../src/Unison/Codebase/SqliteCodebase/MigrateSchema12.hs | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/parser-typechecker/src/Unison/Codebase/SqliteCodebase/MigrateSchema12.hs b/parser-typechecker/src/Unison/Codebase/SqliteCodebase/MigrateSchema12.hs index 584f81a836..3544047289 100644 --- a/parser-typechecker/src/Unison/Codebase/SqliteCodebase/MigrateSchema12.hs +++ b/parser-typechecker/src/Unison/Codebase/SqliteCodebase/MigrateSchema12.hs @@ -109,6 +109,8 @@ migrateSchema12 conn codebase = do (Sync.sync @_ @Entity migrationSync progress (CausalE rootCausalHashId : watches)) `runReaderT` Env {db = conn, codebase} `execStateT` MigrationState Map.empty Map.empty Map.empty Set.empty + let (_, newRootCausalHashId) = causalMapping migrationState ^?! ix rootCausalHashId + runDB conn . liftQ $ Q.setNamespaceRoot newRootCausalHashId ifor_ (objLookup migrationState) \oldObjId (newObjId, _, _, _) -> do (runDB conn . liftQ) do Q.recordObjectRehash oldObjId newObjId @@ -124,7 +126,7 @@ migrateSchema12 conn codebase = do error :: Entity -> ReaderT (Env m v a) (StateT MigrationState m) () error e = liftIO $ putStrLn $ "Error: " ++ show e allDone :: ReaderT (Env m v a) (StateT MigrationState m) () - allDone = liftIO $ putStrLn $ "All Done" + allDone = liftIO $ putStrLn $ "Finished migrating, initiating cleanup." in Sync.Progress {need, done, error, allDone} type Old a = a From 18da0e9cc95cd20f0c27c8b008c6003581ce5f6e Mon Sep 17 00:00:00 2001 From: Chris Penner Date: Fri, 12 Nov 2021 09:58:46 -0600 Subject: [PATCH 113/297] Use savepoints in migration --- .../U/Codebase/Sqlite/Queries.hs | 33 +++++++++++++- .../SqliteCodebase/MigrateSchema12.hs | 45 ++++++++++++------- 2 files changed, 61 insertions(+), 17 deletions(-) diff --git a/codebase2/codebase-sqlite/U/Codebase/Sqlite/Queries.hs b/codebase2/codebase-sqlite/U/Codebase/Sqlite/Queries.hs index a1f139ac7b..75d82019fa 100644 --- a/codebase2/codebase-sqlite/U/Codebase/Sqlite/Queries.hs +++ b/codebase2/codebase-sqlite/U/Codebase/Sqlite/Queries.hs @@ -120,6 +120,8 @@ module U.Codebase.Sqlite.Queries ( savepoint, release, rollbackRelease, + rollbackTo, + withSavepoint, setJournalMode, traceConnectionFile, @@ -186,6 +188,7 @@ import U.Util.Hash (Hash) import qualified U.Util.Hash as Hash import UnliftIO (MonadUnliftIO, throwIO, try, tryAny, withRunInIO) import UnliftIO.Concurrent (myThreadId) +import UnliftIO.Exception (bracket_, onException) -- * types type DB m = (MonadIO m, MonadReader Connection m) @@ -918,12 +921,40 @@ withImmediateTransaction action = do -- | low-level transaction stuff -savepoint, release, rollbackTo, rollbackRelease :: DB m => String -> m () + +-- | Create a savepoint, which is a named transaction which may wrap many nested +-- sub-transactions. +savepoint :: DB m => String -> m () savepoint name = execute_ (fromString $ "SAVEPOINT " ++ name) + +-- | Release a savepoint, which will commit the results once all +-- wrapping transactions/savepoints are commited. +release :: DB m => String -> m () release name = execute_ (fromString $ "RELEASE " ++ name) + +-- | Roll the database back to its state from when the savepoint was created. +-- Note: this also re-starts the savepoint and it must still be released if that is the +-- intention. See 'rollbackRelease'. +rollbackTo :: DB m => String -> m () rollbackTo name = execute_ (fromString $ "ROLLBACK TO " ++ name) + +-- | Roll back the savepoint and immediately release it. +-- This effectively _aborts_ the savepoint, useful if an irrecoverable error is +-- encountered. +rollbackRelease :: DB m => String -> m () rollbackRelease name = rollbackTo name *> release name +-- | Runs the provided action within a savepoint. +-- Releases the savepoint on completion. +-- If an exception occurs, the savepoint will be rolled-back and released, +-- abandoning all changes. +withSavepoint :: (DB m, MonadUnliftIO m) => String -> m a -> m a +withSavepoint name act = + bracket_ + (savepoint name) + (release name) + (act `onException` rollbackTo name) + -- * orphan instances deriving via Text instance ToField Base32Hex diff --git a/parser-typechecker/src/Unison/Codebase/SqliteCodebase/MigrateSchema12.hs b/parser-typechecker/src/Unison/Codebase/SqliteCodebase/MigrateSchema12.hs index 3544047289..40a596d3bf 100644 --- a/parser-typechecker/src/Unison/Codebase/SqliteCodebase/MigrateSchema12.hs +++ b/parser-typechecker/src/Unison/Codebase/SqliteCodebase/MigrateSchema12.hs @@ -75,6 +75,8 @@ import qualified Unison.Term as Term import Unison.Type (Type) import qualified Unison.Type as Type import Unison.Var (Var) +import UnliftIO.Exception (bracket_, onException) +import UnliftIO (MonadUnliftIO) -- todo: -- * write a harness to call & seed algorithm @@ -98,25 +100,36 @@ import Unison.Var (Var) -- * [ ] Update the schema version in the database after migrating so we only migrate -- once. -migrateSchema12 :: forall a m v. (MonadIO m, Var v) => Connection -> Codebase m v a -> m () +migrateSchema12 :: forall a m v. (MonadUnliftIO m, Var v) => Connection -> Codebase m v a -> m () migrateSchema12 conn codebase = do - rootCausalHashId <- runDB conn (liftQ Q.loadNamespaceRoot) - watches <- - foldMapM - (\watchKind -> map (W watchKind) <$> Codebase.watches codebase (Cv.watchKind2to1 watchKind)) - [WK.RegularWatch, WK.TestWatch] migrationState <- - (Sync.sync @_ @Entity migrationSync progress (CausalE rootCausalHashId : watches)) - `runReaderT` Env {db = conn, codebase} - `execStateT` MigrationState Map.empty Map.empty Map.empty Set.empty - let (_, newRootCausalHashId) = causalMapping migrationState ^?! ix rootCausalHashId - runDB conn . liftQ $ Q.setNamespaceRoot newRootCausalHashId - ifor_ (objLookup migrationState) \oldObjId (newObjId, _, _, _) -> do - (runDB conn . liftQ) do - Q.recordObjectRehash oldObjId newObjId - -- what about deleting old watches? - runDB conn (liftQ Q.garbageCollectObjectsWithoutHashes) + withinSavepoint "MIGRATE12" $ do + rootCausalHashId <- runDB conn (liftQ Q.loadNamespaceRoot) + watches <- + foldMapM + (\watchKind -> map (W watchKind) <$> Codebase.watches codebase (Cv.watchKind2to1 watchKind)) + [WK.RegularWatch, WK.TestWatch] + migrationState <- + (Sync.sync @_ @Entity migrationSync progress (CausalE rootCausalHashId : watches)) + `runReaderT` Env {db = conn, codebase} + `execStateT` MigrationState Map.empty Map.empty Map.empty Set.empty + let (_, newRootCausalHashId) = causalMapping migrationState ^?! ix rootCausalHashId + runDB conn . liftQ $ Q.setNamespaceRoot newRootCausalHashId + pure migrationState + + withinSavepoint "MIGRATE12_CLEANUP" do + ifor_ (objLookup migrationState) \oldObjId (newObjId, _, _, _) -> do + (runDB conn . liftQ) do + Q.recordObjectRehash oldObjId newObjId + -- what about deleting old watches? + runDB conn (liftQ Q.garbageCollectObjectsWithoutHashes) where + withinSavepoint :: (String -> m c -> m c) + withinSavepoint name act = + bracket_ + (runDB conn $ Q.savepoint name) + (runDB conn $ Q.release name) + (act `onException` runDB conn (Q.rollbackTo name)) progress :: Sync.Progress (ReaderT (Env m v a) (StateT MigrationState m)) Entity progress = let need :: Entity -> ReaderT (Env m v a) (StateT MigrationState m) () From f4d92aedecd15a555223cac3c1d5f582e0ffd769 Mon Sep 17 00:00:00 2001 From: Chris Penner Date: Fri, 12 Nov 2021 10:26:29 -0600 Subject: [PATCH 114/297] Propagate UnliftIO --- .../src/Unison/Codebase/SqliteCodebase.hs | 14 +++++++------- 1 file changed, 7 insertions(+), 7 deletions(-) diff --git a/parser-typechecker/src/Unison/Codebase/SqliteCodebase.hs b/parser-typechecker/src/Unison/Codebase/SqliteCodebase.hs index 184b5858c5..c3fae5cfc1 100644 --- a/parser-typechecker/src/Unison/Codebase/SqliteCodebase.hs +++ b/parser-typechecker/src/Unison/Codebase/SqliteCodebase.hs @@ -109,7 +109,7 @@ import Unison.Type (Type) import qualified Unison.Type as Type import qualified Unison.Util.Pretty as P import qualified Unison.WatchKind as UF -import UnliftIO (MonadIO, catchIO, finally, liftIO) +import UnliftIO (MonadIO, catchIO, finally, liftIO, MonadUnliftIO) import UnliftIO.Directory (canonicalizePath, createDirectoryIfMissing, doesDirectoryExist, doesFileExist) import UnliftIO.STM @@ -124,11 +124,11 @@ codebasePath = ".unison" "v2" "unison.sqlite3" v2dir :: FilePath -> FilePath v2dir root = root ".unison" "v2" -init :: HasCallStack => (MonadIO m, MonadCatch m) => Codebase.Init m Symbol Ann +init :: HasCallStack => (MonadUnliftIO m, MonadCatch m) => Codebase.Init m Symbol Ann init = Codebase.Init getCodebaseOrError createCodebaseOrError v2dir createCodebaseOrError :: - (MonadIO m, MonadCatch m) => + (MonadUnliftIO m, MonadCatch m) => Codebase.DebugName -> CodebasePath -> m (Either Codebase1.CreateCodebaseError (m (), Codebase m Symbol Ann)) @@ -148,7 +148,7 @@ data CreateCodebaseError deriving (Show) createCodebaseOrError' :: - (MonadIO m, MonadCatch m) => + (MonadUnliftIO m, MonadCatch m) => Codebase.DebugName -> CodebasePath -> m (Either CreateCodebaseError (m (), Codebase m Symbol Ann)) @@ -179,7 +179,7 @@ openOrCreateCodebaseConnection debugName path = do unsafeGetConnection debugName path -- get the codebase in dir -getCodebaseOrError :: forall m. (MonadIO m, MonadCatch m) => Codebase.DebugName -> CodebasePath -> m (Either Codebase1.Pretty (m (), Codebase m Symbol Ann)) +getCodebaseOrError :: forall m. (MonadUnliftIO m, MonadCatch m) => Codebase.DebugName -> CodebasePath -> m (Either Codebase1.Pretty (m (), Codebase m Symbol Ann)) getCodebaseOrError debugName dir = do prettyDir <- liftIO $ P.string <$> canonicalizePath dir let prettyError v = P.wrap $ "I don't know how to handle " <> P.shown v <> "in" <> P.backticked' prettyDir "." @@ -274,7 +274,7 @@ shutdownConnection conn = do sqliteCodebase :: forall m. - (MonadIO m, MonadCatch m) => + (MonadUnliftIO m, MonadCatch m) => Codebase.DebugName -> CodebasePath -> m (Either SchemaVersion (m (), Codebase m Symbol Ann)) @@ -1057,7 +1057,7 @@ syncProgress = Sync.Progress need done warn allDone viewRemoteBranch' :: forall m. - (MonadIO m, MonadCatch m) => + (MonadUnliftIO m, MonadCatch m) => ReadRemoteNamespace -> m (Either C.GitError (m (), Branch m, CodebasePath)) viewRemoteBranch' (repo, sbh, path) = runExceptT @C.GitError do From a39d898589e057ec884dfd0009ab2ced7f73767d Mon Sep 17 00:00:00 2001 From: Chris Penner Date: Fri, 12 Nov 2021 10:39:29 -0600 Subject: [PATCH 115/297] Make migration transaction safe and upgrade to schema version 2 after completion. --- .../U/Codebase/Sqlite/Queries.hs | 9 +++++ .../SqliteCodebase/MigrateSchema12.hs | 35 +++++++++---------- 2 files changed, 25 insertions(+), 19 deletions(-) diff --git a/codebase2/codebase-sqlite/U/Codebase/Sqlite/Queries.hs b/codebase2/codebase-sqlite/U/Codebase/Sqlite/Queries.hs index 75d82019fa..abe6ce9731 100644 --- a/codebase2/codebase-sqlite/U/Codebase/Sqlite/Queries.hs +++ b/codebase2/codebase-sqlite/U/Codebase/Sqlite/Queries.hs @@ -112,6 +112,7 @@ module U.Codebase.Sqlite.Queries ( -- * db misc createSchema, schemaVersion, + setSchemaVersion, setFlags, DataVersion, @@ -259,6 +260,14 @@ schemaVersion = queryAtoms_ sql >>= \case vs -> error $ show (MultipleSchemaVersions vs) where sql = "SELECT version from schema_version;" +setSchemaVersion :: DB m => SchemaVersion -> m () +setSchemaVersion schemaVersion = execute sql (Only schemaVersion) + where + sql = [here| + UPDATE schema_version + SET version = ? + |] + saveHash :: DB m => Base32Hex -> m HashId saveHash base32 = execute sql (Only base32) >> queryOne (loadHashId base32) where sql = [here| diff --git a/parser-typechecker/src/Unison/Codebase/SqliteCodebase/MigrateSchema12.hs b/parser-typechecker/src/Unison/Codebase/SqliteCodebase/MigrateSchema12.hs index 40a596d3bf..5c0928a403 100644 --- a/parser-typechecker/src/Unison/Codebase/SqliteCodebase/MigrateSchema12.hs +++ b/parser-typechecker/src/Unison/Codebase/SqliteCodebase/MigrateSchema12.hs @@ -80,11 +80,11 @@ import UnliftIO (MonadUnliftIO) -- todo: -- * write a harness to call & seed algorithm --- * [ ] embed migration in a transaction/savepoint and ensure that we never leave the codebase in a +-- * [x] embed migration in a transaction/savepoint and ensure that we never leave the codebase in a -- weird state even if we crash. -- * [x] may involve writing a `Progress` -- * raw DB things: --- * [ ] write new namespace root after migration. +-- * [x] write new namespace root after migration. -- * [ ] overwrite object_id column in hash_object table to point at new objects <-- mitchell has started -- * [ ] delete references to old objects in index tables (where else?) -- * [ ] delete old objects @@ -97,32 +97,29 @@ import UnliftIO (MonadUnliftIO) -- * [ ] Delete V1 Hashing to ensure it's unused -- * [x] Salt V2 hashes with version number -- * [ ] confirm that pulls are handled ok --- * [ ] Update the schema version in the database after migrating so we only migrate +-- * [x] Update the schema version in the database after migrating so we only migrate -- once. migrateSchema12 :: forall a m v. (MonadUnliftIO m, Var v) => Connection -> Codebase m v a -> m () migrateSchema12 conn codebase = do - migrationState <- - withinSavepoint "MIGRATE12" $ do - rootCausalHashId <- runDB conn (liftQ Q.loadNamespaceRoot) - watches <- - foldMapM - (\watchKind -> map (W watchKind) <$> Codebase.watches codebase (Cv.watchKind2to1 watchKind)) - [WK.RegularWatch, WK.TestWatch] - migrationState <- - (Sync.sync @_ @Entity migrationSync progress (CausalE rootCausalHashId : watches)) - `runReaderT` Env {db = conn, codebase} - `execStateT` MigrationState Map.empty Map.empty Map.empty Set.empty - let (_, newRootCausalHashId) = causalMapping migrationState ^?! ix rootCausalHashId - runDB conn . liftQ $ Q.setNamespaceRoot newRootCausalHashId - pure migrationState - - withinSavepoint "MIGRATE12_CLEANUP" do + withinSavepoint "MIGRATESCHEMA12" $ do + rootCausalHashId <- runDB conn (liftQ Q.loadNamespaceRoot) + watches <- + foldMapM + (\watchKind -> map (W watchKind) <$> Codebase.watches codebase (Cv.watchKind2to1 watchKind)) + [WK.RegularWatch, WK.TestWatch] + migrationState <- + (Sync.sync @_ @Entity migrationSync progress (CausalE rootCausalHashId : watches)) + `runReaderT` Env {db = conn, codebase} + `execStateT` MigrationState Map.empty Map.empty Map.empty Set.empty + let (_, newRootCausalHashId) = causalMapping migrationState ^?! ix rootCausalHashId + runDB conn . liftQ $ Q.setNamespaceRoot newRootCausalHashId ifor_ (objLookup migrationState) \oldObjId (newObjId, _, _, _) -> do (runDB conn . liftQ) do Q.recordObjectRehash oldObjId newObjId -- what about deleting old watches? runDB conn (liftQ Q.garbageCollectObjectsWithoutHashes) + runDB conn . liftQ $ Q.setSchemaVersion 2 where withinSavepoint :: (String -> m c -> m c) withinSavepoint name act = From dd6d82da211abd2f45d3be8ffc9a7af310fedfcc Mon Sep 17 00:00:00 2001 From: Chris Penner Date: Fri, 12 Nov 2021 11:05:15 -0600 Subject: [PATCH 116/297] Garbage collect watches without objects --- .../codebase-sqlite/U/Codebase/Sqlite/Queries.hs | 13 +++++++++++++ .../Codebase/SqliteCodebase/MigrateSchema12.hs | 2 +- 2 files changed, 14 insertions(+), 1 deletion(-) diff --git a/codebase2/codebase-sqlite/U/Codebase/Sqlite/Queries.hs b/codebase2/codebase-sqlite/U/Codebase/Sqlite/Queries.hs index abe6ce9731..2c30983000 100644 --- a/codebase2/codebase-sqlite/U/Codebase/Sqlite/Queries.hs +++ b/codebase2/codebase-sqlite/U/Codebase/Sqlite/Queries.hs @@ -108,6 +108,7 @@ module U.Codebase.Sqlite.Queries ( -- * garbage collection garbageCollectObjectsWithoutHashes, + garbageCollectWatchesWithoutObjects, -- * db misc createSchema, @@ -704,6 +705,18 @@ garbageCollectObjectsWithoutHashes = do [here| DROP TABLE object_without_hash |] + execute_ "VACUUM" + +-- | Delete all +garbageCollectWatchesWithoutObjects :: DB m => m () +garbageCollectWatchesWithoutObjects = do + execute_ + [here| + DELETE FROM watch + WHERE watch.hash_id NOT IN + (SELECT hash_object.hash_id FROM hash_object) + |] + execute_ "VACUUM" addToDependentsIndex :: DB m => Reference.Reference -> Reference.Id -> m () addToDependentsIndex dependency dependent = execute sql (dependency :. dependent) diff --git a/parser-typechecker/src/Unison/Codebase/SqliteCodebase/MigrateSchema12.hs b/parser-typechecker/src/Unison/Codebase/SqliteCodebase/MigrateSchema12.hs index 5c0928a403..c43fc91ddd 100644 --- a/parser-typechecker/src/Unison/Codebase/SqliteCodebase/MigrateSchema12.hs +++ b/parser-typechecker/src/Unison/Codebase/SqliteCodebase/MigrateSchema12.hs @@ -117,8 +117,8 @@ migrateSchema12 conn codebase = do ifor_ (objLookup migrationState) \oldObjId (newObjId, _, _, _) -> do (runDB conn . liftQ) do Q.recordObjectRehash oldObjId newObjId - -- what about deleting old watches? runDB conn (liftQ Q.garbageCollectObjectsWithoutHashes) + runDB conn (liftQ Q.garbageCollectWatchesWithoutObjects) runDB conn . liftQ $ Q.setSchemaVersion 2 where withinSavepoint :: (String -> m c -> m c) From 56666e05200d98da2f35a123f8101f2131ac6911 Mon Sep 17 00:00:00 2001 From: Chris Penner Date: Fri, 12 Nov 2021 13:26:13 -0600 Subject: [PATCH 117/297] Move vacuum out of transactions --- codebase2/codebase-sqlite/U/Codebase/Sqlite/Queries.hs | 9 ++++++--- .../Unison/Codebase/SqliteCodebase/MigrateSchema12.hs | 1 + 2 files changed, 7 insertions(+), 3 deletions(-) diff --git a/codebase2/codebase-sqlite/U/Codebase/Sqlite/Queries.hs b/codebase2/codebase-sqlite/U/Codebase/Sqlite/Queries.hs index 2c30983000..bdab1346de 100644 --- a/codebase2/codebase-sqlite/U/Codebase/Sqlite/Queries.hs +++ b/codebase2/codebase-sqlite/U/Codebase/Sqlite/Queries.hs @@ -107,6 +107,7 @@ module U.Codebase.Sqlite.Queries ( causalHashIdByBase32Prefix, -- * garbage collection + vacuum, garbageCollectObjectsWithoutHashes, garbageCollectWatchesWithoutObjects, @@ -565,7 +566,6 @@ clearWatches :: DB m => m () clearWatches = do execute_ "DELETE FROM watch_result" execute_ "DELETE FROM watch" - execute_ "VACUUM" -- * Index-building addToTypeIndex :: DB m => Reference' TextId HashId -> Referent.Id -> m () @@ -705,7 +705,6 @@ garbageCollectObjectsWithoutHashes = do [here| DROP TABLE object_without_hash |] - execute_ "VACUUM" -- | Delete all garbageCollectWatchesWithoutObjects :: DB m => m () @@ -716,7 +715,11 @@ garbageCollectWatchesWithoutObjects = do WHERE watch.hash_id NOT IN (SELECT hash_object.hash_id FROM hash_object) |] - execute_ "VACUUM" + +-- | Clean the database and recover disk space. +-- This is an expensive operation. Also note that it cannot be executed within a transaction. +vacuum :: DB m => m () +vacuum = execute_ "VACUUM" addToDependentsIndex :: DB m => Reference.Reference -> Reference.Id -> m () addToDependentsIndex dependency dependent = execute sql (dependency :. dependent) diff --git a/parser-typechecker/src/Unison/Codebase/SqliteCodebase/MigrateSchema12.hs b/parser-typechecker/src/Unison/Codebase/SqliteCodebase/MigrateSchema12.hs index c43fc91ddd..69a147c6ad 100644 --- a/parser-typechecker/src/Unison/Codebase/SqliteCodebase/MigrateSchema12.hs +++ b/parser-typechecker/src/Unison/Codebase/SqliteCodebase/MigrateSchema12.hs @@ -120,6 +120,7 @@ migrateSchema12 conn codebase = do runDB conn (liftQ Q.garbageCollectObjectsWithoutHashes) runDB conn (liftQ Q.garbageCollectWatchesWithoutObjects) runDB conn . liftQ $ Q.setSchemaVersion 2 + runDB conn (liftQ Q.vacuum) where withinSavepoint :: (String -> m c -> m c) withinSavepoint name act = From f69a2e5c3fde6203cf17e4639c9a30aacd15fa37 Mon Sep 17 00:00:00 2001 From: Chris Penner Date: Mon, 15 Nov 2021 12:14:45 -0600 Subject: [PATCH 118/297] Update checklist --- .../Unison/Codebase/SqliteCodebase/MigrateSchema12.hs | 11 +++++++---- 1 file changed, 7 insertions(+), 4 deletions(-) diff --git a/parser-typechecker/src/Unison/Codebase/SqliteCodebase/MigrateSchema12.hs b/parser-typechecker/src/Unison/Codebase/SqliteCodebase/MigrateSchema12.hs index 69a147c6ad..5e6b1c4984 100644 --- a/parser-typechecker/src/Unison/Codebase/SqliteCodebase/MigrateSchema12.hs +++ b/parser-typechecker/src/Unison/Codebase/SqliteCodebase/MigrateSchema12.hs @@ -85,9 +85,9 @@ import UnliftIO (MonadUnliftIO) -- * [x] may involve writing a `Progress` -- * raw DB things: -- * [x] write new namespace root after migration. --- * [ ] overwrite object_id column in hash_object table to point at new objects <-- mitchell has started --- * [ ] delete references to old objects in index tables (where else?) --- * [ ] delete old objects +-- * [x] overwrite object_id column in hash_object table to point at new objects +-- * [x] delete references to old objects in index tables (where else?) +-- * [x] delete old objects -- -- * refer to github megaticket https://github.com/unisonweb/unison/issues/2471 -- ☢️ [x] incorporate type signature into hash of term <- chris/arya have started ☢️ @@ -95,8 +95,11 @@ import UnliftIO (MonadUnliftIO) -- * [ ] Refactor Causal helper functions to use V2 hashing -- * [ ] I guess move Hashable to V2.Hashing pseudo-package -- * [ ] Delete V1 Hashing to ensure it's unused --- * [x] Salt V2 hashes with version number +-- * [ ] Salt V2 hashes with version number -- * [ ] confirm that pulls are handled ok +-- * [ ] Make a backup of the v1 codebase before migrating, in a temp directory. +-- Include a message explaining where we put it. +-- * [ ] Improved error message (don't crash) if loading a codebase newer than your ucm -- * [x] Update the schema version in the database after migrating so we only migrate -- once. From 7ad6d4001cd870dca82fb4b4574351c60b6b1377 Mon Sep 17 00:00:00 2001 From: Mitchell Rosen Date: Mon, 15 Nov 2021 14:02:53 -0500 Subject: [PATCH 119/297] delete unused thing --- .../Codebase/SqliteCodebase/MigrateSchema12.hs | 16 ---------------- 1 file changed, 16 deletions(-) diff --git a/parser-typechecker/src/Unison/Codebase/SqliteCodebase/MigrateSchema12.hs b/parser-typechecker/src/Unison/Codebase/SqliteCodebase/MigrateSchema12.hs index 5e6b1c4984..be31ef40dc 100644 --- a/parser-typechecker/src/Unison/Codebase/SqliteCodebase/MigrateSchema12.hs +++ b/parser-typechecker/src/Unison/Codebase/SqliteCodebase/MigrateSchema12.hs @@ -843,21 +843,5 @@ someReferenceIdToEntity = \case -- Constructors are migrated by their decl component. (ConstructorReference ref _conId) -> DeclComponent (Reference.idToHash ref) --- -- | migrate sqlite codebase from version 1 to 2, return False and rollback on failure --- migrateSchema12 :: Applicative m => Connection -> m Bool --- migrateSchema12 _db = do --- -- todo: drop and recreate corrected type/mentions index schema --- -- do we want to garbage collect at this time? ✅ --- -- or just convert everything without going in dependency order? ✅ --- error "todo: go through " --- -- todo: double-hash all the types and produce an constructor mapping --- -- object ids will stay the same --- -- todo: rehash all the terms using the new constructor mapping --- -- and adding the type to the term --- -- do we want to diff namespaces at this time? ❌ --- -- do we want to look at supporting multiple simultaneous representations of objects at this time? --- pure "todo: migrate12" --- pure True - foldSetter :: LensLike (Writer [a]) s t a a -> s -> [a] foldSetter t s = execWriter (s & t %%~ \a -> tell [a] *> pure a) From 3a26addd71ce9e94e702f188172c52a07043d0eb Mon Sep 17 00:00:00 2001 From: Mitchell Rosen Date: Mon, 15 Nov 2021 14:21:06 -0500 Subject: [PATCH 120/297] Make `codebaseExists` not migrate codebase as a side-effect --- .../src/Unison/Codebase/SqliteCodebase.hs | 11 ++++------- 1 file changed, 4 insertions(+), 7 deletions(-) diff --git a/parser-typechecker/src/Unison/Codebase/SqliteCodebase.hs b/parser-typechecker/src/Unison/Codebase/SqliteCodebase.hs index c3fae5cfc1..9f14ecc2a6 100644 --- a/parser-typechecker/src/Unison/Codebase/SqliteCodebase.hs +++ b/parser-typechecker/src/Unison/Codebase/SqliteCodebase.hs @@ -109,7 +109,7 @@ import Unison.Type (Type) import qualified Unison.Type as Type import qualified Unison.Util.Pretty as P import qualified Unison.WatchKind as UF -import UnliftIO (MonadIO, catchIO, finally, liftIO, MonadUnliftIO) +import UnliftIO (MonadIO, catchIO, finally, try, liftIO, MonadUnliftIO) import UnliftIO.Directory (canonicalizePath, createDirectoryIfMissing, doesDirectoryExist, doesFileExist) import UnliftIO.STM @@ -203,12 +203,9 @@ initSchemaIfNotExist path = liftIO do codebaseExists :: MonadIO m => CodebasePath -> m Bool codebaseExists root = liftIO do Monad.when debug $ traceM $ "codebaseExists " ++ root - Control.Exception.catch @Sqlite.SQLError - ( sqliteCodebase "codebaseExists" root >>= \case - Left _ -> pure False - Right (close, _codebase) -> close $> True - ) - (const $ pure False) + try (Sqlite.open root) <&> \case + Left (_ :: Sqlite.SQLError) -> False + Right _ -> True -- 1) buffer up the component -- 2) in the event that the component is complete, then what? From 43138b6131cadb3e4f83950e916723f6dcacf0d1 Mon Sep 17 00:00:00 2001 From: Mitchell Rosen Date: Wed, 17 Nov 2021 11:45:11 -0500 Subject: [PATCH 121/297] back up codebase before migrating --- .../src/Unison/Codebase/SqliteCodebase.hs | 30 ++++++++++++++++--- 1 file changed, 26 insertions(+), 4 deletions(-) diff --git a/parser-typechecker/src/Unison/Codebase/SqliteCodebase.hs b/parser-typechecker/src/Unison/Codebase/SqliteCodebase.hs index 9f14ecc2a6..f9fd0e3e8a 100644 --- a/parser-typechecker/src/Unison/Codebase/SqliteCodebase.hs +++ b/parser-typechecker/src/Unison/Codebase/SqliteCodebase.hs @@ -42,11 +42,14 @@ import qualified Data.Set as Set import Data.Text (Text) import qualified Data.Text as Text import qualified Data.Text.IO as TextIO +import Data.Time (NominalDiffTime) +import Data.Time.Clock.POSIX (getPOSIXTime) import Data.Traversable (for) import Data.Word (Word64) import qualified Database.SQLite.Simple as Sqlite import GHC.Stack (HasCallStack) import qualified System.Console.ANSI as ANSI +import System.Directory (copyFile) import System.FilePath (()) import qualified System.FilePath as FilePath import U.Codebase.HashTags (CausalHash (CausalHash, unCausalHash)) @@ -121,6 +124,10 @@ debugCommitFailedTransaction = False codebasePath :: FilePath codebasePath = ".unison" "v2" "unison.sqlite3" +backupCodebasePath :: NominalDiffTime -> FilePath +backupCodebasePath now = + codebasePath ++ "." ++ show @Int (floor now) + v2dir :: FilePath -> FilePath v2dir root = root ".unison" "v2" @@ -169,7 +176,7 @@ createCodebaseOrError' debugName path = do Right () -> pure () ) - fmap (Either.mapLeft CreateCodebaseUnknownSchemaVersion) (sqliteCodebase debugName path) + fmap (Either.mapLeft CreateCodebaseUnknownSchemaVersion) (sqliteCodebase debugName path Local) openOrCreateCodebaseConnection :: MonadIO m => Codebase.DebugName -> FilePath -> m Connection openOrCreateCodebaseConnection debugName path = do @@ -187,7 +194,7 @@ getCodebaseOrError debugName dir = do -- If the codebase file doesn't exist, just return any string. The string is currently ignored (see -- Unison.Codebase.Init.getCodebaseOrExit). False -> pure (Left "codebase doesn't exist") - True -> fmap (Either.mapLeft prettyError) (sqliteCodebase debugName dir) + True -> fmap (Either.mapLeft prettyError) (sqliteCodebase debugName dir Local) initSchemaIfNotExist :: MonadIO m => FilePath -> m () initSchemaIfNotExist path = liftIO do @@ -269,13 +276,20 @@ shutdownConnection conn = do Monad.when debug $ traceM $ "shutdown connection " ++ show conn liftIO $ Sqlite.close (Connection.underlying conn) +-- | Whether a codebase is local or remote. +data LocalOrRemote + = Local + | Remote + sqliteCodebase :: forall m. (MonadUnliftIO m, MonadCatch m) => Codebase.DebugName -> CodebasePath -> + -- | When local, back up the existing codebase before migrating, in case there's a catastrophic bug in the migration. + LocalOrRemote -> m (Either SchemaVersion (m (), Codebase m Symbol Ann)) -sqliteCodebase debugName root = do +sqliteCodebase debugName root localOrRemote = do Monad.when debug $ traceM $ "sqliteCodebase " ++ debugName ++ " " ++ root conn <- unsafeGetConnection debugName root termCache <- Cache.semispaceCache 8192 -- pure Cache.nullCache -- to disable @@ -831,6 +845,14 @@ sqliteCodebase debugName root = do SchemaVersion 2 -> Right <$> startCodebase SchemaVersion 1 -> do (cleanup, codebase) <- startCodebase + case localOrRemote of + Local -> + liftIO do + backupPath <- backupCodebasePath <$> getPOSIXTime + copyFile (root codebasePath) (root backupPath) + -- FIXME prettify + putStrLn ("I backed up your codebase to " ++ (root backupPath)) + Remote -> pure () migrateSchema12 conn codebase -- it's ok to pass codebase along; whatever it cached during the migration won't break anything pure (Right (cleanup, codebase)) @@ -1063,7 +1085,7 @@ viewRemoteBranch' (repo, sbh, path) = runExceptT @C.GitError do ifM @(ExceptT C.GitError m) (codebaseExists remotePath) do - lift (sqliteCodebase "viewRemoteBranch.gitCache" remotePath) >>= \case + lift (sqliteCodebase "viewRemoteBranch.gitCache" remotePath Remote) >>= \case Left sv -> ExceptT . pure . Left . C.GitSqliteCodebaseError $ GitError.UnrecognizedSchemaVersion repo remotePath sv Right (closeCodebase, codebase) -> do -- try to load the requested branch from it From 861087d76a75953817f50e3a83b16bdadde075e8 Mon Sep 17 00:00:00 2001 From: Arya Irani Date: Fri, 19 Nov 2021 10:47:47 -0500 Subject: [PATCH 122/297] misc PR feedback --- parser-typechecker/src/Unison/Hashing/V2/Convert.hs | 4 ---- parser-typechecker/src/Unison/Hashing/V2/Hashable.hs | 2 +- stack.yaml | 2 +- unison-core/src/Unison/Hashable.hs | 2 +- 4 files changed, 3 insertions(+), 7 deletions(-) diff --git a/parser-typechecker/src/Unison/Hashing/V2/Convert.hs b/parser-typechecker/src/Unison/Hashing/V2/Convert.hs index 0af81ee24d..80fe60888e 100644 --- a/parser-typechecker/src/Unison/Hashing/V2/Convert.hs +++ b/parser-typechecker/src/Unison/Hashing/V2/Convert.hs @@ -352,10 +352,6 @@ hashPatch = Hashing.Patch.hashPatch . m2hPatch hashBranch0 :: Memory.Branch.Branch0 m -> Hash hashBranch0 = Hashing.Branch.hashBranch . m2hBranch0 --- hashing of branches isn't currently delegated here, because it's enmeshed --- with every `cons` or `step` function. I think it would be good to do while --- we're updating the hash function, but I'm also not looking forward to doing it --- and it's not clearly a problem yet. hashBranch :: Memory.Branch.Branch m -> Hash hashBranch = Hashing.Causal.hashCausal . m2hCausal . Memory.Branch._history diff --git a/parser-typechecker/src/Unison/Hashing/V2/Hashable.hs b/parser-typechecker/src/Unison/Hashing/V2/Hashable.hs index 9632bedad0..390b3fe131 100644 --- a/parser-typechecker/src/Unison/Hashing/V2/Hashable.hs +++ b/parser-typechecker/src/Unison/Hashing/V2/Hashable.hs @@ -63,7 +63,7 @@ instance (Hashable d1, Hashable d2, Hashable d3) => Hashable (Relation3 d1 d2 d3 instance (Hashable d1, Hashable d2, Hashable d3, Hashable d4) => Hashable (Relation4 d1 d2 d3 d4) where tokens s = [accumulateToken $ Relation4.toNestedList s] -class Functor f => Hashable1 f where +class Hashable1 f where -- | Produce a hash for an `f a`, given a hashing function for `a`. -- If there is a notion of order-independence in some aspect of a subterm -- of `f`, then the first argument (`hashUnordered :: [a] -> ([h], a -> h)`) diff --git a/stack.yaml b/stack.yaml index 9c1b8ad8b8..b5c9e00188 100644 --- a/stack.yaml +++ b/stack.yaml @@ -44,7 +44,7 @@ extra-deps: ghc-options: # All packages - "$locals": -Wall -Wno-name-shadowing -Wno-type-defaults -Wno-missing-pattern-synonym-signatures -fprint-expanded-synonyms -fwrite-ide-info #-freverse-errors + "$locals": -Wall -Werror -Wno-name-shadowing -Wno-type-defaults -Wno-missing-pattern-synonym-signatures -fprint-expanded-synonyms -fwrite-ide-info #-freverse-errors # See https://github.com/haskell/haskell-language-server/issues/208 "$everything": -haddock diff --git a/unison-core/src/Unison/Hashable.hs b/unison-core/src/Unison/Hashable.hs index c73b482622..a99eee9d69 100644 --- a/unison-core/src/Unison/Hashable.hs +++ b/unison-core/src/Unison/Hashable.hs @@ -64,7 +64,7 @@ instance (Hashable d1, Hashable d2, Hashable d3) => Hashable (Relation3 d1 d2 d3 instance (Hashable d1, Hashable d2, Hashable d3, Hashable d4) => Hashable (Relation4 d1 d2 d3 d4) where tokens s = [ accumulateToken $ Relation4.toNestedList s ] -class Functor f => Hashable1 f where +class Hashable1 f where -- | Produce a hash for an `f a`, given a hashing function for `a`. -- If there is a notion of order-independence in some aspect of a subterm -- of `f`, then the first argument (`hashUnordered :: [a] -> ([h], a -> h)`) From 9f61875a1d7e1992b90880ac07a8ba90b2f8bdce Mon Sep 17 00:00:00 2001 From: Arya Irani Date: Fri, 19 Nov 2021 10:53:48 -0500 Subject: [PATCH 123/297] update checklist --- .../src/Unison/Codebase/SqliteCodebase/MigrateSchema12.hs | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/parser-typechecker/src/Unison/Codebase/SqliteCodebase/MigrateSchema12.hs b/parser-typechecker/src/Unison/Codebase/SqliteCodebase/MigrateSchema12.hs index be31ef40dc..66d05a93a6 100644 --- a/parser-typechecker/src/Unison/Codebase/SqliteCodebase/MigrateSchema12.hs +++ b/parser-typechecker/src/Unison/Codebase/SqliteCodebase/MigrateSchema12.hs @@ -92,10 +92,10 @@ import UnliftIO (MonadUnliftIO) -- * refer to github megaticket https://github.com/unisonweb/unison/issues/2471 -- ☢️ [x] incorporate type signature into hash of term <- chris/arya have started ☢️ -- [x] store type annotation in the term --- * [ ] Refactor Causal helper functions to use V2 hashing --- * [ ] I guess move Hashable to V2.Hashing pseudo-package --- * [ ] Delete V1 Hashing to ensure it's unused --- * [ ] Salt V2 hashes with version number +-- * [x] Refactor Causal helper functions to use V2 hashing +-- * [x] I guess move Hashable to V2.Hashing pseudo-package +-- * [x] Delete V1 Hashing to ensure it's unused +-- * [x] Salt V2 hashes with version number -- * [ ] confirm that pulls are handled ok -- * [ ] Make a backup of the v1 codebase before migrating, in a temp directory. -- Include a message explaining where we put it. From f05549bbad077600d6749e9754e5a0ff03a5d519 Mon Sep 17 00:00:00 2001 From: Mitchell Rosen Date: Sat, 20 Nov 2021 23:18:34 -0500 Subject: [PATCH 124/297] bang on ConstructorReference --- unison-core/src/Unison/ConstructorReference.hs | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/unison-core/src/Unison/ConstructorReference.hs b/unison-core/src/Unison/ConstructorReference.hs index b5803b4d9f..d1ef912d14 100644 --- a/unison-core/src/Unison/ConstructorReference.hs +++ b/unison-core/src/Unison/ConstructorReference.hs @@ -18,9 +18,7 @@ import qualified Unison.ShortHash as ShortHash -- | A reference to a constructor is represented by a reference to its type declaration, plus the ordinal constructor id. data GConstructorReference r - -- Implementation note: a bang pattern on `r` would be nice, but currently (as of 21-11-10) trips a bug in the runtime where we pass an - -- `undefined`, which itself could probably be refactored to be less trixy. Anyway, for now, no bang pattern. - = ConstructorReference r !ConstructorId + = ConstructorReference !r !ConstructorId deriving stock (Eq, Functor, Ord, Show) type ConstructorReference = GConstructorReference TypeReference From c33ec67f65c3e4e67a0e17da9b64765494942b1f Mon Sep 17 00:00:00 2001 From: Chris Penner Date: Mon, 22 Nov 2021 10:24:08 -0600 Subject: [PATCH 125/297] Update checklist --- .../src/Unison/Codebase/SqliteCodebase/MigrateSchema12.hs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/parser-typechecker/src/Unison/Codebase/SqliteCodebase/MigrateSchema12.hs b/parser-typechecker/src/Unison/Codebase/SqliteCodebase/MigrateSchema12.hs index 656e210042..36a96d7a64 100644 --- a/parser-typechecker/src/Unison/Codebase/SqliteCodebase/MigrateSchema12.hs +++ b/parser-typechecker/src/Unison/Codebase/SqliteCodebase/MigrateSchema12.hs @@ -97,7 +97,7 @@ import UnliftIO (MonadUnliftIO) -- * [ ] Delete V1 Hashing to ensure it's unused -- * [ ] Salt V2 hashes with version number -- * [ ] confirm that pulls are handled ok --- * [ ] Make a backup of the v1 codebase before migrating, in a temp directory. +-- * [x] Make a backup of the v1 codebase before migrating, in a temp directory. -- Include a message explaining where we put it. -- * [ ] Improved error message (don't crash) if loading a codebase newer than your ucm -- * [x] Update the schema version in the database after migrating so we only migrate From e49938fd6130e5b89343c266ebc0b97e5dad0b7b Mon Sep 17 00:00:00 2001 From: Arya Irani Date: Mon, 29 Nov 2021 15:24:06 -1000 Subject: [PATCH 126/297] un-un-generalize Causal.hs functions, and make Hashable less extensible It may be worth it to un-split it from Causal.Type.hs also. Moved Hashable.hs guts into BuildHashable.hs un-un-specialize Unison.Hashable.hs un-delete Unison.Test.Codebase.Causal.hs (checked out from #2690) --- .../src/Unison/Codebase/Branch.hs | 5 +- .../src/Unison/Codebase/Causal.hs | 65 +++--- .../src/Unison/Hashing/V2/ABT.hs | 6 +- .../src/Unison/Hashing/V2/Branch.hs | 4 +- .../src/Unison/Hashing/V2/BuildHashable.hs | 140 +++++++++++++ .../src/Unison/Hashing/V2/Causal.hs | 5 +- .../src/Unison/Hashing/V2/Convert.hs | 26 +-- .../src/Unison/Hashing/V2/DataDeclaration.hs | 4 +- .../src/Unison/Hashing/V2/Hashable.hs | 148 ++----------- .../src/Unison/Hashing/V2/Kind.hs | 4 +- .../src/Unison/Hashing/V2/Patch.hs | 4 +- .../src/Unison/Hashing/V2/Pattern.hs | 2 +- .../src/Unison/Hashing/V2/Reference.hs | 7 +- .../src/Unison/Hashing/V2/Reference/Util.hs | 2 +- .../src/Unison/Hashing/V2/Referent.hs | 4 +- .../src/Unison/Hashing/V2/Term.hs | 4 +- .../src/Unison/Hashing/V2/TermEdit.hs | 4 +- .../src/Unison/Hashing/V2/Type.hs | 4 +- .../src/Unison/Hashing/V2/TypeEdit.hs | 4 +- .../tests/Unison/Test/Codebase/Causal.hs | 194 ++++++++++++++++++ .../unison-parser-typechecker.cabal | 2 + unison-core/src/Unison/Hashable.hs | 6 +- 22 files changed, 426 insertions(+), 218 deletions(-) create mode 100644 parser-typechecker/src/Unison/Hashing/V2/BuildHashable.hs create mode 100644 parser-typechecker/tests/Unison/Test/Codebase/Causal.hs diff --git a/parser-typechecker/src/Unison/Codebase/Branch.hs b/parser-typechecker/src/Unison/Codebase/Branch.hs index af18a7665a..71478e3671 100644 --- a/parser-typechecker/src/Unison/Codebase/Branch.hs +++ b/parser-typechecker/src/Unison/Codebase/Branch.hs @@ -109,6 +109,7 @@ import Unison.Util.Relation (Relation) import qualified Unison.Util.Relation as R import qualified Unison.Util.Relation4 as R4 import qualified Unison.Util.Star3 as Star3 +import qualified Unison.Hashing.V2.Hashable as H deepReferents :: Branch0 m -> Set Referent deepReferents = R.dom . deepTerms @@ -488,8 +489,10 @@ transform f b = case _history b of -> Causal m Raw (Branch0 n) transformB0s f = Causal.unsafeMapHashPreserving (transformB0 f) - -- | Traverse the head branch of all direct children. -- The index of the traversal is the name of that child branch according to the parent. children0 :: IndexedTraversal' NameSegment (Branch0 m) (Branch0 m) children0 = children .> itraversed <. (history . Causal.head_) + +instance H.Hashable (Branch0 m) where + hash = H.hashBranch0 diff --git a/parser-typechecker/src/Unison/Codebase/Causal.hs b/parser-typechecker/src/Unison/Codebase/Causal.hs index 11baf27834..a6f8e6f2d6 100644 --- a/parser-typechecker/src/Unison/Codebase/Causal.hs +++ b/parser-typechecker/src/Unison/Codebase/Causal.hs @@ -32,7 +32,6 @@ import qualified Control.Monad.Reader as Reader import qualified Control.Monad.State as State import qualified Data.Map as Map import qualified Data.Set as Set -import Unison.Codebase.Branch.Type (Branch0, UnwrappedBranch) import Unison.Codebase.Causal.Type ( Causal ( Cons, @@ -50,22 +49,22 @@ import Unison.Codebase.Causal.Type lca, ) import qualified Unison.Hashing.V2.Convert as Hashing +import Unison.Hashing.V2.Hashable (Hashable) +import qualified Unison.Hashing.V2.Hashable as Hashable import Prelude hiding (head, read, tail) -import qualified Unison.Codebase.Branch.Raw as Branch - -- A `squashMerge combine c1 c2` gives the same resulting `e` -- as a `threeWayMerge`, but doesn't introduce a merge node for the -- result. Instead, the resulting causal is a simple `Cons` onto `c2` -- (or is equal to `c2` if `c1` changes nothing). squashMerge' - :: forall m - . Monad m - => (UnwrappedBranch m -> UnwrappedBranch m -> m (Maybe (UnwrappedBranch m))) - -> (Branch0 m -> m (Branch0 m)) - -> (Maybe (Branch0 m) -> Branch0 m -> Branch0 m -> m (Branch0 m)) - -> UnwrappedBranch m - -> UnwrappedBranch m - -> m (UnwrappedBranch m) + :: forall m h e + . (Monad m, Hashable e, Eq e) + => (Causal m h e -> Causal m h e -> m (Maybe (Causal m h e))) + -> (e -> m e) + -> (Maybe e -> e -> e -> m e) + -> Causal m h e + -> Causal m h e + -> m (Causal m h e) squashMerge' lca discardHistory combine c1 c2 = do theLCA <- lca c1 c2 let done newHead = consDistinct newHead c2 @@ -76,22 +75,22 @@ squashMerge' lca discardHistory combine c1 c2 = do | lca == c2 -> done <$> discardHistory (head c1) | otherwise -> done <$> combine (Just $ head lca) (head c1) (head c2) -threeWayMerge :: forall m - . Monad m - => (Maybe (Branch0 m) -> Branch0 m -> Branch0 m -> m (Branch0 m)) - -> UnwrappedBranch m - -> UnwrappedBranch m - -> m (UnwrappedBranch m) +threeWayMerge :: forall m h e + . (Monad m, Hashable e) + => (Maybe e -> e -> e -> m e) + -> Causal m h e + -> Causal m h e + -> m (Causal m h e) threeWayMerge = threeWayMerge' lca threeWayMerge' - :: forall m - . Monad m - => (UnwrappedBranch m -> UnwrappedBranch m -> m (Maybe (UnwrappedBranch m))) - -> (Maybe (Branch0 m) -> Branch0 m -> Branch0 m -> m (Branch0 m)) - -> UnwrappedBranch m - -> UnwrappedBranch m - -> m (UnwrappedBranch m) + :: forall m h e + . (Monad m, Hashable e) + => (Causal m h e -> Causal m h e -> m (Maybe (Causal m h e))) + -> (Maybe e -> e -> e -> m e) + -> Causal m h e + -> Causal m h e + -> m (Causal m h e) threeWayMerge' lca combine c1 c2 = do theLCA <- lca c1 c2 case theLCA of @@ -103,7 +102,7 @@ threeWayMerge' lca combine c1 c2 = do where children = Map.fromList [(currentHash c1, pure c1), (currentHash c2, pure c2)] - done :: Branch0 m -> UnwrappedBranch m + done :: e -> Causal m h e done newHead = let h = Hashing.hashCausal newHead (Map.keysSet children) in Merge (RawHash h) newHead children @@ -126,28 +125,28 @@ beforeHash maxDepth h c = State.modify' (<> Set.fromList cs) Monad.anyM (Reader.local (1+) . go) unseens -stepDistinct :: Applicative m => (Branch0 m -> Branch0 m) -> UnwrappedBranch m -> UnwrappedBranch m +stepDistinct :: (Applicative m, Eq e, Hashable e) => (e -> e) -> Causal m h e -> Causal m h e stepDistinct f c = f (head c) `consDistinct` c stepDistinctM - :: (Applicative m, Functor n) - => (Branch0 m -> n (Branch0 m)) -> UnwrappedBranch m -> n (UnwrappedBranch m) + :: (Applicative m, Functor n, Eq e, Hashable e) + => (e -> n e) -> Causal m h e -> n (Causal m h e) stepDistinctM f c = (`consDistinct` c) <$> f (head c) -one :: Branch0 m -> UnwrappedBranch m +one :: Hashable e => e -> Causal m h e one e = - let h = Hashing.hashCausal e mempty + let h = Hashable.hash e in One (RawHash h) e -cons :: Applicative m => Branch0 m -> UnwrappedBranch m -> UnwrappedBranch m +cons :: (Applicative m, Hashable e) => e -> Causal m h e -> Causal m h e cons e tl = cons' e (currentHash tl) (pure tl) -cons' :: Branch0 m -> RawHash Branch.Raw -> m (UnwrappedBranch m) -> UnwrappedBranch m +cons' :: Hashable e => e -> RawHash h -> m (Causal m h e) -> Causal m h e cons' b0 hTail mTail = let h = Hashing.hashCausal b0 (Set.singleton hTail) in Cons (RawHash h) b0 (hTail, mTail) -consDistinct :: Applicative m => Branch0 m -> UnwrappedBranch m -> UnwrappedBranch m +consDistinct :: (Applicative m, Eq e, Hashable e) => e -> Causal m h e -> Causal m h e consDistinct e tl = if head tl == e then tl else cons e tl diff --git a/parser-typechecker/src/Unison/Hashing/V2/ABT.hs b/parser-typechecker/src/Unison/Hashing/V2/ABT.hs index adb9909b81..bb02f0a1bb 100644 --- a/parser-typechecker/src/Unison/Hashing/V2/ABT.hs +++ b/parser-typechecker/src/Unison/Hashing/V2/ABT.hs @@ -13,12 +13,12 @@ import Unison.ABT import Data.List hiding (cycle, find) import Data.Vector ((!)) -import Prelude hiding (abs,cycle) -import Unison.Hashing.V2.Hashable (Accumulate,Hashable1,hash1) +import Prelude hiding (abs, cycle) +import Unison.Hashing.V2.BuildHashable (Accumulate, Hashable1, hash1) +import qualified Unison.Hashing.V2.BuildHashable as Hashable import qualified Data.Map as Map import qualified Data.Set as Set import qualified Data.Vector as Vector -import qualified Unison.Hashing.V2.Hashable as Hashable -- Hash a strongly connected component and sort its definitions into a canonical order. hashComponent :: diff --git a/parser-typechecker/src/Unison/Hashing/V2/Branch.hs b/parser-typechecker/src/Unison/Hashing/V2/Branch.hs index 15c4ad1ff8..cb2b3be3c8 100644 --- a/parser-typechecker/src/Unison/Hashing/V2/Branch.hs +++ b/parser-typechecker/src/Unison/Hashing/V2/Branch.hs @@ -8,8 +8,8 @@ module Unison.Hashing.V2.Branch (NameSegment(..), Raw (..), MdValues (..), hashBranch) where import Unison.Hash (Hash) -import Unison.Hashing.V2.Hashable (Hashable) -import qualified Unison.Hashing.V2.Hashable as H +import Unison.Hashing.V2.BuildHashable (Hashable) +import qualified Unison.Hashing.V2.BuildHashable as H import Unison.Hashing.V2.Reference (Reference) import Unison.Hashing.V2.Referent (Referent) import Unison.Prelude diff --git a/parser-typechecker/src/Unison/Hashing/V2/BuildHashable.hs b/parser-typechecker/src/Unison/Hashing/V2/BuildHashable.hs new file mode 100644 index 0000000000..737626e888 --- /dev/null +++ b/parser-typechecker/src/Unison/Hashing/V2/BuildHashable.hs @@ -0,0 +1,140 @@ +module Unison.Hashing.V2.BuildHashable where + +import qualified Crypto.Hash as CH +import qualified Data.ByteArray as BA +import qualified Data.ByteString as B +import Data.ByteString.Builder (doubleBE, int64BE, toLazyByteString, word64BE) +import qualified Data.ByteString.Lazy as BL +import qualified Data.Map as Map +import qualified Data.Set as Set +import U.Util.Hash (Hash) +import qualified U.Util.Hash as Hash +import Unison.Prelude +import Unison.Util.Relation (Relation) +import qualified Unison.Util.Relation as Relation +import Unison.Util.Relation3 (Relation3) +import qualified Unison.Util.Relation3 as Relation3 +import Unison.Util.Relation4 (Relation4) +import qualified Unison.Util.Relation4 as Relation4 + +data Token h + = Tag !Word8 + | Bytes !ByteString + | Int !Int64 + | Text !Text + | Double !Double + | Hashed !h + | Nat !Word64 + +class Accumulate h where + accumulate :: [Token h] -> h + fromBytes :: ByteString -> h + toBytes :: h -> ByteString + +accumulateToken :: (Accumulate h, Hashable t) => t -> Token h +accumulateToken = Hashed . accumulate' + +hash, accumulate' :: (Accumulate h, Hashable t) => t -> h +hash = accumulate' +accumulate' = accumulate . (hashVersion :) . tokens + where + hashVersion = Tag 2 + +class Hashable t where + tokens :: Accumulate h => t -> [Token h] + +instance Hashable a => Hashable [a] where + tokens = map accumulateToken + +instance (Hashable a, Hashable b) => Hashable (a, b) where + tokens (a, b) = [accumulateToken a, accumulateToken b] + +instance (Hashable a) => Hashable (Set.Set a) where + tokens = tokens . Set.toList + +instance (Hashable k, Hashable v) => Hashable (Map.Map k v) where + tokens = tokens . Map.toList + +instance (Hashable a, Hashable b) => Hashable (Relation a b) where + tokens = tokens . Relation.toList + +instance (Hashable d1, Hashable d2, Hashable d3) => Hashable (Relation3 d1 d2 d3) where + tokens s = [accumulateToken $ Relation3.toNestedList s] + +instance (Hashable d1, Hashable d2, Hashable d3, Hashable d4) => Hashable (Relation4 d1 d2 d3 d4) where + tokens s = [accumulateToken $ Relation4.toNestedList s] + +class Hashable1 f where + -- | Produce a hash for an `f a`, given a hashing function for `a`. + -- If there is a notion of order-independence in some aspect of a subterm + -- of `f`, then the first argument (`hashUnordered :: [a] -> ([h], a -> h)`) + -- should be used to impose an order, and then apply that order in further hashing. + -- Otherwise the second argument (`hash :: a -> h`) should be used. + -- + -- Example 1: A simple functor with no unordered components. Hashable1 instance + -- just uses `hash`: + -- + -- data T a = One a | Two a a deriving Functor + -- + -- instance Hashable1 T where + -- hash1 _ hash t = case t of + -- One a -> accumulate [Tag 0, Hashed (hash a)] + -- Two a a2 -> accumulate [Tag 1, Hashed (hash a), Hashed (hash a2)] + -- + -- Example 2: A functor with unordered components. For hashing, we need to + -- pick a canonical ordering of the unordered components, so we + -- use `hashUnordered`: + -- + -- data U a = U { unordered :: [a], uno :: a, dos :: a } deriving Functor + -- + -- instance Hashable1 U where + -- hash1 hashUnordered _ (U unordered uno dos) = + -- let (hs, hash) = hashUnordered unordered + -- in accumulate $ map Hashed hs ++ [Hashed (hash uno), Hashed (hash dos)] + hash1 :: (Ord h, Accumulate h) => ([a] -> ([h], a -> h)) -> (a -> h) -> f a -> h + +instance Hashable () where + tokens _ = [] + +instance Hashable Double where + tokens d = [Double d] + +instance Hashable Text where + tokens s = [Text s] + +instance Hashable Char where + tokens c = [Nat $ fromIntegral $ fromEnum c] + +instance Hashable ByteString where + tokens bs = [Bytes bs] + +instance Hashable Word64 where + tokens w = [Nat w] + +instance Hashable Int64 where + tokens w = [Int w] + +instance Hashable Bool where + tokens b = [Tag . fromIntegral $ fromEnum b] + +instance Hashable Hash where + tokens h = [Bytes (Hash.toByteString h)] + +instance Accumulate Hash where + accumulate = fromBytes . BA.convert . CH.hashFinalize . go CH.hashInit + where + go :: CH.Context CH.SHA3_512 -> [Token Hash] -> CH.Context CH.SHA3_512 + go acc tokens = CH.hashUpdates acc (tokens >>= toBS) + toBS (Tag b) = [B.singleton b] + toBS (Bytes bs) = [encodeLength $ B.length bs, bs] + toBS (Int i) = BL.toChunks . toLazyByteString . int64BE $ i + toBS (Nat i) = BL.toChunks . toLazyByteString . word64BE $ i + toBS (Double d) = BL.toChunks . toLazyByteString . doubleBE $ d + toBS (Text txt) = + let tbytes = encodeUtf8 txt + in [encodeLength (B.length tbytes), tbytes] + toBS (Hashed h) = [Hash.toByteString h] + encodeLength :: Integral n => n -> B.ByteString + encodeLength = BL.toStrict . toLazyByteString . word64BE . fromIntegral + fromBytes = Hash.fromByteString + toBytes = Hash.toByteString diff --git a/parser-typechecker/src/Unison/Hashing/V2/Causal.hs b/parser-typechecker/src/Unison/Hashing/V2/Causal.hs index 0b4fcba401..f2507ac208 100644 --- a/parser-typechecker/src/Unison/Hashing/V2/Causal.hs +++ b/parser-typechecker/src/Unison/Hashing/V2/Causal.hs @@ -11,13 +11,12 @@ where import Data.Set (Set) import qualified Data.Set as Set import Unison.Hash (Hash) -import Unison.Hashing.V2.Hashable (Hashable) -import qualified Unison.Hashing.V2.Hashable as H +import qualified Unison.Hashing.V2.BuildHashable as H hashCausal :: H.Accumulate h => Causal -> h hashCausal = H.accumulate' data Causal = Causal {branchHash :: Hash, parents :: Set Hash} -instance Hashable Causal where +instance H.Hashable Causal where tokens c = H.tokens $ branchHash c : Set.toList (parents c) diff --git a/parser-typechecker/src/Unison/Hashing/V2/Convert.hs b/parser-typechecker/src/Unison/Hashing/V2/Convert.hs index 80fe60888e..d9693a9546 100644 --- a/parser-typechecker/src/Unison/Hashing/V2/Convert.hs +++ b/parser-typechecker/src/Unison/Hashing/V2/Convert.hs @@ -3,7 +3,7 @@ module Unison.Hashing.V2.Convert ( ResolutionResult, - hashBranch, + hashBranch0, hashCausal, hashDataDecls, hashDecls, @@ -28,6 +28,7 @@ import Data.Map (Map) import qualified Data.Map as Map import Data.Set (Set) import qualified Data.Set as Set +import qualified U.Util.Map as Map import qualified Unison.ABT as ABT import qualified Unison.Codebase.Branch.Type as Memory.Branch import qualified Unison.Codebase.Causal.Type as Memory.Causal @@ -41,6 +42,8 @@ import Unison.Hash (Hash) import qualified Unison.Hashing.V2.Branch as Hashing.Branch import qualified Unison.Hashing.V2.Causal as Hashing.Causal import qualified Unison.Hashing.V2.DataDeclaration as Hashing.DD +import Unison.Hashing.V2.Hashable (Hashable) +import qualified Unison.Hashing.V2.Hashable as Hashable import qualified Unison.Hashing.V2.Kind as Hashing.Kind import qualified Unison.Hashing.V2.Patch as Hashing.Patch import qualified Unison.Hashing.V2.Pattern as Hashing.Pattern @@ -60,7 +63,6 @@ import qualified Unison.Term as Memory.Term import qualified Unison.Type as Memory.Type import qualified Unison.Util.Relation as Relation import qualified Unison.Util.Star3 as Memory.Star3 -import qualified U.Util.Map as Map import Unison.Var (Var) typeToReference :: Var v => Memory.Type.Type v a -> Memory.Reference.Reference @@ -223,7 +225,7 @@ h2mReferent getCT = \case Hashing.Referent.Ref ref -> Memory.Referent.Ref (h2mReference ref) Hashing.Referent.Con ref n -> let mRef = h2mReference ref - in Memory.Referent.Con mRef n (getCT mRef) + in Memory.Referent.Con mRef n (getCT mRef) hashDataDecls :: Var v => @@ -352,22 +354,10 @@ hashPatch = Hashing.Patch.hashPatch . m2hPatch hashBranch0 :: Memory.Branch.Branch0 m -> Hash hashBranch0 = Hashing.Branch.hashBranch . m2hBranch0 -hashBranch :: Memory.Branch.Branch m -> Hash -hashBranch = Hashing.Causal.hashCausal . m2hCausal . Memory.Branch._history - -hashCausal :: Memory.Branch.Branch0 m -> Set (Memory.Causal.RawHash h) -> Hash -hashCausal b0 tails = +hashCausal :: Hashable e => e -> Set (Memory.Causal.RawHash h) -> Hash +hashCausal e tails = Hashing.Causal.hashCausal $ - Hashing.Causal.Causal (hashBranch0 b0) (Set.map Memory.Causal.unRawHash tails) - -m2hCausal :: Memory.Branch.UnwrappedBranch m -> Hashing.Causal.Causal -m2hCausal = \case - Memory.Causal.One _h e -> - Hashing.Causal.Causal (hashBranch0 e) mempty - Memory.Causal.Cons _h e (ht, _) -> - Hashing.Causal.Causal (hashBranch0 e) $ Set.singleton (Memory.Causal.unRawHash ht) - Memory.Causal.Merge _h e ts -> - Hashing.Causal.Causal (hashBranch0 e) $ Set.map Memory.Causal.unRawHash (Map.keysSet ts) + Hashing.Causal.Causal (Hashable.hash e) (Set.map Memory.Causal.unRawHash tails) m2hBranch0 :: Memory.Branch.Branch0 m -> Hashing.Branch.Raw m2hBranch0 b = diff --git a/parser-typechecker/src/Unison/Hashing/V2/DataDeclaration.hs b/parser-typechecker/src/Unison/Hashing/V2/DataDeclaration.hs index cf30b5f2db..aa368ef674 100644 --- a/parser-typechecker/src/Unison/Hashing/V2/DataDeclaration.hs +++ b/parser-typechecker/src/Unison/Hashing/V2/DataDeclaration.hs @@ -21,8 +21,8 @@ import qualified Data.Map as Map import qualified Unison.ABT as ABT import Unison.Hash (Hash) import qualified Unison.Hashing.V2.ABT as ABT -import Unison.Hashing.V2.Hashable (Hashable1) -import qualified Unison.Hashing.V2.Hashable as Hashable +import Unison.Hashing.V2.BuildHashable (Hashable1) +import qualified Unison.Hashing.V2.BuildHashable as Hashable import Unison.Hashing.V2.Reference (Reference) import qualified Unison.Hashing.V2.Reference as Reference import qualified Unison.Hashing.V2.Reference.Util as Reference.Util diff --git a/parser-typechecker/src/Unison/Hashing/V2/Hashable.hs b/parser-typechecker/src/Unison/Hashing/V2/Hashable.hs index 390b3fe131..63e2920d4c 100644 --- a/parser-typechecker/src/Unison/Hashing/V2/Hashable.hs +++ b/parser-typechecker/src/Unison/Hashing/V2/Hashable.hs @@ -1,139 +1,21 @@ -module Unison.Hashing.V2.Hashable where +module Unison.Hashing.V2.Hashable + ( Hashable, + hash, + ) +where -import qualified Crypto.Hash as CH -import qualified Data.ByteArray as BA -import qualified Data.ByteString as B -import Data.ByteString.Builder (doubleBE, int64BE, toLazyByteString, word64BE) -import qualified Data.ByteString.Lazy as BL -import qualified Data.Map as Map -import qualified Data.Set as Set -import U.Util.Hash (Hash) -import qualified U.Util.Hash as Hash -import Unison.Prelude -import Unison.Util.Relation (Relation) -import qualified Unison.Util.Relation as Relation -import Unison.Util.Relation3 (Relation3) -import qualified Unison.Util.Relation3 as Relation3 -import Unison.Util.Relation4 (Relation4) -import qualified Unison.Util.Relation4 as Relation4 - -data Token h - = Tag !Word8 - | Bytes !ByteString - | Int !Int64 - | Text !Text - | Double !Double - | Hashed !h - | Nat !Word64 - -class Accumulate h where - accumulate :: [Token h] -> h - fromBytes :: ByteString -> h - toBytes :: h -> ByteString - -accumulateToken :: (Accumulate h, Hashable t) => t -> Token h -accumulateToken = Hashed . accumulate' - -accumulate' :: (Accumulate h, Hashable t) => t -> h -accumulate' = accumulate . (hashVersion :) . tokens - where - hashVersion = Tag 2 +import Data.Int (Int64) +import Unison.Hash (Hash) +import qualified Unison.Hashing.V2.BuildHashable as BuildHashable +import Data.Set (Set) class Hashable t where - tokens :: Accumulate h => t -> [Token h] - -instance Hashable a => Hashable [a] where - tokens = map accumulateToken - -instance (Hashable a, Hashable b) => Hashable (a, b) where - tokens (a, b) = [accumulateToken a, accumulateToken b] - -instance (Hashable a) => Hashable (Set.Set a) where - tokens = tokens . Set.toList - -instance (Hashable k, Hashable v) => Hashable (Map.Map k v) where - tokens = tokens . Map.toList - -instance (Hashable a, Hashable b) => Hashable (Relation a b) where - tokens = tokens . Relation.toList - -instance (Hashable d1, Hashable d2, Hashable d3) => Hashable (Relation3 d1 d2 d3) where - tokens s = [accumulateToken $ Relation3.toNestedList s] + hash :: t -> Hash -instance (Hashable d1, Hashable d2, Hashable d3, Hashable d4) => Hashable (Relation4 d1 d2 d3 d4) where - tokens s = [accumulateToken $ Relation4.toNestedList s] - -class Hashable1 f where - -- | Produce a hash for an `f a`, given a hashing function for `a`. - -- If there is a notion of order-independence in some aspect of a subterm - -- of `f`, then the first argument (`hashUnordered :: [a] -> ([h], a -> h)`) - -- should be used to impose an order, and then apply that order in further hashing. - -- Otherwise the second argument (`hash :: a -> h`) should be used. - -- - -- Example 1: A simple functor with no unordered components. Hashable1 instance - -- just uses `hash`: - -- - -- data T a = One a | Two a a deriving Functor - -- - -- instance Hashable1 T where - -- hash1 _ hash t = case t of - -- One a -> accumulate [Tag 0, Hashed (hash a)] - -- Two a a2 -> accumulate [Tag 1, Hashed (hash a), Hashed (hash a2)] - -- - -- Example 2: A functor with unordered components. For hashing, we need to - -- pick a canonical ordering of the unordered components, so we - -- use `hashUnordered`: - -- - -- data U a = U { unordered :: [a], uno :: a, dos :: a } deriving Functor - -- - -- instance Hashable1 U where - -- hash1 hashUnordered _ (U unordered uno dos) = - -- let (hs, hash) = hashUnordered unordered - -- in accumulate $ map Hashed hs ++ [Hashed (hash uno), Hashed (hash dos)] - hash1 :: (Ord h, Accumulate h) => ([a] -> ([h], a -> h)) -> (a -> h) -> f a -> h - -instance Hashable () where - tokens _ = [] - -instance Hashable Double where - tokens d = [Double d] - -instance Hashable Text where - tokens s = [Text s] - -instance Hashable Char where - tokens c = [Nat $ fromIntegral $ fromEnum c] - -instance Hashable ByteString where - tokens bs = [Bytes bs] - -instance Hashable Word64 where - tokens w = [Nat w] +instance BuildHashable.Hashable a => Hashable [a] where + hash = BuildHashable.hash +instance BuildHashable.Hashable a => Hashable (Set a) where + hash = BuildHashable.hash instance Hashable Int64 where - tokens w = [Int w] - -instance Hashable Bool where - tokens b = [Tag . fromIntegral $ fromEnum b] - -instance Hashable Hash where - tokens h = [Bytes (Hash.toByteString h)] - -instance Accumulate Hash where - accumulate = fromBytes . BA.convert . CH.hashFinalize . go CH.hashInit - where - go :: CH.Context CH.SHA3_512 -> [Token Hash] -> CH.Context CH.SHA3_512 - go acc tokens = CH.hashUpdates acc (tokens >>= toBS) - toBS (Tag b) = [B.singleton b] - toBS (Bytes bs) = [encodeLength $ B.length bs, bs] - toBS (Int i) = BL.toChunks . toLazyByteString . int64BE $ i - toBS (Nat i) = BL.toChunks . toLazyByteString . word64BE $ i - toBS (Double d) = BL.toChunks . toLazyByteString . doubleBE $ d - toBS (Text txt) = - let tbytes = encodeUtf8 txt - in [encodeLength (B.length tbytes), tbytes] - toBS (Hashed h) = [Hash.toByteString h] - encodeLength :: Integral n => n -> B.ByteString - encodeLength = BL.toStrict . toLazyByteString . word64BE . fromIntegral - fromBytes = Hash.fromByteString - toBytes = Hash.toByteString + hash = BuildHashable.hash diff --git a/parser-typechecker/src/Unison/Hashing/V2/Kind.hs b/parser-typechecker/src/Unison/Hashing/V2/Kind.hs index e7b082704d..062217cb28 100644 --- a/parser-typechecker/src/Unison/Hashing/V2/Kind.hs +++ b/parser-typechecker/src/Unison/Hashing/V2/Kind.hs @@ -4,8 +4,8 @@ module Unison.Hashing.V2.Kind where import Unison.Prelude -import Unison.Hashing.V2.Hashable (Hashable) -import qualified Unison.Hashing.V2.Hashable as Hashable +import Unison.Hashing.V2.BuildHashable (Hashable) +import qualified Unison.Hashing.V2.BuildHashable as Hashable data Kind = Star | Arrow Kind Kind deriving (Eq,Ord,Read,Show,Generic) diff --git a/parser-typechecker/src/Unison/Hashing/V2/Patch.hs b/parser-typechecker/src/Unison/Hashing/V2/Patch.hs index 58ad7e7268..5b1e79afa6 100644 --- a/parser-typechecker/src/Unison/Hashing/V2/Patch.hs +++ b/parser-typechecker/src/Unison/Hashing/V2/Patch.hs @@ -6,8 +6,8 @@ module Unison.Hashing.V2.Patch (Patch (..), hashPatch) where import Data.Map (Map) import Data.Set (Set) -import Unison.Hashing.V2.Hashable (Hashable) -import qualified Unison.Hashing.V2.Hashable as H +import Unison.Hashing.V2.BuildHashable (Hashable) +import qualified Unison.Hashing.V2.BuildHashable as H import Unison.Hashing.V2.Reference (Reference) import Unison.Hashing.V2.Referent (Referent) import Unison.Hashing.V2.TermEdit (TermEdit) diff --git a/parser-typechecker/src/Unison/Hashing/V2/Pattern.hs b/parser-typechecker/src/Unison/Hashing/V2/Pattern.hs index eb3000ea4c..75600dd469 100644 --- a/parser-typechecker/src/Unison/Hashing/V2/Pattern.hs +++ b/parser-typechecker/src/Unison/Hashing/V2/Pattern.hs @@ -9,7 +9,7 @@ import Data.Foldable as Foldable hiding (foldMap') import Data.List (intercalate) import qualified Data.Set as Set import Unison.DataDeclaration.ConstructorId (ConstructorId) -import qualified Unison.Hashing.V2.Hashable as H +import qualified Unison.Hashing.V2.BuildHashable as H import Unison.Hashing.V2.Reference (Reference) import qualified Unison.Hashing.V2.Type as Type import Unison.Prelude diff --git a/parser-typechecker/src/Unison/Hashing/V2/Reference.hs b/parser-typechecker/src/Unison/Hashing/V2/Reference.hs index a7b66b34c9..f343188e5b 100644 --- a/parser-typechecker/src/Unison/Hashing/V2/Reference.hs +++ b/parser-typechecker/src/Unison/Hashing/V2/Reference.hs @@ -17,8 +17,8 @@ import Unison.Prelude import qualified Data.Text as Text import qualified Unison.Hash as H -import Unison.Hashing.V2.Hashable (Hashable) -import qualified Unison.Hashing.V2.Hashable as Hashable +import Unison.Hashing.V2.BuildHashable (Hashable) +import qualified Unison.Hashing.V2.BuildHashable as Hashable import Unison.ShortHash (ShortHash) import qualified Unison.ShortHash as SH @@ -45,8 +45,7 @@ pattern Derived h i = DerivedId (Id h i) -- | @Pos@ is a position into a cycle of size @Size@, as cycles are hashed together. data Id = Id H.Hash Pos deriving (Eq, Ord) --- todo: move these to ShortHash module? --- but Show Reference currently depends on SH +-- todo: delete these, but `instance Show Reference` currently depends on SH toShortHash :: Reference -> ShortHash toShortHash (Builtin b) = SH.Builtin b toShortHash (Derived h 0) = SH.ShortHash (H.base32Hex h) Nothing Nothing diff --git a/parser-typechecker/src/Unison/Hashing/V2/Reference/Util.hs b/parser-typechecker/src/Unison/Hashing/V2/Reference/Util.hs index b81da536b2..2d574467cd 100644 --- a/parser-typechecker/src/Unison/Hashing/V2/Reference/Util.hs +++ b/parser-typechecker/src/Unison/Hashing/V2/Reference/Util.hs @@ -3,7 +3,7 @@ module Unison.Hashing.V2.Reference.Util where import Unison.Prelude import qualified Unison.Hashing.V2.Reference as Reference -import Unison.Hashing.V2.Hashable (Hashable1) +import Unison.Hashing.V2.BuildHashable (Hashable1) import Unison.ABT (Var) import qualified Unison.Hashing.V2.ABT as ABT import qualified Data.Map as Map diff --git a/parser-typechecker/src/Unison/Hashing/V2/Referent.hs b/parser-typechecker/src/Unison/Hashing/V2/Referent.hs index 2ca2aaf69f..38c0c15eed 100644 --- a/parser-typechecker/src/Unison/Hashing/V2/Referent.hs +++ b/parser-typechecker/src/Unison/Hashing/V2/Referent.hs @@ -10,8 +10,8 @@ module Unison.Hashing.V2.Referent where import Unison.DataDeclaration.ConstructorId (ConstructorId) -import Unison.Hashing.V2.Hashable (Hashable) -import qualified Unison.Hashing.V2.Hashable as H +import Unison.Hashing.V2.BuildHashable (Hashable) +import qualified Unison.Hashing.V2.BuildHashable as H import Unison.Hashing.V2.Reference (Reference) data Referent = Ref Reference | Con Reference ConstructorId diff --git a/parser-typechecker/src/Unison/Hashing/V2/Term.hs b/parser-typechecker/src/Unison/Hashing/V2/Term.hs index cc45f8add8..5e61de64fc 100644 --- a/parser-typechecker/src/Unison/Hashing/V2/Term.hs +++ b/parser-typechecker/src/Unison/Hashing/V2/Term.hs @@ -25,8 +25,8 @@ import qualified Unison.ABT as ABT import qualified Unison.Blank as B import Unison.DataDeclaration.ConstructorId (ConstructorId) import qualified Unison.Hash as Hash -import Unison.Hashing.V2.Hashable (Hashable1, accumulateToken) -import qualified Unison.Hashing.V2.Hashable as Hashable +import Unison.Hashing.V2.BuildHashable (Hashable1, accumulateToken) +import qualified Unison.Hashing.V2.BuildHashable as Hashable import qualified Unison.Hashing.V2.ABT as ABT import Unison.Hashing.V2.Pattern (Pattern) import Unison.Hashing.V2.Reference (Reference) diff --git a/parser-typechecker/src/Unison/Hashing/V2/TermEdit.hs b/parser-typechecker/src/Unison/Hashing/V2/TermEdit.hs index d05000257d..88cccde40a 100644 --- a/parser-typechecker/src/Unison/Hashing/V2/TermEdit.hs +++ b/parser-typechecker/src/Unison/Hashing/V2/TermEdit.hs @@ -1,7 +1,7 @@ module Unison.Hashing.V2.TermEdit (TermEdit (..)) where -import Unison.Hashing.V2.Hashable (Hashable) -import qualified Unison.Hashing.V2.Hashable as H +import Unison.Hashing.V2.BuildHashable (Hashable) +import qualified Unison.Hashing.V2.BuildHashable as H import Unison.Hashing.V2.Referent (Referent) data TermEdit = Replace Referent | Deprecate diff --git a/parser-typechecker/src/Unison/Hashing/V2/Type.hs b/parser-typechecker/src/Unison/Hashing/V2/Type.hs index a2f5232f93..f2a5fe4671 100644 --- a/parser-typechecker/src/Unison/Hashing/V2/Type.hs +++ b/parser-typechecker/src/Unison/Hashing/V2/Type.hs @@ -27,8 +27,8 @@ module Unison.Hashing.V2.Type ( import qualified Data.Map as Map import qualified Data.Set as Set import qualified Unison.ABT as ABT -import Unison.Hashing.V2.Hashable (Hashable1) -import qualified Unison.Hashing.V2.Hashable as Hashable +import Unison.Hashing.V2.BuildHashable (Hashable1) +import qualified Unison.Hashing.V2.BuildHashable as Hashable import qualified Unison.Hashing.V2.ABT as ABT import Unison.Hashing.V2.Reference (Reference) import qualified Unison.Hashing.V2.Reference as Reference diff --git a/parser-typechecker/src/Unison/Hashing/V2/TypeEdit.hs b/parser-typechecker/src/Unison/Hashing/V2/TypeEdit.hs index 328e82a8b3..e51fff891a 100644 --- a/parser-typechecker/src/Unison/Hashing/V2/TypeEdit.hs +++ b/parser-typechecker/src/Unison/Hashing/V2/TypeEdit.hs @@ -1,7 +1,7 @@ module Unison.Hashing.V2.TypeEdit (TypeEdit (..)) where -import Unison.Hashing.V2.Hashable (Hashable) -import qualified Unison.Hashing.V2.Hashable as H +import Unison.Hashing.V2.BuildHashable (Hashable) +import qualified Unison.Hashing.V2.BuildHashable as H import Unison.Hashing.V2.Reference (Reference) data TypeEdit = Replace Reference | Deprecate diff --git a/parser-typechecker/tests/Unison/Test/Codebase/Causal.hs b/parser-typechecker/tests/Unison/Test/Codebase/Causal.hs new file mode 100644 index 0000000000..6ac2f5be5f --- /dev/null +++ b/parser-typechecker/tests/Unison/Test/Codebase/Causal.hs @@ -0,0 +1,194 @@ +{-# LANGUAGE PartialTypeSignatures #-} + +module Unison.Test.Codebase.Causal (test) where + +import Control.Monad (replicateM_) +import Data.Functor.Identity (Identity (runIdentity)) +import Data.Int (Int64) +import Data.Set (Set) +import qualified Data.Set as Set +import EasyTest +import Unison.Codebase.Causal (Causal, one) +import qualified Unison.Codebase.Causal as Causal +import Unison.Hash (Hash) + +test :: Test () +test = + scope "causal" + . tests + $ [ scope "threeWayMerge.ex1" + . expect + $ Causal.head testThreeWay + == Set.fromList [3, 4], + scope "threeWayMerge.idempotent" + . expect + $ testIdempotent oneCausal, -- == oneCausal + -- $ prop_mergeIdempotent + scope "threeWayMerge.identity" + . expect + $ testIdentity oneCausal emptyCausal, + -- $ prop_mergeIdentity + scope "threeWayMerge.commutative" + . expect + $ testCommutative (Set.fromList [3, 4]) oneRemoved, + -- $ prop_mergeCommutative + {- , scope "threeWayMerge.commonAncestor" + . expect + $ testCommonAncestor + -- $ prop_mergeCommonAncestor --} + scope "lca.hasLca" lcaPairTest, + scope "lca.noLca" noLcaPairTest, + scope "beforeHash" $ beforeHashTests + ] + +beforeHashTests :: Test () +beforeHashTests = do + -- c1 and c2 have unrelated histories + c1 <- pure $ Causal.one (0 :: Int64) + c2 <- pure $ Causal.one (1 :: Int64) + -- c1' and c2' are extension of c1 and c2, respectively + c1' <- pure $ Causal.cons 2 c1 + c2' <- pure $ Causal.cons 3 c2 + c12 <- Causal.threeWayMerge sillyMerge c1' c2' + + -- verifying basic properties of `before` for these examples + expect' =<< before c1 c1 + expect' =<< before c1 c12 + expect' =<< before c2 c2 + expect' =<< before c2 c12 + expect' =<< before c2 c2' + expect' =<< before c1 c1' + expect' . not =<< before c1 c2 + expect' . not =<< before c2 c1 + + -- make sure the search cutoff works - + -- even though both start with `Causal.one 0`, that's + -- more than 10 steps back from `longCausal 1000`, so we + -- want this to be false + expect' . not =<< before c1 (longCausal (1000 :: Int64)) + ok + where + before h c = Causal.beforeHash 10 (Causal.currentHash h) c + sillyMerge _lca l _r = pure l + longCausal 0 = Causal.one 0 + longCausal n = Causal.cons n (longCausal (n - 1)) + +int64 :: Test Int64 +int64 = random + +extend :: + Int -> + Causal Identity Hash Int64 -> + Test (Causal Identity Hash Int64) +extend 0 ca = pure ca +extend n ca = do + i <- int64 + extend (n -1) (Causal.cons i ca) + +lcaPair :: Test (Causal Identity Hash Int64, Causal Identity Hash Int64) +lcaPair = do + base <- one <$> int64 + ll <- int' 0 20 + lr <- int' 0 20 + (,) <$> extend ll base <*> extend lr base + +lcaPairTest :: Test () +lcaPairTest = replicateM_ 50 test >> ok + where + test = + runIdentity . uncurry Causal.lca <$> lcaPair >>= \case + Just _ -> pure () + Nothing -> crash "expected lca" + +noLcaPair :: + Test (Causal Identity Hash Int64, Causal Identity Hash Int64) +noLcaPair = do + basel <- one <$> int64 + baser <- one <$> int64 + ll <- int' 0 20 + lr <- int' 0 20 + (,) <$> extend ll basel <*> extend lr baser + +noLcaPairTest :: Test () +noLcaPairTest = replicateM_ 50 test >> ok + where + test = + runIdentity . uncurry Causal.lca <$> noLcaPair >>= \case + Nothing -> pure () + Just _ -> crash "expected no lca" + +oneRemoved :: Causal Identity Hash (Set Int64) +oneRemoved = + foldr + Causal.cons + (one (Set.singleton 1)) + (Set.fromList <$> [[2, 3, 4], [1, 2, 3, 4], [1, 2]]) + +twoRemoved :: Causal Identity Hash (Set Int64) +twoRemoved = + foldr + Causal.cons + (one (Set.singleton 1)) + (Set.fromList <$> [[1, 3, 4], [1, 2, 3], [1, 2]]) + +testThreeWay :: Causal Identity Hash (Set Int64) +testThreeWay = + runIdentity $ + threeWayMerge' oneRemoved twoRemoved + +setCombine :: Applicative m => Ord a => Set a -> Set a -> m (Set a) +setCombine a b = pure $ a <> b + +setDiff :: Applicative m => Ord a => Set a -> Set a -> m (Set a, Set a) +setDiff old new = pure (Set.difference new old, Set.difference old new) + +setPatch :: Applicative m => Ord a => Set a -> (Set a, Set a) -> m (Set a) +setPatch s (added, removed) = pure (added <> Set.difference s removed) + +-- merge x x == x, should not add a new head, and also the value at the head should be the same of course +testIdempotent :: Causal Identity Hash (Set Int64) -> Bool -- Causal Identity Hash (Set Int64) +testIdempotent causal = + runIdentity (threeWayMerge' causal causal) + == causal + +-- prop_mergeIdempotent :: Bool +-- prop_mergeIdempotent = and (map testIdempotent (take 1000 generateRandomCausals)) + +oneCausal :: Causal Identity Hash (Set Int64) +oneCausal = Causal.one (Set.fromList [1]) + +-- generateRandomCausals :: Causal Identity Hash (Set Int64) +-- generateRandomCausals = undefined + +easyCombine :: + (Monad m, Semigroup d) => + (e -> e -> m e) -> + (e -> e -> m d) -> + (e -> d -> m e) -> + (Maybe e -> e -> e -> m e) +easyCombine comb _ _ Nothing l r = comb l r +easyCombine _ diff appl (Just ca) l r = do + dl <- diff ca l + dr <- diff ca r + appl ca (dl <> dr) + +threeWayMerge' :: + Causal Identity Hash (Set Int64) -> + Causal Identity Hash (Set Int64) -> + Identity (Causal Identity Hash (Set Int64)) +threeWayMerge' = Causal.threeWayMerge (easyCombine setCombine setDiff setPatch) + +-- merge x mempty == x, merge mempty x == x +testIdentity :: Causal Identity Hash (Set Int64) -> Causal Identity Hash (Set Int64) -> Bool +testIdentity causal mempty = + (threeWayMerge' causal mempty) + == (threeWayMerge' mempty causal) + +emptyCausal :: Causal Identity Hash (Set Int64) +emptyCausal = one (Set.empty) + +-- merge (cons hd tl) tl == cons hd tl, merge tl (cons hd tl) == cons hd tl +testCommutative :: Set Int64 -> Causal Identity Hash (Set Int64) -> Bool +testCommutative hd tl = + (threeWayMerge' (Causal.cons hd tl) tl) + == (threeWayMerge' tl (Causal.cons hd tl)) diff --git a/parser-typechecker/unison-parser-typechecker.cabal b/parser-typechecker/unison-parser-typechecker.cabal index d83a349309..f6bf0fb3b1 100644 --- a/parser-typechecker/unison-parser-typechecker.cabal +++ b/parser-typechecker/unison-parser-typechecker.cabal @@ -80,6 +80,7 @@ library Unison.FileParsers Unison.Hashing.V2.ABT Unison.Hashing.V2.Branch + Unison.Hashing.V2.BuildHashable Unison.Hashing.V2.Causal Unison.Hashing.V2.Convert Unison.Hashing.V2.DataDeclaration @@ -354,6 +355,7 @@ executable tests Unison.Test.ANF Unison.Test.Cache Unison.Test.Codebase.Branch + Unison.Test.Codebase.Causal Unison.Test.Codebase.Path Unison.Test.CodebaseInit Unison.Test.ColorText diff --git a/unison-core/src/Unison/Hashable.hs b/unison-core/src/Unison/Hashable.hs index a99eee9d69..dace9fa305 100644 --- a/unison-core/src/Unison/Hashable.hs +++ b/unison-core/src/Unison/Hashable.hs @@ -36,9 +36,9 @@ class Accumulate h where accumulateToken :: (Accumulate h, Hashable t) => t -> Token h accumulateToken = Hashed . accumulate' -accumulate' :: (Accumulate h, Hashable t) => t -> h -accumulate' = accumulate . (hashVersion :). tokens - where hashVersion = Tag 2 +hash, accumulate' :: (Accumulate h, Hashable t) => t -> h +accumulate' = accumulate . tokens +hash = accumulate' class Hashable t where tokens :: Accumulate h => t -> [Token h] From b47c49606ec1f0656c6d17aabbaf420c43a56152 Mon Sep 17 00:00:00 2001 From: Arya Irani Date: Mon, 29 Nov 2021 16:57:17 -1000 Subject: [PATCH 127/297] build Causal hashes in Unison.Hashing.V2.Convert as Hashing --- parser-typechecker/src/Unison/Codebase/Causal.hs | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/parser-typechecker/src/Unison/Codebase/Causal.hs b/parser-typechecker/src/Unison/Codebase/Causal.hs index a6f8e6f2d6..44f03aee31 100644 --- a/parser-typechecker/src/Unison/Codebase/Causal.hs +++ b/parser-typechecker/src/Unison/Codebase/Causal.hs @@ -50,7 +50,6 @@ import Unison.Codebase.Causal.Type ) import qualified Unison.Hashing.V2.Convert as Hashing import Unison.Hashing.V2.Hashable (Hashable) -import qualified Unison.Hashing.V2.Hashable as Hashable import Prelude hiding (head, read, tail) -- A `squashMerge combine c1 c2` gives the same resulting `e` -- as a `threeWayMerge`, but doesn't introduce a merge node for the @@ -135,7 +134,7 @@ stepDistinctM f c = (`consDistinct` c) <$> f (head c) one :: Hashable e => e -> Causal m h e one e = - let h = Hashable.hash e + let h = Hashing.hashCausal e mempty in One (RawHash h) e cons :: (Applicative m, Hashable e) => e -> Causal m h e -> Causal m h e From 8d3d896498d352a3a2bf05c9ab0bae5bdcd84957 Mon Sep 17 00:00:00 2001 From: Arya Irani Date: Fri, 3 Dec 2021 11:14:47 -1000 Subject: [PATCH 128/297] just have one path through causal construction --- .../src/Unison/Codebase/Causal.hs | 54 +++++++++++-------- .../src/Unison/Codebase/Causal/FoldHistory.hs | 2 +- .../src/Unison/Codebase/Causal/Type.hs | 30 +++++++---- .../Codebase/SqliteCodebase/Conversions.hs | 6 +-- .../src/Unison/Codebase/Editor/HandleInput.hs | 4 +- 5 files changed, 58 insertions(+), 38 deletions(-) diff --git a/parser-typechecker/src/Unison/Codebase/Causal.hs b/parser-typechecker/src/Unison/Codebase/Causal.hs index 44f03aee31..daf084aa66 100644 --- a/parser-typechecker/src/Unison/Codebase/Causal.hs +++ b/parser-typechecker/src/Unison/Codebase/Causal.hs @@ -3,12 +3,14 @@ {-# LANGUAGE TemplateHaskell #-} module Unison.Codebase.Causal - ( Causal (..), + ( Causal(currentHash, head, tail, tails), + pattern One, + pattern Cons, + pattern Merge, RawHash (RawHash, unRawHash), head_, one, cons, - cons', consDistinct, uncons, children, @@ -34,15 +36,18 @@ import qualified Data.Map as Map import qualified Data.Set as Set import Unison.Codebase.Causal.Type ( Causal - ( Cons, - Merge, - One, + ( UnsafeCons, + UnsafeMerge, + UnsafeOne, currentHash, head, tail, tails ), RawHash (RawHash, unRawHash), + pattern One, + pattern Cons, + pattern Merge, before, children, head_, @@ -51,6 +56,17 @@ import Unison.Codebase.Causal.Type import qualified Unison.Hashing.V2.Convert as Hashing import Unison.Hashing.V2.Hashable (Hashable) import Prelude hiding (head, read, tail) +import qualified Data.List.Extra as List + +fromList :: (Applicative m, Hashable e) => e -> [Causal m h e] -> Causal m h e +fromList e (List.nubOrdOn currentHash -> tails) = case tails of + [] -> UnsafeOne h e + t : [] -> UnsafeCons h e (tailPair t) + _ : _ : _ -> UnsafeMerge h e (Map.fromList $ map tailPair tails) + where + tailPair c = (currentHash c, pure c) + h = RawHash $ Hashing.hashCausal e (Set.fromList $ map currentHash tails) + -- A `squashMerge combine c1 c2` gives the same resulting `e` -- as a `threeWayMerge`, but doesn't introduce a merge node for the -- result. Instead, the resulting causal is a simple `Cons` onto `c2` @@ -99,12 +115,8 @@ threeWayMerge' lca combine c1 c2 = do | lca == c2 -> pure c1 | otherwise -> done <$> combine (Just $ head lca) (head c1) (head c2) where - children = - Map.fromList [(currentHash c1, pure c1), (currentHash c2, pure c2)] done :: e -> Causal m h e - done newHead = - let h = Hashing.hashCausal newHead (Map.keysSet children) - in Merge (RawHash h) newHead children + done newHead = fromList newHead [c1, c2] -- `True` if `h` is found in the history of `c` within `maxDepth` path length -- from the tip of `c` @@ -132,18 +144,14 @@ stepDistinctM => (e -> n e) -> Causal m h e -> n (Causal m h e) stepDistinctM f c = (`consDistinct` c) <$> f (head c) +-- duplicated logic here instead of delegating to `fromList` to avoid `Applicative m` constraint. one :: Hashable e => e -> Causal m h e one e = let h = Hashing.hashCausal e mempty - in One (RawHash h) e + in UnsafeOne (RawHash h) e cons :: (Applicative m, Hashable e) => e -> Causal m h e -> Causal m h e -cons e tl = cons' e (currentHash tl) (pure tl) - -cons' :: Hashable e => e -> RawHash h -> m (Causal m h e) -> Causal m h e -cons' b0 hTail mTail = - let h = Hashing.hashCausal b0 (Set.singleton hTail) - in Cons (RawHash h) b0 (hTail, mTail) +cons e tail = fromList e [tail] consDistinct :: (Applicative m, Eq e, Hashable e) => e -> Causal m h e -> Causal m h e consDistinct e tl = @@ -157,14 +165,14 @@ uncons c = case c of transform :: Functor m => (forall a . m a -> n a) -> Causal m h e -> Causal n h e transform nt c = case c of - One h e -> One h e - Cons h e (ht, tl) -> Cons h e (ht, nt (transform nt <$> tl)) - Merge h e tls -> Merge h e $ Map.map (\mc -> nt (transform nt <$> mc)) tls + One h e -> UnsafeOne h e + Cons h e (ht, tl) -> UnsafeCons h e (ht, nt (transform nt <$> tl)) + Merge h e tls -> UnsafeMerge h e $ Map.map (\mc -> nt (transform nt <$> mc)) tls unsafeMapHashPreserving :: Functor m => (e -> e2) -> Causal m h e -> Causal m h e2 unsafeMapHashPreserving f c = case c of - One h e -> One h (f e) - Cons h e (ht, tl) -> Cons h (f e) (ht, unsafeMapHashPreserving f <$> tl) - Merge h e tls -> Merge h (f e) $ Map.map (fmap $ unsafeMapHashPreserving f) tls + One h e -> UnsafeOne h (f e) + Cons h e (ht, tl) -> UnsafeCons h (f e) (ht, unsafeMapHashPreserving f <$> tl) + Merge h e tls -> UnsafeMerge h (f e) $ Map.map (fmap $ unsafeMapHashPreserving f) tls data FoldHistoryResult a = Satisfied a | Unsatisfied a deriving (Eq,Ord,Show) diff --git a/parser-typechecker/src/Unison/Codebase/Causal/FoldHistory.hs b/parser-typechecker/src/Unison/Codebase/Causal/FoldHistory.hs index 44a88465ec..83f304d1c7 100644 --- a/parser-typechecker/src/Unison/Codebase/Causal/FoldHistory.hs +++ b/parser-typechecker/src/Unison/Codebase/Causal/FoldHistory.hs @@ -5,7 +5,7 @@ module Unison.Codebase.Causal.FoldHistory (FoldHistoryResult (..), foldHistoryUn import Unison.Prelude -import Unison.Codebase.Causal ( Causal(..), RawHash ) +import Unison.Codebase.Causal (Causal(..), RawHash, pattern One, pattern Cons, pattern Merge) import Prelude hiding (tail, head) import qualified Data.Sequence as Seq import qualified Data.Set as Set diff --git a/parser-typechecker/src/Unison/Codebase/Causal/Type.hs b/parser-typechecker/src/Unison/Codebase/Causal/Type.hs index 2c05ea402c..dfeba8faff 100644 --- a/parser-typechecker/src/Unison/Codebase/Causal/Type.hs +++ b/parser-typechecker/src/Unison/Codebase/Causal/Type.hs @@ -5,6 +5,9 @@ module Unison.Codebase.Causal.Type ( Causal (..), RawHash (..), + pattern One, + pattern Cons, + pattern Merge, before, children, lca, @@ -48,35 +51,44 @@ instance Show (RawHash a) where instance Show e => Show (Causal m h e) where show = \case - One h e -> "One " ++ (take 3 . show) h ++ " " ++ show e - Cons h e t -> "Cons " ++ (take 3 . show) h ++ " " ++ show e ++ " " ++ (take 3 . show) (fst t) - Merge h e ts -> "Merge " ++ (take 3 . show) h ++ " " ++ show e ++ " " ++ (show . fmap (take 3 . show) . toList) (Map.keysSet ts) + UnsafeOne h e -> "One " ++ (take 3 . show) h ++ " " ++ show e + UnsafeCons h e t -> "Cons " ++ (take 3 . show) h ++ " " ++ show e ++ " " ++ (take 3 . show) (fst t) + UnsafeMerge h e ts -> "Merge " ++ (take 3 . show) h ++ " " ++ show e ++ " " ++ (show . fmap (take 3 . show) . toList) (Map.keysSet ts) -- h is the type of the pure data structure that will be hashed and used as -- an index; e.g. h = Branch00, e = Branch0 m data Causal m h e - = One + = UnsafeOne { currentHash :: RawHash h, head :: e } - | Cons + | UnsafeCons { currentHash :: RawHash h, head :: e, tail :: (RawHash h, m (Causal m h e)) } | -- The merge operation `<>` flattens and normalizes for order - Merge + UnsafeMerge { currentHash :: RawHash h, head :: e, tails :: Map (RawHash h) (m (Causal m h e)) } +pattern One :: RawHash h -> e -> Causal m h e +pattern One h e <- UnsafeOne h e +pattern Cons :: RawHash h -> e -> (RawHash h, m (Causal m h e)) -> Causal m h e +pattern Cons h e tail <- UnsafeCons h e tail +pattern Merge :: RawHash h -> e -> Map (RawHash h) (m (Causal m h e)) -> Causal m h e +pattern Merge h e tails <- UnsafeMerge h e tails +{-# COMPLETE One, Cons, Merge #-} + + Lens.makeLensesFor [("head", "head_")] ''Causal children :: Causal m h e -> Seq (m (Causal m h e)) -children (One _ _) = Seq.empty -children (Cons _ _ (_, t)) = Seq.singleton t -children (Merge _ _ ts) = Seq.fromList $ Map.elems ts +children (UnsafeOne _ _) = Seq.empty +children (UnsafeCons _ _ (_, t)) = Seq.singleton t +children (UnsafeMerge _ _ ts) = Seq.fromList $ Map.elems ts before :: Monad m => Causal m h e -> Causal m h e -> m Bool before a b = (== Just a) <$> lca a b diff --git a/parser-typechecker/src/Unison/Codebase/SqliteCodebase/Conversions.hs b/parser-typechecker/src/Unison/Codebase/SqliteCodebase/Conversions.hs index 3625704cf5..993d594c18 100644 --- a/parser-typechecker/src/Unison/Codebase/SqliteCodebase/Conversions.hs +++ b/parser-typechecker/src/Unison/Codebase/SqliteCodebase/Conversions.hs @@ -414,16 +414,16 @@ causalbranch2to1' :: Monad m => (V2.Reference -> m CT.ConstructorType) -> V2.Bra causalbranch2to1' lookupCT (V2.Causal hc _he (Map.toList -> parents) me) = do let currentHash = causalHash2to1 hc case parents of - [] -> V1.Causal.One currentHash <$> (me >>= branch2to1 lookupCT) + [] -> V1.Causal.UnsafeOne currentHash <$> (me >>= branch2to1 lookupCT) [(hp, mp)] -> do let parentHash = causalHash2to1 hp - V1.Causal.Cons currentHash + V1.Causal.UnsafeCons currentHash <$> (me >>= branch2to1 lookupCT) <*> pure (parentHash, causalbranch2to1' lookupCT =<< mp) merge -> do let tailsList = map (bimap causalHash2to1 (causalbranch2to1' lookupCT =<<)) merge e <- me - V1.Causal.Merge currentHash <$> branch2to1 lookupCT e <*> pure (Map.fromList tailsList) + V1.Causal.UnsafeMerge currentHash <$> branch2to1 lookupCT e <*> pure (Map.fromList tailsList) causalbranch1to2 :: forall m. Monad m => V1.Branch.Branch m -> V2.Branch.Causal m causalbranch1to2 (V1.Branch.Branch c) = causal1to2' hash1to2cb hash1to2c branch1to2 c diff --git a/unison-cli/src/Unison/Codebase/Editor/HandleInput.hs b/unison-cli/src/Unison/Codebase/Editor/HandleInput.hs index 20076a6717..af089dcda3 100644 --- a/unison-cli/src/Unison/Codebase/Editor/HandleInput.hs +++ b/unison-cli/src/Unison/Codebase/Editor/HandleInput.hs @@ -856,9 +856,9 @@ loop = do else case Branch._history b of Causal.One {} -> respond $ History diffCap acc (EndOfLog . sbh $ Branch.headHash b) - Causal.Merge {Causal.tails} -> + Causal.Merge _ _ tails -> respond $ History diffCap acc (MergeTail (sbh $ Branch.headHash b) . map sbh $ Map.keys tails) - Causal.Cons {Causal.tail} -> do + Causal.Cons _ _ tail -> do b' <- fmap Branch.Branch . eval . Eval $ snd tail let elem = (sbh $ Branch.headHash b, Branch.namesDiff b' b) doHistory (n + 1) b' (elem : acc) From c74a074f6d04d07a308dd834d0d5fbd142bd2a17 Mon Sep 17 00:00:00 2001 From: Arya Irani Date: Fri, 3 Dec 2021 12:09:47 -1000 Subject: [PATCH 129/297] restore causal test suite --- parser-typechecker/tests/Suite.hs | 2 ++ 1 file changed, 2 insertions(+) diff --git a/parser-typechecker/tests/Suite.hs b/parser-typechecker/tests/Suite.hs index fef13bac36..d1fee504e9 100644 --- a/parser-typechecker/tests/Suite.hs +++ b/parser-typechecker/tests/Suite.hs @@ -11,6 +11,7 @@ import qualified Unison.Core.Test.Name as Name import qualified Unison.Test.ABT as ABT import qualified Unison.Test.Cache as Cache import qualified Unison.Test.Codebase.Branch as Branch +import qualified Unison.Test.Codebase.Causal as Causal import qualified Unison.Test.Codebase.Path as Path import qualified Unison.Test.ColorText as ColorText import qualified Unison.Test.DataDeclaration as DataDeclaration @@ -56,6 +57,7 @@ test = tests , Text.test , Relation.test , Path.test + , Causal.test , Referent.test , ABT.test , ANF.test From dcaa9f280e38155947e778351156ce3e9ed2457e Mon Sep 17 00:00:00 2001 From: Arya Irani Date: Fri, 3 Dec 2021 13:19:49 -1000 Subject: [PATCH 130/297] remove `Unison.Hashable.Hashable1` --- unison-core/src/Unison/Hashable.hs | 32 +----------------------------- 1 file changed, 1 insertion(+), 31 deletions(-) diff --git a/unison-core/src/Unison/Hashable.hs b/unison-core/src/Unison/Hashable.hs index d4761ad505..d846736f4c 100644 --- a/unison-core/src/Unison/Hashable.hs +++ b/unison-core/src/Unison/Hashable.hs @@ -1,5 +1,5 @@ {- ORMOLU_DISABLE -} -- Remove this when the file is ready to be auto-formatted -module Unison.Hashable where +module Unison.Hashable (accumulate', hash, toBytes) where import Unison.Prelude @@ -64,36 +64,6 @@ instance (Hashable d1, Hashable d2, Hashable d3) => Hashable (Relation3 d1 d2 d3 instance (Hashable d1, Hashable d2, Hashable d3, Hashable d4) => Hashable (Relation4 d1 d2 d3 d4) where tokens s = [ accumulateToken $ Relation4.toNestedList s ] - -class Hashable1 f where - -- | Produce a hash for an `f a`, given a hashing function for `a`. - -- If there is a notion of order-independence in some aspect of a subterm - -- of `f`, then the first argument (`hashUnordered :: [a] -> ([h], a -> h)`) - -- should be used to impose an order, and then apply that order in further hashing. - -- Otherwise the second argument (`hash :: a -> h`) should be used. - -- - -- Example 1: A simple functor with no unordered components. Hashable1 instance - -- just uses `hash`: - -- - -- data T a = One a | Two a a deriving Functor - -- - -- instance Hashable1 T where - -- hash1 _ hash t = case t of - -- One a -> accumulate [Tag 0, Hashed (hash a)] - -- Two a a2 -> accumulate [Tag 1, Hashed (hash a), Hashed (hash a2)] - -- - -- Example 2: A functor with unordered components. For hashing, we need to - -- pick a canonical ordering of the unordered components, so we - -- use `hashUnordered`: - -- - -- data U a = U { unordered :: [a], uno :: a, dos :: a } deriving Functor - -- - -- instance Hashable1 U where - -- hash1 hashUnordered _ (U unordered uno dos) = - -- let (hs, hash) = hashUnordered unordered - -- in accumulate $ map Hashed hs ++ [Hashed (hash uno), Hashed (hash dos)] - hash1 :: (Ord h, Accumulate h) => ([a] -> ([h], a -> h)) -> (a -> h) -> f a -> h - instance Hashable () where tokens _ = [] From 6055ca61844e2e50dd67c85f06c56fc83de2d5a5 Mon Sep 17 00:00:00 2001 From: Arya Irani Date: Mon, 6 Dec 2021 11:54:15 -1000 Subject: [PATCH 131/297] bring transcript outputs up to date --- unison-src/transcripts/ambiguous-metadata.output.md | 8 ++++---- unison-src/transcripts/copy-patch.output.md | 4 ++-- unison-src/transcripts/find-by-type.output.md | 4 ++-- .../transcripts/fix-1381-excess-propagate.output.md | 2 +- unison-src/transcripts/fix2567.output.md | 4 ++-- unison-src/transcripts/mergeloop.output.md | 6 +++--- unison-src/transcripts/names.output.md | 6 +++--- 7 files changed, 17 insertions(+), 17 deletions(-) diff --git a/unison-src/transcripts/ambiguous-metadata.output.md b/unison-src/transcripts/ambiguous-metadata.output.md index 0fd795b97b..4c85794172 100644 --- a/unison-src/transcripts/ambiguous-metadata.output.md +++ b/unison-src/transcripts/ambiguous-metadata.output.md @@ -14,10 +14,10 @@ x = 1 New name conflicts: - 1. doc#mqm91b53vp : #qpp8bgqet0 + 1. doc#lgjhda02he : #2k64c46bkd ↓ - 2. ┌ doc#a09ch66esd : #qpp8bgqet0 - 3. └ doc#mqm91b53vp : #qpp8bgqet0 + 2. ┌ doc#lgjhda02he : #2k64c46bkd + 3. └ doc#n1n1e95rjc : #2k64c46bkd Tip: You can use `todo` to see if this generated any work to do in this namespace and `test` to run the tests. Or you @@ -31,8 +31,8 @@ x = 1 I'm not sure which metadata value you're referring to since there are multiple matches: + doc#lgjhda02he foo.doc - doc#mqm91b53vp Tip: Try again and supply one of the above definitions explicitly. diff --git a/unison-src/transcripts/copy-patch.output.md b/unison-src/transcripts/copy-patch.output.md index 45a8217f8e..e742f22c84 100644 --- a/unison-src/transcripts/copy-patch.output.md +++ b/unison-src/transcripts/copy-patch.output.md @@ -39,7 +39,7 @@ Copy the patch and make sure it's still there. .> view.patch foo.patch - Edited Terms: #jk19sm5bf8 -> x + Edited Terms: #rrsqv1ogaq -> x Tip: To remove entries from a patch, use delete.term-replacement or delete.type-replacement, as @@ -51,7 +51,7 @@ Copy the patch and make sure it's still there. .> view.patch bar.patch - Edited Terms: #jk19sm5bf8 -> x + Edited Terms: #rrsqv1ogaq -> x Tip: To remove entries from a patch, use delete.term-replacement or delete.type-replacement, as diff --git a/unison-src/transcripts/find-by-type.output.md b/unison-src/transcripts/find-by-type.output.md index b473449bcf..0b47aa9771 100644 --- a/unison-src/transcripts/find-by-type.output.md +++ b/unison-src/transcripts/find-by-type.output.md @@ -49,8 +49,8 @@ baz = cases I couldn't find exact type matches, resorting to fuzzy matching... - 1. bar : Text -> A - 2. baz : A -> Text + 1. baz : A -> Text + 2. bar : Text -> A 3. A.A : Text -> A diff --git a/unison-src/transcripts/fix-1381-excess-propagate.output.md b/unison-src/transcripts/fix-1381-excess-propagate.output.md index 23e46270f7..0f21282e0c 100644 --- a/unison-src/transcripts/fix-1381-excess-propagate.output.md +++ b/unison-src/transcripts/fix-1381-excess-propagate.output.md @@ -37,7 +37,7 @@ As of the time of this writing, the history for `X` should be a single node, `#4 - □ #49c1c5kdrf (start of history) + □ #h2tkacp7a0 (start of history) ``` however, as of release/M1i, we saw an extraneous node appear. If your `ucm` is fixed, you won't see it below: diff --git a/unison-src/transcripts/fix2567.output.md b/unison-src/transcripts/fix2567.output.md index 507556154b..727ec42695 100644 --- a/unison-src/transcripts/fix2567.output.md +++ b/unison-src/transcripts/fix2567.output.md @@ -22,15 +22,15 @@ structural ability Foo where .> view Foo structural ability some.subnamespace.Foo where - woot : Nat -> (Nat, Nat) ->{some.subnamespace.Foo} Nat blah : Nat ->{some.subnamespace.Foo} Nat + woot : Nat -> (Nat, Nat) ->{some.subnamespace.Foo} Nat ☝️ The namespace .somewhere is empty. .somewhere> view Foo structural ability .some.subnamespace.Foo where - woot : Nat -> (Nat, Nat) ->{.some.subnamespace.Foo} Nat blah : Nat ->{.some.subnamespace.Foo} Nat + woot : Nat -> (Nat, Nat) ->{.some.subnamespace.Foo} Nat ``` diff --git a/unison-src/transcripts/mergeloop.output.md b/unison-src/transcripts/mergeloop.output.md index b081461c28..f4a5546a0d 100644 --- a/unison-src/transcripts/mergeloop.output.md +++ b/unison-src/transcripts/mergeloop.output.md @@ -135,9 +135,9 @@ b = 2 `history #som3n4m3space` to view history starting from a given namespace hash. - ⊙ #lt6ilkmejb + ⊙ #9ok9vp5hsh ⑃ - #d1o3h34d1d - #q5gf6eubo8 + #i2dsn51imn + #veuj72uce5 ``` diff --git a/unison-src/transcripts/names.output.md b/unison-src/transcripts/names.output.md index 66cab3a516..b265384c76 100644 --- a/unison-src/transcripts/names.output.md +++ b/unison-src/transcripts/names.output.md @@ -16,17 +16,17 @@ intTriple = IntTriple(+1, +1, +1) .> names IntTriple Type - Hash: #p1iakck1ol + Hash: #ap1scd256n Names: IntTriple namespc.another.TripleInt Term - Hash: #p1iakck1ol#0 + Hash: #ap1scd256n#0 Names: IntTriple.IntTriple .> names intTriple Term - Hash: #2quul9e9bo + Hash: #rliag116kp Names: intTriple namespc.another.tripleInt ``` From 52b2c81783c73182974bed39664c5555e0a1a023 Mon Sep 17 00:00:00 2001 From: Arya Irani Date: Mon, 6 Dec 2021 11:26:37 -1000 Subject: [PATCH 132/297] never assume builtins are known to the db fixes 107 failing transcripts --- .../U/Codebase/Sqlite/Operations.hs | 12 +- .../transcripts-using-base/doc.output.md | 12 +- .../transcripts-using-base/hashing.output.md | 4 +- .../transcripts-using-base/tls.output.md | 2 +- .../transcripts/bug-strange-closure.output.md | 24 ++-- .../transcripts/command-replace.output.md | 4 +- .../transcripts/delete-namespace.output.md | 2 +- unison-src/transcripts/delete.output.md | 38 +++--- ...ependents-dependencies-debugfile.output.md | 50 ++++---- unison-src/transcripts/diff.output.md | 120 ++++++++---------- .../transcripts/doc-formatting.output.md | 1 + unison-src/transcripts/docs.output.md | 18 ++- unison-src/transcripts/find-patch.output.md | 4 +- unison-src/transcripts/fix1334.output.md | 6 +- unison-src/transcripts/fix2000.output.md | 12 +- unison-src/transcripts/fix2053.output.md | 2 +- unison-src/transcripts/fix2254.output.md | 4 +- .../transcripts/isPropagated-exists.output.md | 6 +- unison-src/transcripts/link.output.md | 8 +- unison-src/transcripts/merges.output.md | 12 +- .../transcripts/name-selection.output.md | 22 ++-- .../pattern-pretty-print-2345.output.md | 6 +- unison-src/transcripts/propagate.output.md | 12 +- unison-src/transcripts/reflog.output.md | 12 +- unison-src/transcripts/squash.output.md | 52 ++++---- 25 files changed, 226 insertions(+), 219 deletions(-) diff --git a/codebase2/codebase-sqlite/U/Codebase/Sqlite/Operations.hs b/codebase2/codebase-sqlite/U/Codebase/Sqlite/Operations.hs index 39d54ee9d6..e24c6eb707 100644 --- a/codebase2/codebase-sqlite/U/Codebase/Sqlite/Operations.hs +++ b/codebase2/codebase-sqlite/U/Codebase/Sqlite/Operations.hs @@ -249,7 +249,6 @@ data Error = DecodeError DecodeError ByteString ErrString | DatabaseIntegrityError Q.Integrity | UnknownDependency H.Hash - | UnknownText Text | ExpectedBranch CausalHash BranchHash | ExpectedBranch' Db.CausalHashId | LegacyUnknownCycleLen H.Hash @@ -270,12 +269,6 @@ liftQ a = -- * Database lookups -lookupTextId :: EDB m => Text -> m Db.TextId -lookupTextId t = - Q.loadText t >>= \case - Just textId -> pure textId - Nothing -> throwError $ UnknownText t - loadTextById :: EDB m => Db.TextId -> m Text loadTextById = liftQ . Q.loadTextById @@ -335,8 +328,11 @@ loadMaybeRootCausalHash = -- ** read existing references +-- |Assumes that a derived reference would already exist in the database +-- (by virtue of dependencies being stored before dependents), but does +-- not assume a builtin reference would. c2sReference :: EDB m => C.Reference -> m S.Reference -c2sReference = bitraverse lookupTextId primaryHashToExistingObjectId +c2sReference = bitraverse Q.saveText primaryHashToExistingObjectId s2cReference :: EDB m => S.Reference -> m C.Reference s2cReference = bitraverse loadTextById loadHashByObjectId diff --git a/unison-src/transcripts-using-base/doc.output.md b/unison-src/transcripts-using-base/doc.output.md index 14c7f69e4b..5905af1fd7 100644 --- a/unison-src/transcripts-using-base/doc.output.md +++ b/unison-src/transcripts-using-base/doc.output.md @@ -335,16 +335,18 @@ and the rendered output using `display`: Unison definitions can be included in docs. For instance: - structural type Optional a = Some a | None + structural type Optional a = None | Some a + sqr : Nat -> Nat sqr x = use Nat * x * x Some rendering targets also support folded source: - structural type Optional a = Some a | None + structural type Optional a = None | Some a + sqr : Nat -> Nat sqr x = use Nat * x * x @@ -637,16 +639,18 @@ Lastly, it's common to build longer documents including subdocuments via `{{ sub Unison definitions can be included in docs. For instance: - structural type Optional a = Some a | None + structural type Optional a = None | Some a + sqr : Nat -> Nat sqr x = use Nat * x * x Some rendering targets also support folded source: - structural type Optional a = Some a | None + structural type Optional a = None | Some a + sqr : Nat -> Nat sqr x = use Nat * x * x diff --git a/unison-src/transcripts-using-base/hashing.output.md b/unison-src/transcripts-using-base/hashing.output.md index 8d9bf6e0a0..052c1baf1a 100644 --- a/unison-src/transcripts-using-base/hashing.output.md +++ b/unison-src/transcripts-using-base/hashing.output.md @@ -106,11 +106,11 @@ ex5 = crypto.hmac Sha2_256 mysecret f |> hex 25 | > ex4 ⧩ - "c9adb8fbda582aeab113379dbd8f6af3ea450df1782780d61d44ad1ef7bff76e" + "c0fb39ed837cfe691bda9944b0c01a1c8c09dfc1be8d8501848d8d8d7651dcaf" 26 | > ex5 ⧩ - "b198a72da9e6c11536c0a2118002760a9eac57db7f9a0ce9ebda8cd8a806bd06" + "e04090fa634d01ecc7db4422b3611735170930bc7ef3ebcdb8b4f3cf778f7475" ``` And here's the full API: diff --git a/unison-src/transcripts-using-base/tls.output.md b/unison-src/transcripts-using-base/tls.output.md index 4caf77050d..7c7acd619d 100644 --- a/unison-src/transcripts-using-base/tls.output.md +++ b/unison-src/transcripts-using-base/tls.output.md @@ -29,7 +29,7 @@ test> match (decodeCert.impl (toUtf8 self_signed_cert_pem) with ⍟ These new definitions are ok to `add`: - test.ko630itb5m (Unison bug, unknown term) + test.cf6t0id1ti (Unison bug, unknown term) Now evaluating any watch expressions (lines starting with `>`)... Ctrl+C cancels. diff --git a/unison-src/transcripts/bug-strange-closure.output.md b/unison-src/transcripts/bug-strange-closure.output.md index 3b1adc4add..6d507ea269 100644 --- a/unison-src/transcripts/bug-strange-closure.output.md +++ b/unison-src/transcripts/bug-strange-closure.output.md @@ -88,16 +88,18 @@ We can display the guide before and after adding it to the codebase: Unison definitions can be included in docs. For instance: - structural type Optional a = Some a | None + structural type Optional a = None | Some a + sqr : Nat -> Nat sqr x = use Nat * x * x Some rendering targets also support folded source: - structural type Optional a = Some a | None + structural type Optional a = None | Some a + sqr : Nat -> Nat sqr x = use Nat * x * x @@ -289,16 +291,18 @@ We can display the guide before and after adding it to the codebase: Unison definitions can be included in docs. For instance: - structural type Optional a = Some a | None + structural type Optional a = None | Some a + sqr : Nat -> Nat sqr x = use Nat * x * x Some rendering targets also support folded source: - structural type Optional a = Some a | None + structural type Optional a = None | Some a + sqr : Nat -> Nat sqr x = use Nat * x * x @@ -496,16 +500,18 @@ rendered = Pretty.get (docFormatConsole doc.guide) Unison definitions can be included in docs. For instance: - structural type Optional a = Some a | None + structural type Optional a = None | Some a + sqr : Nat -> Nat sqr x = use Nat * x * x Some rendering targets also support folded source: - structural type Optional a = Some a | None + structural type Optional a = None | Some a + sqr : Nat -> Nat sqr x = use Nat * x * x @@ -690,16 +696,18 @@ rendered = Pretty.get (docFormatConsole doc.guide) Unison definitions can be included in docs. For instance: - structural type Optional a = Some a | None + structural type Optional a = None | Some a + sqr : Nat -> Nat sqr x = use Nat * x * x Some rendering targets also support folded source: - structural type Optional a = Some a | None + structural type Optional a = None | Some a + sqr : Nat -> Nat sqr x = use Nat * x * x diff --git a/unison-src/transcripts/command-replace.output.md b/unison-src/transcripts/command-replace.output.md index 84f905e1e8..a5c331c833 100644 --- a/unison-src/transcripts/command-replace.output.md +++ b/unison-src/transcripts/command-replace.output.md @@ -67,9 +67,9 @@ Test that replace works with types .scratch> view.patch patch - Edited Types: X#d97e0jhkmd -> X + Edited Types: X#hhc6goudjq -> X - Edited Terms: #jk19sm5bf8 -> x + Edited Terms: #rrsqv1ogaq -> x Tip: To remove entries from a patch, use delete.term-replacement or delete.type-replacement, as diff --git a/unison-src/transcripts/delete-namespace.output.md b/unison-src/transcripts/delete-namespace.output.md index 3bc453b4f6..5677c11f67 100644 --- a/unison-src/transcripts/delete-namespace.output.md +++ b/unison-src/transcripts/delete-namespace.output.md @@ -76,7 +76,7 @@ I should be able to view an affected dependency by number dependents.usage1 : Nat dependents.usage1 = use Nat + - #jk19sm5bf8 + #0ja1qfpej6 + #rrsqv1ogaq + #0t5t522gs3 ``` Deleting the root namespace should require confirmation if not forced. diff --git a/unison-src/transcripts/delete.output.md b/unison-src/transcripts/delete.output.md index 7570d774f5..79280dd640 100644 --- a/unison-src/transcripts/delete.output.md +++ b/unison-src/transcripts/delete.output.md @@ -49,7 +49,7 @@ structural type Foo = Foo Nat Removed definitions: - 1. Foo.Foo : Nat -> #d97e0jhkmd + 1. Foo.Foo : Nat -> #hhc6goudjq Tip: You can use `undo` or `reflog` to undo this change. @@ -90,10 +90,10 @@ foo = 2 New name conflicts: - 1. foo#jk19sm5bf8 : Nat + 1. foo#rrsqv1ogaq : Nat ↓ - 2. ┌ foo#0ja1qfpej6 : Nat - 3. └ foo#jk19sm5bf8 : Nat + 2. ┌ foo#0t5t522gs3 : Nat + 3. └ foo#rrsqv1ogaq : Nat Tip: You can use `todo` to see if this generated any work to do in this namespace and `test` to run the tests. Or you @@ -108,13 +108,13 @@ A delete should remove both versions of the term. Removed definitions: - 1. a.foo#jk19sm5bf8 : Nat + 1. a.foo#rrsqv1ogaq : Nat Name changes: Original Changes - 2. b.foo ┐ 3. a.foo#0ja1qfpej6 (removed) - 4. a.foo#0ja1qfpej6 ┘ + 2. b.foo ┐ 3. a.foo#0t5t522gs3 (removed) + 4. a.foo#0t5t522gs3 ┘ Tip: You can use `undo` or `reflog` to undo this change. @@ -157,18 +157,18 @@ structural type Foo = Foo Boolean New name conflicts: - 1. structural type Foo#d97e0jhkmd + 1. structural type Foo#hhc6goudjq ↓ - 2. ┌ structural type Foo#d97e0jhkmd + 2. ┌ structural type Foo#gf6ne3ran5 - 3. └ structural type Foo#gq9inhvg9h + 3. └ structural type Foo#hhc6goudjq - 4. Foo.Foo#d97e0jhkmd#0 : Nat -> Foo#d97e0jhkmd + 4. Foo.Foo#hhc6goudjq#0 : Nat -> Foo#hhc6goudjq ↓ - 5. ┌ Foo.Foo#d97e0jhkmd#0 : Nat -> Foo#d97e0jhkmd - 6. └ Foo.Foo#gq9inhvg9h#0 : Boolean -> Foo#gq9inhvg9h + 5. ┌ Foo.Foo#gf6ne3ran5#0 : Boolean -> Foo#gf6ne3ran5 + 6. └ Foo.Foo#hhc6goudjq#0 : Nat -> Foo#hhc6goudjq Tip: You can use `todo` to see if this generated any work to do in this namespace and `test` to run the tests. Or you @@ -181,13 +181,13 @@ structural type Foo = Foo Boolean Removed definitions: - 1. structural type a.Foo#d97e0jhkmd + 1. structural type a.Foo#hhc6goudjq Name changes: Original Changes - 2. b.Foo ┐ 3. a.Foo#gq9inhvg9h (removed) - 4. a.Foo#gq9inhvg9h ┘ + 2. b.Foo ┐ 3. a.Foo#gf6ne3ran5 (removed) + 4. a.Foo#gf6ne3ran5 ┘ Tip: You can use `undo` or `reflog` to undo this change. @@ -197,13 +197,13 @@ structural type Foo = Foo Boolean Removed definitions: - 1. a.Foo.Foo#d97e0jhkmd#0 : Nat -> #d97e0jhkmd + 1. a.Foo.Foo#hhc6goudjq#0 : Nat -> #hhc6goudjq Name changes: Original Changes - 2. b.Foo.Foo ┐ 3. a.Foo.Foo#gq9inhvg9h#0 (removed) - 4. a.Foo.Foo#gq9inhvg9h#0 ┘ + 2. b.Foo.Foo ┐ 3. a.Foo.Foo#gf6ne3ran5#0 (removed) + 4. a.Foo.Foo#gf6ne3ran5#0 ┘ Tip: You can use `undo` or `reflog` to undo this change. diff --git a/unison-src/transcripts/dependents-dependencies-debugfile.output.md b/unison-src/transcripts/dependents-dependencies-debugfile.output.md index 5ef6560de7..ceed7787d8 100644 --- a/unison-src/transcripts/dependents-dependencies-debugfile.output.md +++ b/unison-src/transcripts/dependents-dependencies-debugfile.output.md @@ -17,14 +17,14 @@ inside.r = d ```ucm .> debug.file - type inside.M#ld0okei52l - type outside.A#e6mpjfecmg - type outside.B#muulibntaq - inside.p#fiupm7pl7o - inside.q#l5pndeifuh - inside.r#im2kiu2hmn - outside.c#msp7bv40rv - outside.d#6cdi7g1oi2 + type inside.M#goba2va40r + type outside.A#ihqhr4prbp + type outside.B#mm8h095nrg + inside.p#h63obi5rb4 + inside.q#1qtbral9uo + inside.r#9guss29ljv + outside.c#fs7la111vn + outside.d#p7dvt0ka99 ``` This will help me make progress in some situations when UCM is being deficient or broken. @@ -47,47 +47,49 @@ But wait, there's more. I can check the dependencies and dependents of a defini .> dependents q - #l5pndeifuh doesn't have any named dependents. + #1qtbral9uo doesn't have any named dependents. .> dependencies q - Dependencies of #l5pndeifuh: + Dependencies of #1qtbral9uo: Reference Name - 1. ##Nat.* builtin.Nat.* - 2. ##Nat.+ builtin.Nat.+ - 3. #fiupm7pl7o inside.p + 1. ##Nat builtin.Nat + 2. ##Nat.* builtin.Nat.* + 3. ##Nat.+ builtin.Nat.+ + 4. #h63obi5rb4 inside.p .> dependencies B - Dependencies of #muulibntaq: + Dependencies of #mm8h095nrg: Reference Name 1. ##Int builtin.Int - Dependencies of #muulibntaq#0: + Dependencies of #mm8h095nrg#0: Reference Name - 1. #muulibntaq outside.B + 1. #mm8h095nrg outside.B 2. ##Int builtin.Int .> dependencies d - Dependencies of #6cdi7g1oi2: + Dependencies of #p7dvt0ka99: Reference Name - 1. ##Nat builtin.Nat - 2. ##Nat.+ builtin.Nat.+ - 3. ##Universal.< builtin.Universal.< - 4. #msp7bv40rv outside.c - 5. #fiupm7pl7o inside.p + 1. ##Boolean builtin.Boolean + 2. ##Nat builtin.Nat + 3. ##Nat.+ builtin.Nat.+ + 4. ##Universal.< builtin.Universal.< + 5. #fs7la111vn outside.c + 6. #h63obi5rb4 inside.p .> dependents d - Dependents of #6cdi7g1oi2: + Dependents of #p7dvt0ka99: Reference Name - 1. #im2kiu2hmn inside.r + 1. #9guss29ljv inside.r ``` We don't have an index for dependents of constructors, but iirc if you ask for that, it will show you dependents of the structural type that provided the constructor. diff --git a/unison-src/transcripts/diff.output.md b/unison-src/transcripts/diff.output.md index 4889ad7406..b840863d49 100644 --- a/unison-src/transcripts/diff.output.md +++ b/unison-src/transcripts/diff.output.md @@ -43,10 +43,10 @@ fslkdjflskdjflksjdf = 663 New name conflicts: - 1. fslkdjflskdjflksjdf#4kipsv2tm6 : Nat + 1. fslkdjflskdjflksjdf#4vn50bh8pk : Nat ↓ - 2. ┌ fslkdjflskdjflksjdf#4kipsv2tm6 : Nat - 3. └ fslkdjflskdjflksjdf#s5tu4n7rlb : Nat + 2. ┌ fslkdjflskdjflksjdf#4vn50bh8pk : Nat + 3. └ fslkdjflskdjflksjdf#9mupj24g1n : Nat Tip: You can use `todo` to see if this generated any work to do in this namespace and `test` to run the tests. Or you @@ -57,33 +57,33 @@ fslkdjflskdjflksjdf = 663 Resolved name conflicts: - 1. ┌ fslkdjflskdjflksjdf#4kipsv2tm6 : Nat - 2. └ fslkdjflskdjflksjdf#s5tu4n7rlb : Nat + 1. ┌ fslkdjflskdjflksjdf#4vn50bh8pk : Nat + 2. └ fslkdjflskdjflksjdf#9mupj24g1n : Nat ↓ - 3. fslkdjflskdjflksjdf#4kipsv2tm6 : Nat + 3. fslkdjflskdjflksjdf#4vn50bh8pk : Nat Name changes: Original Changes 4. x ┐ 5. abc (added) - 6. fslkdjflskdjflksjdf#4kipsv2tm6 ┘ 7. fslkdjflskdjflksjdf (added) - 8. fslkdjflskdjflksjdf#4kipsv2tm6 (removed) + 6. fslkdjflskdjflksjdf#4vn50bh8pk ┘ 7. fslkdjflskdjflksjdf (added) + 8. fslkdjflskdjflksjdf#4vn50bh8pk (removed) .b2> diff.namespace .b1 Resolved name conflicts: - 1. ┌ fslkdjflskdjflksjdf#4kipsv2tm6 : Nat - 2. └ fslkdjflskdjflksjdf#s5tu4n7rlb : Nat + 1. ┌ fslkdjflskdjflksjdf#4vn50bh8pk : Nat + 2. └ fslkdjflskdjflksjdf#9mupj24g1n : Nat ↓ - 3. fslkdjflskdjflksjdf#4kipsv2tm6 : Nat + 3. fslkdjflskdjflksjdf#4vn50bh8pk : Nat Name changes: Original Changes 4. x ┐ 5. abc (added) - 6. fslkdjflskdjflksjdf#4kipsv2tm6 ┘ 7. fslkdjflskdjflksjdf (added) - 8. fslkdjflskdjflksjdf#4kipsv2tm6 (removed) + 6. fslkdjflskdjflksjdf#4vn50bh8pk ┘ 7. fslkdjflskdjflksjdf (added) + 8. fslkdjflskdjflksjdf#4vn50bh8pk (removed) ``` Things we want to test: @@ -184,10 +184,10 @@ fromJust = "asldkfjasldkfj" New name conflicts: - 1. fromJust#jk19sm5bf8 : Nat + 1. fromJust#rrsqv1ogaq : Nat ↓ - 2. ┌ fromJust#hs2i9lcgkd : Text - 3. └ fromJust#jk19sm5bf8 : Nat + 2. ┌ fromJust#8vv2a5jnig : Text + 3. └ fromJust#rrsqv1ogaq : Nat Tip: You can use `todo` to see if this generated any work to do in this namespace and `test` to run the tests. Or you @@ -232,10 +232,10 @@ unique type Y a b = Y a b Resolved name conflicts: - 1. ┌ fromJust#hs2i9lcgkd : Text - 2. └ fromJust#jk19sm5bf8 : Nat + 1. ┌ fromJust#8vv2a5jnig : Text + 2. └ fromJust#rrsqv1ogaq : Nat ↓ - 3. fromJust#1o1iq26cq7 : Nat + 3. fromJust#mkj3tehhkv : Nat - 4. ns1.b : Nat + 5. ns2.b : Text @@ -251,8 +251,6 @@ unique type Y a b = Y a b - 10. ns1.b : Nat + 11. ns2.b : Text - There were 1 auto-propagated updates. - Added definitions: 12. unique type Y a b @@ -279,10 +277,10 @@ unique type Y a b = Y a b Resolved name conflicts: - 1. ┌ fromJust#hs2i9lcgkd : Text - 2. └ fromJust#jk19sm5bf8 : Nat + 1. ┌ fromJust#8vv2a5jnig : Text + 2. └ fromJust#rrsqv1ogaq : Nat ↓ - 3. fromJust#1o1iq26cq7 : Nat + 3. fromJust#mkj3tehhkv : Nat - 4. ns1.b : Nat + 5. ns2.b : Text @@ -298,8 +296,6 @@ unique type Y a b = Y a b - 10. ns1.b : Nat + 11. ns2.b : Text - There were 1 auto-propagated updates. - Added definitions: 12. unique type Y a b @@ -336,10 +332,10 @@ unique type Y a b = Y a b Resolved name conflicts: - 1. ┌ fromJust#hs2i9lcgkd : Text - 2. └ fromJust#jk19sm5bf8 : Nat + 1. ┌ fromJust#8vv2a5jnig : Text + 2. └ fromJust#rrsqv1ogaq : Nat ↓ - 3. fromJust#1o1iq26cq7 : Nat + 3. fromJust#mkj3tehhkv : Nat - 4. ns1.b : Nat + 5. ns2.b : Text @@ -358,8 +354,6 @@ unique type Y a b = Y a b - 12. ns1.b : Nat + 13. ns2.b : Text - There were 1 auto-propagated updates. - Added definitions: 14. unique type Y a b @@ -386,10 +380,10 @@ unique type Y a b = Y a b Resolved name conflicts: - 1. ┌ fromJust#hs2i9lcgkd : Text - 2. └ fromJust#jk19sm5bf8 : Nat + 1. ┌ fromJust#8vv2a5jnig : Text + 2. └ fromJust#rrsqv1ogaq : Nat ↓ - 3. fromJust#1o1iq26cq7 : Nat + 3. fromJust#mkj3tehhkv : Nat - 4. ns1.b : Nat + 5. ns2.b : Text @@ -408,8 +402,6 @@ unique type Y a b = Y a b - 12. ns1.b : Nat + 13. ns2.b : Text - There were 1 auto-propagated updates. - Added definitions: 14. unique type Y a b @@ -491,7 +483,7 @@ bdependent = "banana" Updates: - 1. bdependent : Text + 1. bdependent : Nat ↓ 2. bdependent : Text @@ -578,14 +570,14 @@ a = 555 New name conflicts: - 1. a#ekguc9h648 : Nat + 1. a#er8q007v02 : Nat ↓ - 2. ┌ a#5f8uodgrtf : Nat - 3. └ a#ekguc9h648 : Nat + 2. ┌ a#75jrr0qj1c : Nat + 3. └ a#er8q007v02 : Nat Updates: - 4. b#be9a2abbbg : Nat + 4. b#j6aq9c9k9v : Nat There were 1 auto-propagated updates. @@ -605,10 +597,10 @@ a = 555 New name conflicts: - 1. a#8ss2r9gqe7 : Nat + 1. a#nfb8lmj46i : Nat ↓ - 2. ┌ a#5f8uodgrtf : Nat - 3. └ a#ekguc9h648 : Nat + 2. ┌ a#75jrr0qj1c : Nat + 3. └ a#er8q007v02 : Nat Updates: @@ -620,21 +612,21 @@ a = 555 .nsw> view a b - a#5f8uodgrtf : Nat - a#5f8uodgrtf = 555 + a#75jrr0qj1c : Nat + a#75jrr0qj1c = 555 - a#ekguc9h648 : Nat - a#ekguc9h648 = 444 + a#er8q007v02 : Nat + a#er8q007v02 = 444 - b#be9a2abbbg : Nat - b#be9a2abbbg = + b#j6aq9c9k9v : Nat + b#j6aq9c9k9v = use Nat + - a#ekguc9h648 + 1 + a#er8q007v02 + 1 - b#kut4vstim7 : Nat - b#kut4vstim7 = + b#p50ctv8q17 : Nat + b#p50ctv8q17 = use Nat + - a#5f8uodgrtf + 1 + a#75jrr0qj1c + 1 ``` ```unison @@ -670,21 +662,21 @@ a = 777 .nsw> view a b - a#5f8uodgrtf : Nat - a#5f8uodgrtf = 555 + a#75jrr0qj1c : Nat + a#75jrr0qj1c = 555 - a#ekguc9h648 : Nat - a#ekguc9h648 = 444 + a#er8q007v02 : Nat + a#er8q007v02 = 444 - b#be9a2abbbg : Nat - b#be9a2abbbg = + b#j6aq9c9k9v : Nat + b#j6aq9c9k9v = use Nat + - a#ekguc9h648 + 1 + a#er8q007v02 + 1 - b#kut4vstim7 : Nat - b#kut4vstim7 = + b#p50ctv8q17 : Nat + b#p50ctv8q17 = use Nat + - a#5f8uodgrtf + 1 + a#75jrr0qj1c + 1 ``` ## diff --git a/unison-src/transcripts/doc-formatting.output.md b/unison-src/transcripts/doc-formatting.output.md index a0da802f71..a0dd067cbe 100644 --- a/unison-src/transcripts/doc-formatting.output.md +++ b/unison-src/transcripts/doc-formatting.output.md @@ -497,6 +497,7 @@ But note it's not obvious how display should best be handling this. At the mome .> display test2 Take a look at this: + foo : Nat -> Nat foo n = use Nat + [: do the thing :] diff --git a/unison-src/transcripts/docs.output.md b/unison-src/transcripts/docs.output.md index 3b09535dac..ab85993cfd 100644 --- a/unison-src/transcripts/docs.output.md +++ b/unison-src/transcripts/docs.output.md @@ -7,11 +7,11 @@ Unison documentation is written in Unison. Documentation is a value of the follo unique type builtin.Doc = Join [builtin.Doc] - | Link Link - | Source Link - | Blob Text | Signature Term | Evaluate Term + | Blob Text + | Link Link + | Source Link ``` You can create these `Doc` values with ordinary code, or you can use the special syntax. A value of structural type `Doc` can be created via syntax like: @@ -147,12 +147,14 @@ Now that documentation is linked to the definition. We can view it if we like: ## Examples: - List.take.ex1 = builtin.List.take 0 [1, 2, 3, 4, 5] + List.take.ex1 : [Nat] + List.take.ex1 = builtin.List.take 0 [1, 2, 3, 4, 5] 🔽 ex1 = [] - List.take.ex2 = builtin.List.take 2 [1, 2, 3, 4, 5] + List.take.ex2 : [Nat] + List.take.ex2 = builtin.List.take 2 [1, 2, 3, 4, 5] 🔽 ex2 = [1, 2] @@ -170,12 +172,14 @@ Or there's also a convenient function, `docs`, which shows the `Doc` values that ## Examples: - List.take.ex1 = builtin.List.take 0 [1, 2, 3, 4, 5] + List.take.ex1 : [Nat] + List.take.ex1 = builtin.List.take 0 [1, 2, 3, 4, 5] 🔽 ex1 = [] - List.take.ex2 = builtin.List.take 2 [1, 2, 3, 4, 5] + List.take.ex2 : [Nat] + List.take.ex2 = builtin.List.take 2 [1, 2, 3, 4, 5] 🔽 ex2 = [1, 2] diff --git a/unison-src/transcripts/find-patch.output.md b/unison-src/transcripts/find-patch.output.md index d021f0cc6d..396766f288 100644 --- a/unison-src/transcripts/find-patch.output.md +++ b/unison-src/transcripts/find-patch.output.md @@ -64,7 +64,7 @@ Update .> view.patch patch - Edited Terms: hey#8e79ctircj -> hey + Edited Terms: hey#ao70f78mjt -> hey Tip: To remove entries from a patch, use delete.term-replacement or delete.type-replacement, as @@ -72,7 +72,7 @@ Update .> view.patch 1 - Edited Terms: hey#8e79ctircj -> hey + Edited Terms: hey#ao70f78mjt -> hey Tip: To remove entries from a patch, use delete.term-replacement or delete.type-replacement, as diff --git a/unison-src/transcripts/fix1334.output.md b/unison-src/transcripts/fix1334.output.md index 9fdc30def6..bb2a783414 100644 --- a/unison-src/transcripts/fix1334.output.md +++ b/unison-src/transcripts/fix1334.output.md @@ -60,7 +60,7 @@ We used to have to know the full hash for a definition to be able to use the `re .> names g Term - Hash: #52addbrohu + Hash: #7pm9gkpflp Names: g .> replace f g @@ -70,12 +70,12 @@ We used to have to know the full hash for a definition to be able to use the `re .> names g Term - Hash: #52addbrohu + Hash: #7pm9gkpflp Names: f g .> view.patch - Edited Terms: f#msp7bv40rv -> f + Edited Terms: f#fs7la111vn -> f Tip: To remove entries from a patch, use delete.term-replacement or delete.type-replacement, as diff --git a/unison-src/transcripts/fix2000.output.md b/unison-src/transcripts/fix2000.output.md index 3e46ccc3c3..802a635048 100644 --- a/unison-src/transcripts/fix2000.output.md +++ b/unison-src/transcripts/fix2000.output.md @@ -104,10 +104,10 @@ Merge back into the ancestor. New name conflicts: - 1. p#a3ef1630bu : Text + 1. p#ku52sq6ta7 : Text ↓ - 2. ┌ p#a3ef1630bu : Text - 3. └ p#fjqpdmdeqi : Text + 2. ┌ p#ku52sq6ta7 : Text + 3. └ p#p6cuhi6hb7 : Text Tip: You can use `todo` to see if this generated any work to do in this namespace and `test` to run the tests. Or you @@ -118,10 +118,10 @@ Merge back into the ancestor. Resolved name conflicts: - 1. ┌ y.a.p#a3ef1630bu : Text - 2. └ y.a.p#fjqpdmdeqi : Text + 1. ┌ y.a.p#ku52sq6ta7 : Text + 2. └ y.a.p#p6cuhi6hb7 : Text ↓ - 3. y.a.p#fjqpdmdeqi : Text + 3. y.a.p#p6cuhi6hb7 : Text Tip: You can use `undo` or `reflog` to undo this change. diff --git a/unison-src/transcripts/fix2053.output.md b/unison-src/transcripts/fix2053.output.md index 02ccfc0ac4..60fe87aa41 100644 --- a/unison-src/transcripts/fix2053.output.md +++ b/unison-src/transcripts/fix2053.output.md @@ -4,10 +4,10 @@ go f i as acc = _pattern = List.at i as match _pattern with + None -> acc Some _pattern1 -> use Nat + go f (i + 1) as (acc :+ f _pattern) - None -> acc f a -> go f 0 a [] ``` diff --git a/unison-src/transcripts/fix2254.output.md b/unison-src/transcripts/fix2254.output.md index 78ba1d91ee..f58e139909 100644 --- a/unison-src/transcripts/fix2254.output.md +++ b/unison-src/transcripts/fix2254.output.md @@ -76,10 +76,10 @@ Let's do the update now, and verify that the definitions all look good and there unique type A a b c d = D d - | A a + | E a d | B b | C c - | E a d + | A a structural type NeedsA a b = NeedsA (A a b Nat Nat) diff --git a/unison-src/transcripts/isPropagated-exists.output.md b/unison-src/transcripts/isPropagated-exists.output.md index 845609246a..940ba5b595 100644 --- a/unison-src/transcripts/isPropagated-exists.output.md +++ b/unison-src/transcripts/isPropagated-exists.output.md @@ -32,15 +32,15 @@ x = 4 .> links y - 1. #kea5380m2n : #ffb7g9cull + 1. #kvjtpqi06m : #pi9o52ongq Tip: Try using `display 1` to display the first result or `view 1` to view its source. .> view 1 - #kea5380m2n : #ffb7g9cull - #kea5380m2n = #ffb7g9cull#0 + #kvjtpqi06m : #pi9o52ongq + #kvjtpqi06m = #pi9o52ongq#0 ``` Well, it's hard to tell from those hashes, but those are right. We can confirm diff --git a/unison-src/transcripts/link.output.md b/unison-src/transcripts/link.output.md index eb852121bb..8a4ac3b675 100644 --- a/unison-src/transcripts/link.output.md +++ b/unison-src/transcripts/link.output.md @@ -101,9 +101,9 @@ We can look at the links we have: ```ucm .> links coolFunction - 1. coolFunction.license : License + 1. coolFunction.doc : Doc 2. alice : Author - 3. coolFunction.doc : Doc + 3. coolFunction.license : License Tip: Try using `display 1` to display the first result or `view 1` to view its source. @@ -194,11 +194,11 @@ myLibrary.h x = x + 3 Note: The most recent namespace hash is immediately below this message. - ⊙ #l3kl2s1g8u + ⊙ #noq1the6kh - □ #hcaq4np5kg (start of history) + □ #793qedilos (start of history) .> unlink coolFunction.doc coolFunction diff --git a/unison-src/transcripts/merges.output.md b/unison-src/transcripts/merges.output.md index e814435fe0..525b73d787 100644 --- a/unison-src/transcripts/merges.output.md +++ b/unison-src/transcripts/merges.output.md @@ -112,13 +112,13 @@ We can also delete the fork if we're done with it. (Don't worry, it's still in t Note: The most recent namespace hash is immediately below this message. - ⊙ #4dmogr46d2 + ⊙ #979i457hd7 - Deletes: feature1.y - ⊙ #fe6mqhfcun + ⊙ #kb0j8t02ue + Adds / updates: @@ -129,26 +129,26 @@ We can also delete the fork if we're done with it. (Don't worry, it's still in t Original name New name(s) feature1.y master.y - ⊙ #qlrc4272pk + ⊙ #bdvh2otcut + Adds / updates: feature1.y - ⊙ #mu2ju8e2ic + ⊙ #kt38mg5bug > Moves: Original name New name x master.x - ⊙ #4adnodif8j + ⊙ #gd173qpcn0 + Adds / updates: x - □ #ucb56c3fgj (start of history) + □ #hlflbqjqf9 (start of history) ``` To resurrect an old version of a namespace, you can learn its hash via the `history` command, then use `fork #namespacehash .newname`. diff --git a/unison-src/transcripts/name-selection.output.md b/unison-src/transcripts/name-selection.output.md index 4cb92fe524..17fdd4bce4 100644 --- a/unison-src/transcripts/name-selection.output.md +++ b/unison-src/transcripts/name-selection.output.md @@ -80,19 +80,19 @@ d = c + 10 New name conflicts: - 1. c#0ja1qfpej6 : Nat + 1. c#0t5t522gs3 : Nat ↓ - 2. ┌ c#0ja1qfpej6 : Nat - 3. └ c#jk19sm5bf8 : Nat + 2. ┌ c#0t5t522gs3 : Nat + 3. └ c#rrsqv1ogaq : Nat - 4. d#gk1aqtfmh6 : Nat + 4. d#libk4oh073 : Nat ↓ - 5. ┌ d#gk1aqtfmh6 : Nat - 6. └ d#qk9ub6bngd : Nat + 5. ┌ d#libk4oh073 : Nat + 6. └ d#qc2aihi6pd : Nat Added definitions: - 7. ┌ c#jk19sm5bf8 : Nat + 7. ┌ c#rrsqv1ogaq : Nat 8. └ aaaa.tooManySegments : Nat Tip: You can use `todo` to see if this generated any work to @@ -118,10 +118,10 @@ At this point, `a3` is conflicted for symbols `c` and `d`, but the original `a2` a2.d : Nat a2.d = a2.c + 10 - a3.c#0ja1qfpej6 : Nat - a3.c#0ja1qfpej6 = 2 + a3.c#0t5t522gs3 : Nat + a3.c#0t5t522gs3 = 2 - a3.d#gk1aqtfmh6 : Nat - a3.d#gk1aqtfmh6 = c#0ja1qfpej6 + 10 + a3.d#libk4oh073 : Nat + a3.d#libk4oh073 = c#0t5t522gs3 + 10 ``` diff --git a/unison-src/transcripts/pattern-pretty-print-2345.output.md b/unison-src/transcripts/pattern-pretty-print-2345.output.md index a764580fa5..20e39ccdab 100644 --- a/unison-src/transcripts/pattern-pretty-print-2345.output.md +++ b/unison-src/transcripts/pattern-pretty-print-2345.output.md @@ -63,7 +63,7 @@ doc = cases demure : [Nat] -> () doc : Nat -> () dopey : Char -> () - grumpy : p4kl4dn7b41 -> () + grumpy : q5g5surm1d1 -> () happy : Boolean -> () mouthy : [t] -> () pokey : [t] -> () @@ -85,7 +85,7 @@ doc = cases demure : [Nat] -> () doc : Nat -> () dopey : Char -> () - grumpy : p4kl4dn7b41 -> () + grumpy : q5g5surm1d1 -> () happy : Boolean -> () mouthy : [t] -> () pokey : [t] -> () @@ -101,7 +101,7 @@ doc = cases .> view grumpy - grumpy : p4kl4dn7b41 -> () + grumpy : q5g5surm1d1 -> () grumpy = cases d -> () .> view happy diff --git a/unison-src/transcripts/propagate.output.md b/unison-src/transcripts/propagate.output.md index c279fe922d..0fd909c69c 100644 --- a/unison-src/transcripts/propagate.output.md +++ b/unison-src/transcripts/propagate.output.md @@ -37,13 +37,13 @@ And then we add it. .subpath> find.verbose - 1. -- #v4a90flt15t54qnjbvbdtj42ouqo8dktu5da8g6q30l4frc6l81ttjtov42r1nbj5jq3hh98snlb64tkbb1mc5dk8les96v71b4qr6g + 1. -- #ohtefitum4jai7d42hi6vc0vs8kupm9h81t1cspr9600r5grscm7jga452lnns29p7rkm9bq2u4o4ais0praq6r1uarbpfg4mf70758 unique type Foo - 2. -- #v4a90flt15t54qnjbvbdtj42ouqo8dktu5da8g6q30l4frc6l81ttjtov42r1nbj5jq3hh98snlb64tkbb1mc5dk8les96v71b4qr6g#0 + 2. -- #ohtefitum4jai7d42hi6vc0vs8kupm9h81t1cspr9600r5grscm7jga452lnns29p7rkm9bq2u4o4ais0praq6r1uarbpfg4mf70758#0 Foo.Foo : Foo - 3. -- #5k9rns49vrtujrpbiegajeja9qjjs77fju3usg1i1dpeo44kefkbce776u1kvqhvtutk6a6f178kovr422ocsd4fdsbsg7fprf4o0dg + 3. -- #d9h62ubdj054t16h1mv5gc838ji23q8g5kgf35upp8f9ed9rq95pu2jg0lkq4c8k58p2c652o7mc2b7dr21b9m9jng512cop27sdo8o fooToInt : Foo -> Int @@ -187,9 +187,9 @@ Cleaning up a bit... Removed definitions: 1. unique type Foo - 2. Foo.Bar : #i2nv821v0u - 3. Foo.Foo : #i2nv821v0u - 4. fooToInt : #i2nv821v0u -> Int + 2. Foo.Bar : #8ei3pu3p2p + 3. Foo.Foo : #8ei3pu3p2p + 4. fooToInt : #8ei3pu3p2p -> Int 5. preserve.otherTerm : Optional baz -> Optional baz 6. preserve.someTerm : Optional x -> Optional x 7. patch patch diff --git a/unison-src/transcripts/reflog.output.md b/unison-src/transcripts/reflog.output.md index af3d7b7a6b..a1a51ad2dc 100644 --- a/unison-src/transcripts/reflog.output.md +++ b/unison-src/transcripts/reflog.output.md @@ -59,17 +59,17 @@ y = 2 most recent, along with the command that got us there. Try: `fork 2 .old` - `fork #o7ncp4f3j1 .old` to make an old namespace + `fork #b9ghmnv7v4 .old` to make an old namespace accessible again, - `reset-root #o7ncp4f3j1` to reset the root namespace and + `reset-root #b9ghmnv7v4` to reset the root namespace and its history to that of the specified namespace. - 1. #nc81qsj2br : add - 2. #o7ncp4f3j1 : add - 3. #ucb56c3fgj : builtins.merge - 4. #sjg2v58vn2 : (initial reflogged namespace) + 1. #rggkaa5ori : add + 2. #b9ghmnv7v4 : add + 3. #hlflbqjqf9 : builtins.merge + 4. #sg60bvjo91 : (initial reflogged namespace) ``` If we `reset-root` to its previous value, `y` disappears. diff --git a/unison-src/transcripts/squash.output.md b/unison-src/transcripts/squash.output.md index 4971a2e62f..06e35fa32e 100644 --- a/unison-src/transcripts/squash.output.md +++ b/unison-src/transcripts/squash.output.md @@ -13,7 +13,7 @@ Let's look at some examples. We'll start with a namespace with just the builtins - □ #edu0qq546n (start of history) + □ #hfao0gunaf (start of history) .> fork builtin builtin2 @@ -42,21 +42,21 @@ Now suppose we `fork` a copy of builtin, then rename `Nat.+` to `frobnicate`, th Note: The most recent namespace hash is immediately below this message. - ⊙ #u7d9er9k2f + ⊙ #jnnhsl20gn > Moves: Original name New name Nat.frobnicate Nat.+ - ⊙ #4dcjftvejg + ⊙ #b61495giqo > Moves: Original name New name Nat.+ Nat.frobnicate - □ #edu0qq546n (start of history) + □ #hfao0gunaf (start of history) ``` If we merge that back into `builtin`, we get that same chain of history: @@ -71,21 +71,21 @@ If we merge that back into `builtin`, we get that same chain of history: Note: The most recent namespace hash is immediately below this message. - ⊙ #u7d9er9k2f + ⊙ #jnnhsl20gn > Moves: Original name New name Nat.frobnicate Nat.+ - ⊙ #4dcjftvejg + ⊙ #b61495giqo > Moves: Original name New name Nat.+ Nat.frobnicate - □ #edu0qq546n (start of history) + □ #hfao0gunaf (start of history) ``` Let's try again, but using a `merge.squash` (or just `squash`) instead. The history will be unchanged: @@ -106,7 +106,7 @@ Let's try again, but using a `merge.squash` (or just `squash`) instead. The hist - □ #edu0qq546n (start of history) + □ #hfao0gunaf (start of history) ``` The churn that happened in `mybuiltin` namespace ended up back in the same spot, so the squash merge of that namespace with our original namespace had no effect. @@ -191,47 +191,47 @@ At this point, Alice and Bob both have some history beyond what's in trunk: - □ #hkrqt3tm05 (start of history) + □ #q9cdigs0bo (start of history) .> history alice Note: The most recent namespace hash is immediately below this message. - ⊙ #uollchacf2 + ⊙ #6q28310ueh > Moves: Original name New name neatoFun productionReadyId - ⊙ #7b6lii2lmc + ⊙ #hkd9e8udkk > Moves: Original name New name radNumber superRadNumber - ⊙ #1l7bsgu3om + ⊙ #oudl9oi8gt + Adds / updates: bodaciousNumero neatoFun radNumber - □ #hkrqt3tm05 (start of history) + □ #q9cdigs0bo (start of history) .> history bob Note: The most recent namespace hash is immediately below this message. - ⊙ #aicts31vr6 + ⊙ #9s1a1ak3fa + Adds / updates: babyDon'tHurtMe no whatIsLove - □ #hkrqt3tm05 (start of history) + □ #q9cdigs0bo (start of history) ``` Alice then squash merges into `trunk`, as does Bob. It's as if Alice and Bob both made their changes in one single commit. @@ -257,13 +257,13 @@ Alice then squash merges into `trunk`, as does Bob. It's as if Alice and Bob bot Note: The most recent namespace hash is immediately below this message. - ⊙ #gjfd096e1s + ⊙ #p7g0h14puu + Adds / updates: bodaciousNumero productionReadyId superRadNumber - □ #hkrqt3tm05 (start of history) + □ #q9cdigs0bo (start of history) .> merge.squash bob trunk @@ -285,19 +285,19 @@ Alice then squash merges into `trunk`, as does Bob. It's as if Alice and Bob bot Note: The most recent namespace hash is immediately below this message. - ⊙ #k7bfk3l7uv + ⊙ #1i2d300kla + Adds / updates: babyDon'tHurtMe no whatIsLove - ⊙ #gjfd096e1s + ⊙ #p7g0h14puu + Adds / updates: bodaciousNumero productionReadyId superRadNumber - □ #hkrqt3tm05 (start of history) + □ #q9cdigs0bo (start of history) ``` Since squash merges don't produce any merge nodes, we can `undo` a couple times to get back to our starting state: @@ -336,7 +336,7 @@ Since squash merges don't produce any merge nodes, we can `undo` a couple times - □ #hkrqt3tm05 (start of history) + □ #q9cdigs0bo (start of history) ``` This time, we'll first squash Alice and Bob's changes together before squashing their combined changes into `trunk`. The resulting `trunk` will have just a single entry in it, combining both Alice and Bob's changes: @@ -380,14 +380,14 @@ This time, we'll first squash Alice and Bob's changes together before squashing Note: The most recent namespace hash is immediately below this message. - ⊙ #ka70nifphh + ⊙ #744kq5lo40 + Adds / updates: babyDon'tHurtMe bodaciousNumero no productionReadyId superRadNumber whatIsLove - □ #hkrqt3tm05 (start of history) + □ #q9cdigs0bo (start of history) ``` So, there you have it. With squashing, you can control the granularity of your history. @@ -420,7 +420,7 @@ Another thing we can do is `squash` into an empty namespace. This effectively ma - □ #sui24env59 (start of history) + □ #qvt58h64so (start of history) ``` There's nothing really special here, `squash src dest` discards `src` history that comes after the LCA of `src` and `dest`, it's just that in the case of an empty namespace, that LCA is the beginning of time (the empty namespace), so all the history of `src` is discarded. @@ -485,13 +485,13 @@ This checks to see that squashing correctly preserves deletions: Note: The most recent namespace hash is immediately below this message. - ⊙ #0kh907mpqb + ⊙ #da2tmjvh5i - Deletes: Nat.* Nat.+ - □ #edu0qq546n (start of history) + □ #hfao0gunaf (start of history) ``` Notice that `Nat.+` and `Nat.*` are deleted by the squash, and we see them deleted in one atomic step in the history. From cd686b6452cbaa1027ed3a749452d2ba774f1fc0 Mon Sep 17 00:00:00 2001 From: Arya Irani Date: Mon, 6 Dec 2021 11:39:38 -1000 Subject: [PATCH 133/297] initialize new codebase with schema version 2 fixes #2720 --- codebase2/codebase-sqlite/sql/create.sql | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/codebase2/codebase-sqlite/sql/create.sql b/codebase2/codebase-sqlite/sql/create.sql index ddf2fc8361..ef9ff5a498 100644 --- a/codebase2/codebase-sqlite/sql/create.sql +++ b/codebase2/codebase-sqlite/sql/create.sql @@ -3,7 +3,7 @@ CREATE TABLE schema_version ( version INTEGER NOT NULL ); -INSERT INTO schema_version (version) VALUES (1); +INSERT INTO schema_version (version) VALUES (2); -- actually stores the 512-byte hashes CREATE TABLE hash ( From 7f0984a17d7259e8e1b27d24a5c6348b8144a805 Mon Sep 17 00:00:00 2001 From: Mitchell Rosen Date: Fri, 3 Dec 2021 13:35:55 -0500 Subject: [PATCH 134/297] move handling of update command to top level of HandleInput --- .../src/Unison/Codebase/Editor/HandleInput.hs | 482 ++++++++++-------- 1 file changed, 258 insertions(+), 224 deletions(-) diff --git a/unison-cli/src/Unison/Codebase/Editor/HandleInput.hs b/unison-cli/src/Unison/Codebase/Editor/HandleInput.hs index bb5c10b691..770f8634e5 100644 --- a/unison-cli/src/Unison/Codebase/Editor/HandleInput.hs +++ b/unison-cli/src/Unison/Codebase/Editor/HandleInput.hs @@ -461,8 +461,6 @@ loop = do ps' = p' . Path.unsplit' stepAt = Unison.Codebase.Editor.HandleInput.stepAt inputDescription stepManyAt = Unison.Codebase.Editor.HandleInput.stepManyAt inputDescription - stepManyAtNoSync = - Unison.Codebase.Editor.HandleInput.stepManyAtNoSync updateRoot = flip Unison.Codebase.Editor.HandleInput.updateRoot inputDescription syncRoot = use LoopState.root >>= updateRoot updateAtM = Unison.Codebase.Editor.HandleInput.updateAtM inputDescription @@ -485,95 +483,6 @@ loop = do ppe <- PPE.suffixifiedPPE <$> prettyPrintEnvDecl names respond $ Typechecked (Text.pack sourceName) ppe sr uf - -- Add default metadata to all added types and terms in a slurp component. - -- - -- No-op if the slurp component is empty. - addDefaultMetadata :: - SlurpComponent v -> - Action m (Either Event Input) v () - addDefaultMetadata adds = - when (not (SC.isEmpty adds)) do - let addedVs = Set.toList $ SC.types adds <> SC.terms adds - addedNs = traverse (Path.hqSplitFromName' . Name.unsafeFromVar) addedVs - case addedNs of - Nothing -> - error $ - "I couldn't parse a name I just added to the codebase! " - <> "-- Added names: " - <> show addedVs - Just addedNames -> do - dm <- resolveDefaultMetadata currentPath' - case toList dm of - [] -> pure () - dm' -> do - let hqs = traverse InputPatterns.parseHashQualifiedName dm' - case hqs of - Left e -> - respond $ - ConfiguredMetadataParseError - (Path.absoluteToPath' currentPath') - (show dm') - e - Right defaultMeta -> - manageLinks True addedNames defaultMeta Metadata.insert - - -- Add/remove links between definitions and metadata. - -- `silent` controls whether this produces any output to the user. - -- `srcs` is (names of the) definitions to pass to `op` - -- `mdValues` is (names of the) metadata to pass to `op` - -- `op` is the operation to add/remove/alter metadata mappings. - -- e.g. `Metadata.insert` is passed to add metadata links. - manageLinks :: - Bool -> - [(Path', HQ'.HQSegment)] -> - [HQ.HashQualified Name] -> - ( forall r. - Ord r => - (r, Metadata.Type, Metadata.Value) -> - Branch.Star r NameSegment -> - Branch.Star r NameSegment - ) -> - Action m (Either Event Input) v () - manageLinks silent srcs mdValues op = do - runExceptT (for mdValues \val -> ExceptT (getMetadataFromName val)) >>= \case - Left output -> respond output - Right metadata -> do - before <- Branch.head <$> use LoopState.root - traverse_ go metadata - if silent - then respond DefaultMetadataNotification - else do - after <- Branch.head <$> use LoopState.root - (ppe, outputDiff) <- diffHelper before after - if OBranchDiff.isEmpty outputDiff - then respond NoOp - else - respondNumbered $ - ShowDiffNamespace - Path.absoluteEmpty - Path.absoluteEmpty - ppe - outputDiff - where - go :: (Metadata.Type, Metadata.Value) -> Action m (Either Event Input) v () - go (mdType, mdValue) = do - newRoot <- use LoopState.root - let r0 = Branch.head newRoot - getTerms p = BranchUtil.getTerm (resolveSplit' p) r0 - getTypes p = BranchUtil.getType (resolveSplit' p) r0 - !srcle = toList . getTerms =<< srcs - !srclt = toList . getTypes =<< srcs - let step b0 = - let tmUpdates terms = foldl' go terms srcle - where - go terms src = op (src, mdType, mdValue) terms - tyUpdates types = foldl' go types srclt - where - go types src = op (src, mdType, mdValue) types - in over Branch.terms tmUpdates . over Branch.types tyUpdates $ b0 - steps = srcs <&> \(path, _hq) -> (Path.unabsolute (resolveToAbsolute path), step) - stepManyAtNoSync steps - delete :: (Path.HQSplit' -> Set Referent) -> -- compute matching terms (Path.HQSplit' -> Set Reference) -> -- compute matching types @@ -1326,126 +1235,7 @@ loop = do <$> slurpResultNames previewResponse sourceName sr uf _ -> respond NoUnisonFile - UpdateI maybePatchPath hqs -> case uf of - Nothing -> respond NoUnisonFile - Just uf -> do - let patchPath = fromMaybe defaultPatchPath maybePatchPath - slurpCheckNames <- slurpResultNames - currentPathNames <- currentPathNames - let sr = - applySelection hqs uf - . toSlurpResult currentPath' uf - $ slurpCheckNames - addsAndUpdates = Slurp.updates sr <> Slurp.adds sr - fileNames = UF.typecheckedToNames uf - -- todo: display some error if typeEdits or termEdits itself contains a loop - typeEdits :: Map Name (Reference, Reference) - typeEdits = Map.fromList $ map f (toList $ SC.types (updates sr)) - where - f v = case ( toList (Names.typesNamed slurpCheckNames n), - toList (Names.typesNamed fileNames n) - ) of - ([old], [new]) -> (n, (old, new)) - _ -> - error $ - "Expected unique matches for " - ++ Var.nameStr v - ++ " but got: " - ++ show otherwise - where - n = Name.unsafeFromVar v - hashTerms :: Map Reference (Type v Ann) - hashTerms = Map.fromList (toList hashTerms0) - where - hashTerms0 = (\(r, _wk, _tm, typ) -> (r, typ)) <$> UF.hashTerms uf - termEdits :: Map Name (Reference, Reference) - termEdits = Map.fromList $ map g (toList $ SC.terms (updates sr)) - where - g v = case ( toList (Names.refTermsNamed slurpCheckNames n), - toList (Names.refTermsNamed fileNames n) - ) of - ([old], [new]) -> (n, (old, new)) - _ -> - error $ - "Expected unique matches for " - ++ Var.nameStr v - ++ " but got: " - ++ show otherwise - where - n = Name.unsafeFromVar v - termDeprecations :: [(Name, Referent)] - termDeprecations = - [ (n, r) | (oldTypeRef, _) <- Map.elems typeEdits, (n, r) <- Names.constructorsForType oldTypeRef currentPathNames - ] - - ye'ol'Patch <- getPatchAt patchPath - -- If `uf` updates a -> a', we want to replace all (a0 -> a) in patch - -- with (a0 -> a') in patch'. - -- So for all (a0 -> a) in patch, for all (a -> a') in `uf`, - -- we must know the type of a0, a, a'. - let -- we need: - -- all of the `old` references from the `new` edits, - -- plus all of the `old` references for edits from patch we're replacing - collectOldForTyping :: [(Reference, Reference)] -> Patch -> Set Reference - collectOldForTyping new old = foldl' f mempty (new ++ fromOld) - where - f acc (r, _r') = Set.insert r acc - newLHS = Set.fromList . fmap fst $ new - fromOld :: [(Reference, Reference)] - fromOld = - [ (r, r') | (r, TermEdit.Replace r' _) <- R.toList . Patch._termEdits $ old, Set.member r' newLHS - ] - neededTypes = collectOldForTyping (toList termEdits) ye'ol'Patch - - allTypes :: Map Reference (Type v Ann) <- - fmap Map.fromList . for (toList neededTypes) $ \r -> - (r,) . fromMaybe (Type.builtin External "unknown type") - <$> (eval . LoadTypeOfTerm) r - - let typing r1 r2 = case (Map.lookup r1 allTypes, Map.lookup r2 hashTerms) of - (Just t1, Just t2) - | Typechecker.isEqual t1 t2 -> TermEdit.Same - | Typechecker.isSubtype t1 t2 -> TermEdit.Subtype - | otherwise -> TermEdit.Different - e -> - error $ - "compiler bug: typing map not constructed properly\n" - <> "typing " - <> show r1 - <> " " - <> show r2 - <> " : " - <> show e - - let updatePatch :: Patch -> Patch - updatePatch p = foldl' step2 p' termEdits - where - p' = foldl' step1 p typeEdits - step1 p (r, r') = Patch.updateType r (TypeEdit.Replace r') p - step2 p (r, r') = Patch.updateTerm typing r (TermEdit.Replace r' (typing r r')) p - (p, seg) = Path.toAbsoluteSplit currentPath' patchPath - updatePatches :: Branch0 m -> m (Branch0 m) - updatePatches = Branch.modifyPatches seg updatePatch - - when (Slurp.isNonempty sr) $ do - -- take a look at the `updates` from the SlurpResult - -- and make a patch diff to record a replacement from the old to new references - stepManyAtMNoSync - [ ( Path.unabsolute currentPath', - pure . doSlurpUpdates typeEdits termEdits termDeprecations - ), - ( Path.unabsolute currentPath', - pure . doSlurpAdds addsAndUpdates uf - ), - (Path.unabsolute p, updatePatches) - ] - eval . AddDefsToCodebase . filterBySlurpResult sr $ uf - ppe <- prettyPrintEnvDecl =<< displayNames uf - respond $ SlurpOutput input (PPE.suffixifiedPPE ppe) sr - -- propagatePatch prints TodoOutput - void $ propagatePatchNoSync (updatePatch ye'ol'Patch) currentPath' - addDefaultMetadata addsAndUpdates - syncRoot + UpdateI maybePatchPath hqs -> handleUpdate input maybePatchPath hqs PreviewUpdateI hqs -> case (latestFile', uf) of (Just (sourceName, _), Just uf) -> do sr <- @@ -1767,19 +1557,6 @@ loop = do notImplemented = eval $ Notify NotImplemented success = respond Success - resolveDefaultMetadata :: Path.Absolute -> Action' m v [String] - resolveDefaultMetadata path = do - let superpaths = Path.ancestors path - xs <- - for - superpaths - ( \path -> do - mayNames <- - eval . ConfigLookup @[String] $ configKey "DefaultMetadata" path - pure . join $ toList mayNames - ) - pure . join $ toList xs - case e of Right input -> LoopState.lastInput .= Just input _ -> pure () @@ -1948,6 +1725,257 @@ handleShowDefinition outputLoc inputQuery = do Nothing -> Just "scratch.u" Just (path, _) -> Just path +-- | Handle an @update@ command. +handleUpdate :: forall m v. (Monad m, Var v) => Input -> Maybe PatchPath -> [HQ'.HashQualified Name] -> Action' m v () +handleUpdate input maybePatchPath hqs = do + use LoopState.latestTypecheckedFile >>= \case + Nothing -> respond NoUnisonFile + Just uf -> do + currentPath' <- use LoopState.currentPath + let defaultPatchPath :: PatchPath + defaultPatchPath = (Path' $ Left currentPath', defaultPatchNameSegment) + getPatchAt :: Path.Split' -> Action' m v Patch + getPatchAt patchPath' = do + let (p, seg) = Path.toAbsoluteSplit currentPath' patchPath' + b <- getAt p + eval . Eval $ Branch.getPatch seg (Branch.head b) + let patchPath = fromMaybe defaultPatchPath maybePatchPath + slurpCheckNames <- slurpResultNames + currentPathNames <- currentPathNames + let sr :: SlurpResult v + sr = + applySelection hqs uf + . toSlurpResult currentPath' uf + $ slurpCheckNames + addsAndUpdates :: SlurpComponent v + addsAndUpdates = Slurp.updates sr <> Slurp.adds sr + fileNames :: Names + fileNames = UF.typecheckedToNames uf + -- todo: display some error if typeEdits or termEdits itself contains a loop + typeEdits :: Map Name (Reference, Reference) + typeEdits = Map.fromList $ map f (toList $ SC.types (updates sr)) + where + f v = case ( toList (Names.typesNamed slurpCheckNames n), + toList (Names.typesNamed fileNames n) + ) of + ([old], [new]) -> (n, (old, new)) + _ -> + error $ + "Expected unique matches for " + ++ Var.nameStr v + ++ " but got: " + ++ show otherwise + where + n = Name.unsafeFromVar v + hashTerms :: Map Reference (Type v Ann) + hashTerms = Map.fromList (toList hashTerms0) + where + hashTerms0 = (\(r, _wk, _tm, typ) -> (r, typ)) <$> UF.hashTerms uf + termEdits :: Map Name (Reference, Reference) + termEdits = Map.fromList $ map g (toList $ SC.terms (updates sr)) + where + g v = case ( toList (Names.refTermsNamed slurpCheckNames n), + toList (Names.refTermsNamed fileNames n) + ) of + ([old], [new]) -> (n, (old, new)) + _ -> + error $ + "Expected unique matches for " + ++ Var.nameStr v + ++ " but got: " + ++ show otherwise + where + n = Name.unsafeFromVar v + termDeprecations :: [(Name, Referent)] + termDeprecations = + [ (n, r) + | (oldTypeRef, _) <- Map.elems typeEdits, + (n, r) <- Names.constructorsForType oldTypeRef currentPathNames + ] + + ye'ol'Patch <- getPatchAt patchPath + -- If `uf` updates a -> a', we want to replace all (a0 -> a) in patch + -- with (a0 -> a') in patch'. + -- So for all (a0 -> a) in patch, for all (a -> a') in `uf`, + -- we must know the type of a0, a, a'. + let -- we need: + -- all of the `old` references from the `new` edits, + -- plus all of the `old` references for edits from patch we're replacing + collectOldForTyping :: [(Reference, Reference)] -> Patch -> Set Reference + collectOldForTyping new old = foldl' f mempty (new ++ fromOld) + where + f acc (r, _r') = Set.insert r acc + newLHS = Set.fromList . fmap fst $ new + fromOld :: [(Reference, Reference)] + fromOld = + [ (r, r') | (r, TermEdit.Replace r' _) <- R.toList . Patch._termEdits $ old, Set.member r' newLHS + ] + neededTypes = collectOldForTyping (toList termEdits) ye'ol'Patch + + allTypes :: Map Reference (Type v Ann) <- + fmap Map.fromList . for (toList neededTypes) $ \r -> + (r,) . fromMaybe (Type.builtin External "unknown type") + <$> (eval . LoadTypeOfTerm) r + + let typing r1 r2 = case (Map.lookup r1 allTypes, Map.lookup r2 hashTerms) of + (Just t1, Just t2) + | Typechecker.isEqual t1 t2 -> TermEdit.Same + | Typechecker.isSubtype t1 t2 -> TermEdit.Subtype + | otherwise -> TermEdit.Different + e -> + error $ + "compiler bug: typing map not constructed properly\n" + <> "typing " + <> show r1 + <> " " + <> show r2 + <> " : " + <> show e + + let updatePatch :: Patch -> Patch + updatePatch p = foldl' step2 p' termEdits + where + p' = foldl' step1 p typeEdits + step1 p (r, r') = Patch.updateType r (TypeEdit.Replace r') p + step2 p (r, r') = Patch.updateTerm typing r (TermEdit.Replace r' (typing r r')) p + (p, seg) = Path.toAbsoluteSplit currentPath' patchPath + updatePatches :: Branch0 m -> m (Branch0 m) + updatePatches = Branch.modifyPatches seg updatePatch + + when (Slurp.isNonempty sr) $ do + -- take a look at the `updates` from the SlurpResult + -- and make a patch diff to record a replacement from the old to new references + stepManyAtMNoSync + [ ( Path.unabsolute currentPath', + pure . doSlurpUpdates typeEdits termEdits termDeprecations + ), + ( Path.unabsolute currentPath', + pure . doSlurpAdds addsAndUpdates uf + ), + (Path.unabsolute p, updatePatches) + ] + eval . AddDefsToCodebase . filterBySlurpResult sr $ uf + ppe <- prettyPrintEnvDecl =<< displayNames uf + respond $ SlurpOutput input (PPE.suffixifiedPPE ppe) sr + -- propagatePatch prints TodoOutput + void $ propagatePatchNoSync (updatePatch ye'ol'Patch) currentPath' + addDefaultMetadata addsAndUpdates + let patchString :: Text + patchString = + patchPath + & Path.unsplit' + & Path.resolve @_ @_ @Path.Absolute currentPath' + & tShow + syncRoot ("update " <> patchString) + +-- Add default metadata to all added types and terms in a slurp component. +-- +-- No-op if the slurp component is empty. +addDefaultMetadata :: (Monad m, Var v) => SlurpComponent v -> Action m (Either Event Input) v () +addDefaultMetadata adds = + when (not (SC.isEmpty adds)) do + currentPath' <- use LoopState.currentPath + let addedVs = Set.toList $ SC.types adds <> SC.terms adds + addedNs = traverse (Path.hqSplitFromName' . Name.unsafeFromVar) addedVs + case addedNs of + Nothing -> + error $ + "I couldn't parse a name I just added to the codebase! " + <> "-- Added names: " + <> show addedVs + Just addedNames -> do + dm <- resolveDefaultMetadata currentPath' + case toList dm of + [] -> pure () + dm' -> do + let hqs = traverse InputPatterns.parseHashQualifiedName dm' + case hqs of + Left e -> + respond $ + ConfiguredMetadataParseError + (Path.absoluteToPath' currentPath') + (show dm') + e + Right defaultMeta -> + manageLinks True addedNames defaultMeta Metadata.insert + +resolveDefaultMetadata :: Path.Absolute -> Action' m v [String] +resolveDefaultMetadata path = do + let superpaths = Path.ancestors path + xs <- + for + superpaths + ( \path -> do + mayNames <- + eval . ConfigLookup @[String] $ configKey "DefaultMetadata" path + pure . join $ toList mayNames + ) + pure . join $ toList xs + +-- Add/remove links between definitions and metadata. +-- `silent` controls whether this produces any output to the user. +-- `srcs` is (names of the) definitions to pass to `op` +-- `mdValues` is (names of the) metadata to pass to `op` +-- `op` is the operation to add/remove/alter metadata mappings. +-- e.g. `Metadata.insert` is passed to add metadata links. +manageLinks :: + forall m v. + (Monad m, Var v) => + Bool -> + [(Path', HQ'.HQSegment)] -> + [HQ.HashQualified Name] -> + ( forall r. + Ord r => + (r, Metadata.Type, Metadata.Value) -> + Branch.Star r NameSegment -> + Branch.Star r NameSegment + ) -> + Action m (Either Event Input) v () +manageLinks silent srcs mdValues op = do + runExceptT (for mdValues \val -> ExceptT (getMetadataFromName val)) >>= \case + Left output -> respond output + Right metadata -> do + before <- Branch.head <$> use LoopState.root + traverse_ go metadata + if silent + then respond DefaultMetadataNotification + else do + after <- Branch.head <$> use LoopState.root + (ppe, outputDiff) <- diffHelper before after + if OBranchDiff.isEmpty outputDiff + then respond NoOp + else + respondNumbered $ + ShowDiffNamespace + Path.absoluteEmpty + Path.absoluteEmpty + ppe + outputDiff + where + go :: (Metadata.Type, Metadata.Value) -> Action m (Either Event Input) v () + go (mdType, mdValue) = do + newRoot <- use LoopState.root + currentPath' <- use LoopState.currentPath + let resolveToAbsolute :: Path' -> Path.Absolute + resolveToAbsolute = Path.resolve currentPath' + resolveSplit' :: (Path', a) -> (Path, a) + resolveSplit' = Path.fromAbsoluteSplit . Path.toAbsoluteSplit currentPath' + r0 = Branch.head newRoot + getTerms p = BranchUtil.getTerm (resolveSplit' p) r0 + getTypes p = BranchUtil.getType (resolveSplit' p) r0 + !srcle = toList . getTerms =<< srcs + !srclt = toList . getTypes =<< srcs + let step b0 = + let tmUpdates terms = foldl' go terms srcle + where + go terms src = op (src, mdType, mdValue) terms + tyUpdates types = foldl' go types srclt + where + go types src = op (src, mdType, mdValue) types + in over Branch.terms tmUpdates . over Branch.types tyUpdates $ b0 + steps = srcs <&> \(path, _hq) -> (Path.unabsolute (resolveToAbsolute path), step) + stepManyAtNoSync steps + -- Takes a maybe (namespace address triple); returns it as-is if `Just`; -- otherwise, tries to load a value from .unisonConfig, and complains -- if needed. @@ -2541,6 +2569,12 @@ stepManyAtMNoSync' actions = do LoopState.root .= b' pure (b /= b') +-- | Sync the in-memory root branch. +syncRoot :: LoopState.InputDescription -> Action m i v () +syncRoot description = do + root' <- use LoopState.root + Unison.Codebase.Editor.HandleInput.updateRoot root' description + updateRoot :: Branch m -> LoopState.InputDescription -> Action m i v () updateRoot new reason = do old <- use LoopState.lastSavedRoot From 5a7337a1a7ed4cf7c3fdfeaf8b02500d47acd9ad Mon Sep 17 00:00:00 2001 From: Arya Irani <538571+aryairani@users.noreply.github.com> Date: Thu, 9 Dec 2021 13:16:50 -1000 Subject: [PATCH 135/297] Update parser-typechecker/src/Unison/Hashing/V2/Hashable.hs --- parser-typechecker/src/Unison/Hashing/V2/Hashable.hs | 1 + 1 file changed, 1 insertion(+) diff --git a/parser-typechecker/src/Unison/Hashing/V2/Hashable.hs b/parser-typechecker/src/Unison/Hashing/V2/Hashable.hs index 63e2920d4c..8bc66e2a0a 100644 --- a/parser-typechecker/src/Unison/Hashing/V2/Hashable.hs +++ b/parser-typechecker/src/Unison/Hashing/V2/Hashable.hs @@ -14,6 +14,7 @@ class Hashable t where instance BuildHashable.Hashable a => Hashable [a] where hash = BuildHashable.hash + instance BuildHashable.Hashable a => Hashable (Set a) where hash = BuildHashable.hash From 9c344abac071977fc746138a8cc453ee4b190d9c Mon Sep 17 00:00:00 2001 From: Arya Irani Date: Thu, 9 Dec 2021 13:28:35 -1000 Subject: [PATCH 136/297] comments --- .../src/Unison/Codebase/Causal.hs | 29 +++++++++++-------- 1 file changed, 17 insertions(+), 12 deletions(-) diff --git a/parser-typechecker/src/Unison/Codebase/Causal.hs b/parser-typechecker/src/Unison/Codebase/Causal.hs index 397cf9e0e9..d793e80ab8 100644 --- a/parser-typechecker/src/Unison/Codebase/Causal.hs +++ b/parser-typechecker/src/Unison/Codebase/Causal.hs @@ -59,15 +59,6 @@ import Unison.Hashing.V2.Hashable (Hashable) import Prelude hiding (head, read, tail) import qualified Data.List.Extra as List -fromList :: (Applicative m, Hashable e) => e -> [Causal m h e] -> Causal m h e -fromList e (List.nubOrdOn currentHash -> tails) = case tails of - [] -> UnsafeOne h e - t : [] -> UnsafeCons h e (tailPair t) - _ : _ : _ -> UnsafeMerge h e (Map.fromList $ map tailPair tails) - where - tailPair c = (currentHash c, pure c) - h = RawHash $ Hashing.hashCausal e (Set.fromList $ map currentHash tails) - -- A `squashMerge combine c1 c2` gives the same resulting `e` -- as a `threeWayMerge`, but doesn't introduce a merge node for the -- result. Instead, the resulting causal is a simple `Cons` onto `c2` @@ -145,11 +136,22 @@ stepDistinctM => (e -> n e) -> Causal m h e -> n (Causal m h e) stepDistinctM f c = (`consDistinct` c) <$> f (head c) +-- | Causal construction should go through here for uniformity; +-- with an exception for `one`, which avoids an Applicative constraint. +fromList :: (Applicative m, Hashable e) => e -> [Causal m h e] -> Causal m h e +fromList e (List.nubOrdOn currentHash -> tails) = case tails of + [] -> UnsafeOne h e + t : [] -> UnsafeCons h e (tailPair t) + _ : _ : _ -> UnsafeMerge h e (Map.fromList $ map tailPair tails) + where + tailPair c = (currentHash c, pure c) + h = RawHash $ Hashing.hashCausal e (Set.fromList $ map currentHash tails) + -- duplicated logic here instead of delegating to `fromList` to avoid `Applicative m` constraint. one :: Hashable e => e -> Causal m h e -one e = - let h = Hashing.hashCausal e mempty - in UnsafeOne (RawHash h) e +one e = UnsafeOne h e + where + h = RawHash $ Hashing.hashCausal e mempty cons :: (Applicative m, Hashable e) => e -> Causal m h e -> Causal m h e cons e tail = fromList e [tail] @@ -164,12 +166,15 @@ uncons c = case c of Cons _ e (_,tl) -> fmap (e,) . Just <$> tl _ -> pure Nothing +-- it's okay to call "Unsafe"* here with the existing hashes because `nt` can't +-- affect `e`. transform :: Functor m => (forall a . m a -> n a) -> Causal m h e -> Causal n h e transform nt c = case c of One h e -> UnsafeOne h e Cons h e (ht, tl) -> UnsafeCons h e (ht, nt (transform nt <$> tl)) Merge h e tls -> UnsafeMerge h e $ Map.map (\mc -> nt (transform nt <$> mc)) tls +-- "unsafe" because the hashes will be wrong if `f` affects aspects of `e` that impact hashing unsafeMapHashPreserving :: Functor m => (e -> e2) -> Causal m h e -> Causal m h e2 unsafeMapHashPreserving f c = case c of One h e -> UnsafeOne h (f e) From 90725f41319ab302c311fc3886845eb9134401f7 Mon Sep 17 00:00:00 2001 From: Arya Irani Date: Fri, 10 Dec 2021 12:26:12 -1000 Subject: [PATCH 137/297] monomorphised Patch.hashPatch and Causal.hashCausal --- parser-typechecker/src/Unison/Hashing/V2/Causal.hs | 2 +- parser-typechecker/src/Unison/Hashing/V2/Patch.hs | 3 ++- 2 files changed, 3 insertions(+), 2 deletions(-) diff --git a/parser-typechecker/src/Unison/Hashing/V2/Causal.hs b/parser-typechecker/src/Unison/Hashing/V2/Causal.hs index f2507ac208..ee02c6109f 100644 --- a/parser-typechecker/src/Unison/Hashing/V2/Causal.hs +++ b/parser-typechecker/src/Unison/Hashing/V2/Causal.hs @@ -13,7 +13,7 @@ import qualified Data.Set as Set import Unison.Hash (Hash) import qualified Unison.Hashing.V2.BuildHashable as H -hashCausal :: H.Accumulate h => Causal -> h +hashCausal :: Causal -> Hash hashCausal = H.accumulate' data Causal = Causal {branchHash :: Hash, parents :: Set Hash} diff --git a/parser-typechecker/src/Unison/Hashing/V2/Patch.hs b/parser-typechecker/src/Unison/Hashing/V2/Patch.hs index 5b1e79afa6..d97062cf48 100644 --- a/parser-typechecker/src/Unison/Hashing/V2/Patch.hs +++ b/parser-typechecker/src/Unison/Hashing/V2/Patch.hs @@ -6,6 +6,7 @@ module Unison.Hashing.V2.Patch (Patch (..), hashPatch) where import Data.Map (Map) import Data.Set (Set) +import Unison.Hash (Hash) import Unison.Hashing.V2.BuildHashable (Hashable) import qualified Unison.Hashing.V2.BuildHashable as H import Unison.Hashing.V2.Reference (Reference) @@ -13,7 +14,7 @@ import Unison.Hashing.V2.Referent (Referent) import Unison.Hashing.V2.TermEdit (TermEdit) import Unison.Hashing.V2.TypeEdit (TypeEdit) -hashPatch :: H.Accumulate h => Patch -> h +hashPatch :: Patch -> Hash hashPatch = H.accumulate' data Patch = Patch From 5b2bd91c027b369dcd39c65802d586a2bddb6ddf Mon Sep 17 00:00:00 2001 From: Chris Penner Date: Tue, 14 Dec 2021 12:49:28 -0600 Subject: [PATCH 138/297] Fix bad merges from trunk --- .../src/Unison/Codebase/SqliteCodebase.hs | 19 ++++--------------- .../src/Unison/Codebase/Editor/HandleInput.hs | 4 ++-- 2 files changed, 6 insertions(+), 17 deletions(-) diff --git a/parser-typechecker/src/Unison/Codebase/SqliteCodebase.hs b/parser-typechecker/src/Unison/Codebase/SqliteCodebase.hs index 52d0980e38..6b93adf12f 100644 --- a/parser-typechecker/src/Unison/Codebase/SqliteCodebase.hs +++ b/parser-typechecker/src/Unison/Codebase/SqliteCodebase.hs @@ -1,4 +1,5 @@ {- ORMOLU_DISABLE -} -- Remove this when the file is ready to be auto-formatted + {-# LANGUAGE DeriveAnyClass #-} {-# LANGUAGE OverloadedStrings #-} {-# LANGUAGE QuasiQuotes #-} @@ -13,12 +14,8 @@ module Unison.Codebase.SqliteCodebase where import qualified Control.Concurrent -import Control.Monad (filterM, unless, when, (>=>)) -import Control.Monad.Except (ExceptT, MonadError (throwError), runExceptT, withExceptT) -import qualified Control.Exception -import Control.Monad (filterM, unless, when, (>=>)) -import Control.Monad.Except (ExceptT, MonadError (throwError), runExceptT, withExceptT) -import Control.Monad.Except (ExceptT (ExceptT), runExceptT, withExceptT) +import Control.Monad.Except (ExceptT, runExceptT, withExceptT) +import Control.Monad.Except (ExceptT (ExceptT)) import qualified Control.Monad.Except as Except import qualified Control.Monad.Extra as Monad import Control.Monad.Reader (ReaderT (runReaderT)) @@ -30,8 +27,6 @@ import qualified Data.Char as Char import qualified Data.Either.Combinators as Either import qualified Data.List as List import Data.List.NonEmpty.Extra (NonEmpty ((:|)), maximum1) -import Data.Map (Map) -import Data.Map (Map) import qualified Data.Map as Map import Data.Maybe (fromJust) import qualified Data.Set as Set @@ -39,10 +34,6 @@ import qualified Data.Text as Text import qualified Data.Text.IO as TextIO import Data.Time (NominalDiffTime) import Data.Time.Clock.POSIX (getPOSIXTime) -import Data.Traversable (for) -import Data.Word (Word64) -import Data.Traversable (for) -import Data.Word (Word64) import qualified Database.SQLite.Simple as Sqlite import qualified System.Console.ANSI as ANSI import System.Directory (copyFile) @@ -110,9 +101,7 @@ import Unison.Type (Type) import qualified Unison.Type as Type import qualified Unison.Util.Set as Set import qualified Unison.WatchKind as UF -import UnliftIO (MonadIO, catchIO, finally, try, liftIO, MonadUnliftIO, throwIO) -import UnliftIO (MonadIO, catchIO, finally, liftIO, MonadUnliftIO, throwIO) -import UnliftIO (MonadUnliftIO, catchIO, finally, throwIO) +import UnliftIO (catchIO, finally, try, MonadUnliftIO, throwIO) import qualified UnliftIO import UnliftIO.Directory (createDirectoryIfMissing, doesDirectoryExist, doesFileExist) import UnliftIO.Exception (bracket, catch) diff --git a/unison-cli/src/Unison/Codebase/Editor/HandleInput.hs b/unison-cli/src/Unison/Codebase/Editor/HandleInput.hs index dc5d386626..b47c099645 100644 --- a/unison-cli/src/Unison/Codebase/Editor/HandleInput.hs +++ b/unison-cli/src/Unison/Codebase/Editor/HandleInput.hs @@ -2007,8 +2007,8 @@ manageLinks silent srcs mdValues op = do else respondNumbered $ ShowDiffNamespace - Path.absoluteEmpty - Path.absoluteEmpty + (Right Path.absoluteEmpty) + (Right Path.absoluteEmpty) ppe outputDiff where From fb022ab855ee2d7300bcc1f63483ef8956258669 Mon Sep 17 00:00:00 2001 From: Mitchell Rosen Date: Tue, 14 Dec 2021 14:34:46 -0500 Subject: [PATCH 139/297] fix gitsync22.sc.one-term --- unison-cli/tests/Unison/Test/GitSync.hs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/unison-cli/tests/Unison/Test/GitSync.hs b/unison-cli/tests/Unison/Test/GitSync.hs index e9b40511b1..8f50719092 100644 --- a/unison-cli/tests/Unison/Test/GitSync.hs +++ b/unison-cli/tests/Unison/Test/GitSync.hs @@ -314,7 +314,7 @@ test = scope "gitsync22" . tests $ .> alias.term ##Nat.+ + ``` ```unison - > #msp7bv40rv + 1 + > #fs7la111vn + 1 ``` |]) , From 3563a47d058a8a0908b5a3e2e28c39d82bbf602c Mon Sep 17 00:00:00 2001 From: Mitchell Rosen Date: Tue, 14 Dec 2021 14:41:12 -0500 Subject: [PATCH 140/297] fix gitsync22.sc.gist --- unison-cli/tests/Unison/Test/GitSync.hs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/unison-cli/tests/Unison/Test/GitSync.hs b/unison-cli/tests/Unison/Test/GitSync.hs index 8f50719092..c8655d1ad3 100644 --- a/unison-cli/tests/Unison/Test/GitSync.hs +++ b/unison-cli/tests/Unison/Test/GitSync.hs @@ -618,7 +618,7 @@ gistTest fmt = userScript repo = [i| ```ucm - .> pull ${repo}:#n611nnppp5 + .> pull ${repo}:#frj8ob9ugr .> find ``` ```unison From 209ced0fc07282b69f288ff30801aa480a963b1b Mon Sep 17 00:00:00 2001 From: Mitchell Rosen Date: Tue, 14 Dec 2021 14:47:05 -0500 Subject: [PATCH 141/297] fix gitsync22.sc.history --- unison-cli/tests/Unison/Test/GitSync.hs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/unison-cli/tests/Unison/Test/GitSync.hs b/unison-cli/tests/Unison/Test/GitSync.hs index c8655d1ad3..48b3c7c664 100644 --- a/unison-cli/tests/Unison/Test/GitSync.hs +++ b/unison-cli/tests/Unison/Test/GitSync.hs @@ -283,7 +283,7 @@ test = scope "gitsync22" . tests $ ```ucm .> pull ${repo} .> history - .> reset-root #97u + .> reset-root #0u7no051k7 .> history ``` |]) -- Not sure why this hash is here. From 35e5453be38cf06c585c4559817b678f82e41cfa Mon Sep 17 00:00:00 2001 From: Chris Penner Date: Tue, 14 Dec 2021 14:47:42 -0600 Subject: [PATCH 142/297] update hashes in transcripts --- .../child-namespace-history-merge.output.md | 56 +++++++++---------- .../transcripts/empty-namespaces.output.md | 12 ++-- unison-src/transcripts/merges.output.md | 16 +++--- unison-src/transcripts/reflog.output.md | 12 ++-- unison-src/transcripts/squash.output.md | 20 +++---- 5 files changed, 58 insertions(+), 58 deletions(-) diff --git a/unison-src/transcripts/child-namespace-history-merge.output.md b/unison-src/transcripts/child-namespace-history-merge.output.md index ba7e1638f3..6bfed54df3 100644 --- a/unison-src/transcripts/child-namespace-history-merge.output.md +++ b/unison-src/transcripts/child-namespace-history-merge.output.md @@ -30,7 +30,7 @@ The child branch has a single history node representing the addition of `parent. - □ #0pu6u21kb4 (start of history) + □ #c9opo30t5h (start of history) ``` If we add another thing to the child namespace it should add another history node to both the child and parent. @@ -51,26 +51,26 @@ parent.child.thing2 = "parent.child.thing2" Note: The most recent namespace hash is immediately below this message. - ⊙ #9uakh0rhhe + ⊙ #go29r9r4ok + Adds / updates: child.thing2 - □ #gdahjt281d (start of history) + □ #rkui6ehfcq (start of history) .> history parent.child Note: The most recent namespace hash is immediately below this message. - ⊙ #o0ig5fooud + ⊙ #ab2160llo3 + Adds / updates: thing2 - □ #0pu6u21kb4 (start of history) + □ #c9opo30t5h (start of history) ``` ## Forking off some history on a separate branch @@ -101,19 +101,19 @@ The child should have a new history node after adding `thing3` Note: The most recent namespace hash is immediately below this message. - ⊙ #tppoolbkem + ⊙ #9jb3t68gad + Adds / updates: thing3 - ⊙ #o0ig5fooud + ⊙ #ab2160llo3 + Adds / updates: thing2 - □ #0pu6u21kb4 (start of history) + □ #c9opo30t5h (start of history) ``` ## Saving our parent state @@ -143,19 +143,19 @@ For a squash merge, when I squash-merge back into parent, we expect `parent_fork Note: The most recent namespace hash is immediately below this message. - ⊙ #0ddrpnkqfj + ⊙ #5f07ek0393 + Adds / updates: child.thing3 - ⊙ #9uakh0rhhe + ⊙ #go29r9r4ok + Adds / updates: child.thing2 - □ #gdahjt281d (start of history) + □ #rkui6ehfcq (start of history) ``` Notice that with the current behaviour, the history of `parent.child` is completely wiped out, containing nothing from the source OR destination. @@ -166,32 +166,32 @@ Notice that with the current behaviour, the history of `parent.child` is complet Note: The most recent namespace hash is immediately below this message. - ⊙ #o0ig5fooud + ⊙ #ab2160llo3 + Adds / updates: thing2 - □ #0pu6u21kb4 (start of history) + □ #c9opo30t5h (start of history) .> history parent_fork.child Note: The most recent namespace hash is immediately below this message. - ⊙ #tppoolbkem + ⊙ #9jb3t68gad + Adds / updates: thing3 - ⊙ #o0ig5fooud + ⊙ #ab2160llo3 + Adds / updates: thing2 - □ #0pu6u21kb4 (start of history) + □ #c9opo30t5h (start of history) .> history parent_squash_base.child @@ -200,7 +200,7 @@ Notice that with the current behaviour, the history of `parent.child` is complet - □ #mjdnabl5c5 (start of history) + □ #u3n5lkrtde (start of history) ``` ## Standard merge @@ -226,19 +226,19 @@ For a standard merge, if I merge back into parent, we expect `parent_fork.child. Note: The most recent namespace hash is immediately below this message. - ⊙ #ug6q7n5bos + ⊙ #tibitdoffj + Adds / updates: child.thing3 - ⊙ #9uakh0rhhe + ⊙ #go29r9r4ok + Adds / updates: child.thing2 - □ #gdahjt281d (start of history) + □ #rkui6ehfcq (start of history) ``` Child histories should also be *merged*. @@ -249,50 +249,50 @@ Child histories should also be *merged*. Note: The most recent namespace hash is immediately below this message. - ⊙ #o0ig5fooud + ⊙ #ab2160llo3 + Adds / updates: thing2 - □ #0pu6u21kb4 (start of history) + □ #c9opo30t5h (start of history) .> history parent_fork.child Note: The most recent namespace hash is immediately below this message. - ⊙ #tppoolbkem + ⊙ #9jb3t68gad + Adds / updates: thing3 - ⊙ #o0ig5fooud + ⊙ #ab2160llo3 + Adds / updates: thing2 - □ #0pu6u21kb4 (start of history) + □ #c9opo30t5h (start of history) .> history parent_merge_base.child Note: The most recent namespace hash is immediately below this message. - ⊙ #tppoolbkem + ⊙ #9jb3t68gad + Adds / updates: thing3 - ⊙ #o0ig5fooud + ⊙ #ab2160llo3 + Adds / updates: thing2 - □ #0pu6u21kb4 (start of history) + □ #c9opo30t5h (start of history) ``` diff --git a/unison-src/transcripts/empty-namespaces.output.md b/unison-src/transcripts/empty-namespaces.output.md index 68accf1a29..ca61ac40c3 100644 --- a/unison-src/transcripts/empty-namespaces.output.md +++ b/unison-src/transcripts/empty-namespaces.output.md @@ -39,13 +39,13 @@ The history of the namespace should still exist if requested explicitly. Note: The most recent namespace hash is immediately below this message. - ⊙ #qjc20aua9h + ⊙ #3fdue8bl8c - Deletes: x - □ #hkrqt3tm05 (start of history) + □ #q9cdigs0bo (start of history) ``` Merging an empty namespace should still copy its history if it has some. @@ -66,13 +66,13 @@ Merging an empty namespace should still copy its history if it has some. Note: The most recent namespace hash is immediately below this message. - ⊙ #qjc20aua9h + ⊙ #3fdue8bl8c - Deletes: x - □ #hkrqt3tm05 (start of history) + □ #q9cdigs0bo (start of history) ``` Add and then delete a term to add some history to a deleted namespace. @@ -100,7 +100,7 @@ The history from the `deleted` namespace should have been overwritten by the his - □ #3bm1524lb7 (start of history) + □ #ag66d092nt (start of history) .> history deleted @@ -109,6 +109,6 @@ The history from the `deleted` namespace should have been overwritten by the his - □ #3bm1524lb7 (start of history) + □ #ag66d092nt (start of history) ``` diff --git a/unison-src/transcripts/merges.output.md b/unison-src/transcripts/merges.output.md index 1e064fa341..1e15da50b2 100644 --- a/unison-src/transcripts/merges.output.md +++ b/unison-src/transcripts/merges.output.md @@ -112,26 +112,26 @@ We can also delete the fork if we're done with it. (Don't worry, it's still in t Note: The most recent namespace hash is immediately below this message. - ⊙ #gj7j96t88n + ⊙ #d3taki96i4 - Deletes: y - □ #aqh5c7qsod (start of history) + □ #n9ujbbert4 (start of history) .> history Note: The most recent namespace hash is immediately below this message. - ⊙ #979i457hd7 + ⊙ #o9s7ds4o7a - Deletes: feature1.y - ⊙ #kb0j8t02ue + ⊙ #18ecfqhvrs + Adds / updates: @@ -142,26 +142,26 @@ We can also delete the fork if we're done with it. (Don't worry, it's still in t Original name New name(s) feature1.y master.y - ⊙ #bdvh2otcut + ⊙ #hs3mmjjsm4 + Adds / updates: feature1.y - ⊙ #kt38mg5bug + ⊙ #ksv285nvf2 > Moves: Original name New name x master.x - ⊙ #gd173qpcn0 + ⊙ #tp06lqfa83 + Adds / updates: x - □ #hlflbqjqf9 (start of history) + □ #0nh7emi4gr (start of history) ``` To resurrect an old version of a namespace, you can learn its hash via the `history` command, then use `fork #namespacehash .newname`. diff --git a/unison-src/transcripts/reflog.output.md b/unison-src/transcripts/reflog.output.md index a1a51ad2dc..f8e8f687d4 100644 --- a/unison-src/transcripts/reflog.output.md +++ b/unison-src/transcripts/reflog.output.md @@ -59,17 +59,17 @@ y = 2 most recent, along with the command that got us there. Try: `fork 2 .old` - `fork #b9ghmnv7v4 .old` to make an old namespace + `fork #vc4djot72t .old` to make an old namespace accessible again, - `reset-root #b9ghmnv7v4` to reset the root namespace and + `reset-root #vc4djot72t` to reset the root namespace and its history to that of the specified namespace. - 1. #rggkaa5ori : add - 2. #b9ghmnv7v4 : add - 3. #hlflbqjqf9 : builtins.merge - 4. #sg60bvjo91 : (initial reflogged namespace) + 1. #t7bqg5kp79 : add + 2. #vc4djot72t : add + 3. #0nh7emi4gr : builtins.merge + 4. #n6ulbn64d1 : (initial reflogged namespace) ``` If we `reset-root` to its previous value, `y` disappears. diff --git a/unison-src/transcripts/squash.output.md b/unison-src/transcripts/squash.output.md index 06e35fa32e..2107f309c8 100644 --- a/unison-src/transcripts/squash.output.md +++ b/unison-src/transcripts/squash.output.md @@ -13,7 +13,7 @@ Let's look at some examples. We'll start with a namespace with just the builtins - □ #hfao0gunaf (start of history) + □ #to0o8q3su1 (start of history) .> fork builtin builtin2 @@ -42,21 +42,21 @@ Now suppose we `fork` a copy of builtin, then rename `Nat.+` to `frobnicate`, th Note: The most recent namespace hash is immediately below this message. - ⊙ #jnnhsl20gn + ⊙ #bvd9nf4u37 > Moves: Original name New name Nat.frobnicate Nat.+ - ⊙ #b61495giqo + ⊙ #ee0acgvh97 > Moves: Original name New name Nat.+ Nat.frobnicate - □ #hfao0gunaf (start of history) + □ #to0o8q3su1 (start of history) ``` If we merge that back into `builtin`, we get that same chain of history: @@ -71,21 +71,21 @@ If we merge that back into `builtin`, we get that same chain of history: Note: The most recent namespace hash is immediately below this message. - ⊙ #jnnhsl20gn + ⊙ #bvd9nf4u37 > Moves: Original name New name Nat.frobnicate Nat.+ - ⊙ #b61495giqo + ⊙ #ee0acgvh97 > Moves: Original name New name Nat.+ Nat.frobnicate - □ #hfao0gunaf (start of history) + □ #to0o8q3su1 (start of history) ``` Let's try again, but using a `merge.squash` (or just `squash`) instead. The history will be unchanged: @@ -106,7 +106,7 @@ Let's try again, but using a `merge.squash` (or just `squash`) instead. The hist - □ #hfao0gunaf (start of history) + □ #to0o8q3su1 (start of history) ``` The churn that happened in `mybuiltin` namespace ended up back in the same spot, so the squash merge of that namespace with our original namespace had no effect. @@ -485,13 +485,13 @@ This checks to see that squashing correctly preserves deletions: Note: The most recent namespace hash is immediately below this message. - ⊙ #da2tmjvh5i + ⊙ #26ang33omm - Deletes: Nat.* Nat.+ - □ #hfao0gunaf (start of history) + □ #to0o8q3su1 (start of history) ``` Notice that `Nat.+` and `Nat.*` are deleted by the squash, and we see them deleted in one atomic step in the history. From d20ab287992ad9d609ef2372a89c5ace28be2b7a Mon Sep 17 00:00:00 2001 From: Chris Penner Date: Tue, 14 Dec 2021 14:57:11 -0600 Subject: [PATCH 143/297] Fix hashes in resolve.md --- unison-src/transcripts/resolve.md | 8 +++---- unison-src/transcripts/resolve.output.md | 30 ++++++++++++------------ 2 files changed, 19 insertions(+), 19 deletions(-) diff --git a/unison-src/transcripts/resolve.md b/unison-src/transcripts/resolve.md index 29f5444794..ce09e5232b 100644 --- a/unison-src/transcripts/resolve.md +++ b/unison-src/transcripts/resolve.md @@ -86,15 +86,15 @@ The namespace `c` now has an edit conflict, since the term `foo` was edited in t .example.resolve.c> todo ``` -We see that `#44954ulpdf` (the original hash of `a.foo`) got replaced with _both_ the `#8e68dvpr0a` and `#jdqoenu794`. +We see that `#5cj58badlt` (the original hash of `a.foo`) got replaced with _both_ the `#39feiiunjf` and `#iqa41ufqol`. We can resolve this conflict by picking one of the terms as the "winner": ```ucm -.example.resolve.c> replace #44954ulpdf #8e68dvpr0a +.example.resolve.c> replace #5cj58badlt #39feiiunjf ``` -This changes the merged `c.patch` so that only the edit from #44954ulpdf to #8e68dvpr0a remains: +This changes the merged `c.patch` so that only the edit from #5cj58badlt to #39feiiunjf remains: ```ucm .example.resolve.c> view.patch @@ -109,7 +109,7 @@ We still have a remaining _name conflict_ since it just so happened that both of We can resolve the name conflict by deleting one of the names. ```ucm -.example.resolve.c> delete.term foo#jdqoenu794 +.example.resolve.c> delete.term foo#iqa41ufqol .example.resolve.c> todo ``` diff --git a/unison-src/transcripts/resolve.output.md b/unison-src/transcripts/resolve.output.md index 3ee8ea89ba..172c9d367e 100644 --- a/unison-src/transcripts/resolve.output.md +++ b/unison-src/transcripts/resolve.output.md @@ -163,10 +163,10 @@ Let's now merge these namespaces into `c`: New name conflicts: - 1. foo#jdqoenu794 : Nat + 1. foo#iqa41ufqol : Nat ↓ - 2. ┌ foo#8e68dvpr0a : Nat - 3. └ foo#jdqoenu794 : Nat + 2. ┌ foo#39feiiunjf : Nat + 3. └ foo#iqa41ufqol : Nat Updates: @@ -194,26 +194,26 @@ The namespace `c` now has an edit conflict, since the term `foo` was edited in t have been merged into this one. You'll have to tell me what to use as the new definition: - The term #44954ulpdf was replaced with foo#8e68dvpr0a and - foo#jdqoenu794 + The term #5cj58badlt was replaced with foo#39feiiunjf and + foo#iqa41ufqol ``` -We see that `#44954ulpdf` (the original hash of `a.foo`) got replaced with _both_ the `#8e68dvpr0a` and `#jdqoenu794`. +We see that `#5cj58badlt` (the original hash of `a.foo`) got replaced with _both_ the `#39feiiunjf` and `#iqa41ufqol`. We can resolve this conflict by picking one of the terms as the "winner": ```ucm -.example.resolve.c> replace #44954ulpdf #8e68dvpr0a +.example.resolve.c> replace #5cj58badlt #39feiiunjf Done. ``` -This changes the merged `c.patch` so that only the edit from #44954ulpdf to #8e68dvpr0a remains: +This changes the merged `c.patch` so that only the edit from #5cj58badlt to #39feiiunjf remains: ```ucm .example.resolve.c> view.patch - Edited Terms: #44954ulpdf -> foo#8e68dvpr0a + Edited Terms: #5cj58badlt -> foo#39feiiunjf Tip: To remove entries from a patch, use delete.term-replacement or delete.type-replacement, as @@ -238,20 +238,20 @@ We still have a remaining _name conflict_ since it just so happened that both of We can resolve the name conflict by deleting one of the names. ```ucm -.example.resolve.c> delete.term foo#jdqoenu794 +.example.resolve.c> delete.term foo#iqa41ufqol Resolved name conflicts: - 1. ┌ example.resolve.c.foo#8e68dvpr0a : Nat - 2. └ example.resolve.c.foo#jdqoenu794 : Nat + 1. ┌ example.resolve.c.foo#39feiiunjf : Nat + 2. └ example.resolve.c.foo#iqa41ufqol : Nat ↓ - 3. example.resolve.c.foo#8e68dvpr0a : Nat + 3. example.resolve.c.foo#39feiiunjf : Nat Name changes: Original Changes - 4. example.resolve.a.foo ┐ 5. example.resolve.c.foo#jdqoenu794 (removed) - 6. example.resolve.c.foo#jdqoenu794 ┘ + 4. example.resolve.a.foo ┐ 5. example.resolve.c.foo#iqa41ufqol (removed) + 6. example.resolve.c.foo#iqa41ufqol ┘ Tip: You can use `undo` or `reflog` to undo this change. From 3985108b15ca84437356d525911851adc6f5d4fb Mon Sep 17 00:00:00 2001 From: Chris Penner Date: Tue, 14 Dec 2021 15:02:51 -0600 Subject: [PATCH 144/297] Fix bad hashes in diff-namespace.md --- unison-src/transcripts/diff-namespace.md | 2 +- unison-src/transcripts/diff-namespace.output.md | 8 ++++---- 2 files changed, 5 insertions(+), 5 deletions(-) diff --git a/unison-src/transcripts/diff-namespace.md b/unison-src/transcripts/diff-namespace.md index 3f2f4a7fce..6c4e5c5d63 100644 --- a/unison-src/transcripts/diff-namespace.md +++ b/unison-src/transcripts/diff-namespace.md @@ -169,7 +169,7 @@ y = 2 ```ucm .hashdiff> add .hashdiff> history -.hashdiff> diff.namespace #hkrqt3tm05 #is7tu6katt +.hashdiff> diff.namespace #r0471p61ch #q9cdigs0bo ``` ## diff --git a/unison-src/transcripts/diff-namespace.output.md b/unison-src/transcripts/diff-namespace.output.md index 28cb25b776..186a2d90c0 100644 --- a/unison-src/transcripts/diff-namespace.output.md +++ b/unison-src/transcripts/diff-namespace.output.md @@ -733,17 +733,17 @@ y = 2 Note: The most recent namespace hash is immediately below this message. - ⊙ #is7tu6katt + ⊙ #r0471p61ch + Adds / updates: y - □ #hkrqt3tm05 (start of history) + □ #q9cdigs0bo (start of history) -.hashdiff> diff.namespace #hkrqt3tm05 #is7tu6katt +.hashdiff> diff.namespace #r0471p61ch #q9cdigs0bo - Added definitions: + Removed definitions: 1. y : Nat From 1ed78f4fae3bdf81d7b012176b6d0d6f1a3ee536 Mon Sep 17 00:00:00 2001 From: Chris Penner Date: Tue, 14 Dec 2021 15:10:54 -0600 Subject: [PATCH 145/297] Fix hashes in deleteReplacements.md --- unison-src/transcripts/deleteReplacements.md | 6 +++--- unison-src/transcripts/deleteReplacements.output.md | 12 ++++++------ 2 files changed, 9 insertions(+), 9 deletions(-) diff --git a/unison-src/transcripts/deleteReplacements.md b/unison-src/transcripts/deleteReplacements.md index dd4520b00c..696d97de16 100644 --- a/unison-src/transcripts/deleteReplacements.md +++ b/unison-src/transcripts/deleteReplacements.md @@ -18,7 +18,7 @@ x = 2 ``` ```ucm -.> delete.term-replacement #jk19 +.> delete.term-replacement #rrsqv1ogaq .> view.patch ``` @@ -40,7 +40,7 @@ unique[b] type Foo = Foo | Bar ``` ```ucm -.> delete.type-replacement #hsk1l8232e +.> delete.type-replacement #tsgi4cbf2h .> view.patch ``` @@ -60,6 +60,6 @@ unique[bb] type bar = Foo | Bar ```ucm .> update .> view.patch -.> delete.type-replacement #b1ct5ub6du +.> delete.type-replacement #gcg6p503b0 .> view.patch ``` diff --git a/unison-src/transcripts/deleteReplacements.output.md b/unison-src/transcripts/deleteReplacements.output.md index 89f7d71d40..90925acb55 100644 --- a/unison-src/transcripts/deleteReplacements.output.md +++ b/unison-src/transcripts/deleteReplacements.output.md @@ -48,7 +48,7 @@ x = 2 .> view.patch - Edited Terms: x#jk19sm5bf8 -> x + Edited Terms: x#rrsqv1ogaq -> x Tip: To remove entries from a patch, use delete.term-replacement or delete.type-replacement, as @@ -56,7 +56,7 @@ x = 2 ``` ```ucm -.> delete.term-replacement #jk19 +.> delete.term-replacement #rrsqv1ogaq Done. @@ -113,7 +113,7 @@ unique[b] type Foo = Foo | Bar .> view.patch - Edited Types: Foo#hsk1l8232e -> Foo + Edited Types: Foo#tsgi4cbf2h -> Foo Tip: To remove entries from a patch, use delete.term-replacement or delete.type-replacement, as @@ -121,7 +121,7 @@ unique[b] type Foo = Foo | Bar ``` ```ucm -.> delete.type-replacement #hsk1l8232e +.> delete.type-replacement #tsgi4cbf2h Done. @@ -181,13 +181,13 @@ unique[bb] type bar = Foo | Bar .> view.patch - Edited Types: bar#b1ct5ub6du -> bar + Edited Types: bar#gcg6p503b0 -> bar Tip: To remove entries from a patch, use delete.term-replacement or delete.type-replacement, as appropriate. -.> delete.type-replacement #b1ct5ub6du +.> delete.type-replacement #gcg6p503b0 Done. From c70492653a60453e94c7b7ad4f4753d479f96426 Mon Sep 17 00:00:00 2001 From: Chris Penner Date: Tue, 14 Dec 2021 16:08:54 -0600 Subject: [PATCH 146/297] Update Author Datatype Hashes --- unison-cli/src/Unison/Codebase/Editor/AuthorInfo.hs | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/unison-cli/src/Unison/Codebase/Editor/AuthorInfo.hs b/unison-cli/src/Unison/Codebase/Editor/AuthorInfo.hs index 6d050aba63..afb1587c50 100644 --- a/unison-cli/src/Unison/Codebase/Editor/AuthorInfo.hs +++ b/unison-cli/src/Unison/Codebase/Editor/AuthorInfo.hs @@ -69,6 +69,6 @@ createAuthorInfo a t = createAuthorInfo' . unpack <$> liftIO (getRandomBytes 32) (authorType, authorTypeRef) = (Type.ref a authorTypeRef, unsafeParse authorHash) (guidType, guidTypeRef) = (Type.ref a guidTypeRef, unsafeParse guidHash) unsafeParse = either error id . Reference.fromText - guidHash = "#rc29vdqe019p56kupcgkg07fkib86r3oooatbmsgfbdsgpmjhsh00l307iuts3r973q5etb61vbjkes42b6adb3mkorusvmudiuorno" - copyrightHolderHash = "#jeaknsbobmr6pdj9bga290pj1qckqsemiu1qkg7l9s6p88ot111218jkoe6l19hjpdqctpd0c87capaf3j5qlcim1uh1pq23pu0ebsg" - authorHash = "#i8f8ru3p8ijof9r26lskplmjj45rle8jdh31n62cef2r0tbj6fgjkcu2ljh4m44lo16if0fcdp7eb5fqo1iard47l4cllo7g244kmo0" + guidHash = "#fg4sicimattgf7p2u0i752v6rvf4b6kmrtf0ovja81uptfqmcpe9vdo4vhb2ts2o8somo8t1cvv52lhlv5isr0ovf7h93h3sslnkrlo" + copyrightHolderHash = "#fodu3l22att8et4eiqhtog2p45bh2njh8qfm7mbe1rt4eg2scbgebedagqlo3g1l7ofm3na45julc6174v9cuu62qvo4toaf776kfj8" + authorHash = "#9akbttgrmjs8tmmfe3m1e9vg2nc0ufok4vjqcs0lm16ut8j3bu0osejahgb87i05k92md983o8rsfb44715indh4oruqeq105hec8l8" From f3ba7becfdd76a794987f12923dc66f14f692881 Mon Sep 17 00:00:00 2001 From: Chris Penner Date: Tue, 14 Dec 2021 16:09:26 -0600 Subject: [PATCH 147/297] Add more progress information to migration --- .../src/Unison/Codebase/SqliteCodebase/MigrateSchema12.hs | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/parser-typechecker/src/Unison/Codebase/SqliteCodebase/MigrateSchema12.hs b/parser-typechecker/src/Unison/Codebase/SqliteCodebase/MigrateSchema12.hs index 6304d59d56..f83ae70737 100644 --- a/parser-typechecker/src/Unison/Codebase/SqliteCodebase/MigrateSchema12.hs +++ b/parser-typechecker/src/Unison/Codebase/SqliteCodebase/MigrateSchema12.hs @@ -117,13 +117,19 @@ migrateSchema12 conn codebase = do `runReaderT` Env {db = conn, codebase} `execStateT` MigrationState Map.empty Map.empty Map.empty Set.empty let (_, newRootCausalHashId) = causalMapping migrationState ^?! ix rootCausalHashId + liftIO $ putStrLn $ "Updating Namespace Root..." runDB conn . liftQ $ Q.setNamespaceRoot newRootCausalHashId + liftIO $ putStrLn $ "Rewriting old object IDs..." ifor_ (objLookup migrationState) \oldObjId (newObjId, _, _, _) -> do (runDB conn . liftQ) do Q.recordObjectRehash oldObjId newObjId + liftIO $ putStrLn $ "Garbage collecting orphaned objects..." runDB conn (liftQ Q.garbageCollectObjectsWithoutHashes) + liftIO $ putStrLn $ "Garbage collecting orphaned watches..." runDB conn (liftQ Q.garbageCollectWatchesWithoutObjects) + liftIO $ putStrLn $ "Updating Schema Version..." runDB conn . liftQ $ Q.setSchemaVersion 2 + liftIO $ putStrLn $ "Cleaning up..." runDB conn (liftQ Q.vacuum) where withinSavepoint :: (String -> m c -> m c) From 55f395e253e5165df1256a468f6ce6eb36af206a Mon Sep 17 00:00:00 2001 From: Chris Penner Date: Tue, 14 Dec 2021 16:29:53 -0600 Subject: [PATCH 148/297] Update old doc hashes --- parser-typechecker/src/Unison/TermPrinter.hs | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/parser-typechecker/src/Unison/TermPrinter.hs b/parser-typechecker/src/Unison/TermPrinter.hs index aef8bed7ac..1376be089a 100644 --- a/parser-typechecker/src/Unison/TermPrinter.hs +++ b/parser-typechecker/src/Unison/TermPrinter.hs @@ -1545,8 +1545,8 @@ toDocEval _ _ = Nothing -- -- See https://github.com/unisonweb/unison/issues/2238 _oldDocEval, _oldDocEvalInline :: Reference -_oldDocEval = Reference.unsafeFromText "#0ua7gqa7kqnj80ulhmtcqsfgalmh4g9kg198dt2uen0s0jeebbo4ljnj4133cn1kbm38i2q3joivodtfei3jfln5scof0r0381k8dm0" -_oldDocEvalInline = Reference.unsafeFromText "#maleg6fmu3j0k0vgm99lgrsnhio3ba750hcainuv5jdi9scdsg43hpicmf6lovsa0mnaija7bjebnr5nas3qsj4r087hur1jh0rsfso" +_oldDocEval = Reference.unsafeFromText "m2bmkdos2669tt46sh2gf6cmb4td5le8lcqnmsl9nfaqiv7s816q8bdtjdbt98tkk11ejlesepe7p7u8p0asu9758gdseffh0t78m2o" +_oldDocEvalInline = Reference.unsafeFromText "#7pjlvdu42gmfvfntja265dmi08afk08l54kpsuu55l9hq4l32fco2jlrm8mf2jbn61esfsi972b6e66d9on4i5bkmfchjdare1v5npg" toDocEvalInline :: Ord v => PrettyPrintEnv -> Term3 v PrintAnnotation -> Maybe (Term3 v PrintAnnotation) toDocEvalInline ppe (App' (Ref' r) (Delay' tm)) From 9117058f2cf551349d961f79c49cf9739760251b Mon Sep 17 00:00:00 2001 From: Chris Penner Date: Fri, 17 Dec 2021 11:49:00 -0600 Subject: [PATCH 149/297] Temporarily remove failing test from `io.md`. getBuffering.impl is currently broken on trunk (https://github.com/unisonweb/unison/issues/2767) Add this back to the ucm block once that's fixed. --- unison-src/transcripts/io.md | 4 +++- unison-src/transcripts/io.output.md | 16 +++------------- 2 files changed, 6 insertions(+), 14 deletions(-) diff --git a/unison-src/transcripts/io.md b/unison-src/transcripts/io.md index 65170c1282..965c5fe229 100644 --- a/unison-src/transcripts/io.md +++ b/unison-src/transcripts/io.md @@ -107,8 +107,10 @@ testOpenClose _ = ```ucm .> add -.> io.test testOpenClose ``` +-- getBuffering.impl is currently broken on trunk (https://github.com/unisonweb/unison/issues/2767) +-- Add this back to the ucm block once that's fixed. +.> io.test testOpenClose ### Seeking in open files diff --git a/unison-src/transcripts/io.output.md b/unison-src/transcripts/io.output.md index ed3664a043..7c10234a2a 100644 --- a/unison-src/transcripts/io.output.md +++ b/unison-src/transcripts/io.output.md @@ -141,21 +141,11 @@ testOpenClose _ = testOpenClose : '{IO} [Result] +``` +-- getBuffering.impl is currently broken on trunk (https://github.com/unisonweb/unison/issues/2767) +-- Add this back to the ucm block once that's fixed. .> io.test testOpenClose - New test results: - - ◉ testOpenClose file should be open - ◉ testOpenClose file should be closed - ◉ testOpenClose bytes have been written - ◉ testOpenClose bytes have been written - ◉ testOpenClose file should be closed - - ✅ 5 test(s) passing - - Tip: Use view testOpenClose to view the source of a test. - -``` ### Seeking in open files Tests: openFile From 1925ccf0dd3bea8ff7a860245576ff27c234e3d4 Mon Sep 17 00:00:00 2001 From: Chris Penner Date: Fri, 17 Dec 2021 14:00:15 -0600 Subject: [PATCH 150/297] Fix bad reference in TermPrinter --- parser-typechecker/src/Unison/TermPrinter.hs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/parser-typechecker/src/Unison/TermPrinter.hs b/parser-typechecker/src/Unison/TermPrinter.hs index 1376be089a..f2b39ed57b 100644 --- a/parser-typechecker/src/Unison/TermPrinter.hs +++ b/parser-typechecker/src/Unison/TermPrinter.hs @@ -1545,7 +1545,7 @@ toDocEval _ _ = Nothing -- -- See https://github.com/unisonweb/unison/issues/2238 _oldDocEval, _oldDocEvalInline :: Reference -_oldDocEval = Reference.unsafeFromText "m2bmkdos2669tt46sh2gf6cmb4td5le8lcqnmsl9nfaqiv7s816q8bdtjdbt98tkk11ejlesepe7p7u8p0asu9758gdseffh0t78m2o" +_oldDocEval = Reference.unsafeFromText "#m2bmkdos2669tt46sh2gf6cmb4td5le8lcqnmsl9nfaqiv7s816q8bdtjdbt98tkk11ejlesepe7p7u8p0asu9758gdseffh0t78m2o" _oldDocEvalInline = Reference.unsafeFromText "#7pjlvdu42gmfvfntja265dmi08afk08l54kpsuu55l9hq4l32fco2jlrm8mf2jbn61esfsi972b6e66d9on4i5bkmfchjdare1v5npg" toDocEvalInline :: Ord v => PrettyPrintEnv -> Term3 v PrintAnnotation -> Maybe (Term3 v PrintAnnotation) From 7f1c42bb698da289a0f8bd608abc137fbaed1e42 Mon Sep 17 00:00:00 2001 From: Chris Penner Date: Fri, 17 Dec 2021 14:41:43 -0600 Subject: [PATCH 151/297] Tumble hashing version safely and move Hashable -> Tokenizable --- .../src/Unison/Hashing/V2/Branch.hs | 12 +- .../src/Unison/Hashing/V2/BuildHashable.hs | 128 ++++++++++-------- .../src/Unison/Hashing/V2/Causal.hs | 5 +- .../src/Unison/Hashing/V2/DataDeclaration.hs | 2 +- .../src/Unison/Hashing/V2/Hashable.hs | 21 +-- .../src/Unison/Hashing/V2/Kind.hs | 9 +- .../src/Unison/Hashing/V2/Patch.hs | 6 +- .../src/Unison/Hashing/V2/Pattern.hs | 4 +- .../src/Unison/Hashing/V2/Reference.hs | 4 +- .../src/Unison/Hashing/V2/Referent.hs | 4 +- .../src/Unison/Hashing/V2/TermEdit.hs | 4 +- .../src/Unison/Hashing/V2/TypeEdit.hs | 4 +- 12 files changed, 112 insertions(+), 91 deletions(-) diff --git a/parser-typechecker/src/Unison/Hashing/V2/Branch.hs b/parser-typechecker/src/Unison/Hashing/V2/Branch.hs index cb2b3be3c8..d3bc426c38 100644 --- a/parser-typechecker/src/Unison/Hashing/V2/Branch.hs +++ b/parser-typechecker/src/Unison/Hashing/V2/Branch.hs @@ -5,10 +5,10 @@ {-# LANGUAGE TemplateHaskell #-} {-# LANGUAGE ViewPatterns #-} -module Unison.Hashing.V2.Branch (NameSegment(..), Raw (..), MdValues (..), hashBranch) where +module Unison.Hashing.V2.Branch (NameSegment (..), Raw (..), MdValues (..), hashBranch) where import Unison.Hash (Hash) -import Unison.Hashing.V2.BuildHashable (Hashable) +import Unison.Hashing.V2.BuildHashable (Tokenizable) import qualified Unison.Hashing.V2.BuildHashable as H import Unison.Hashing.V2.Reference (Reference) import Unison.Hashing.V2.Referent (Referent) @@ -18,12 +18,12 @@ type MetadataValue = Reference newtype MdValues = MdValues (Set MetadataValue) deriving (Eq, Ord, Show) - deriving (Hashable) via Set MetadataValue + deriving (Tokenizable) via Set MetadataValue newtype NameSegment = NameSegment Text deriving (Eq, Ord, Show) hashBranch :: Raw -> Hash -hashBranch = H.accumulate' +hashBranch = H.hashTokenizable data Raw = Raw { terms :: Map NameSegment (Map Referent MdValues), @@ -32,7 +32,7 @@ data Raw = Raw children :: Map NameSegment Hash -- the Causal Hash } -instance Hashable Raw where +instance Tokenizable Raw where tokens b = [ H.accumulateToken (terms b), H.accumulateToken (types b), @@ -40,5 +40,5 @@ instance Hashable Raw where H.accumulateToken (patches b) ] -instance H.Hashable NameSegment where +instance H.Tokenizable NameSegment where tokens (NameSegment t) = [H.Text t] diff --git a/parser-typechecker/src/Unison/Hashing/V2/BuildHashable.hs b/parser-typechecker/src/Unison/Hashing/V2/BuildHashable.hs index 737626e888..eb1ab2c09f 100644 --- a/parser-typechecker/src/Unison/Hashing/V2/BuildHashable.hs +++ b/parser-typechecker/src/Unison/Hashing/V2/BuildHashable.hs @@ -1,4 +1,12 @@ -module Unison.Hashing.V2.BuildHashable where +module Unison.Hashing.V2.BuildHashable + ( Tokenizable (..), + Accumulate (..), + Hashable1 (..), + Token (..), + hashTokenizable, + accumulateToken, + ) +where import qualified Crypto.Hash as CH import qualified Data.ByteArray as BA @@ -17,6 +25,18 @@ import qualified Unison.Util.Relation3 as Relation3 import Unison.Util.Relation4 (Relation4) import qualified Unison.Util.Relation4 as Relation4 +-- | The version of the current hashing function. +-- This should be incremented every time the hashing function is changed. +-- +-- The reasoning is that, if a change to the hashing function changes the hashes for _some_ +-- values, it should change it for _all_ values so that we don't have collisions between +-- different hashing function versions. If we don't do this, it's possible for the hashes of +-- simple types (like an Int for example) to keep the same hashes, which would lead to +-- collisions in the `hash` table, since each hash has a different hash version but the same +-- base32 representation. +hashingVersion :: Token h +hashingVersion = Tag 2 + data Token h = Tag !Word8 | Bytes !ByteString @@ -31,100 +51,69 @@ class Accumulate h where fromBytes :: ByteString -> h toBytes :: h -> ByteString -accumulateToken :: (Accumulate h, Hashable t) => t -> Token h -accumulateToken = Hashed . accumulate' +accumulateToken :: (Accumulate h, Tokenizable t) => t -> Token h +accumulateToken = Hashed . hashTokenizable -hash, accumulate' :: (Accumulate h, Hashable t) => t -> h -hash = accumulate' -accumulate' = accumulate . (hashVersion :) . tokens - where - hashVersion = Tag 2 +-- | Tokenize then accumulate a type into a Hash. +hashTokenizable :: (Tokenizable t, Accumulate h) => t -> h +hashTokenizable = accumulate . tokens -class Hashable t where +class Tokenizable t where tokens :: Accumulate h => t -> [Token h] -instance Hashable a => Hashable [a] where +instance Tokenizable a => Tokenizable [a] where tokens = map accumulateToken -instance (Hashable a, Hashable b) => Hashable (a, b) where +instance (Tokenizable a, Tokenizable b) => Tokenizable (a, b) where tokens (a, b) = [accumulateToken a, accumulateToken b] -instance (Hashable a) => Hashable (Set.Set a) where +instance (Tokenizable a) => Tokenizable (Set.Set a) where tokens = tokens . Set.toList -instance (Hashable k, Hashable v) => Hashable (Map.Map k v) where +instance (Tokenizable k, Tokenizable v) => Tokenizable (Map.Map k v) where tokens = tokens . Map.toList -instance (Hashable a, Hashable b) => Hashable (Relation a b) where +instance (Tokenizable a, Tokenizable b) => Tokenizable (Relation a b) where tokens = tokens . Relation.toList -instance (Hashable d1, Hashable d2, Hashable d3) => Hashable (Relation3 d1 d2 d3) where +instance (Tokenizable d1, Tokenizable d2, Tokenizable d3) => Tokenizable (Relation3 d1 d2 d3) where tokens s = [accumulateToken $ Relation3.toNestedList s] -instance (Hashable d1, Hashable d2, Hashable d3, Hashable d4) => Hashable (Relation4 d1 d2 d3 d4) where +instance (Tokenizable d1, Tokenizable d2, Tokenizable d3, Tokenizable d4) => Tokenizable (Relation4 d1 d2 d3 d4) where tokens s = [accumulateToken $ Relation4.toNestedList s] -class Hashable1 f where - -- | Produce a hash for an `f a`, given a hashing function for `a`. - -- If there is a notion of order-independence in some aspect of a subterm - -- of `f`, then the first argument (`hashUnordered :: [a] -> ([h], a -> h)`) - -- should be used to impose an order, and then apply that order in further hashing. - -- Otherwise the second argument (`hash :: a -> h`) should be used. - -- - -- Example 1: A simple functor with no unordered components. Hashable1 instance - -- just uses `hash`: - -- - -- data T a = One a | Two a a deriving Functor - -- - -- instance Hashable1 T where - -- hash1 _ hash t = case t of - -- One a -> accumulate [Tag 0, Hashed (hash a)] - -- Two a a2 -> accumulate [Tag 1, Hashed (hash a), Hashed (hash a2)] - -- - -- Example 2: A functor with unordered components. For hashing, we need to - -- pick a canonical ordering of the unordered components, so we - -- use `hashUnordered`: - -- - -- data U a = U { unordered :: [a], uno :: a, dos :: a } deriving Functor - -- - -- instance Hashable1 U where - -- hash1 hashUnordered _ (U unordered uno dos) = - -- let (hs, hash) = hashUnordered unordered - -- in accumulate $ map Hashed hs ++ [Hashed (hash uno), Hashed (hash dos)] - hash1 :: (Ord h, Accumulate h) => ([a] -> ([h], a -> h)) -> (a -> h) -> f a -> h - -instance Hashable () where +instance Tokenizable () where tokens _ = [] -instance Hashable Double where +instance Tokenizable Double where tokens d = [Double d] -instance Hashable Text where +instance Tokenizable Text where tokens s = [Text s] -instance Hashable Char where +instance Tokenizable Char where tokens c = [Nat $ fromIntegral $ fromEnum c] -instance Hashable ByteString where +instance Tokenizable ByteString where tokens bs = [Bytes bs] -instance Hashable Word64 where +instance Tokenizable Word64 where tokens w = [Nat w] -instance Hashable Int64 where +instance Tokenizable Int64 where tokens w = [Int w] -instance Hashable Bool where +instance Tokenizable Bool where tokens b = [Tag . fromIntegral $ fromEnum b] -instance Hashable Hash where +instance Tokenizable Hash where tokens h = [Bytes (Hash.toByteString h)] instance Accumulate Hash where accumulate = fromBytes . BA.convert . CH.hashFinalize . go CH.hashInit where go :: CH.Context CH.SHA3_512 -> [Token Hash] -> CH.Context CH.SHA3_512 - go acc tokens = CH.hashUpdates acc (tokens >>= toBS) + go acc tokens = CH.hashUpdates acc (hashingVersion : tokens >>= toBS) toBS (Tag b) = [B.singleton b] toBS (Bytes bs) = [encodeLength $ B.length bs, bs] toBS (Int i) = BL.toChunks . toLazyByteString . int64BE $ i @@ -138,3 +127,32 @@ instance Accumulate Hash where encodeLength = BL.toStrict . toLazyByteString . word64BE . fromIntegral fromBytes = Hash.fromByteString toBytes = Hash.toByteString + +class Hashable1 f where + -- | Produce a hash for an `f a`, given a hashing function for `a`. + -- If there is a notion of order-independence in some aspect of a subterm + -- of `f`, then the first argument (`hashUnordered :: [a] -> ([h], a -> h)`) + -- should be used to impose an order, and then apply that order in further hashing. + -- Otherwise the second argument (`hash :: a -> h`) should be used. + -- + -- Example 1: A simple functor with no unordered components. Hashable1 instance + -- just uses `hash`: + -- + -- data T a = One a | Two a a deriving Functor + -- + -- instance Hashable1 T where + -- hash1 _ hash t = case t of + -- One a -> accumulate [Tag 0, Hashed (hash a)] + -- Two a a2 -> accumulate [Tag 1, Hashed (hash a), Hashed (hash a2)] + -- + -- Example 2: A functor with unordered components. For hashing, we need to + -- pick a canonical ordering of the unordered components, so we + -- use `hashUnordered`: + -- + -- data U a = U { unordered :: [a], uno :: a, dos :: a } deriving Functor + -- + -- instance Hashable1 U where + -- hash1 hashUnordered _ (U unordered uno dos) = + -- let (hs, hash) = hashUnordered unordered + -- in accumulate $ map Hashed hs ++ [Hashed (hash uno), Hashed (hash dos)] + hash1 :: (Ord h, Accumulate h) => ([a] -> ([h], a -> h)) -> (a -> h) -> f a -> h diff --git a/parser-typechecker/src/Unison/Hashing/V2/Causal.hs b/parser-typechecker/src/Unison/Hashing/V2/Causal.hs index ee02c6109f..e3e4253471 100644 --- a/parser-typechecker/src/Unison/Hashing/V2/Causal.hs +++ b/parser-typechecker/src/Unison/Hashing/V2/Causal.hs @@ -11,12 +11,13 @@ where import Data.Set (Set) import qualified Data.Set as Set import Unison.Hash (Hash) +import qualified Unison.Hashing.V2.BuildHashable as BuildHashable import qualified Unison.Hashing.V2.BuildHashable as H hashCausal :: Causal -> Hash -hashCausal = H.accumulate' +hashCausal = BuildHashable.hashTokenizable data Causal = Causal {branchHash :: Hash, parents :: Set Hash} -instance H.Hashable Causal where +instance H.Tokenizable Causal where tokens c = H.tokens $ branchHash c : Set.toList (parents c) diff --git a/parser-typechecker/src/Unison/Hashing/V2/DataDeclaration.hs b/parser-typechecker/src/Unison/Hashing/V2/DataDeclaration.hs index dffe348b68..aeb96fc544 100644 --- a/parser-typechecker/src/Unison/Hashing/V2/DataDeclaration.hs +++ b/parser-typechecker/src/Unison/Hashing/V2/DataDeclaration.hs @@ -138,6 +138,6 @@ instance Hashable1 F where Modified m t -> [tag 3, Hashable.accumulateToken m, hashed $ hash t] -instance Hashable.Hashable Modifier where +instance Hashable.Tokenizable Modifier where tokens Structural = [Hashable.Tag 0] tokens (Unique txt) = [Hashable.Tag 1, Hashable.Text txt] diff --git a/parser-typechecker/src/Unison/Hashing/V2/Hashable.hs b/parser-typechecker/src/Unison/Hashing/V2/Hashable.hs index 8bc66e2a0a..4ad8f24b1b 100644 --- a/parser-typechecker/src/Unison/Hashing/V2/Hashable.hs +++ b/parser-typechecker/src/Unison/Hashing/V2/Hashable.hs @@ -1,22 +1,25 @@ module Unison.Hashing.V2.Hashable - ( Hashable, - hash, + ( Hashable (..), ) where import Data.Int (Int64) -import Unison.Hash (Hash) -import qualified Unison.Hashing.V2.BuildHashable as BuildHashable import Data.Set (Set) +import Unison.Hash (Hash (..)) +import qualified Unison.Hashing.V2.BuildHashable as BuildHashable +-- | This typeclass provides a mechanism for obtaining a content-based hash for Unison types & +-- terms. +-- Be wary that Unison requires that these hashes be deterministic, any change to a Hashable +-- instance requires a full codebase migration and should not be taken lightly. class Hashable t where hash :: t -> Hash -instance BuildHashable.Hashable a => Hashable [a] where - hash = BuildHashable.hash +instance BuildHashable.Tokenizable a => Hashable [a] where + hash = BuildHashable.hashTokenizable -instance BuildHashable.Hashable a => Hashable (Set a) where - hash = BuildHashable.hash +instance BuildHashable.Tokenizable a => Hashable (Set a) where + hash = BuildHashable.hashTokenizable instance Hashable Int64 where - hash = BuildHashable.hash + hash = BuildHashable.hashTokenizable diff --git a/parser-typechecker/src/Unison/Hashing/V2/Kind.hs b/parser-typechecker/src/Unison/Hashing/V2/Kind.hs index 062217cb28..5c9de43d4b 100644 --- a/parser-typechecker/src/Unison/Hashing/V2/Kind.hs +++ b/parser-typechecker/src/Unison/Hashing/V2/Kind.hs @@ -2,14 +2,13 @@ module Unison.Hashing.V2.Kind where -import Unison.Prelude - -import Unison.Hashing.V2.BuildHashable (Hashable) +import Unison.Hashing.V2.BuildHashable (Tokenizable) import qualified Unison.Hashing.V2.BuildHashable as Hashable +import Unison.Prelude -data Kind = Star | Arrow Kind Kind deriving (Eq,Ord,Read,Show,Generic) +data Kind = Star | Arrow Kind Kind deriving (Eq, Ord, Read, Show, Generic) -instance Hashable Kind where +instance Tokenizable Kind where tokens k = case k of Star -> [Hashable.Tag 0] Arrow k1 k2 -> (Hashable.Tag 1 : Hashable.tokens k1) ++ Hashable.tokens k2 diff --git a/parser-typechecker/src/Unison/Hashing/V2/Patch.hs b/parser-typechecker/src/Unison/Hashing/V2/Patch.hs index d97062cf48..b31ec7d543 100644 --- a/parser-typechecker/src/Unison/Hashing/V2/Patch.hs +++ b/parser-typechecker/src/Unison/Hashing/V2/Patch.hs @@ -7,7 +7,7 @@ module Unison.Hashing.V2.Patch (Patch (..), hashPatch) where import Data.Map (Map) import Data.Set (Set) import Unison.Hash (Hash) -import Unison.Hashing.V2.BuildHashable (Hashable) +import Unison.Hashing.V2.BuildHashable (Tokenizable) import qualified Unison.Hashing.V2.BuildHashable as H import Unison.Hashing.V2.Reference (Reference) import Unison.Hashing.V2.Referent (Referent) @@ -15,14 +15,14 @@ import Unison.Hashing.V2.TermEdit (TermEdit) import Unison.Hashing.V2.TypeEdit (TypeEdit) hashPatch :: Patch -> Hash -hashPatch = H.accumulate' +hashPatch = H.hashTokenizable data Patch = Patch { termEdits :: Map Referent (Set TermEdit), typeEdits :: Map Reference (Set TypeEdit) } -instance Hashable Patch where +instance Tokenizable Patch where tokens p = [ H.accumulateToken (termEdits p), H.accumulateToken (typeEdits p) diff --git a/parser-typechecker/src/Unison/Hashing/V2/Pattern.hs b/parser-typechecker/src/Unison/Hashing/V2/Pattern.hs index 7427d85df0..d5076f5610 100644 --- a/parser-typechecker/src/Unison/Hashing/V2/Pattern.hs +++ b/parser-typechecker/src/Unison/Hashing/V2/Pattern.hs @@ -38,7 +38,7 @@ data SeqOp | Concat deriving (Eq, Show, Ord, Generic) -instance H.Hashable SeqOp where +instance H.Tokenizable SeqOp where tokens Cons = [H.Tag 0] tokens Snoc = [H.Tag 1] tokens Concat = [H.Tag 2] @@ -78,7 +78,7 @@ setLoc p loc = case p of SequenceOp _ ph op pt -> SequenceOp loc ph op pt x -> fmap (const loc) x -instance H.Hashable (Pattern p) where +instance H.Tokenizable (Pattern p) where tokens (Unbound _) = [H.Tag 0] tokens (Var _) = [H.Tag 1] tokens (Boolean _ b) = H.Tag 2 : [H.Tag $ if b then 1 else 0] diff --git a/parser-typechecker/src/Unison/Hashing/V2/Reference.hs b/parser-typechecker/src/Unison/Hashing/V2/Reference.hs index d3d4770416..e6b2c7d346 100644 --- a/parser-typechecker/src/Unison/Hashing/V2/Reference.hs +++ b/parser-typechecker/src/Unison/Hashing/V2/Reference.hs @@ -18,7 +18,7 @@ import Unison.Prelude import qualified Data.Text as Text import qualified Unison.Hash as H -import Unison.Hashing.V2.BuildHashable (Hashable) +import Unison.Hashing.V2.BuildHashable (Tokenizable) import qualified Unison.Hashing.V2.BuildHashable as Hashable import Unison.ShortHash (ShortHash) import qualified Unison.ShortHash as SH @@ -65,6 +65,6 @@ components sccs = uncurry component =<< sccs instance Show Id where show = SH.toString . SH.take 5 . toShortHash . DerivedId instance Show Reference where show = SH.toString . SH.take 5 . toShortHash -instance Hashable Reference where +instance Tokenizable Reference where tokens (Builtin txt) = [Hashable.Tag 0, Hashable.Text txt] tokens (DerivedId (Id h i)) = [Hashable.Tag 1, Hashable.Bytes (H.toByteString h), Hashable.Nat i] diff --git a/parser-typechecker/src/Unison/Hashing/V2/Referent.hs b/parser-typechecker/src/Unison/Hashing/V2/Referent.hs index 1d9270b404..442c63969e 100644 --- a/parser-typechecker/src/Unison/Hashing/V2/Referent.hs +++ b/parser-typechecker/src/Unison/Hashing/V2/Referent.hs @@ -11,13 +11,13 @@ module Unison.Hashing.V2.Referent where import Unison.DataDeclaration.ConstructorId (ConstructorId) -import Unison.Hashing.V2.BuildHashable (Hashable) +import Unison.Hashing.V2.BuildHashable (Tokenizable) import qualified Unison.Hashing.V2.BuildHashable as H import Unison.Hashing.V2.Reference (Reference) data Referent = Ref Reference | Con Reference ConstructorId deriving (Show, Ord, Eq) -instance Hashable Referent where +instance Tokenizable Referent where tokens (Ref r) = [H.Tag 0] ++ H.tokens r tokens (Con r i) = [H.Tag 2] ++ H.tokens r ++ H.tokens i diff --git a/parser-typechecker/src/Unison/Hashing/V2/TermEdit.hs b/parser-typechecker/src/Unison/Hashing/V2/TermEdit.hs index 88cccde40a..80c9d5fd7d 100644 --- a/parser-typechecker/src/Unison/Hashing/V2/TermEdit.hs +++ b/parser-typechecker/src/Unison/Hashing/V2/TermEdit.hs @@ -1,12 +1,12 @@ module Unison.Hashing.V2.TermEdit (TermEdit (..)) where -import Unison.Hashing.V2.BuildHashable (Hashable) +import Unison.Hashing.V2.BuildHashable (Tokenizable) import qualified Unison.Hashing.V2.BuildHashable as H import Unison.Hashing.V2.Referent (Referent) data TermEdit = Replace Referent | Deprecate deriving (Eq, Ord, Show) -instance Hashable TermEdit where +instance Tokenizable TermEdit where tokens (Replace r) = [H.Tag 0] ++ H.tokens r tokens Deprecate = [H.Tag 1] diff --git a/parser-typechecker/src/Unison/Hashing/V2/TypeEdit.hs b/parser-typechecker/src/Unison/Hashing/V2/TypeEdit.hs index e51fff891a..2bce2fc844 100644 --- a/parser-typechecker/src/Unison/Hashing/V2/TypeEdit.hs +++ b/parser-typechecker/src/Unison/Hashing/V2/TypeEdit.hs @@ -1,12 +1,12 @@ module Unison.Hashing.V2.TypeEdit (TypeEdit (..)) where -import Unison.Hashing.V2.BuildHashable (Hashable) +import Unison.Hashing.V2.BuildHashable (Tokenizable) import qualified Unison.Hashing.V2.BuildHashable as H import Unison.Hashing.V2.Reference (Reference) data TypeEdit = Replace Reference | Deprecate deriving (Eq, Ord, Show) -instance Hashable TypeEdit where +instance Tokenizable TypeEdit where tokens (Replace r) = H.Tag 0 : H.tokens r tokens Deprecate = [H.Tag 1] From 3e0e47e63289c15c44436bca680f3a0cd56497c4 Mon Sep 17 00:00:00 2001 From: Chris Penner Date: Fri, 17 Dec 2021 14:43:22 -0600 Subject: [PATCH 152/297] Rename BuildHashable -> Tokenizable --- parser-typechecker/src/Unison/Hashing/V2/ABT.hs | 4 ++-- parser-typechecker/src/Unison/Hashing/V2/Branch.hs | 4 ++-- parser-typechecker/src/Unison/Hashing/V2/Causal.hs | 6 +++--- .../src/Unison/Hashing/V2/DataDeclaration.hs | 4 ++-- parser-typechecker/src/Unison/Hashing/V2/Hashable.hs | 12 ++++++------ parser-typechecker/src/Unison/Hashing/V2/Kind.hs | 4 ++-- parser-typechecker/src/Unison/Hashing/V2/Patch.hs | 4 ++-- parser-typechecker/src/Unison/Hashing/V2/Pattern.hs | 2 +- .../src/Unison/Hashing/V2/Reference.hs | 4 ++-- .../src/Unison/Hashing/V2/Reference/Util.hs | 2 +- parser-typechecker/src/Unison/Hashing/V2/Referent.hs | 4 ++-- parser-typechecker/src/Unison/Hashing/V2/Term.hs | 4 ++-- parser-typechecker/src/Unison/Hashing/V2/TermEdit.hs | 4 ++-- .../Hashing/V2/{BuildHashable.hs => Tokenizable.hs} | 2 +- parser-typechecker/src/Unison/Hashing/V2/Type.hs | 4 ++-- parser-typechecker/src/Unison/Hashing/V2/TypeEdit.hs | 4 ++-- parser-typechecker/unison-parser-typechecker.cabal | 2 +- 17 files changed, 35 insertions(+), 35 deletions(-) rename parser-typechecker/src/Unison/Hashing/V2/{BuildHashable.hs => Tokenizable.hs} (99%) diff --git a/parser-typechecker/src/Unison/Hashing/V2/ABT.hs b/parser-typechecker/src/Unison/Hashing/V2/ABT.hs index bb02f0a1bb..eccb060c3c 100644 --- a/parser-typechecker/src/Unison/Hashing/V2/ABT.hs +++ b/parser-typechecker/src/Unison/Hashing/V2/ABT.hs @@ -14,8 +14,8 @@ import Unison.ABT import Data.List hiding (cycle, find) import Data.Vector ((!)) import Prelude hiding (abs, cycle) -import Unison.Hashing.V2.BuildHashable (Accumulate, Hashable1, hash1) -import qualified Unison.Hashing.V2.BuildHashable as Hashable +import Unison.Hashing.V2.Tokenizable (Accumulate, Hashable1, hash1) +import qualified Unison.Hashing.V2.Tokenizable as Hashable import qualified Data.Map as Map import qualified Data.Set as Set import qualified Data.Vector as Vector diff --git a/parser-typechecker/src/Unison/Hashing/V2/Branch.hs b/parser-typechecker/src/Unison/Hashing/V2/Branch.hs index d3bc426c38..dbc60602de 100644 --- a/parser-typechecker/src/Unison/Hashing/V2/Branch.hs +++ b/parser-typechecker/src/Unison/Hashing/V2/Branch.hs @@ -8,8 +8,8 @@ module Unison.Hashing.V2.Branch (NameSegment (..), Raw (..), MdValues (..), hashBranch) where import Unison.Hash (Hash) -import Unison.Hashing.V2.BuildHashable (Tokenizable) -import qualified Unison.Hashing.V2.BuildHashable as H +import Unison.Hashing.V2.Tokenizable (Tokenizable) +import qualified Unison.Hashing.V2.Tokenizable as H import Unison.Hashing.V2.Reference (Reference) import Unison.Hashing.V2.Referent (Referent) import Unison.Prelude diff --git a/parser-typechecker/src/Unison/Hashing/V2/Causal.hs b/parser-typechecker/src/Unison/Hashing/V2/Causal.hs index e3e4253471..0463342a90 100644 --- a/parser-typechecker/src/Unison/Hashing/V2/Causal.hs +++ b/parser-typechecker/src/Unison/Hashing/V2/Causal.hs @@ -11,11 +11,11 @@ where import Data.Set (Set) import qualified Data.Set as Set import Unison.Hash (Hash) -import qualified Unison.Hashing.V2.BuildHashable as BuildHashable -import qualified Unison.Hashing.V2.BuildHashable as H +import qualified Unison.Hashing.V2.Tokenizable as Tokenizable +import qualified Unison.Hashing.V2.Tokenizable as H hashCausal :: Causal -> Hash -hashCausal = BuildHashable.hashTokenizable +hashCausal = Tokenizable.hashTokenizable data Causal = Causal {branchHash :: Hash, parents :: Set Hash} diff --git a/parser-typechecker/src/Unison/Hashing/V2/DataDeclaration.hs b/parser-typechecker/src/Unison/Hashing/V2/DataDeclaration.hs index aeb96fc544..1cbc8e8bcf 100644 --- a/parser-typechecker/src/Unison/Hashing/V2/DataDeclaration.hs +++ b/parser-typechecker/src/Unison/Hashing/V2/DataDeclaration.hs @@ -22,8 +22,8 @@ import qualified Data.Map as Map import qualified Unison.ABT as ABT import Unison.Hash (Hash) import qualified Unison.Hashing.V2.ABT as ABT -import Unison.Hashing.V2.BuildHashable (Hashable1) -import qualified Unison.Hashing.V2.BuildHashable as Hashable +import Unison.Hashing.V2.Tokenizable (Hashable1) +import qualified Unison.Hashing.V2.Tokenizable as Hashable import Unison.Hashing.V2.Reference (Reference) import qualified Unison.Hashing.V2.Reference as Reference import qualified Unison.Hashing.V2.Reference.Util as Reference.Util diff --git a/parser-typechecker/src/Unison/Hashing/V2/Hashable.hs b/parser-typechecker/src/Unison/Hashing/V2/Hashable.hs index 4ad8f24b1b..13ed364b95 100644 --- a/parser-typechecker/src/Unison/Hashing/V2/Hashable.hs +++ b/parser-typechecker/src/Unison/Hashing/V2/Hashable.hs @@ -6,7 +6,7 @@ where import Data.Int (Int64) import Data.Set (Set) import Unison.Hash (Hash (..)) -import qualified Unison.Hashing.V2.BuildHashable as BuildHashable +import qualified Unison.Hashing.V2.Tokenizable as Tokenizable -- | This typeclass provides a mechanism for obtaining a content-based hash for Unison types & -- terms. @@ -15,11 +15,11 @@ import qualified Unison.Hashing.V2.BuildHashable as BuildHashable class Hashable t where hash :: t -> Hash -instance BuildHashable.Tokenizable a => Hashable [a] where - hash = BuildHashable.hashTokenizable +instance Tokenizable.Tokenizable a => Hashable [a] where + hash = Tokenizable.hashTokenizable -instance BuildHashable.Tokenizable a => Hashable (Set a) where - hash = BuildHashable.hashTokenizable +instance Tokenizable.Tokenizable a => Hashable (Set a) where + hash = Tokenizable.hashTokenizable instance Hashable Int64 where - hash = BuildHashable.hashTokenizable + hash = Tokenizable.hashTokenizable diff --git a/parser-typechecker/src/Unison/Hashing/V2/Kind.hs b/parser-typechecker/src/Unison/Hashing/V2/Kind.hs index 5c9de43d4b..98d6791a6f 100644 --- a/parser-typechecker/src/Unison/Hashing/V2/Kind.hs +++ b/parser-typechecker/src/Unison/Hashing/V2/Kind.hs @@ -2,8 +2,8 @@ module Unison.Hashing.V2.Kind where -import Unison.Hashing.V2.BuildHashable (Tokenizable) -import qualified Unison.Hashing.V2.BuildHashable as Hashable +import Unison.Hashing.V2.Tokenizable (Tokenizable) +import qualified Unison.Hashing.V2.Tokenizable as Hashable import Unison.Prelude data Kind = Star | Arrow Kind Kind deriving (Eq, Ord, Read, Show, Generic) diff --git a/parser-typechecker/src/Unison/Hashing/V2/Patch.hs b/parser-typechecker/src/Unison/Hashing/V2/Patch.hs index b31ec7d543..a200f35013 100644 --- a/parser-typechecker/src/Unison/Hashing/V2/Patch.hs +++ b/parser-typechecker/src/Unison/Hashing/V2/Patch.hs @@ -7,8 +7,8 @@ module Unison.Hashing.V2.Patch (Patch (..), hashPatch) where import Data.Map (Map) import Data.Set (Set) import Unison.Hash (Hash) -import Unison.Hashing.V2.BuildHashable (Tokenizable) -import qualified Unison.Hashing.V2.BuildHashable as H +import Unison.Hashing.V2.Tokenizable (Tokenizable) +import qualified Unison.Hashing.V2.Tokenizable as H import Unison.Hashing.V2.Reference (Reference) import Unison.Hashing.V2.Referent (Referent) import Unison.Hashing.V2.TermEdit (TermEdit) diff --git a/parser-typechecker/src/Unison/Hashing/V2/Pattern.hs b/parser-typechecker/src/Unison/Hashing/V2/Pattern.hs index d5076f5610..200df8ffaa 100644 --- a/parser-typechecker/src/Unison/Hashing/V2/Pattern.hs +++ b/parser-typechecker/src/Unison/Hashing/V2/Pattern.hs @@ -10,7 +10,7 @@ import Data.Foldable as Foldable hiding (foldMap') import Data.List (intercalate) import qualified Data.Set as Set import Unison.DataDeclaration.ConstructorId (ConstructorId) -import qualified Unison.Hashing.V2.BuildHashable as H +import qualified Unison.Hashing.V2.Tokenizable as H import Unison.Hashing.V2.Reference (Reference) import qualified Unison.Hashing.V2.Type as Type import Unison.Prelude diff --git a/parser-typechecker/src/Unison/Hashing/V2/Reference.hs b/parser-typechecker/src/Unison/Hashing/V2/Reference.hs index e6b2c7d346..7a06ac2c4c 100644 --- a/parser-typechecker/src/Unison/Hashing/V2/Reference.hs +++ b/parser-typechecker/src/Unison/Hashing/V2/Reference.hs @@ -18,8 +18,8 @@ import Unison.Prelude import qualified Data.Text as Text import qualified Unison.Hash as H -import Unison.Hashing.V2.BuildHashable (Tokenizable) -import qualified Unison.Hashing.V2.BuildHashable as Hashable +import Unison.Hashing.V2.Tokenizable (Tokenizable) +import qualified Unison.Hashing.V2.Tokenizable as Hashable import Unison.ShortHash (ShortHash) import qualified Unison.ShortHash as SH diff --git a/parser-typechecker/src/Unison/Hashing/V2/Reference/Util.hs b/parser-typechecker/src/Unison/Hashing/V2/Reference/Util.hs index 8317a16b2a..a7d4e4fc0b 100644 --- a/parser-typechecker/src/Unison/Hashing/V2/Reference/Util.hs +++ b/parser-typechecker/src/Unison/Hashing/V2/Reference/Util.hs @@ -4,7 +4,7 @@ module Unison.Hashing.V2.Reference.Util where import Unison.Prelude import qualified Unison.Hashing.V2.Reference as Reference -import Unison.Hashing.V2.BuildHashable (Hashable1) +import Unison.Hashing.V2.Tokenizable (Hashable1) import Unison.ABT (Var) import qualified Unison.Hashing.V2.ABT as ABT import qualified Data.Map as Map diff --git a/parser-typechecker/src/Unison/Hashing/V2/Referent.hs b/parser-typechecker/src/Unison/Hashing/V2/Referent.hs index 442c63969e..6f34c74795 100644 --- a/parser-typechecker/src/Unison/Hashing/V2/Referent.hs +++ b/parser-typechecker/src/Unison/Hashing/V2/Referent.hs @@ -11,8 +11,8 @@ module Unison.Hashing.V2.Referent where import Unison.DataDeclaration.ConstructorId (ConstructorId) -import Unison.Hashing.V2.BuildHashable (Tokenizable) -import qualified Unison.Hashing.V2.BuildHashable as H +import Unison.Hashing.V2.Tokenizable (Tokenizable) +import qualified Unison.Hashing.V2.Tokenizable as H import Unison.Hashing.V2.Reference (Reference) data Referent = Ref Reference | Con Reference ConstructorId diff --git a/parser-typechecker/src/Unison/Hashing/V2/Term.hs b/parser-typechecker/src/Unison/Hashing/V2/Term.hs index 04c4914f2b..6c28090178 100644 --- a/parser-typechecker/src/Unison/Hashing/V2/Term.hs +++ b/parser-typechecker/src/Unison/Hashing/V2/Term.hs @@ -26,8 +26,8 @@ import qualified Unison.ABT as ABT import qualified Unison.Blank as B import Unison.DataDeclaration.ConstructorId (ConstructorId) import qualified Unison.Hash as Hash -import Unison.Hashing.V2.BuildHashable (Hashable1, accumulateToken) -import qualified Unison.Hashing.V2.BuildHashable as Hashable +import Unison.Hashing.V2.Tokenizable (Hashable1, accumulateToken) +import qualified Unison.Hashing.V2.Tokenizable as Hashable import qualified Unison.Hashing.V2.ABT as ABT import Unison.Hashing.V2.Pattern (Pattern) import Unison.Hashing.V2.Reference (Reference) diff --git a/parser-typechecker/src/Unison/Hashing/V2/TermEdit.hs b/parser-typechecker/src/Unison/Hashing/V2/TermEdit.hs index 80c9d5fd7d..9164d170a8 100644 --- a/parser-typechecker/src/Unison/Hashing/V2/TermEdit.hs +++ b/parser-typechecker/src/Unison/Hashing/V2/TermEdit.hs @@ -1,7 +1,7 @@ module Unison.Hashing.V2.TermEdit (TermEdit (..)) where -import Unison.Hashing.V2.BuildHashable (Tokenizable) -import qualified Unison.Hashing.V2.BuildHashable as H +import Unison.Hashing.V2.Tokenizable (Tokenizable) +import qualified Unison.Hashing.V2.Tokenizable as H import Unison.Hashing.V2.Referent (Referent) data TermEdit = Replace Referent | Deprecate diff --git a/parser-typechecker/src/Unison/Hashing/V2/BuildHashable.hs b/parser-typechecker/src/Unison/Hashing/V2/Tokenizable.hs similarity index 99% rename from parser-typechecker/src/Unison/Hashing/V2/BuildHashable.hs rename to parser-typechecker/src/Unison/Hashing/V2/Tokenizable.hs index eb1ab2c09f..ea13c3e323 100644 --- a/parser-typechecker/src/Unison/Hashing/V2/BuildHashable.hs +++ b/parser-typechecker/src/Unison/Hashing/V2/Tokenizable.hs @@ -1,4 +1,4 @@ -module Unison.Hashing.V2.BuildHashable +module Unison.Hashing.V2.Tokenizable ( Tokenizable (..), Accumulate (..), Hashable1 (..), diff --git a/parser-typechecker/src/Unison/Hashing/V2/Type.hs b/parser-typechecker/src/Unison/Hashing/V2/Type.hs index 5de33f8a6c..35ab1e605f 100644 --- a/parser-typechecker/src/Unison/Hashing/V2/Type.hs +++ b/parser-typechecker/src/Unison/Hashing/V2/Type.hs @@ -28,8 +28,8 @@ module Unison.Hashing.V2.Type ( import qualified Data.Map as Map import qualified Data.Set as Set import qualified Unison.ABT as ABT -import Unison.Hashing.V2.BuildHashable (Hashable1) -import qualified Unison.Hashing.V2.BuildHashable as Hashable +import Unison.Hashing.V2.Tokenizable (Hashable1) +import qualified Unison.Hashing.V2.Tokenizable as Hashable import qualified Unison.Hashing.V2.ABT as ABT import Unison.Hashing.V2.Reference (Reference) import qualified Unison.Hashing.V2.Reference as Reference diff --git a/parser-typechecker/src/Unison/Hashing/V2/TypeEdit.hs b/parser-typechecker/src/Unison/Hashing/V2/TypeEdit.hs index 2bce2fc844..c5629868ea 100644 --- a/parser-typechecker/src/Unison/Hashing/V2/TypeEdit.hs +++ b/parser-typechecker/src/Unison/Hashing/V2/TypeEdit.hs @@ -1,7 +1,7 @@ module Unison.Hashing.V2.TypeEdit (TypeEdit (..)) where -import Unison.Hashing.V2.BuildHashable (Tokenizable) -import qualified Unison.Hashing.V2.BuildHashable as H +import Unison.Hashing.V2.Tokenizable (Tokenizable) +import qualified Unison.Hashing.V2.Tokenizable as H import Unison.Hashing.V2.Reference (Reference) data TypeEdit = Replace Reference | Deprecate diff --git a/parser-typechecker/unison-parser-typechecker.cabal b/parser-typechecker/unison-parser-typechecker.cabal index f804abfde1..1ce06e678b 100644 --- a/parser-typechecker/unison-parser-typechecker.cabal +++ b/parser-typechecker/unison-parser-typechecker.cabal @@ -81,7 +81,6 @@ library Unison.FileParsers Unison.Hashing.V2.ABT Unison.Hashing.V2.Branch - Unison.Hashing.V2.BuildHashable Unison.Hashing.V2.Causal Unison.Hashing.V2.Convert Unison.Hashing.V2.DataDeclaration @@ -94,6 +93,7 @@ library Unison.Hashing.V2.Referent Unison.Hashing.V2.Term Unison.Hashing.V2.TermEdit + Unison.Hashing.V2.Tokenizable Unison.Hashing.V2.Type Unison.Hashing.V2.TypeEdit Unison.Lexer From a438f84f1a58f48197832219aa9cfcff72904f9d Mon Sep 17 00:00:00 2001 From: Chris Penner Date: Fri, 17 Dec 2021 14:51:44 -0600 Subject: [PATCH 153/297] More docs --- .../src/Unison/Hashing/V2/Tokenizable.hs | 18 ++++++++++++++++++ unison-core/src/Unison/Hashable.hs | 6 ++++++ 2 files changed, 24 insertions(+) diff --git a/parser-typechecker/src/Unison/Hashing/V2/Tokenizable.hs b/parser-typechecker/src/Unison/Hashing/V2/Tokenizable.hs index ea13c3e323..eca0902717 100644 --- a/parser-typechecker/src/Unison/Hashing/V2/Tokenizable.hs +++ b/parser-typechecker/src/Unison/Hashing/V2/Tokenizable.hs @@ -58,6 +58,21 @@ accumulateToken = Hashed . hashTokenizable hashTokenizable :: (Tokenizable t, Accumulate h) => t -> h hashTokenizable = accumulate . tokens +-- | Tokenizable converts a value into a set of hashing tokens which will later be accumulated +-- into a Hash. Be very careful when adding or altering instances of this typeclass, changing +-- the hash of a value is a major breaking change and requires a complete codebase migration. +-- +-- If you simply want to provide a convenience instance for a type which wraps some Hashable +-- type, write an instance of 'Hashable' which calls through to the inner instance instead. +-- +-- E.g. If I want to be able to hash a @TaggedBranch@ using its Branch0 hashable instance: +-- +-- @@ +-- data TaggedBranch = TaggedBranch String Branch +-- +-- instance Hashable TaggedBranch where +-- hash (TaggedBranch _ b) = hash b +-- @@ class Tokenizable t where tokens :: Accumulate h => t -> [Token h] @@ -109,6 +124,9 @@ instance Tokenizable Bool where instance Tokenizable Hash where tokens h = [Bytes (Hash.toByteString h)] +-- | A class for all types which can accumulate tokens into a hash. +-- If you want to provide an instance for hashing a Unison value, see 'Tokenizable' +-- and 'Hashable' instead. instance Accumulate Hash where accumulate = fromBytes . BA.convert . CH.hashFinalize . go CH.hashInit where diff --git a/unison-core/src/Unison/Hashable.hs b/unison-core/src/Unison/Hashable.hs index d846736f4c..68dc4fcf97 100644 --- a/unison-core/src/Unison/Hashable.hs +++ b/unison-core/src/Unison/Hashable.hs @@ -41,6 +41,12 @@ hash, accumulate' :: (Accumulate h, Hashable t) => t -> h accumulate' = accumulate . tokens hash = accumulate' +-- | NOTE: This typeclass is distinct from 'Unison.Hashing.V2.Hashable', which is the +-- content-based hashish class used for Unison types & terms. +-- +-- This class however, is meant only to be used as a utility when hash-based identities are +-- useful in algorithms, the runtime, etc. +-- Consider carefully which class you want in each use-case. class Hashable t where tokens :: Accumulate h => t -> [Token h] From 1edf417863e16f6eb9d6690ebb72f37c6de28462 Mon Sep 17 00:00:00 2001 From: Mitchell Rosen Date: Wed, 22 Dec 2021 11:27:04 -0800 Subject: [PATCH 154/297] Bugfix: make hashing effects order-independent Previously, terms that are identical other than the order a set of effects is listed hashed differently. The following two terms should have the same hash, and now they do. foo : Nat ->{Exception, IO} Nat bar : Nat ->{IO, Exception} Nat --- .../src/Unison/Hashing/V2/DataDeclaration.hs | 10 +++------- parser-typechecker/src/Unison/Hashing/V2/Type.hs | 7 +++---- unison-core/src/Unison/ABT.hs | 6 ++++-- unison-core/src/Unison/Term.hs | 2 +- 4 files changed, 11 insertions(+), 14 deletions(-) diff --git a/parser-typechecker/src/Unison/Hashing/V2/DataDeclaration.hs b/parser-typechecker/src/Unison/Hashing/V2/DataDeclaration.hs index 02b2601f85..8f2c6bcf2c 100644 --- a/parser-typechecker/src/Unison/Hashing/V2/DataDeclaration.hs +++ b/parser-typechecker/src/Unison/Hashing/V2/DataDeclaration.hs @@ -18,6 +18,7 @@ where import Control.Lens (over, _3) import Data.Bifunctor (first, second) +import qualified Data.List as List import qualified Data.Map as Map import qualified Unison.ABT as ABT import Unison.Hash (Hash) @@ -61,10 +62,7 @@ constructors (DataDeclaration _ _ _ ctors) = [(v, t) | (_, v, t) <- ctors] toABT :: ABT.Var v => DataDeclaration v () -> ABT.Term F v () toABT dd = ABT.tm $ Modified (modifier dd) dd' where - dd' = ABT.absChain (bound dd) $ ABT.cycle - (ABT.absChain - (fst <$> constructors dd) - (ABT.tm . Constructors $ ABT.transform Type <$> constructorTypes dd)) + dd' = ABT.absChain (bound dd) (ABT.tm (Constructors (ABT.transform Type <$> constructorTypes dd))) -- Implementation detail of `hashDecls`, works with unannotated data decls hashDecls0 :: (Eq v, ABT.Var v, Show v) => Map v (DataDeclaration v ()) -> [(v, Reference.Id)] @@ -131,9 +129,7 @@ instance Hashable1 F where LetRec bindings body -> let (hashes, hash') = hashCycle bindings in [tag 1] ++ map hashed hashes ++ [hashed $ hash' body] - Constructors cs -> - let (hashes, _) = hashCycle cs - in tag 2 : map hashed hashes + Constructors cs -> tag 2 : map hashed (List.sort (map hash cs)) Modified m t -> [tag 3, Hashable.accumulateToken m, hashed $ hash t] diff --git a/parser-typechecker/src/Unison/Hashing/V2/Type.hs b/parser-typechecker/src/Unison/Hashing/V2/Type.hs index 938fc8b010..fd29d39399 100644 --- a/parser-typechecker/src/Unison/Hashing/V2/Type.hs +++ b/parser-typechecker/src/Unison/Hashing/V2/Type.hs @@ -25,6 +25,7 @@ module Unison.Hashing.V2.Type ( textRef, ) where +import qualified Data.List as List (sort) import qualified Data.Map as Map import qualified Data.Set as Set import qualified Unison.ABT as ABT @@ -129,7 +130,7 @@ toReferenceMentions ty = in Set.fromList $ toReference . gen <$> ABT.subterms ty instance Hashable1 F where - hash1 hashCycle hash e = + hash1 _ hash e = let (tag, hashed) = (Hashable.Tag, Hashable.Hashed) -- Note: start each layer with leading `0` byte, to avoid collisions with @@ -143,9 +144,7 @@ instance Hashable1 F where -- a) {Remote, Abort} (() -> {Remote} ()) should hash the same as -- b) {Abort, Remote} (() -> {Remote} ()) but should hash differently from -- c) {Remote, Abort} (() -> {Abort} ()) - Effects es -> let - (hs, _) = hashCycle es - in tag 4 : map hashed hs + Effects es -> tag 4 : map hashed (List.sort (map hash es)) Effect e t -> [tag 5, hashed (hash e), hashed (hash t)] Forall a -> [tag 6, hashed (hash a)] IntroOuter a -> [tag 7, hashed (hash a)] diff --git a/unison-core/src/Unison/ABT.hs b/unison-core/src/Unison/ABT.hs index 9436387a29..6b02416e89 100644 --- a/unison-core/src/Unison/ABT.hs +++ b/unison-core/src/Unison/ABT.hs @@ -78,6 +78,7 @@ module Unison.ABT , cycle' , cycler , pattern Abs' + , pattern Abs'' , pattern AbsN' , pattern Var' , pattern Cycle' @@ -214,14 +215,15 @@ extraMap p (Term fvs a sub) = Term fvs a (go p sub) where Tm x -> Tm (fmap (extraMap p) (p x)) pattern Var' v <- Term _ _ (Var v) -pattern Cycle' vs t <- Term _ _ (Cycle (AbsN' vs t)) --- pattern Abs' v body <- Term _ _ (Abs v body) +pattern Cycle' vs t <- Term _ _ (Cycle (AbsN' vs (Tm' t))) +pattern Abs'' v body <- Term _ _ (Abs v body) pattern Abs' subst <- (unabs1 -> Just subst) pattern AbsN' vs body <- (unabs -> (vs, body)) {-# COMPLETE AbsN' #-} pattern Tm' f <- Term _ _ (Tm f) pattern CycleA' a avs t <- Term _ a (Cycle (AbsNA' avs t)) pattern AbsNA' avs body <- (unabsA -> (avs, body)) +{-# COMPLETE Var', Cycle', Abs'', Tm' #-} unabsA :: Term f v a -> ([(a,v)], Term f v a) unabsA (Term _ a (Abs hd body)) = diff --git a/unison-core/src/Unison/Term.hs b/unison-core/src/Unison/Term.hs index 59e20b34ce..d3875b34c0 100644 --- a/unison-core/src/Unison/Term.hs +++ b/unison-core/src/Unison/Term.hs @@ -771,7 +771,7 @@ unLetRecNamed , [(v, Term2 vt at ap v a)] , Term2 vt at ap v a ) -unLetRecNamed (ABT.Cycle' vs (ABT.Tm' (LetRec isTop bs e))) +unLetRecNamed (ABT.Cycle' vs (LetRec isTop bs e)) | length vs == length bs = Just (isTop, zip vs bs, e) unLetRecNamed _ = Nothing From cae412b859a91043f4026a29a0309151d45ae470 Mon Sep 17 00:00:00 2001 From: Mitchell Rosen Date: Fri, 10 Dec 2021 15:03:57 -0500 Subject: [PATCH 155/297] use hashCycle for unordered things --- .../src/Unison/Hashing/V2/ABT.hs | 23 +++++++++---------- .../src/Unison/Hashing/V2/DataDeclaration.hs | 5 ++-- .../src/Unison/Hashing/V2/Type.hs | 7 +++--- 3 files changed, 18 insertions(+), 17 deletions(-) diff --git a/parser-typechecker/src/Unison/Hashing/V2/ABT.hs b/parser-typechecker/src/Unison/Hashing/V2/ABT.hs index 54bba0b4f0..8fa096e373 100644 --- a/parser-typechecker/src/Unison/Hashing/V2/ABT.hs +++ b/parser-typechecker/src/Unison/Hashing/V2/ABT.hs @@ -15,6 +15,7 @@ import Data.List hiding (cycle, find) import Data.Vector ((!)) import Prelude hiding (abs,cycle) import Unison.Hashable (Accumulate,Hashable1,hash1) +import qualified Data.List as List (sort) import qualified Data.Map as Map import qualified Data.Set as Set import qualified Data.Vector as Vector @@ -78,8 +79,8 @@ hash :: forall f v a h . (Functor f, Hashable1 f, Eq v, Show v, Ord h, Accumulat => Term f v a -> h hash = hash' [] where hash' :: [Either [v] v] -> Term f v a -> h - hash' env (Term _ _ t) = case t of - Var v -> maybe die hashInt ind + hash' env = \case + Var' v -> maybe die hashInt ind where lookup (Left cycle) = v `elem` cycle lookup (Right v') = v == v' ind = findIndex lookup env @@ -87,18 +88,16 @@ hash = hash' [] where hashInt i = Hashable.accumulate [Hashable.Nat $ fromIntegral i] die = error $ "unknown var in environment: " ++ show v ++ " environment = " ++ show env - Cycle (AbsN' vs t) -> hash' (Left vs : env) t - -- Cycle t -> hash' env t - Abs v t -> hash' (Right v : env) t - Tm t -> Hashable.hash1 (hashCycle env) (hash' env) t + Cycle' vs t -> Hashable.hash1 (hashCycle vs env) undefined t + Abs'' v t -> hash' (Right v : env) t + Tm' t -> Hashable.hash1 (\ts -> (List.sort (map (hash' env) ts), hash' env)) (hash' env) t - hashCycle :: [Either [v] v] -> [Term f v a] -> ([h], Term f v a -> h) - hashCycle env@(Left cycle : envTl) ts | length cycle == length ts = + hashCycle :: [v] -> [Either [v] v] -> [Term f v a] -> ([h], Term f v a -> h) + hashCycle cycle env ts = let permute p xs = case Vector.fromList xs of xs -> map (xs !) p - hashed = map (\(i,t) -> ((i,t), hash' env t)) (zip [0..] ts) + hashed = map (\(i,t) -> ((i,t), hash' (Left cycle : env) t)) (zip [0..] ts) pt = fst <$> sortOn snd hashed (p,ts') = unzip pt - in case map Right (permute p cycle) ++ envTl of - env -> (map (hash' env) ts', hash' env) - hashCycle env ts = (map (hash' env) ts, hash' env) + in case map Right (permute p cycle) ++ env of + env -> (map (hash' env) ts', hash' env) \ No newline at end of file diff --git a/parser-typechecker/src/Unison/Hashing/V2/DataDeclaration.hs b/parser-typechecker/src/Unison/Hashing/V2/DataDeclaration.hs index 8f2c6bcf2c..50cdd7dd5b 100644 --- a/parser-typechecker/src/Unison/Hashing/V2/DataDeclaration.hs +++ b/parser-typechecker/src/Unison/Hashing/V2/DataDeclaration.hs @@ -18,7 +18,6 @@ where import Control.Lens (over, _3) import Data.Bifunctor (first, second) -import qualified Data.List as List import qualified Data.Map as Map import qualified Unison.ABT as ABT import Unison.Hash (Hash) @@ -129,7 +128,9 @@ instance Hashable1 F where LetRec bindings body -> let (hashes, hash') = hashCycle bindings in [tag 1] ++ map hashed hashes ++ [hashed $ hash' body] - Constructors cs -> tag 2 : map hashed (List.sort (map hash cs)) + Constructors cs -> + let (hashes, _) = hashCycle cs + in tag 2 : map hashed hashes Modified m t -> [tag 3, Hashable.accumulateToken m, hashed $ hash t] diff --git a/parser-typechecker/src/Unison/Hashing/V2/Type.hs b/parser-typechecker/src/Unison/Hashing/V2/Type.hs index fd29d39399..938fc8b010 100644 --- a/parser-typechecker/src/Unison/Hashing/V2/Type.hs +++ b/parser-typechecker/src/Unison/Hashing/V2/Type.hs @@ -25,7 +25,6 @@ module Unison.Hashing.V2.Type ( textRef, ) where -import qualified Data.List as List (sort) import qualified Data.Map as Map import qualified Data.Set as Set import qualified Unison.ABT as ABT @@ -130,7 +129,7 @@ toReferenceMentions ty = in Set.fromList $ toReference . gen <$> ABT.subterms ty instance Hashable1 F where - hash1 _ hash e = + hash1 hashCycle hash e = let (tag, hashed) = (Hashable.Tag, Hashable.Hashed) -- Note: start each layer with leading `0` byte, to avoid collisions with @@ -144,7 +143,9 @@ instance Hashable1 F where -- a) {Remote, Abort} (() -> {Remote} ()) should hash the same as -- b) {Abort, Remote} (() -> {Remote} ()) but should hash differently from -- c) {Remote, Abort} (() -> {Abort} ()) - Effects es -> tag 4 : map hashed (List.sort (map hash es)) + Effects es -> let + (hs, _) = hashCycle es + in tag 4 : map hashed hs Effect e t -> [tag 5, hashed (hash e), hashed (hash t)] Forall a -> [tag 6, hashed (hash a)] IntroOuter a -> [tag 7, hashed (hash a)] From 15697f818f6ecb288f231d4150a4790b8aeb4f50 Mon Sep 17 00:00:00 2001 From: Mitchell Rosen Date: Tue, 4 Jan 2022 16:58:15 -0800 Subject: [PATCH 156/297] add transcript that demonstrates ability order doesn't affect hash --- .../ability-order-doesnt-affect-hash.md | 20 ++++++++ ...ability-order-doesnt-affect-hash.output.md | 47 +++++++++++++++++++ 2 files changed, 67 insertions(+) create mode 100644 unison-src/transcripts/ability-order-doesnt-affect-hash.md create mode 100644 unison-src/transcripts/ability-order-doesnt-affect-hash.output.md diff --git a/unison-src/transcripts/ability-order-doesnt-affect-hash.md b/unison-src/transcripts/ability-order-doesnt-affect-hash.md new file mode 100644 index 0000000000..4a0606a4bd --- /dev/null +++ b/unison-src/transcripts/ability-order-doesnt-affect-hash.md @@ -0,0 +1,20 @@ +The order of a set of abilities is normalized before hashing. + +```unison +unique ability Foo where + foo : () + +unique ability Bar where + bar : () + +term1 : () ->{Foo, Bar} () +term1 _ = () + +term2 : () ->{Bar, Foo} () +term2 _ = () +``` + +```ucm +.> add +.> names term1 +``` diff --git a/unison-src/transcripts/ability-order-doesnt-affect-hash.output.md b/unison-src/transcripts/ability-order-doesnt-affect-hash.output.md new file mode 100644 index 0000000000..751d63b0c2 --- /dev/null +++ b/unison-src/transcripts/ability-order-doesnt-affect-hash.output.md @@ -0,0 +1,47 @@ +The order of a set of abilities is normalized before hashing. + +```unison +unique ability Foo where + foo : () + +unique ability Bar where + bar : () + +term1 : () ->{Foo, Bar} () +term1 _ = () + +term2 : () ->{Bar, Foo} () +term2 _ = () +``` + +```ucm + + I found and typechecked these definitions in scratch.u. If you + do an `add` or `update`, here's how your codebase would + change: + + ⍟ These new definitions are ok to `add`: + + unique ability Bar + unique ability Foo + term1 : '{Foo, Bar} () + term2 : '{Foo, Bar} () + +``` +```ucm +.> add + + ⍟ I've added these definitions: + + unique ability Bar + unique ability Foo + term1 : '{Foo, Bar} () + term2 : '{Foo, Bar} () + +.> names term1 + + Term + Hash: #3g90c9l64s + Names: term1 term2 + +``` From 35d22623f657eaae2f174088b10b9beb66a61f08 Mon Sep 17 00:00:00 2001 From: Chris Penner Date: Wed, 5 Jan 2022 16:58:52 -0600 Subject: [PATCH 157/297] Update changed history hash (expected due to a change in trunk to how empty namespaces are handled.) --- unison-src/transcripts/empty-namespaces.output.md | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/unison-src/transcripts/empty-namespaces.output.md b/unison-src/transcripts/empty-namespaces.output.md index 487655f056..0532c30674 100644 --- a/unison-src/transcripts/empty-namespaces.output.md +++ b/unison-src/transcripts/empty-namespaces.output.md @@ -140,7 +140,7 @@ The history should be that of the moved namespace. - □ #ldl7o5e9i5 (start of history) + □ #j5moq3uqa9 (start of history) .> move.namespace moveme moveoverme @@ -153,6 +153,6 @@ The history should be that of the moved namespace. - □ #ldl7o5e9i5 (start of history) + □ #j5moq3uqa9 (start of history) ``` From bfd4669d0233c339ccfb0c7c96c5cf78317f7ed4 Mon Sep 17 00:00:00 2001 From: Chris Penner Date: Thu, 6 Jan 2022 14:36:40 -0600 Subject: [PATCH 158/297] Remove one-term test It depends on a hash, but doesn't clarify what it's testing, and the test after it seems to be accomplishing a similar thing in a more robust way. --- unison-cli/tests/Unison/Test/GitSync.hs | 23 ----------------------- 1 file changed, 23 deletions(-) diff --git a/unison-cli/tests/Unison/Test/GitSync.hs b/unison-cli/tests/Unison/Test/GitSync.hs index 48b3c7c664..9515ca0978 100644 --- a/unison-cli/tests/Unison/Test/GitSync.hs +++ b/unison-cli/tests/Unison/Test/GitSync.hs @@ -296,29 +296,6 @@ test = scope "gitsync22" . tests $ , pushPullTest "one-term" fmt --- simplest-author - (\repo -> [i| - ```unison - c = 3 - ``` - ```ucm - .> debug.file - .> add - .> push.create ${repo} - ``` - |]) --- simplest-user - (\repo -> [i| - ```ucm - .> pull ${repo} - .> alias.term ##Nat.+ + - ``` - ```unison - > #fs7la111vn + 1 - ``` - |]) - , - pushPullTest "one-term2" fmt -- simplest-author (\repo -> [i| ```unison From 1e6c6590e87e932cdaf7c4c150c462e458d8e810 Mon Sep 17 00:00:00 2001 From: Chris Penner Date: Wed, 12 Jan 2022 14:22:29 -0600 Subject: [PATCH 159/297] WIP --- unison-cli/package.yaml | 1 + .../src/Unison/Codebase/Editor/HandleInput.hs | 3 +- .../src/Unison/Codebase/Editor/Slurp.hs | 70 +++++++++++++++++++ unison-cli/unison-cli.cabal | 6 ++ 4 files changed, 79 insertions(+), 1 deletion(-) create mode 100644 unison-cli/src/Unison/Codebase/Editor/Slurp.hs diff --git a/unison-cli/package.yaml b/unison-cli/package.yaml index 7adf4da3a1..9228fea898 100644 --- a/unison-cli/package.yaml +++ b/unison-cli/package.yaml @@ -10,6 +10,7 @@ flags: ghc-options: -Wall dependencies: + - pretty-simple - ListLike - async - base diff --git a/unison-cli/src/Unison/Codebase/Editor/HandleInput.hs b/unison-cli/src/Unison/Codebase/Editor/HandleInput.hs index 56d1b3b26b..927be2113e 100644 --- a/unison-cli/src/Unison/Codebase/Editor/HandleInput.hs +++ b/unison-cli/src/Unison/Codebase/Editor/HandleInput.hs @@ -151,6 +151,7 @@ import qualified Data.Set.NonEmpty as NESet import Data.Set.NonEmpty (NESet) import Unison.Symbol (Symbol) import qualified Unison.Codebase.Editor.Input as Input +import Debug.Pretty.Simple defaultPatchNameSegment :: NameSegment defaultPatchNameSegment = "patch" @@ -2782,7 +2783,7 @@ toSlurpResult :: UF.TypecheckedUnisonFile v Ann -> Names -> SlurpResult v -toSlurpResult curPath uf existingNames = +toSlurpResult curPath uf existingNames = pTraceShowId $ Slurp.subtractComponent (conflicts <> ctorCollisions) $ SlurpResult uf diff --git a/unison-cli/src/Unison/Codebase/Editor/Slurp.hs b/unison-cli/src/Unison/Codebase/Editor/Slurp.hs new file mode 100644 index 0000000000..389e82e881 --- /dev/null +++ b/unison-cli/src/Unison/Codebase/Editor/Slurp.hs @@ -0,0 +1,70 @@ +{-# OPTIONS_GHC -Wno-unused-local-binds #-} + +module Unison.Codebase.Editor.Slurp where + +import Unison.Hash (Hash) +import qualified Unison.HashQualified' as HQ' +import Unison.Name (Name) +import Unison.Names (Names) +import Unison.Parser.Ann (Ann) +import Unison.Prelude +import qualified Unison.UnisonFile as UF +import qualified Unison.UnisonFile.Names as UF +import Unison.Var (Var) + +data SlurpStatus = New | Updated | Duplicate | Alias + +data SlurpOp = Add | Update + +data SlurpErr + = TermCtorCollision + | CtorTermCollision + | RequiresUpdate + +data SlurpComponent v = SlurpComponent {types :: Set v, terms :: Set v} + deriving (Eq, Ord, Show) + +data DefinitionNotes v = DefinitionNotes + { status :: SlurpStatus, + errs :: Set SlurpErr + } + +data ComponentNotes v = ComponentNotes + { deps :: Set ComponentHash, + definitions :: Map v (DefinitionNotes v) + } + +data SlurpResult v = SlurpResult + { componentNotes :: Map ComponentHash (Map v (DefinitionNotes v)), + varToComponent :: Map v ComponentHash + } + +type ComponentHash = Hash + +data Components v = Components + { termComponents :: Map Hash (Set v), + typeComponents :: Map Hash (Set v) + } + +collectComponents :: UF.TypecheckedUnisonFile v Ann -> Components v +collectComponents _uf = Components {termComponents, typeComponents} + where + termComponents = undefined + typeComponents = undefined + +computeComponentDependencies :: Components v -> Map Hash (Set Hash) +computeComponentDependencies = undefined + +analyzeTypecheckedUnisonFile :: + forall v. + Var v => + UF.TypecheckedUnisonFile v Ann -> + Names -> + SlurpResult v +analyzeTypecheckedUnisonFile uf _codebaseNames = undefined + where + fileNames :: Names + fileNames = UF.typecheckedToNames uf + +slurpOp :: SlurpOp -> Maybe (Set v) -> SlurpResult v -> Either (Set SlurpErr) (SlurpComponent v) +slurpOp = undefined diff --git a/unison-cli/unison-cli.cabal b/unison-cli/unison-cli.cabal index 9eb1bbc4a7..76fac607a3 100644 --- a/unison-cli/unison-cli.cabal +++ b/unison-cli/unison-cli.cabal @@ -35,6 +35,7 @@ library Unison.Codebase.Editor.Output.BranchDiff Unison.Codebase.Editor.Output.DumpNamespace Unison.Codebase.Editor.Propagate + Unison.Codebase.Editor.Slurp Unison.Codebase.Editor.SlurpComponent Unison.Codebase.Editor.SlurpResult Unison.Codebase.Editor.TodoOutput @@ -96,6 +97,7 @@ library , mtl , nonempty-containers , open-browser + , pretty-simple , random >=1.2.0 , regex-tdfa , stm @@ -165,6 +167,7 @@ executable integration-tests , mtl , nonempty-containers , open-browser + , pretty-simple , process , random >=1.2.0 , regex-tdfa @@ -235,6 +238,7 @@ executable transcripts , mtl , nonempty-containers , open-browser + , pretty-simple , process , random >=1.2.0 , regex-tdfa @@ -305,6 +309,7 @@ executable unison , nonempty-containers , open-browser , optparse-applicative >=0.16.1.0 + , pretty-simple , random >=1.2.0 , regex-tdfa , shellmet @@ -382,6 +387,7 @@ test-suite tests , mtl , nonempty-containers , open-browser + , pretty-simple , random >=1.2.0 , regex-tdfa , shellmet From bb522031de70c91e9873989d6041a7c4c4bd7a16 Mon Sep 17 00:00:00 2001 From: Chris Penner Date: Wed, 12 Jan 2022 17:05:08 -0600 Subject: [PATCH 160/297] WIP --- .../src/Unison/UnisonFile/Type.hs | 14 ++ .../src/Unison/Codebase/Editor/HandleInput.hs | 137 ++++++++-------- .../src/Unison/Codebase/Editor/Slurp.hs | 146 ++++++++++++++++-- 3 files changed, 224 insertions(+), 73 deletions(-) diff --git a/parser-typechecker/src/Unison/UnisonFile/Type.hs b/parser-typechecker/src/Unison/UnisonFile/Type.hs index f4c595a237..efc06b8f2a 100644 --- a/parser-typechecker/src/Unison/UnisonFile/Type.hs +++ b/parser-typechecker/src/Unison/UnisonFile/Type.hs @@ -16,6 +16,9 @@ import Unison.Term (Term) import qualified Unison.Term as Term import Unison.Type (Type) import Unison.WatchKind (WatchKind) +import Unison.Hash (Hash) +import qualified Unison.Referent as Referent +import qualified Unison.LabeledDependency as LD data UnisonFile v a = UnisonFileId { dataDeclarationsId :: Map v (Reference.Id, DataDeclaration v a), @@ -42,6 +45,17 @@ data TypecheckedUnisonFile v a = hashTermsId :: Map v (Reference.Id, Maybe WatchKind, Term v a, Type v a) } deriving Show +-- Produce a mapping which includes all component hashes and the variables contained +-- within them. +-- This includes all kinds of definitions: types, terms, abilities, constructors +componentMap :: TypecheckedUnisonFile v ann -> Map Hash (Set v) +componentMap _uf = undefined + -- TODO: watch components? + +-- Produce a mapping which includes all variables their reference. +referencesMap :: TypecheckedUnisonFile v ann -> Map v LD.LabeledDependency +referencesMap _uf = undefined + {-# COMPLETE TypecheckedUnisonFile #-} pattern TypecheckedUnisonFile ds es tlcs wcs hts <- TypecheckedUnisonFileId (fmap (first Reference.DerivedId) -> ds) diff --git a/unison-cli/src/Unison/Codebase/Editor/HandleInput.hs b/unison-cli/src/Unison/Codebase/Editor/HandleInput.hs index 927be2113e..09fa026841 100644 --- a/unison-cli/src/Unison/Codebase/Editor/HandleInput.hs +++ b/unison-cli/src/Unison/Codebase/Editor/HandleInput.hs @@ -55,8 +55,9 @@ import qualified Unison.Codebase.Editor.Propagate as Propagate import Unison.Codebase.Editor.RemoteRepo (ReadRemoteNamespace, WriteRemotePath, WriteRepo, printNamespace, writePathToRead, writeToRead) import Unison.Codebase.Editor.SlurpComponent (SlurpComponent (..)) import qualified Unison.Codebase.Editor.SlurpComponent as SC +import qualified Unison.Codebase.Editor.Slurp as Slurp import Unison.Codebase.Editor.SlurpResult (SlurpResult (..)) -import qualified Unison.Codebase.Editor.SlurpResult as Slurp +-- import qualified Unison.Codebase.Editor.SlurpResult as Slurp import qualified Unison.Codebase.Editor.TodoOutput as TO import qualified Unison.Codebase.Editor.UriParser as UriParser import qualified Unison.Codebase.MainTerm as MainTerm @@ -152,6 +153,7 @@ import Data.Set.NonEmpty (NESet) import Unison.Symbol (Symbol) import qualified Unison.Codebase.Editor.Input as Input import Debug.Pretty.Simple +import qualified Unison.Codebase.Editor.SlurpResult as OldSlurp defaultPatchNameSegment :: NameSegment defaultPatchNameSegment = "patch" @@ -260,11 +262,12 @@ loop = do loadUnisonFile sourceName text = do let lexed = L.lexer (Text.unpack sourceName) (Text.unpack text) withFile [] sourceName (text, lexed) $ \unisonFile -> do - sr <- toSlurpResult currentPath' unisonFile <$> slurpResultNames + sr <- Slurp.analyzeTypecheckedUnisonFile unisonFile <$> currentPathNames + -- sr <- toSlurpResult currentPath' unisonFile <$> slurpResultNames names <- displayNames unisonFile pped <- prettyPrintEnvDecl names let ppe = PPE.suffixifiedPPE pped - eval . Notify $ Typechecked sourceName ppe sr unisonFile + eval . Notify $ Typechecked sourceName ppe (undefined $ Slurp.toSlurpPrintout sr) unisonFile unlessError' EvaluationFailure do (bindings, e) <- ExceptT . eval . Evaluate ppe $ unisonFile lift do @@ -1256,11 +1259,11 @@ loop = do Nothing -> respond NoUnisonFile Just uf -> do sr <- - Slurp.disallowUpdates + OldSlurp.disallowUpdates . applySelection hqs uf . toSlurpResult currentPath' uf <$> slurpResultNames - let adds = Slurp.adds sr + let adds = OldSlurp.adds sr stepAtNoSync Branch.CompressHistory (Path.unabsolute currentPath', doSlurpAdds adds uf) eval . AddDefsToCodebase . filterBySlurpResult sr $ uf ppe <- prettyPrintEnvDecl =<< displayNames uf @@ -1270,7 +1273,7 @@ loop = do PreviewAddI hqs -> case (latestFile', uf) of (Just (sourceName, _), Just uf) -> do sr <- - Slurp.disallowUpdates + OldSlurp.disallowUpdates . applySelection hqs uf . toSlurpResult currentPath' uf <$> slurpResultNames @@ -1820,50 +1823,49 @@ handleUpdate input maybePatchPath hqs = do let patchPath = fromMaybe defaultPatchPath maybePatchPath slurpCheckNames <- slurpResultNames let currentPathNames = slurpCheckNames - let sr :: SlurpResult v - sr = - applySelection hqs uf - . toSlurpResult currentPath' uf - $ slurpCheckNames - addsAndUpdates :: SlurpComponent v - addsAndUpdates = Slurp.updates sr <> Slurp.adds sr + let sr = Slurp.analyzeTypecheckedUnisonFile uf currentPathNames + -- let sr :: SlurpResult v + -- sr = + -- applySelection hqs uf + -- . toSlurpResult currentPath' uf + -- $ slurpCheckNames fileNames :: Names fileNames = UF.typecheckedToNames uf -- todo: display some error if typeEdits or termEdits itself contains a loop - typeEdits :: Map Name (Reference, Reference) - typeEdits = Map.fromList $ map f (toList $ SC.types (updates sr)) - where - f v = case ( toList (Names.typesNamed slurpCheckNames n), - toList (Names.typesNamed fileNames n) - ) of - ([old], [new]) -> (n, (old, new)) - _ -> - error $ - "Expected unique matches for " - ++ Var.nameStr v - ++ " but got: " - ++ show otherwise - where - n = Name.unsafeFromVar v + -- typeEdits :: Map Name (Reference, Reference) + -- typeEdits = Map.fromList $ map f (toList $ SC.types (updates sr)) + -- where + -- f v = case ( toList (Names.typesNamed slurpCheckNames n), + -- toList (Names.typesNamed fileNames n) + -- ) of + -- ([old], [new]) -> (n, (old, new)) + -- _ -> + -- error $ + -- "Expected unique matches for " + -- ++ Var.nameStr v + -- ++ " but got: " + -- ++ show otherwise + -- where + -- n = Name.unsafeFromVar v hashTerms :: Map Reference (Type v Ann) hashTerms = Map.fromList (toList hashTerms0) where hashTerms0 = (\(r, _wk, _tm, typ) -> (r, typ)) <$> UF.hashTerms uf - termEdits :: Map Name (Reference, Reference) - termEdits = Map.fromList $ map g (toList $ SC.terms (updates sr)) - where - g v = case ( toList (Names.refTermsNamed slurpCheckNames n), - toList (Names.refTermsNamed fileNames n) - ) of - ([old], [new]) -> (n, (old, new)) - _ -> - error $ - "Expected unique matches for " - ++ Var.nameStr v - ++ " but got: " - ++ show otherwise - where - n = Name.unsafeFromVar v + -- termEdits :: Map Name (Reference, Reference) + -- termEdits = Map.fromList $ map g (toList $ SC.terms (updates sr)) + -- where + -- g v = case ( toList (Names.refTermsNamed slurpCheckNames n), + -- toList (Names.refTermsNamed fileNames n) + -- ) of + -- ([old], [new]) -> (n, (old, new)) + -- _ -> + -- error $ + -- "Expected unique matches for " + -- ++ Var.nameStr v + -- ++ " but got: " + -- ++ show otherwise + -- where + -- n = Name.unsafeFromVar v termDeprecations :: [(Name, Referent)] termDeprecations = [ (n, r) @@ -1920,19 +1922,28 @@ handleUpdate input maybePatchPath hqs = do updatePatches :: Branch0 m -> m (Branch0 m) updatePatches = Branch.modifyPatches seg updatePatch - when (Slurp.isNonempty sr) $ do - -- take a look at the `updates` from the SlurpResult - -- and make a patch diff to record a replacement from the old to new references - stepManyAtMNoSync Branch.CompressHistory - [ ( Path.unabsolute currentPath', - pure . doSlurpUpdates typeEdits termEdits termDeprecations - ), - ( Path.unabsolute currentPath', - pure . doSlurpAdds addsAndUpdates uf - ), - (Path.unabsolute p, updatePatches) - ] - eval . AddDefsToCodebase . filterBySlurpResult sr $ uf + case Slurp.slurpOp Update (undefined hqs) sr of + Left errs -> undefined + Right (adds, updates) -> + -- when nonEmpty + -- doSlurpUpdates updates + -- doSlurpAdds adds + undefined adds updates + + -- take a look at the `updates` from the SlurpResult + -- and make a patch diff to record a replacement from the old to new references + -- stepManyAtMNoSync Branch.CompressHistory + -- [ ( Path.unabsolute currentPath', + -- pure . doSlurpUpdates typeEdits termEdits termDeprecations + -- ), + -- ( Path.unabsolute currentPath', + -- pure . doSlurpAdds addsAndUpdates uf + -- ), + -- (Path.unabsolute p, updatePatches) + -- ] + -- eval . AddDefsToCodebase . filterBySlurpResult sr $ uf + + -- when (Slurp.isNonempty sr) $ do ppe <- prettyPrintEnvDecl =<< displayNames uf respond $ SlurpOutput input (PPE.suffixifiedPPE ppe) sr -- propagatePatch prints TodoOutput @@ -2784,7 +2795,7 @@ toSlurpResult :: Names -> SlurpResult v toSlurpResult curPath uf existingNames = pTraceShowId $ - Slurp.subtractComponent (conflicts <> ctorCollisions) $ + OldSlurp.subtractComponent (conflicts <> ctorCollisions) $ SlurpResult uf mempty @@ -2890,13 +2901,13 @@ toSlurpResult curPath uf existingNames = pTraceShowId $ R.Relation Name Referent -> R.Relation Name Referent -> Set v -> - Map v Slurp.Aliases + Map v OldSlurp.Aliases buildAliases existingNames namesFromFile duplicates = Map.fromList [ ( var n, if null aliasesOfOld - then Slurp.AddAliases aliasesOfNew - else Slurp.UpdateAliases aliasesOfOld aliasesOfNew + then OldSlurp.AddAliases aliasesOfNew + else OldSlurp.UpdateAliases aliasesOfOld aliasesOfNew ) | (n, r@Referent.Ref {}) <- R.toList namesFromFile, -- All the refs whose names include `n`, and are not `r` @@ -2911,14 +2922,14 @@ toSlurpResult curPath uf existingNames = pTraceShowId $ Set.notMember (var n) duplicates ] - termAliases :: Map v Slurp.Aliases + termAliases :: Map v OldSlurp.Aliases termAliases = buildAliases (Names.terms existingNames) (Names.terms fileNames) (SC.terms dups) - typeAliases :: Map v Slurp.Aliases + typeAliases :: Map v OldSlurp.Aliases typeAliases = buildAliases (R.mapRan Referent.Ref $ Names.types existingNames) @@ -3070,7 +3081,7 @@ doSlurpAdds slurp uf = Branch.batchUpdates (typeActions <> termActions) typeActions = map doType . toList $ SC.types slurp termActions = map doTerm . toList $ - SC.terms slurp <> Slurp.constructorsFor (SC.types slurp) uf + SC.terms slurp <> OldSlurp.constructorsFor (SC.types slurp) uf names = UF.typecheckedToNames uf tests = Set.fromList $ fst <$> UF.watchesOfKind WK.TestWatch (UF.discardTypes uf) (isTestType, isTestValue) = isTest diff --git a/unison-cli/src/Unison/Codebase/Editor/Slurp.hs b/unison-cli/src/Unison/Codebase/Editor/Slurp.hs index 389e82e881..66ef7bfba6 100644 --- a/unison-cli/src/Unison/Codebase/Editor/Slurp.hs +++ b/unison-cli/src/Unison/Codebase/Editor/Slurp.hs @@ -2,32 +2,54 @@ module Unison.Codebase.Editor.Slurp where +import qualified Data.Foldable as Foldable +import qualified Data.Map as Map +import qualified Data.Set as Set +import qualified Unison.ABT as ABT +import qualified Unison.DataDeclaration as DD import Unison.Hash (Hash) -import qualified Unison.HashQualified' as HQ' +import qualified Unison.LabeledDependency as LD import Unison.Name (Name) +import qualified Unison.Name as Name import Unison.Names (Names) +import qualified Unison.Names as Names import Unison.Parser.Ann (Ann) import Unison.Prelude +import qualified Unison.Reference as Ref +import Unison.Referent (Referent) +import qualified Unison.Referent as Referent import qualified Unison.UnisonFile as UF import qualified Unison.UnisonFile.Names as UF +import qualified Unison.UnisonFile.Type as UF import Unison.Var (Var) -data SlurpStatus = New | Updated | Duplicate | Alias +data SlurpStatus = New | Updated | Duplicate data SlurpOp = Add | Update -data SlurpErr +data TypeOrTermVar v = TypeVar v | TermVar v + +untypedVar :: TypeOrTermVar v -> v +untypedVar = \case + TypeVar v -> v + TermVar v -> v + +data SlurpPrintout v = SlurpPrintout + { notOk :: Map v (SlurpErr v), + ok :: Map v SlurpStatus + } + +data SlurpErr v = TermCtorCollision | CtorTermCollision - | RequiresUpdate + | RequiresUpdateOf v data SlurpComponent v = SlurpComponent {types :: Set v, terms :: Set v} deriving (Eq, Ord, Show) -data DefinitionNotes v = DefinitionNotes - { status :: SlurpStatus, - errs :: Set SlurpErr - } +data DefinitionNotes v + = DefStatus SlurpStatus + | DefErr (Set (SlurpErr v)) data ComponentNotes v = ComponentNotes { deps :: Set ComponentHash, @@ -46,6 +68,9 @@ data Components v = Components typeComponents :: Map Hash (Set v) } +toSlurpPrintout :: SlurpResult v -> SlurpPrintout v +toSlurpPrintout = undefined + collectComponents :: UF.TypecheckedUnisonFile v Ann -> Components v collectComponents _uf = Components {termComponents, typeComponents} where @@ -60,11 +85,112 @@ analyzeTypecheckedUnisonFile :: Var v => UF.TypecheckedUnisonFile v Ann -> Names -> + Maybe (Set v) -> SlurpResult v -analyzeTypecheckedUnisonFile uf _codebaseNames = undefined +analyzeTypecheckedUnisonFile uf unalteredCodebaseNames _defsToConsider = + SlurpResult _varToComponents _componentNotes' where fileNames :: Names fileNames = UF.typecheckedToNames uf + componentMapping :: Map ComponentHash (Set v) + componentMapping = UF.componentMap uf + -- codebaseNames with deprecated constructors removed. + + allDefinitions :: Set v + allDefinitions = fold componentMapping + + componentNotes' :: Map ComponentHash (Map v (DefinitionNotes v)) + componentNotes' = undefined + + definitionStatus :: TypeOrTermVar v -> DefinitionNotes v + definitionStatus tv = + let v = untypedVar tv + existingTypesAtName = Names.typesNamed codebaseNames (Name.unsafeFromVar v) + existingTermsAtName = Names.termsNamed codebaseNames (Name.unsafeFromVar v) + existingTermsMatchingReference = Names.termsNamed codebaseNames (Name.unsafeFromVar v) + varRef = varReferences Map.! v + in case tv of + TermVar {} -> + case Set.toList existingTermsAtName of + [] -> DefStatus New + [r] | LD.referent r == varRef -> DefStatus Duplicate + [Referent.Con {}] | LD.ConReference {} <- varRef -> DefErr TermCtorCollision + [Referent.Ref {}] | LD.ConReference {} <- varRef -> DefErr CtorTermCollision + -- This allows us to resolve conflicts with an update. + _ -> DefStatus Updated + -- [r] -> DefStatus Updated + -- _ -> DefStatus Conflicted + TypeVar {} -> _ + varReferences :: Map v LD.LabeledDependency + varReferences = UF.referencesMap uf + + -- Get the set of all DIRECT definitions in the file which a definition depends on. + varDeps :: v -> Set v + varDeps v = do + let varComponentHash = varToComponentHash Map.! v + componentPeers = componentMapping Map.! varComponentHash + directDeps = case UF.hashTermsId uf Map.!? v of + Nothing -> mempty + Just (_, _, term, _) -> ABT.freeVars term + in Set.delete v (componentPeers <> directDeps) + + transitiveVarDeps :: Set v -> v -> Set v + transitiveVarDeps resolved v = + let directDeps = varDeps v + in Foldable.foldl' transitiveVarDeps (Set.insert v resolved) directDeps + where + go resolved nextV = + if Set.member nextV resolved + then resolved + else resolved <> transitiveVarDeps resolved nextV + + varToComponentHash :: Map v ComponentHash + varToComponentHash = Map.fromList $ do + -- List monad + (hash, vars) <- Map.toList componentMapping + v <- Set.toList vars + pure (v, hash) + + codebaseNames :: Names + codebaseNames = + -- TODO: make faster + -- TODO: how does defsToConsider affect deprecations? + Names.filter (`Set.notMember` deprecatedConstructors) unalteredCodebaseNames + constructorNamesInFile :: Set Name + constructorNamesInFile = + Map.elems (UF.dataDeclarationsId' uf) + <> (fmap . fmap) DD.toDataDecl (Map.elems (UF.effectDeclarationsId' uf)) + & fmap snd + & concatMap + ( \decl -> + DD.constructors' decl <&> \(_ann, v, _typ) -> + Name.unsafeFromVar v + ) + & Set.fromList + + deprecatedConstructors :: Set Name + deprecatedConstructors = + let allRefIds = + fmap fst (Map.elems (UF.dataDeclarationsId' uf)) + <> fmap fst (Map.elems (UF.effectDeclarationsId' uf)) + existingConstructorsFromEditedTypes = Set.fromList $ do + -- List Monad + refId <- allRefIds + (name, _ref) <- Names.constructorsForType (Ref.DerivedId refId) unalteredCodebaseNames + pure name + in -- Compute any constructors which were deleted + existingConstructorsFromEditedTypes `Set.difference` constructorNamesInFile + +-- [ (n, r) +-- | (oldTypeRef, _) <- Map.elems typeEdits, +-- (n, r) <- Names.constructorsForType oldTypeRef codebaseNames +-- ] -slurpOp :: SlurpOp -> Maybe (Set v) -> SlurpResult v -> Either (Set SlurpErr) (SlurpComponent v) +slurpOp :: + SlurpOp -> + SlurpResult v -> + Either + (Set (SlurpErr v)) + -- adds, updates + (SlurpComponent v, SlurpComponent v) slurpOp = undefined From dfd8cdf1e961d0fe84b89f132bfd429f97ef4284 Mon Sep 17 00:00:00 2001 From: Mitchell Rosen Date: Thu, 13 Jan 2022 11:36:28 -0500 Subject: [PATCH 161/297] fix quadratic hashing in hashComponent --- .../src/Unison/Hashing/V2/ABT.hs | 105 +++++++++++------- unison-core/src/Unison/ABT.hs | 4 - 2 files changed, 63 insertions(+), 46 deletions(-) diff --git a/parser-typechecker/src/Unison/Hashing/V2/ABT.hs b/parser-typechecker/src/Unison/Hashing/V2/ABT.hs index f048cb4a2d..72ff646a34 100644 --- a/parser-typechecker/src/Unison/Hashing/V2/ABT.hs +++ b/parser-typechecker/src/Unison/Hashing/V2/ABT.hs @@ -1,3 +1,4 @@ +{- ORMOLU_DISABLE -} -- Based on: http://semantic-domain.blogspot.com/2015/03/abstract-binding-trees.html {-# LANGUAGE DeriveFoldable #-} {-# LANGUAGE DeriveTraversable #-} @@ -13,27 +14,39 @@ import Unison.ABT import Data.List hiding (cycle, find) import qualified Data.List as List (sort) -import Data.Vector ((!)) import Prelude hiding (abs, cycle) import Unison.Hashing.V2.Tokenizable (Accumulate, Hashable1, hash1) import qualified Unison.Hashing.V2.Tokenizable as Hashable import qualified Data.Map as Map import qualified Data.Set as Set -import qualified Data.Vector as Vector -- Hash a strongly connected component and sort its definitions into a canonical order. hashComponent :: + forall a f h v. (Functor f, Hashable1 f, Foldable f, Eq v, Show v, Ord v, Ord h, Accumulate h) => Map.Map v (Term f v a) -> (h, [(v, Term f v a)]) hashComponent byName = let ts = Map.toList byName - embeds = [ (v, void (transform Embed t)) | (v,t) <- ts ] - vs = fst <$> ts - tms = [ (v, absCycle vs (tm $ Component (snd <$> embeds) (var v))) | v <- vs ] - hashed = [ ((v,t), hash t) | (v,t) <- tms ] - sortedHashed = sortOn snd hashed - overallHash = Hashable.accumulate (Hashable.Hashed . snd <$> sortedHashed) - in (overallHash, [ (v, t) | ((v, _),_) <- sortedHashed, Just t <- [Map.lookup v byName] ]) + -- First, compute a canonical hash ordering of the component, as well as an environment in which we can hash + -- individual names. + (hashes, env) = doHashCycle [] ts + -- Construct a list of tokens that is shared by all members of the component. They are disambiguated only by their + -- name that gets tumbled into the hash. + commonTokens :: [Hashable.Token h] + commonTokens = Hashable.Tag 1 : map Hashable.Hashed hashes + -- Use a helper function that hashes a single term given its name, now that we have an environment in which we can + -- look the name up, as well as the common tokens. + hashName :: v -> h + hashName v = Hashable.accumulate (commonTokens ++ [Hashable.Hashed (hash' env (var v :: Term f v ()))]) + (hashes', permutedTerms) = + ts + -- Pair each term with its hash + & map (\t -> (hashName (fst t), t)) + -- Sort again to get the final canonical ordering + & sortOn fst + & unzip + overallHash = Hashable.accumulate (map Hashable.Hashed hashes') + in (overallHash, permutedTerms) -- Group the definitions into strongly connected components and hash -- each component. Substitute the hash of each component into subsequent @@ -62,42 +75,50 @@ hashComponents termFromHash termsByName = let ++ show (map show (Set.toList escapedVars)) ++ "\n " ++ show (map show (Map.keys termsByName)) --- Implementation detail of hashComponent -data Component f a = Component [a] a | Embed (f a) deriving (Functor, Traversable, Foldable) - -instance (Hashable1 f, Functor f) => Hashable1 (Component f) where - hash1 hashCycle hash c = case c of - Component as a -> let - (hs, hash) = hashCycle as - toks = Hashable.Hashed <$> hs - in Hashable.accumulate $ (Hashable.Tag 1 : toks) ++ [Hashable.Hashed (hash a)] - Embed fa -> Hashable.hash1 hashCycle hash fa - -- | We ignore annotations in the `Term`, as these should never affect the -- meaning of the term. hash :: forall f v a h . (Functor f, Hashable1 f, Eq v, Show v, Ord h, Accumulate h) => Term f v a -> h hash = hash' [] where - hash' :: [Either [v] v] -> Term f v a -> h - hash' env = \case - Var' v -> maybe die hashInt ind - where lookup (Left cycle) = v `elem` cycle - lookup (Right v') = v == v' - ind = findIndex lookup env - hashInt :: Int -> h - hashInt i = Hashable.accumulate [Hashable.Nat $ fromIntegral i] - die = error $ "unknown var in environment: " ++ show v - ++ " environment = " ++ show env - Cycle' vs t -> Hashable.hash1 (hashCycle vs env) undefined t - Abs'' v t -> hash' (Right v : env) t - Tm' t -> Hashable.hash1 (\ts -> (List.sort (map (hash' env) ts), hash' env)) (hash' env) t - hashCycle :: [v] -> [Either [v] v] -> [Term f v a] -> ([h], Term f v a -> h) - hashCycle cycle env ts = - let - permute p xs = case Vector.fromList xs of xs -> map (xs !) p - hashed = map (\(i,t) -> ((i,t), hash' (Left cycle : env) t)) (zip [0..] ts) - pt = fst <$> sortOn snd hashed - (p,ts') = unzip pt - in case map Right (permute p cycle) ++ env of - env -> (map (hash' env) ts', hash' env) \ No newline at end of file +hash' :: forall f v a h . (Functor f, Hashable1 f, Eq v, Show v, Ord h, Accumulate h) + => [Either [v] v] -> Term f v a -> h +hash' env = \case + Var' v -> maybe die hashInt ind + where lookup (Left cycle) = v `elem` cycle + lookup (Right v') = v == v' + ind = findIndex lookup env + hashInt :: Int -> h + hashInt i = Hashable.accumulate [Hashable.Nat $ fromIntegral i] + die = error $ "unknown var in environment: " ++ show v + ++ " environment = " ++ show env + Cycle' vs t -> hash1 (hashCycle vs env) undefined t + Abs'' v t -> hash' (Right v : env) t + Tm' t -> hash1 (\ts -> (List.sort (map (hash' env) ts), hash' env)) (hash' env) t + where + hashCycle :: [v] -> [Either [v] v] -> [Term f v a] -> ([h], Term f v a -> h) + hashCycle cycle env ts = + let (ts', env') = doHashCycle env (zip cycle ts) + in (ts', hash' env') + +-- | @doHashCycle env terms@ hashes cycle @terms@ in environment @env@, and returns the canonical ordering of the hashes +-- of those terms, as well as an updated environment with each of the terms' bindings in the canonical ordering. +doHashCycle :: + forall a f h v. + (Accumulate h, Eq v, Functor f, Hashable1 f, Ord h, Show v) => + [Either [v] v] -> + [(v, Term f v a)] -> + ([h], [Either [v] v]) +doHashCycle env namedTerms = + (map (hash' newEnv) permutedTerms, newEnv) + where + names = map fst namedTerms + -- The environment in which we compute the canonical permutation of terms + permutationEnv = Left names : env + (permutedNames, permutedTerms) = + namedTerms + & sortOn @h (hash' permutationEnv . snd) + & unzip + -- The new environment, which includes the names of all of the terms in the cycle, now that we have computed their + -- canonical ordering + newEnv = map Right permutedNames ++ env diff --git a/unison-core/src/Unison/ABT.hs b/unison-core/src/Unison/ABT.hs index 6b02416e89..c01617f481 100644 --- a/unison-core/src/Unison/ABT.hs +++ b/unison-core/src/Unison/ABT.hs @@ -70,7 +70,6 @@ module Unison.ABT , abs , absChain , absChain' - , absCycle , abs' , absr , unabs @@ -252,9 +251,6 @@ absr' a v body = wrap' v body $ \v body -> abs' a v body absChain :: Ord v => [v] -> Term f v () -> Term f v () absChain vs t = foldr abs t vs -absCycle :: Ord v => [v] -> Term f v () -> Term f v () -absCycle vs t = cycle $ absChain vs t - absChain' :: Ord v => [(a, v)] -> Term f v a -> Term f v a absChain' vs t = foldr (\(a,v) t -> abs' a v t) t vs From d957c0f575450dd05f805f27a4518a0de42ab99f Mon Sep 17 00:00:00 2001 From: Chris Penner Date: Thu, 13 Jan 2022 11:23:14 -0600 Subject: [PATCH 162/297] WIP --- .../src/Unison/UnisonFile/Type.hs | 7 +- .../src/Unison/Codebase/Editor/Slurp.hs | 117 ++++++++++++------ 2 files changed, 85 insertions(+), 39 deletions(-) diff --git a/parser-typechecker/src/Unison/UnisonFile/Type.hs b/parser-typechecker/src/Unison/UnisonFile/Type.hs index efc06b8f2a..13851cbdf7 100644 --- a/parser-typechecker/src/Unison/UnisonFile/Type.hs +++ b/parser-typechecker/src/Unison/UnisonFile/Type.hs @@ -17,7 +17,6 @@ import qualified Unison.Term as Term import Unison.Type (Type) import Unison.WatchKind (WatchKind) import Unison.Hash (Hash) -import qualified Unison.Referent as Referent import qualified Unison.LabeledDependency as LD data UnisonFile v a = UnisonFileId { @@ -48,7 +47,11 @@ data TypecheckedUnisonFile v a = -- Produce a mapping which includes all component hashes and the variables contained -- within them. -- This includes all kinds of definitions: types, terms, abilities, constructors -componentMap :: TypecheckedUnisonFile v ann -> Map Hash (Set v) +componentMap :: + TypecheckedUnisonFile v ann + -- Left is a variable for a type + -- Right is a variable for a term or constructor + -> Map Hash (Set (Either v v)) componentMap _uf = undefined -- TODO: watch components? diff --git a/unison-cli/src/Unison/Codebase/Editor/Slurp.hs b/unison-cli/src/Unison/Codebase/Editor/Slurp.hs index 66ef7bfba6..19c536c04f 100644 --- a/unison-cli/src/Unison/Codebase/Editor/Slurp.hs +++ b/unison-cli/src/Unison/Codebase/Editor/Slurp.hs @@ -2,6 +2,8 @@ module Unison.Codebase.Editor.Slurp where +import Control.Lens +import Control.Monad.State import qualified Data.Foldable as Foldable import qualified Data.Map as Map import qualified Data.Set as Set @@ -16,13 +18,14 @@ import qualified Unison.Names as Names import Unison.Parser.Ann (Ann) import Unison.Prelude import qualified Unison.Reference as Ref -import Unison.Referent (Referent) import qualified Unison.Referent as Referent import qualified Unison.UnisonFile as UF import qualified Unison.UnisonFile.Names as UF import qualified Unison.UnisonFile.Type as UF import Unison.Var (Var) +data LabeledVar v = LabeledVar v LD.LabeledDependency + data SlurpStatus = New | Updated | Duplicate data SlurpOp = Add | Update @@ -35,21 +38,20 @@ untypedVar = \case TermVar v -> v data SlurpPrintout v = SlurpPrintout - { notOk :: Map v (SlurpErr v), + { notOk :: Map v SlurpErr, ok :: Map v SlurpStatus } -data SlurpErr v +data SlurpErr = TermCtorCollision | CtorTermCollision - | RequiresUpdateOf v data SlurpComponent v = SlurpComponent {types :: Set v, terms :: Set v} deriving (Eq, Ord, Show) data DefinitionNotes v = DefStatus SlurpStatus - | DefErr (Set (SlurpErr v)) + | DefErr SlurpErr data ComponentNotes v = ComponentNotes { deps :: Set ComponentHash, @@ -57,8 +59,7 @@ data ComponentNotes v = ComponentNotes } data SlurpResult v = SlurpResult - { componentNotes :: Map ComponentHash (Map v (DefinitionNotes v)), - varToComponent :: Map v ComponentHash + { componentNotes :: Map (LabeledVar v) (DefinitionNotes v, Set (LabeledVar v)) } type ComponentHash = Hash @@ -87,23 +88,49 @@ analyzeTypecheckedUnisonFile :: Names -> Maybe (Set v) -> SlurpResult v -analyzeTypecheckedUnisonFile uf unalteredCodebaseNames _defsToConsider = - SlurpResult _varToComponents _componentNotes' +analyzeTypecheckedUnisonFile uf unalteredCodebaseNames maybeDefsToConsider = + let allInvolvedVars :: Set (LabeledVar v) + allInvolvedVars = + Foldable.foldl' transitiveVarDeps mempty defsToConsider + & Set.map (\v -> labeledVars Map.! v) + allDefStatuses :: Map (LabeledVar v) (DefinitionNotes v, Set (LabeledVar v)) + allDefStatuses = + allInvolvedVars + & Set.toList + & fmap + ( \v -> + -- TODO, save time by memoizing transitiveVarDeps? + (v, (definitionStatus v, transitiveVarDeps mempty v)) + ) + & Map.fromList + in SlurpResult allDefStatuses where fileNames :: Names fileNames = UF.typecheckedToNames uf - componentMapping :: Map ComponentHash (Set v) - componentMapping = UF.componentMap uf + + defsToConsider :: Set v + defsToConsider = case maybeDefsToConsider of + Nothing -> Set.map untypedVar allDefinitions + Just vs -> vs + + labeledVars :: Map v (LabeledVar v) + labeledVars = undefined + + componentMapping :: Map ComponentHash (Set (TypeOrTermVar v)) + componentMapping = undefined UF.componentMap uf -- codebaseNames with deprecated constructors removed. - allDefinitions :: Set v + allDefinitions :: Set (TypeOrTermVar v) allDefinitions = fold componentMapping componentNotes' :: Map ComponentHash (Map v (DefinitionNotes v)) componentNotes' = undefined - definitionStatus :: TypeOrTermVar v -> DefinitionNotes v - definitionStatus tv = + varToLabeledDependency :: v -> LD.LabeledDependency + varToLabeledDependency = undefined + + definitionStatus :: LabeledVar v -> DefinitionNotes v + definitionStatus (undefined -> tv) = let v = untypedVar tv existingTypesAtName = Names.typesNamed codebaseNames (Name.unsafeFromVar v) existingTermsAtName = Names.termsNamed codebaseNames (Name.unsafeFromVar v) @@ -120,19 +147,20 @@ analyzeTypecheckedUnisonFile uf unalteredCodebaseNames _defsToConsider = _ -> DefStatus Updated -- [r] -> DefStatus Updated -- _ -> DefStatus Conflicted - TypeVar {} -> _ + TypeVar {} -> undefined varReferences :: Map v LD.LabeledDependency varReferences = UF.referencesMap uf -- Get the set of all DIRECT definitions in the file which a definition depends on. varDeps :: v -> Set v varDeps v = do - let varComponentHash = varToComponentHash Map.! v - componentPeers = componentMapping Map.! varComponentHash - directDeps = case UF.hashTermsId uf Map.!? v of - Nothing -> mempty - Just (_, _, term, _) -> ABT.freeVars term - in Set.delete v (componentPeers <> directDeps) + undefined + -- let varComponentHash = varToComponentHash Map.! v + -- componentPeers = componentMapping Map.! varComponentHash + -- directDeps = case UF.hashTermsId uf Map.!? v of + -- Nothing -> mempty + -- Just (_, _, term, _) -> ABT.freeVars term + -- in Set.delete v (componentPeers <> directDeps) transitiveVarDeps :: Set v -> v -> Set v transitiveVarDeps resolved v = @@ -144,12 +172,12 @@ analyzeTypecheckedUnisonFile uf unalteredCodebaseNames _defsToConsider = then resolved else resolved <> transitiveVarDeps resolved nextV - varToComponentHash :: Map v ComponentHash - varToComponentHash = Map.fromList $ do - -- List monad - (hash, vars) <- Map.toList componentMapping - v <- Set.toList vars - pure (v, hash) + -- varToComponentHash :: Map v ComponentHash + -- varToComponentHash = Map.fromList $ do + -- -- List monad + -- (hash, vars) <- Map.toList componentMapping + -- v <- Set.toList vars + -- pure (v, hash) codebaseNames :: Names codebaseNames = @@ -181,16 +209,31 @@ analyzeTypecheckedUnisonFile uf unalteredCodebaseNames _defsToConsider = in -- Compute any constructors which were deleted existingConstructorsFromEditedTypes `Set.difference` constructorNamesInFile --- [ (n, r) --- | (oldTypeRef, _) <- Map.elems typeEdits, --- (n, r) <- Names.constructorsForType oldTypeRef codebaseNames --- ] +slurpErrs :: SlurpResult v -> Map (LabeledVar v) SlurpErr +slurpErrs (SlurpResult defs) = + defs + & Map.mapMaybe + ( \case + (DefErr err, _) -> Just err + _ -> Nothing + ) slurpOp :: - SlurpOp -> + forall v. SlurpResult v -> - Either - (Set (SlurpErr v)) - -- adds, updates - (SlurpComponent v, SlurpComponent v) -slurpOp = undefined + (SlurpComponent v, SlurpComponent v, Map (LabeledVar v) SlurpErr) +slurpOp (SlurpResult sr) = do + let (adds, updates, errs) = + flip execState mempty $ + for (Map.toList sr) $ \(v, (dn, _)) -> do + case dn of + DefStatus New -> _1 %= Set.insert v + DefStatus Updated -> _2 %= Set.insert v + DefStatus Duplicate -> pure () + DefErr err -> _3 . at v ?= err + adds' = partitionTypesAndTerms adds + updates' = partitionTypesAndTerms updates + in (adds', updates', errs) + +partitionTypesAndTerms :: Set (LabeledVar v) -> SlurpComponent v +partitionTypesAndTerms = undefined From 4e3359682a7d320273c441b967df69cd43575683 Mon Sep 17 00:00:00 2001 From: Chris Penner Date: Thu, 13 Jan 2022 15:03:57 -0600 Subject: [PATCH 163/297] WIP --- .../src/Unison/Codebase/Editor/Slurp.hs | 261 ++++++++++++++---- 1 file changed, 207 insertions(+), 54 deletions(-) diff --git a/unison-cli/src/Unison/Codebase/Editor/Slurp.hs b/unison-cli/src/Unison/Codebase/Editor/Slurp.hs index 19c536c04f..47446b2109 100644 --- a/unison-cli/src/Unison/Codebase/Editor/Slurp.hs +++ b/unison-cli/src/Unison/Codebase/Editor/Slurp.hs @@ -4,7 +4,6 @@ module Unison.Codebase.Editor.Slurp where import Control.Lens import Control.Monad.State -import qualified Data.Foldable as Foldable import qualified Data.Map as Map import qualified Data.Set as Set import qualified Unison.ABT as ABT @@ -19,12 +18,31 @@ import Unison.Parser.Ann (Ann) import Unison.Prelude import qualified Unison.Reference as Ref import qualified Unison.Referent as Referent +import qualified Unison.Referent' as Referent +import Unison.Term (Term) +import qualified Unison.Term as Term import qualified Unison.UnisonFile as UF import qualified Unison.UnisonFile.Names as UF import qualified Unison.UnisonFile.Type as UF +import qualified Unison.Util.Relation as Rel +import qualified Unison.Util.Relation3 as Rel3 +import qualified Unison.Util.Set as Set import Unison.Var (Var) +-- Determine which components we're considering, i.e. find the components of all provided +-- vars, then include any components they depend on. +-- +-- Then, compute any deprecations and build the env +-- Then, consider all vars in each component and get status (collision, add, or update) +-- Collect and collapse the statuses of each component. +-- I.e., if any definition has an error, the whole component is an error +-- if any piece needs an update +-- +-- +-- Does depending on a type also mean depending on all its constructors + data LabeledVar v = LabeledVar v LD.LabeledDependency + deriving (Eq, Ord) data SlurpStatus = New | Updated | Duplicate @@ -32,8 +50,26 @@ data SlurpOp = Add | Update data TypeOrTermVar v = TypeVar v | TermVar v -untypedVar :: TypeOrTermVar v -> v -untypedVar = \case +-- componentHashForLabeledVar :: LabeledVar v -> ComponentHash +-- componentHashForLabeledVar (LabeledVar _ ld) = +-- labeledDepToComponentHash ld + +labeledDepToComponentHash :: LD.LabeledDependency -> ComponentHash +labeledDepToComponentHash ld = + LD.fold unsafeComponentHashForReference (unsafeComponentHashForReference . Referent.toReference') ld + where + unsafeComponentHashForReference = + fromMaybe (error "Builtin encountered when var was expected") + . componentHashForReference + +componentHashForReference :: Ref.Reference -> Maybe Hash +componentHashForReference = + \case + Ref.Builtin {} -> Nothing + Ref.DerivedId (Ref.Id componentHash _ _) -> Just componentHash + +unlabeled :: TypeOrTermVar v -> v +unlabeled = \case TypeVar v -> v TermVar v -> v @@ -49,17 +85,13 @@ data SlurpErr data SlurpComponent v = SlurpComponent {types :: Set v, terms :: Set v} deriving (Eq, Ord, Show) -data DefinitionNotes v +data DefinitionNotes = DefStatus SlurpStatus | DefErr SlurpErr -data ComponentNotes v = ComponentNotes - { deps :: Set ComponentHash, - definitions :: Map v (DefinitionNotes v) - } - data SlurpResult v = SlurpResult - { componentNotes :: Map (LabeledVar v) (DefinitionNotes v, Set (LabeledVar v)) + { termNotes :: Map v (DefinitionNotes, Set (LabeledVar v)), + typeNotes :: Map v (DefinitionNotes, Set (LabeledVar v)) } type ComponentHash = Hash @@ -90,48 +122,71 @@ analyzeTypecheckedUnisonFile :: SlurpResult v analyzeTypecheckedUnisonFile uf unalteredCodebaseNames maybeDefsToConsider = let allInvolvedVars :: Set (LabeledVar v) - allInvolvedVars = - Foldable.foldl' transitiveVarDeps mempty defsToConsider - & Set.map (\v -> labeledVars Map.! v) - allDefStatuses :: Map (LabeledVar v) (DefinitionNotes v, Set (LabeledVar v)) - allDefStatuses = + allInvolvedVars = foldMap transitiveVarDeps defsToConsider + + termStatuses, typeStatuses :: Map v (DefinitionNotes, Set (LabeledVar v)) + (termStatuses, typeStatuses) = allInvolvedVars & Set.toList & fmap - ( \v -> - -- TODO, save time by memoizing transitiveVarDeps? - (v, (definitionStatus v, transitiveVarDeps mempty v)) + ( \lv -> + (lv, (definitionStatus lv, transitiveLabeledVarDeps lv)) ) & Map.fromList - in SlurpResult allDefStatuses + & Map.mapEitherWithKey + ( \lv x -> case lv of + LabeledVar v ld -> _ + ) + & over both (Map.mapKeys (\(LabeledVar v _) -> v)) + in -- & Map.mapEitherWithKey _ + SlurpResult termStatuses typeStatuses where fileNames :: Names fileNames = UF.typecheckedToNames uf + transitiveCHDeps :: Map ComponentHash (Set ComponentHash) + transitiveCHDeps = + componentTransitiveDeps uf + + -- Find all other file-local vars that a var depends on. + -- This version is for when you don't know whether a var is a type or term + -- E.g., if the user types 'add x', we don't know whether x is a term, type, or + -- constructor, so we add all of them. + transitiveVarDeps :: v -> Set (LabeledVar v) + transitiveVarDeps v = + Rel3.lookupD1 v varRelation + & Rel.ran + -- Find all transitive components we rely on + & ( \chs -> + chs <> foldMap (\ch -> fold $ Map.lookup ch transitiveCHDeps) chs + ) + -- Find all variables within all considered components + & foldMap (\ch -> Rel.ran $ Rel3.lookupD3 ch varRelation) + + transitiveLabeledVarDeps :: LabeledVar v -> Set (LabeledVar v) + transitiveLabeledVarDeps lv = + Rel3.lookupD2 lv varRelation + & Rel.ran + -- Find all transitive components we rely on + & ( \chs -> + chs <> foldMap (\ch -> fold $ Map.lookup ch transitiveCHDeps) chs + ) + -- Find all variables within all considered components + & foldMap (\ch -> Rel.ran $ Rel3.lookupD3 ch varRelation) + defsToConsider :: Set v defsToConsider = case maybeDefsToConsider of - Nothing -> Set.map untypedVar allDefinitions + Nothing -> + varRelation + & Rel3.d1s Just vs -> vs - labeledVars :: Map v (LabeledVar v) - labeledVars = undefined - - componentMapping :: Map ComponentHash (Set (TypeOrTermVar v)) - componentMapping = undefined UF.componentMap uf - -- codebaseNames with deprecated constructors removed. + -- labeledVars :: Map v (LabeledVar v) + -- labeledVars = undefined - allDefinitions :: Set (TypeOrTermVar v) - allDefinitions = fold componentMapping - - componentNotes' :: Map ComponentHash (Map v (DefinitionNotes v)) - componentNotes' = undefined - - varToLabeledDependency :: v -> LD.LabeledDependency - varToLabeledDependency = undefined - - definitionStatus :: LabeledVar v -> DefinitionNotes v + definitionStatus :: LabeledVar v -> DefinitionNotes definitionStatus (undefined -> tv) = - let v = untypedVar tv + let v = unlabeled tv existingTypesAtName = Names.typesNamed codebaseNames (Name.unsafeFromVar v) existingTermsAtName = Names.termsNamed codebaseNames (Name.unsafeFromVar v) existingTermsMatchingReference = Names.termsNamed codebaseNames (Name.unsafeFromVar v) @@ -151,10 +206,14 @@ analyzeTypecheckedUnisonFile uf unalteredCodebaseNames maybeDefsToConsider = varReferences :: Map v LD.LabeledDependency varReferences = UF.referencesMap uf + varRelation :: Rel3.Relation3 v (LabeledVar v) ComponentHash + varRelation = labelling uf + -- Get the set of all DIRECT definitions in the file which a definition depends on. - varDeps :: v -> Set v - varDeps v = do - undefined + -- directDeps lv = do + -- let vComponentHash = componentHashForLabeledVar lv + -- componentPeers = Rel.ran $ Rel3.lookupD3 vComponentHash varRelation + -- undefined -- let varComponentHash = varToComponentHash Map.! v -- componentPeers = componentMapping Map.! varComponentHash -- directDeps = case UF.hashTermsId uf Map.!? v of @@ -162,15 +221,25 @@ analyzeTypecheckedUnisonFile uf unalteredCodebaseNames maybeDefsToConsider = -- Just (_, _, term, _) -> ABT.freeVars term -- in Set.delete v (componentPeers <> directDeps) - transitiveVarDeps :: Set v -> v -> Set v - transitiveVarDeps resolved v = - let directDeps = varDeps v - in Foldable.foldl' transitiveVarDeps (Set.insert v resolved) directDeps - where - go resolved nextV = - if Set.member nextV resolved - then resolved - else resolved <> transitiveVarDeps resolved nextV + -- transitiveVarDeps :: Set v -> v -> Set v + -- transitiveVarDeps resolved v = + -- let directDeps = varDeps v + -- in Foldable.foldl' transitiveVarDeps (Set.insert v resolved) directDeps + -- where + -- go resolved nextV = + -- if Set.member nextV resolved + -- then resolved + -- else resolved <> transitiveVarDeps resolved nextV + + -- transitiveVarDeps :: Set (LabeledVar v) -> LabeledVar v -> Set (LabeledVar v) + -- transitiveVarDeps resolved v = + -- let directDeps = varDeps v + -- in Foldable.foldl' transitiveVarDeps (Set.insert v resolved) directDeps + -- where + -- go resolved nextV = + -- if Set.member nextV resolved + -- then resolved + -- else resolved <> transitiveVarDeps resolved nextV -- varToComponentHash :: Map v ComponentHash -- varToComponentHash = Map.fromList $ do @@ -209,8 +278,8 @@ analyzeTypecheckedUnisonFile uf unalteredCodebaseNames maybeDefsToConsider = in -- Compute any constructors which were deleted existingConstructorsFromEditedTypes `Set.difference` constructorNamesInFile -slurpErrs :: SlurpResult v -> Map (LabeledVar v) SlurpErr -slurpErrs (SlurpResult defs) = +slurpErrs :: SlurpResult v -> Map v SlurpErr +slurpErrs (SlurpResult defs _) = defs & Map.mapMaybe ( \case @@ -221,8 +290,8 @@ slurpErrs (SlurpResult defs) = slurpOp :: forall v. SlurpResult v -> - (SlurpComponent v, SlurpComponent v, Map (LabeledVar v) SlurpErr) -slurpOp (SlurpResult sr) = do + (SlurpComponent v, SlurpComponent v, Map v SlurpErr) +slurpOp (SlurpResult sr _) = do let (adds, updates, errs) = flip execState mempty $ for (Map.toList sr) $ \(v, (dn, _)) -> do @@ -235,5 +304,89 @@ slurpOp (SlurpResult sr) = do updates' = partitionTypesAndTerms updates in (adds', updates', errs) -partitionTypesAndTerms :: Set (LabeledVar v) -> SlurpComponent v +partitionTypesAndTerms :: Set v -> SlurpComponent v partitionTypesAndTerms = undefined + +componentTransitiveDeps :: UF.TypecheckedUnisonFile v a -> Map ComponentHash (Set ComponentHash) +componentTransitiveDeps uf = + let deps = Map.unionsWith (<>) [termDeps, dataDeps, effectDeps] + filteredDeps :: Map ComponentHash (Set ComponentHash) + filteredDeps = + deps + & Map.mapWithKey + ( \k d -> + d + -- Don't track the component as one of its own deps + & Set.delete k + -- Filter out any references to components which aren't defined in this file. + & Set.filter (\ch -> Map.member ch deps) + ) + -- Find the fixed point of our dependencies, which will always terminate because + -- component dependencies are acyclic. + transitiveDeps = + filteredDeps + <&> ( \directDeps -> + directDeps + <> foldMap (\ch -> fold $ Map.lookup ch transitiveDeps) directDeps + ) + in transitiveDeps + where + termDeps :: Map ComponentHash (Set ComponentHash) + termDeps = + UF.hashTermsId uf + & Map.elems + & fmap (\(refId, _watchKind, trm, _typ) -> (idToComponentHash refId, termComponentRefs trm)) + & Map.fromListWith (<>) + dataDeps :: Map ComponentHash (Set ComponentHash) + dataDeps = + UF.dataDeclarationsId' uf + & Map.elems + & fmap (\(refId, decl) -> (idToComponentHash refId, dataDeclRefs decl)) + & Map.fromListWith (<>) + effectDeps :: Map ComponentHash (Set ComponentHash) + effectDeps = + UF.effectDeclarationsId' uf + & Map.elems + & fmap (\(refId, effect) -> (idToComponentHash refId, dataDeclRefs (DD.toDataDecl effect))) + & Map.fromListWith (<>) + +termComponentRefs :: Ord v => Term v a -> Set ComponentHash +termComponentRefs trm = + Term.dependencies trm + -- Ignore builtins + & Set.mapMaybe componentHashForReference + +dataDeclRefs :: Ord v => DD.DataDeclaration v a -> Set ComponentHash +dataDeclRefs decl = + DD.dependencies decl + -- Ignore builtins + & Set.mapMaybe componentHashForReference + +-- Does not include constructors +labelling :: forall v a. UF.TypecheckedUnisonFile v a -> Rel3.Relation3 v (LabeledVar v) ComponentHash +labelling uf = _ $ decls <> effects <> terms + where + terms :: Rel3.Relation3 v LD.LabeledDependency ComponentHash + terms = + UF.hashTermsId uf + & Map.toList + & fmap (\(v, (refId, _, _, _)) -> (v, LD.derivedTerm refId, idToComponentHash refId)) + & Rel3.fromList + decls :: Rel3.Relation3 v LD.LabeledDependency ComponentHash + decls = + UF.dataDeclarationsId' uf + & Map.toList + & fmap (\(v, (refId, _)) -> (v, LD.derivedType refId, idToComponentHash refId)) + & Rel3.fromList + + effects :: Rel3.Relation3 v LD.LabeledDependency ComponentHash + effects = + UF.effectDeclarationsId' uf + & Map.toList + & fmap (\(v, (refId, _)) -> (v, LD.derivedType refId, idToComponentHash refId)) + & Rel3.fromList + +idToComponentHash :: Ref.Id -> ComponentHash +idToComponentHash (Ref.Id componentHash _ _) = componentHash + +-- dependencyMap :: UF.TypecheckedUnisonFile -> Map From 191ad5be40855201f6d385889c8b18c249678536 Mon Sep 17 00:00:00 2001 From: Chris Penner Date: Thu, 13 Jan 2022 15:38:24 -0600 Subject: [PATCH 164/297] WIP --- .../src/Unison/Codebase/Editor/Slurp.hs | 144 ++++++------------ 1 file changed, 45 insertions(+), 99 deletions(-) diff --git a/unison-cli/src/Unison/Codebase/Editor/Slurp.hs b/unison-cli/src/Unison/Codebase/Editor/Slurp.hs index 47446b2109..91aca838d5 100644 --- a/unison-cli/src/Unison/Codebase/Editor/Slurp.hs +++ b/unison-cli/src/Unison/Codebase/Editor/Slurp.hs @@ -6,7 +6,6 @@ import Control.Lens import Control.Monad.State import qualified Data.Map as Map import qualified Data.Set as Set -import qualified Unison.ABT as ABT import qualified Unison.DataDeclaration as DD import Unison.Hash (Hash) import qualified Unison.LabeledDependency as LD @@ -23,7 +22,6 @@ import Unison.Term (Term) import qualified Unison.Term as Term import qualified Unison.UnisonFile as UF import qualified Unison.UnisonFile.Names as UF -import qualified Unison.UnisonFile.Type as UF import qualified Unison.Util.Relation as Rel import qualified Unison.Util.Relation3 as Rel3 import qualified Unison.Util.Set as Set @@ -45,14 +43,13 @@ data LabeledVar v = LabeledVar v LD.LabeledDependency deriving (Eq, Ord) data SlurpStatus = New | Updated | Duplicate + deriving (Eq, Ord, Show) data SlurpOp = Add | Update + deriving (Eq, Ord, Show) data TypeOrTermVar v = TypeVar v | TermVar v - --- componentHashForLabeledVar :: LabeledVar v -> ComponentHash --- componentHashForLabeledVar (LabeledVar _ ld) = --- labeledDepToComponentHash ld + deriving (Eq, Ord, Show) labeledDepToComponentHash :: LD.LabeledDependency -> ComponentHash labeledDepToComponentHash ld = @@ -77,12 +74,14 @@ data SlurpPrintout v = SlurpPrintout { notOk :: Map v SlurpErr, ok :: Map v SlurpStatus } + deriving (Eq, Ord, Show) data SlurpErr = TermCtorCollision | CtorTermCollision + deriving (Eq, Ord, Show) -data SlurpComponent v = SlurpComponent {types :: Set v, terms :: Set v} +data SlurpComponent v = SlurpComponent {types :: Set v, terms :: Set v, errs :: Map v SlurpErr} deriving (Eq, Ord, Show) data DefinitionNotes @@ -101,18 +100,6 @@ data Components v = Components typeComponents :: Map Hash (Set v) } -toSlurpPrintout :: SlurpResult v -> SlurpPrintout v -toSlurpPrintout = undefined - -collectComponents :: UF.TypecheckedUnisonFile v Ann -> Components v -collectComponents _uf = Components {termComponents, typeComponents} - where - termComponents = undefined - typeComponents = undefined - -computeComponentDependencies :: Components v -> Map Hash (Set Hash) -computeComponentDependencies = undefined - analyzeTypecheckedUnisonFile :: forall v. Var v => @@ -135,7 +122,8 @@ analyzeTypecheckedUnisonFile uf unalteredCodebaseNames maybeDefsToConsider = & Map.fromList & Map.mapEitherWithKey ( \lv x -> case lv of - LabeledVar v ld -> _ + LabeledVar _ (LD.TypeReference {}) -> Left x + LabeledVar _ (LD.TermReferent {}) -> Right x ) & over both (Map.mapKeys (\(LabeledVar v _) -> v)) in -- & Map.mapEitherWithKey _ @@ -181,73 +169,29 @@ analyzeTypecheckedUnisonFile uf unalteredCodebaseNames maybeDefsToConsider = & Rel3.d1s Just vs -> vs - -- labeledVars :: Map v (LabeledVar v) - -- labeledVars = undefined - definitionStatus :: LabeledVar v -> DefinitionNotes - definitionStatus (undefined -> tv) = - let v = unlabeled tv - existingTypesAtName = Names.typesNamed codebaseNames (Name.unsafeFromVar v) + definitionStatus (LabeledVar v ld) = + let existingTypesAtName = Names.typesNamed codebaseNames (Name.unsafeFromVar v) existingTermsAtName = Names.termsNamed codebaseNames (Name.unsafeFromVar v) existingTermsMatchingReference = Names.termsNamed codebaseNames (Name.unsafeFromVar v) - varRef = varReferences Map.! v - in case tv of - TermVar {} -> + in case ld of + LD.TypeReference {} -> case Set.toList existingTermsAtName of [] -> DefStatus New - [r] | LD.referent r == varRef -> DefStatus Duplicate - [Referent.Con {}] | LD.ConReference {} <- varRef -> DefErr TermCtorCollision - [Referent.Ref {}] | LD.ConReference {} <- varRef -> DefErr CtorTermCollision + [r] | LD.referent r == ld -> DefStatus Duplicate + [Referent.Con {}] | LD.ConReference {} <- ld -> DefErr TermCtorCollision + [Referent.Ref {}] | LD.ConReference {} <- ld -> DefErr CtorTermCollision -- This allows us to resolve conflicts with an update. _ -> DefStatus Updated -- [r] -> DefStatus Updated -- _ -> DefStatus Conflicted - TypeVar {} -> undefined - varReferences :: Map v LD.LabeledDependency - varReferences = UF.referencesMap uf + LD.TermReference {} -> undefined + LD.ConReference {} -> undefined varRelation :: Rel3.Relation3 v (LabeledVar v) ComponentHash varRelation = labelling uf -- Get the set of all DIRECT definitions in the file which a definition depends on. - -- directDeps lv = do - -- let vComponentHash = componentHashForLabeledVar lv - -- componentPeers = Rel.ran $ Rel3.lookupD3 vComponentHash varRelation - -- undefined - -- let varComponentHash = varToComponentHash Map.! v - -- componentPeers = componentMapping Map.! varComponentHash - -- directDeps = case UF.hashTermsId uf Map.!? v of - -- Nothing -> mempty - -- Just (_, _, term, _) -> ABT.freeVars term - -- in Set.delete v (componentPeers <> directDeps) - - -- transitiveVarDeps :: Set v -> v -> Set v - -- transitiveVarDeps resolved v = - -- let directDeps = varDeps v - -- in Foldable.foldl' transitiveVarDeps (Set.insert v resolved) directDeps - -- where - -- go resolved nextV = - -- if Set.member nextV resolved - -- then resolved - -- else resolved <> transitiveVarDeps resolved nextV - - -- transitiveVarDeps :: Set (LabeledVar v) -> LabeledVar v -> Set (LabeledVar v) - -- transitiveVarDeps resolved v = - -- let directDeps = varDeps v - -- in Foldable.foldl' transitiveVarDeps (Set.insert v resolved) directDeps - -- where - -- go resolved nextV = - -- if Set.member nextV resolved - -- then resolved - -- else resolved <> transitiveVarDeps resolved nextV - - -- varToComponentHash :: Map v ComponentHash - -- varToComponentHash = Map.fromList $ do - -- -- List monad - -- (hash, vars) <- Map.toList componentMapping - -- v <- Set.toList vars - -- pure (v, hash) - codebaseNames :: Names codebaseNames = -- TODO: make faster @@ -289,25 +233,27 @@ slurpErrs (SlurpResult defs _) = slurpOp :: forall v. + Ord v => SlurpResult v -> - (SlurpComponent v, SlurpComponent v, Map v SlurpErr) -slurpOp (SlurpResult sr _) = do - let (adds, updates, errs) = - flip execState mempty $ - for (Map.toList sr) $ \(v, (dn, _)) -> do - case dn of - DefStatus New -> _1 %= Set.insert v - DefStatus Updated -> _2 %= Set.insert v - DefStatus Duplicate -> pure () - DefErr err -> _3 . at v ?= err - adds' = partitionTypesAndTerms adds - updates' = partitionTypesAndTerms updates - in (adds', updates', errs) - -partitionTypesAndTerms :: Set v -> SlurpComponent v -partitionTypesAndTerms = undefined - -componentTransitiveDeps :: UF.TypecheckedUnisonFile v a -> Map ComponentHash (Set ComponentHash) + (SlurpComponent v, SlurpComponent v) +slurpOp (SlurpResult terms types) = + let (termAdds, termUpdates, termErrs) = partition terms + (typeAdds, typeUpdates, typeErrs) = partition types + in (SlurpComponent termAdds termUpdates termErrs, SlurpComponent typeAdds typeUpdates typeErrs) + where + partition :: (Map v (DefinitionNotes, Set (LabeledVar v))) -> (Set v, Set v, Map v SlurpErr) + partition sr = + let (adds, updates, errs) = + flip execState mempty $ + for (Map.toList sr) $ \(v, (dn, _)) -> do + case dn of + DefStatus New -> _1 %= Set.insert v + DefStatus Updated -> _2 %= Set.insert v + DefStatus Duplicate -> pure () + DefErr err -> _3 . at v ?= err + in (adds, updates, errs) + +componentTransitiveDeps :: Ord v => UF.TypecheckedUnisonFile v a -> Map ComponentHash (Set ComponentHash) componentTransitiveDeps uf = let deps = Map.unionsWith (<>) [termDeps, dataDeps, effectDeps] filteredDeps :: Map ComponentHash (Set ComponentHash) @@ -363,27 +309,27 @@ dataDeclRefs decl = & Set.mapMaybe componentHashForReference -- Does not include constructors -labelling :: forall v a. UF.TypecheckedUnisonFile v a -> Rel3.Relation3 v (LabeledVar v) ComponentHash -labelling uf = _ $ decls <> effects <> terms +labelling :: forall v a. Ord v => UF.TypecheckedUnisonFile v a -> Rel3.Relation3 v (LabeledVar v) ComponentHash +labelling uf = decls <> effects <> terms where - terms :: Rel3.Relation3 v LD.LabeledDependency ComponentHash + terms :: Rel3.Relation3 v (LabeledVar v) ComponentHash terms = UF.hashTermsId uf & Map.toList - & fmap (\(v, (refId, _, _, _)) -> (v, LD.derivedTerm refId, idToComponentHash refId)) + & fmap (\(v, (refId, _, _, _)) -> (v, LabeledVar v (LD.derivedTerm refId), idToComponentHash refId)) & Rel3.fromList - decls :: Rel3.Relation3 v LD.LabeledDependency ComponentHash + decls :: Rel3.Relation3 v (LabeledVar v) ComponentHash decls = UF.dataDeclarationsId' uf & Map.toList - & fmap (\(v, (refId, _)) -> (v, LD.derivedType refId, idToComponentHash refId)) + & fmap (\(v, (refId, _)) -> (v, LabeledVar v (LD.derivedType refId), idToComponentHash refId)) & Rel3.fromList - effects :: Rel3.Relation3 v LD.LabeledDependency ComponentHash + effects :: Rel3.Relation3 v (LabeledVar v) ComponentHash effects = UF.effectDeclarationsId' uf & Map.toList - & fmap (\(v, (refId, _)) -> (v, LD.derivedType refId, idToComponentHash refId)) + & fmap (\(v, (refId, _)) -> (v, LabeledVar v (LD.derivedType refId), idToComponentHash refId)) & Rel3.fromList idToComponentHash :: Ref.Id -> ComponentHash From 0a960c454dadcc221d95d5a6d0600d6e11191d31 Mon Sep 17 00:00:00 2001 From: Chris Penner Date: Fri, 14 Jan 2022 11:52:45 -0600 Subject: [PATCH 165/297] WIP --- .../src/Unison/Codebase/Editor/HandleInput.hs | 140 +++++------ .../src/Unison/Codebase/Editor/Slurp.hs | 232 ++++++++++++++---- 2 files changed, 247 insertions(+), 125 deletions(-) diff --git a/unison-cli/src/Unison/Codebase/Editor/HandleInput.hs b/unison-cli/src/Unison/Codebase/Editor/HandleInput.hs index 09fa026841..56d1b3b26b 100644 --- a/unison-cli/src/Unison/Codebase/Editor/HandleInput.hs +++ b/unison-cli/src/Unison/Codebase/Editor/HandleInput.hs @@ -55,9 +55,8 @@ import qualified Unison.Codebase.Editor.Propagate as Propagate import Unison.Codebase.Editor.RemoteRepo (ReadRemoteNamespace, WriteRemotePath, WriteRepo, printNamespace, writePathToRead, writeToRead) import Unison.Codebase.Editor.SlurpComponent (SlurpComponent (..)) import qualified Unison.Codebase.Editor.SlurpComponent as SC -import qualified Unison.Codebase.Editor.Slurp as Slurp import Unison.Codebase.Editor.SlurpResult (SlurpResult (..)) --- import qualified Unison.Codebase.Editor.SlurpResult as Slurp +import qualified Unison.Codebase.Editor.SlurpResult as Slurp import qualified Unison.Codebase.Editor.TodoOutput as TO import qualified Unison.Codebase.Editor.UriParser as UriParser import qualified Unison.Codebase.MainTerm as MainTerm @@ -152,8 +151,6 @@ import qualified Data.Set.NonEmpty as NESet import Data.Set.NonEmpty (NESet) import Unison.Symbol (Symbol) import qualified Unison.Codebase.Editor.Input as Input -import Debug.Pretty.Simple -import qualified Unison.Codebase.Editor.SlurpResult as OldSlurp defaultPatchNameSegment :: NameSegment defaultPatchNameSegment = "patch" @@ -262,12 +259,11 @@ loop = do loadUnisonFile sourceName text = do let lexed = L.lexer (Text.unpack sourceName) (Text.unpack text) withFile [] sourceName (text, lexed) $ \unisonFile -> do - sr <- Slurp.analyzeTypecheckedUnisonFile unisonFile <$> currentPathNames - -- sr <- toSlurpResult currentPath' unisonFile <$> slurpResultNames + sr <- toSlurpResult currentPath' unisonFile <$> slurpResultNames names <- displayNames unisonFile pped <- prettyPrintEnvDecl names let ppe = PPE.suffixifiedPPE pped - eval . Notify $ Typechecked sourceName ppe (undefined $ Slurp.toSlurpPrintout sr) unisonFile + eval . Notify $ Typechecked sourceName ppe sr unisonFile unlessError' EvaluationFailure do (bindings, e) <- ExceptT . eval . Evaluate ppe $ unisonFile lift do @@ -1259,11 +1255,11 @@ loop = do Nothing -> respond NoUnisonFile Just uf -> do sr <- - OldSlurp.disallowUpdates + Slurp.disallowUpdates . applySelection hqs uf . toSlurpResult currentPath' uf <$> slurpResultNames - let adds = OldSlurp.adds sr + let adds = Slurp.adds sr stepAtNoSync Branch.CompressHistory (Path.unabsolute currentPath', doSlurpAdds adds uf) eval . AddDefsToCodebase . filterBySlurpResult sr $ uf ppe <- prettyPrintEnvDecl =<< displayNames uf @@ -1273,7 +1269,7 @@ loop = do PreviewAddI hqs -> case (latestFile', uf) of (Just (sourceName, _), Just uf) -> do sr <- - OldSlurp.disallowUpdates + Slurp.disallowUpdates . applySelection hqs uf . toSlurpResult currentPath' uf <$> slurpResultNames @@ -1823,49 +1819,50 @@ handleUpdate input maybePatchPath hqs = do let patchPath = fromMaybe defaultPatchPath maybePatchPath slurpCheckNames <- slurpResultNames let currentPathNames = slurpCheckNames - let sr = Slurp.analyzeTypecheckedUnisonFile uf currentPathNames - -- let sr :: SlurpResult v - -- sr = - -- applySelection hqs uf - -- . toSlurpResult currentPath' uf - -- $ slurpCheckNames + let sr :: SlurpResult v + sr = + applySelection hqs uf + . toSlurpResult currentPath' uf + $ slurpCheckNames + addsAndUpdates :: SlurpComponent v + addsAndUpdates = Slurp.updates sr <> Slurp.adds sr fileNames :: Names fileNames = UF.typecheckedToNames uf -- todo: display some error if typeEdits or termEdits itself contains a loop - -- typeEdits :: Map Name (Reference, Reference) - -- typeEdits = Map.fromList $ map f (toList $ SC.types (updates sr)) - -- where - -- f v = case ( toList (Names.typesNamed slurpCheckNames n), - -- toList (Names.typesNamed fileNames n) - -- ) of - -- ([old], [new]) -> (n, (old, new)) - -- _ -> - -- error $ - -- "Expected unique matches for " - -- ++ Var.nameStr v - -- ++ " but got: " - -- ++ show otherwise - -- where - -- n = Name.unsafeFromVar v + typeEdits :: Map Name (Reference, Reference) + typeEdits = Map.fromList $ map f (toList $ SC.types (updates sr)) + where + f v = case ( toList (Names.typesNamed slurpCheckNames n), + toList (Names.typesNamed fileNames n) + ) of + ([old], [new]) -> (n, (old, new)) + _ -> + error $ + "Expected unique matches for " + ++ Var.nameStr v + ++ " but got: " + ++ show otherwise + where + n = Name.unsafeFromVar v hashTerms :: Map Reference (Type v Ann) hashTerms = Map.fromList (toList hashTerms0) where hashTerms0 = (\(r, _wk, _tm, typ) -> (r, typ)) <$> UF.hashTerms uf - -- termEdits :: Map Name (Reference, Reference) - -- termEdits = Map.fromList $ map g (toList $ SC.terms (updates sr)) - -- where - -- g v = case ( toList (Names.refTermsNamed slurpCheckNames n), - -- toList (Names.refTermsNamed fileNames n) - -- ) of - -- ([old], [new]) -> (n, (old, new)) - -- _ -> - -- error $ - -- "Expected unique matches for " - -- ++ Var.nameStr v - -- ++ " but got: " - -- ++ show otherwise - -- where - -- n = Name.unsafeFromVar v + termEdits :: Map Name (Reference, Reference) + termEdits = Map.fromList $ map g (toList $ SC.terms (updates sr)) + where + g v = case ( toList (Names.refTermsNamed slurpCheckNames n), + toList (Names.refTermsNamed fileNames n) + ) of + ([old], [new]) -> (n, (old, new)) + _ -> + error $ + "Expected unique matches for " + ++ Var.nameStr v + ++ " but got: " + ++ show otherwise + where + n = Name.unsafeFromVar v termDeprecations :: [(Name, Referent)] termDeprecations = [ (n, r) @@ -1922,28 +1919,19 @@ handleUpdate input maybePatchPath hqs = do updatePatches :: Branch0 m -> m (Branch0 m) updatePatches = Branch.modifyPatches seg updatePatch - case Slurp.slurpOp Update (undefined hqs) sr of - Left errs -> undefined - Right (adds, updates) -> - -- when nonEmpty - -- doSlurpUpdates updates - -- doSlurpAdds adds - undefined adds updates - - -- take a look at the `updates` from the SlurpResult - -- and make a patch diff to record a replacement from the old to new references - -- stepManyAtMNoSync Branch.CompressHistory - -- [ ( Path.unabsolute currentPath', - -- pure . doSlurpUpdates typeEdits termEdits termDeprecations - -- ), - -- ( Path.unabsolute currentPath', - -- pure . doSlurpAdds addsAndUpdates uf - -- ), - -- (Path.unabsolute p, updatePatches) - -- ] - -- eval . AddDefsToCodebase . filterBySlurpResult sr $ uf - - -- when (Slurp.isNonempty sr) $ do + when (Slurp.isNonempty sr) $ do + -- take a look at the `updates` from the SlurpResult + -- and make a patch diff to record a replacement from the old to new references + stepManyAtMNoSync Branch.CompressHistory + [ ( Path.unabsolute currentPath', + pure . doSlurpUpdates typeEdits termEdits termDeprecations + ), + ( Path.unabsolute currentPath', + pure . doSlurpAdds addsAndUpdates uf + ), + (Path.unabsolute p, updatePatches) + ] + eval . AddDefsToCodebase . filterBySlurpResult sr $ uf ppe <- prettyPrintEnvDecl =<< displayNames uf respond $ SlurpOutput input (PPE.suffixifiedPPE ppe) sr -- propagatePatch prints TodoOutput @@ -2794,8 +2782,8 @@ toSlurpResult :: UF.TypecheckedUnisonFile v Ann -> Names -> SlurpResult v -toSlurpResult curPath uf existingNames = pTraceShowId $ - OldSlurp.subtractComponent (conflicts <> ctorCollisions) $ +toSlurpResult curPath uf existingNames = + Slurp.subtractComponent (conflicts <> ctorCollisions) $ SlurpResult uf mempty @@ -2901,13 +2889,13 @@ toSlurpResult curPath uf existingNames = pTraceShowId $ R.Relation Name Referent -> R.Relation Name Referent -> Set v -> - Map v OldSlurp.Aliases + Map v Slurp.Aliases buildAliases existingNames namesFromFile duplicates = Map.fromList [ ( var n, if null aliasesOfOld - then OldSlurp.AddAliases aliasesOfNew - else OldSlurp.UpdateAliases aliasesOfOld aliasesOfNew + then Slurp.AddAliases aliasesOfNew + else Slurp.UpdateAliases aliasesOfOld aliasesOfNew ) | (n, r@Referent.Ref {}) <- R.toList namesFromFile, -- All the refs whose names include `n`, and are not `r` @@ -2922,14 +2910,14 @@ toSlurpResult curPath uf existingNames = pTraceShowId $ Set.notMember (var n) duplicates ] - termAliases :: Map v OldSlurp.Aliases + termAliases :: Map v Slurp.Aliases termAliases = buildAliases (Names.terms existingNames) (Names.terms fileNames) (SC.terms dups) - typeAliases :: Map v OldSlurp.Aliases + typeAliases :: Map v Slurp.Aliases typeAliases = buildAliases (R.mapRan Referent.Ref $ Names.types existingNames) @@ -3081,7 +3069,7 @@ doSlurpAdds slurp uf = Branch.batchUpdates (typeActions <> termActions) typeActions = map doType . toList $ SC.types slurp termActions = map doTerm . toList $ - SC.terms slurp <> OldSlurp.constructorsFor (SC.types slurp) uf + SC.terms slurp <> Slurp.constructorsFor (SC.types slurp) uf names = UF.typecheckedToNames uf tests = Set.fromList $ fst <$> UF.watchesOfKind WK.TestWatch (UF.discardTypes uf) (isTestType, isTestValue) = isTest diff --git a/unison-cli/src/Unison/Codebase/Editor/Slurp.hs b/unison-cli/src/Unison/Codebase/Editor/Slurp.hs index 91aca838d5..3448f8c71b 100644 --- a/unison-cli/src/Unison/Codebase/Editor/Slurp.hs +++ b/unison-cli/src/Unison/Codebase/Editor/Slurp.hs @@ -1,10 +1,9 @@ -{-# OPTIONS_GHC -Wno-unused-local-binds #-} - module Unison.Codebase.Editor.Slurp where import Control.Lens -import Control.Monad.State +import qualified Data.List.NonEmpty as NEList import qualified Data.Map as Map +import qualified Data.Semigroup.Foldable as Semigroup import qualified Data.Set as Set import qualified Unison.DataDeclaration as DD import Unison.Hash (Hash) @@ -16,12 +15,10 @@ import qualified Unison.Names as Names import Unison.Parser.Ann (Ann) import Unison.Prelude import qualified Unison.Reference as Ref -import qualified Unison.Referent as Referent import qualified Unison.Referent' as Referent import Unison.Term (Term) import qualified Unison.Term as Term import qualified Unison.UnisonFile as UF -import qualified Unison.UnisonFile.Names as UF import qualified Unison.Util.Relation as Rel import qualified Unison.Util.Relation3 as Rel3 import qualified Unison.Util.Set as Set @@ -45,8 +42,24 @@ data LabeledVar v = LabeledVar v LD.LabeledDependency data SlurpStatus = New | Updated | Duplicate deriving (Eq, Ord, Show) -data SlurpOp = Add | Update - deriving (Eq, Ord, Show) +data BlockStatus v + = Add + | Duplicated + | NeedsUpdate v + | ErrFrom v SlurpErr + | SelfErr SlurpErr + deriving (Eq, Ord) + +instance Semigroup (BlockStatus v) where + SelfErr err <> _ = SelfErr err + _ <> SelfErr err = SelfErr err + ErrFrom v err <> _ = ErrFrom v err + _ <> ErrFrom v err = ErrFrom v err + NeedsUpdate v <> _ = NeedsUpdate v + _ <> NeedsUpdate v = NeedsUpdate v + Add <> _ = Add + _ <> Add = Add + Duplicated <> Duplicated = Duplicated data TypeOrTermVar v = TypeVar v | TermVar v deriving (Eq, Ord, Show) @@ -81,18 +94,87 @@ data SlurpErr | CtorTermCollision deriving (Eq, Ord, Show) -data SlurpComponent v = SlurpComponent {types :: Set v, terms :: Set v, errs :: Map v SlurpErr} +data SlurpComponent v = SlurpComponent {scTypes :: Set v, scTerms :: Set v} deriving (Eq, Ord, Show) +instance Ord v => Semigroup (SlurpComponent v) where + SlurpComponent typeL termL <> SlurpComponent typeR termR = + SlurpComponent (typeL <> typeR) (termL <> termR) + +instance Ord v => Monoid (SlurpComponent v) where + mempty = SlurpComponent mempty mempty + data DefinitionNotes - = DefStatus SlurpStatus + = DefOk SlurpStatus | DefErr SlurpErr data SlurpResult v = SlurpResult - { termNotes :: Map v (DefinitionNotes, Set (LabeledVar v)), - typeNotes :: Map v (DefinitionNotes, Set (LabeledVar v)) + { termNotes :: Map v (DefinitionNotes, Set (TypeOrTermVar v)), + typeNotes :: Map v (DefinitionNotes, Set (TypeOrTermVar v)) } +type Result v = Map (BlockStatus v) (SlurpComponent v) + +-- data Result v = Result +-- { addable :: SlurpComponent v, +-- needUpdate :: SlurpComponent v, +-- duplicate :: SlurpComponent v, +-- blockedTerms :: Map (SlurpErr v) (Set v) +-- } + +-- instance Semigroup (Result v) where +-- Result adds1 updates1 duplicates1 tcColl1 ctColl1 <> Result adds2 updates2 duplicates2 tcColl2 ctColl2 = +-- Result (adds1 <> adds2) (updates1 <> updates2) (duplicates1 <> duplicates2) (tcColl1 <> tcColl2) (ctColl1 <> ctColl2) + +-- instance Monoid (Result v) where +-- mempty = Result mempty mempty mempty mempty mempty + +-- Compute all definitions which can be added, or the reasons why a def can't be added. +results :: forall v. Ord v => SlurpResult v -> Result v +results sr@(SlurpResult terms types) = + Map.unionWith (<>) analyzedTerms analyzedTypes + where + analyzedTerms :: Map (BlockStatus v) (SlurpComponent v) + analyzedTerms = + terms + & Map.toList + & fmap + ( \(v, (_, deps)) -> + ( Semigroup.foldMap1 (getBlockStatus sr) (TermVar v NEList.:| Set.toList deps), + mempty {scTerms = Set.singleton v} + ) + ) + & Map.fromListWith (<>) + analyzedTypes :: Map (BlockStatus v) (SlurpComponent v) + analyzedTypes = + types + & Map.toList + & fmap + ( \(v, (_, deps)) -> + ( Semigroup.foldMap1 (getBlockStatus sr) (TypeVar v NEList.:| Set.toList deps), + mempty {scTypes = Set.singleton v} + ) + ) + & Map.fromListWith (<>) + +getBlockStatus :: Ord v => SlurpResult v -> TypeOrTermVar v -> BlockStatus v +getBlockStatus (SlurpResult {termNotes, typeNotes}) tv = + let v = unlabeled tv + defNotes = case tv of + TypeVar v -> typeNotes Map.! v + TermVar v -> termNotes Map.! v + in case fst defNotes of + DefOk Updated -> NeedsUpdate v + DefErr err -> ErrFrom v err + DefOk New -> Add + DefOk Duplicate -> Duplicated + +-- Need to know: +-- What can be added without errors? +-- What can be updated without errors? +-- What has errors? +-- What is blocked? + type ComponentHash = Hash data Components v = Components @@ -100,18 +182,25 @@ data Components v = Components typeComponents :: Map Hash (Set v) } +-- groupByOp :: SlurpResult v -> (SlurpComponent v, SlurpComponent v) +-- groupByOp (SlurpResult terms types) = +-- terms +-- & Map.mapEither (\(notes, deps) -> +-- any (== ) +-- ) + analyzeTypecheckedUnisonFile :: forall v. Var v => UF.TypecheckedUnisonFile v Ann -> - Names -> Maybe (Set v) -> + Names -> SlurpResult v -analyzeTypecheckedUnisonFile uf unalteredCodebaseNames maybeDefsToConsider = +analyzeTypecheckedUnisonFile uf maybeDefsToConsider unalteredCodebaseNames = let allInvolvedVars :: Set (LabeledVar v) allInvolvedVars = foldMap transitiveVarDeps defsToConsider - termStatuses, typeStatuses :: Map v (DefinitionNotes, Set (LabeledVar v)) + termStatuses, typeStatuses :: Map v (DefinitionNotes, Set (TypeOrTermVar v)) (termStatuses, typeStatuses) = allInvolvedVars & Set.toList @@ -126,12 +215,21 @@ analyzeTypecheckedUnisonFile uf unalteredCodebaseNames maybeDefsToConsider = LabeledVar _ (LD.TermReferent {}) -> Right x ) & over both (Map.mapKeys (\(LabeledVar v _) -> v)) + & over + both + ( Map.map + ( fmap + ( Set.map + ( \case + LabeledVar v (LD.TermReferent {}) -> TermVar v + LabeledVar v (LD.TypeReference {}) -> TypeVar v + ) + ) + ) + ) in -- & Map.mapEitherWithKey _ SlurpResult termStatuses typeStatuses where - fileNames :: Names - fileNames = UF.typecheckedToNames uf - transitiveCHDeps :: Map ComponentHash (Set ComponentHash) transitiveCHDeps = componentTransitiveDeps uf @@ -173,20 +271,36 @@ analyzeTypecheckedUnisonFile uf unalteredCodebaseNames maybeDefsToConsider = definitionStatus (LabeledVar v ld) = let existingTypesAtName = Names.typesNamed codebaseNames (Name.unsafeFromVar v) existingTermsAtName = Names.termsNamed codebaseNames (Name.unsafeFromVar v) - existingTermsMatchingReference = Names.termsNamed codebaseNames (Name.unsafeFromVar v) in case ld of LD.TypeReference {} -> + case Set.toList existingTypesAtName of + [] -> DefOk New + [r] + | LD.typeRef r == ld -> DefOk Duplicate + | otherwise -> DefOk Updated + -- If there are many existing terms, they must be in conflict, we can update + -- to resolve the conflict. + _ -> DefOk Updated + LD.TermReference {} -> + case Set.toList existingTermsAtName of + [] -> DefOk New + rs | any Referent.isConstructor rs -> DefErr TermCtorCollision + [r] + | LD.referent r == ld -> DefOk Duplicate + | otherwise -> DefOk Updated + -- If there are many existing terms, they must be in conflict, we can update + -- to resolve the conflict. + _ -> DefOk Updated + LD.ConReference {} -> case Set.toList existingTermsAtName of - [] -> DefStatus New - [r] | LD.referent r == ld -> DefStatus Duplicate - [Referent.Con {}] | LD.ConReference {} <- ld -> DefErr TermCtorCollision - [Referent.Ref {}] | LD.ConReference {} <- ld -> DefErr CtorTermCollision - -- This allows us to resolve conflicts with an update. - _ -> DefStatus Updated - -- [r] -> DefStatus Updated - -- _ -> DefStatus Conflicted - LD.TermReference {} -> undefined - LD.ConReference {} -> undefined + [] -> DefOk New + rs | any (not . Referent.isConstructor) rs -> DefErr CtorTermCollision + [r] + | LD.referent r == ld -> DefOk Duplicate + | otherwise -> DefOk Updated + -- If there are many existing terms, they must be in conflict, we can update + -- to resolve the conflict. + _ -> DefOk Updated varRelation :: Rel3.Relation3 v (LabeledVar v) ComponentHash varRelation = labelling uf @@ -231,27 +345,27 @@ slurpErrs (SlurpResult defs _) = _ -> Nothing ) -slurpOp :: - forall v. - Ord v => - SlurpResult v -> - (SlurpComponent v, SlurpComponent v) -slurpOp (SlurpResult terms types) = - let (termAdds, termUpdates, termErrs) = partition terms - (typeAdds, typeUpdates, typeErrs) = partition types - in (SlurpComponent termAdds termUpdates termErrs, SlurpComponent typeAdds typeUpdates typeErrs) - where - partition :: (Map v (DefinitionNotes, Set (LabeledVar v))) -> (Set v, Set v, Map v SlurpErr) - partition sr = - let (adds, updates, errs) = - flip execState mempty $ - for (Map.toList sr) $ \(v, (dn, _)) -> do - case dn of - DefStatus New -> _1 %= Set.insert v - DefStatus Updated -> _2 %= Set.insert v - DefStatus Duplicate -> pure () - DefErr err -> _3 . at v ?= err - in (adds, updates, errs) +-- slurpOp :: +-- forall v. +-- Ord v => +-- SlurpResult v -> +-- (SlurpComponent v, SlurpComponent v) +-- slurpOp (SlurpResult terms types) = +-- let (termAdds, termUpdates, termErrs) = partition terms +-- (typeAdds, typeUpdates, typeErrs) = partition types +-- in (SlurpComponent termAdds termUpdates termErrs, SlurpComponent typeAdds typeUpdates typeErrs) +-- where +-- partition :: (Map v (DefinitionNotes, Set (LabeledVar v))) -> (Set v, Set v, Map v SlurpErr) +-- partition sr = +-- let (adds, updates, errs) = +-- flip execState mempty $ +-- for (Map.toList sr) $ \(v, (dn, _)) -> do +-- case dn of +-- DefOk New -> _1 %= Set.insert v +-- DefOk Updated -> _2 %= Set.insert v +-- DefOk Duplicate -> pure () +-- DefErr err -> _3 . at v ?= err +-- in (adds, updates, errs) componentTransitiveDeps :: Ord v => UF.TypecheckedUnisonFile v a -> Map ComponentHash (Set ComponentHash) componentTransitiveDeps uf = @@ -335,4 +449,24 @@ labelling uf = decls <> effects <> terms idToComponentHash :: Ref.Id -> ComponentHash idToComponentHash (Ref.Id componentHash _ _) = componentHash +-- selectVars :: SlurpComponent (LabeledVar v) -> UF.TypecheckedUnisonFile v a -> UF.TypecheckedUnisonFile v a +-- selectVars +-- vs +-- ( UF.TypecheckedUnisonFileId +-- dataDeclarations' +-- effectDeclarations' +-- topLevelComponents' +-- watchComponents +-- hashTerms +-- ) = +-- UF.TypecheckedUnisonFileId datas effects tlcs watches hashTerms' +-- where +-- keepTypes = SC.types keep +-- hashTerms' = Map.restrictKeys hashTerms keepTerms +-- datas = Map.restrictKeys dataDeclarations' keepTypes +-- effects = Map.restrictKeys effectDeclarations' keepTypes +-- tlcs = filter (not . null) $ fmap (List.filter filterTLC) topLevelComponents' +-- watches = filter (not . null . snd) $ fmap (second (List.filter filterTLC)) watchComponents +-- filterTLC (v, _, _) = Set.member v keepTerms + -- dependencyMap :: UF.TypecheckedUnisonFile -> Map From e94448b5d6dfe668b3c2a880e3ce5e9f8d4d9905 Mon Sep 17 00:00:00 2001 From: Chris Penner Date: Fri, 14 Jan 2022 12:56:23 -0600 Subject: [PATCH 166/297] WIP --- unison-cli/package.yaml | 3 + .../src/Unison/Codebase/Editor/HandleInput.hs | 25 +- .../src/Unison/Codebase/Editor/Input.hs | 2 +- .../src/Unison/Codebase/Editor/Output.hs | 3 + .../src/Unison/Codebase/Editor/Slurp.hs | 231 +++++++++++------- .../Unison/Codebase/Editor/TermsAndTypes.hs | 24 ++ .../src/Unison/CommandLine/InputPatterns.hs | 22 +- .../src/Unison/CommandLine/OutputMessages.hs | 8 +- unison-cli/unison-cli.cabal | 16 ++ 9 files changed, 226 insertions(+), 108 deletions(-) create mode 100644 unison-cli/src/Unison/Codebase/Editor/TermsAndTypes.hs diff --git a/unison-cli/package.yaml b/unison-cli/package.yaml index 9228fea898..33d5b5b202 100644 --- a/unison-cli/package.yaml +++ b/unison-cli/package.yaml @@ -10,6 +10,9 @@ flags: ghc-options: -Wall dependencies: + - semigroupoids + - distributive + - adjunctions - pretty-simple - ListLike - async diff --git a/unison-cli/src/Unison/Codebase/Editor/HandleInput.hs b/unison-cli/src/Unison/Codebase/Editor/HandleInput.hs index 56d1b3b26b..b050b7c0cf 100644 --- a/unison-cli/src/Unison/Codebase/Editor/HandleInput.hs +++ b/unison-cli/src/Unison/Codebase/Editor/HandleInput.hs @@ -151,6 +151,7 @@ import qualified Data.Set.NonEmpty as NESet import Data.Set.NonEmpty (NESet) import Unison.Symbol (Symbol) import qualified Unison.Codebase.Editor.Input as Input +import qualified Unison.Codebase.Editor.Slurp as NewSlurp defaultPatchNameSegment :: NameSegment defaultPatchNameSegment = "patch" @@ -1250,20 +1251,26 @@ loop = do InvalidSourceNameError -> respond $ InvalidSourceName path LoadError -> respond $ SourceLoadFailed path LoadSuccess contents -> loadUnisonFile (Text.pack path) contents - AddI hqs -> + AddI names -> do + let vars = Set.map Name.toVar names case uf of Nothing -> respond NoUnisonFile Just uf -> do - sr <- - Slurp.disallowUpdates - . applySelection hqs uf - . toSlurpResult currentPath' uf - <$> slurpResultNames - let adds = Slurp.adds sr + sr <- NewSlurp.results . NewSlurp.analyzeTypecheckedUnisonFile uf + (if null vars then Nothing else Just vars) + <$> currentPathNames + -- sr <- + -- Slurp.disallowUpdates + -- . applySelection hqs uf + -- . toSlurpResult currentPath' uf + -- <$> slurpResultNames + let adds = fromMaybe mempty . Map.lookup NewSlurp.Add $ sr stepAtNoSync Branch.CompressHistory (Path.unabsolute currentPath', doSlurpAdds adds uf) - eval . AddDefsToCodebase . filterBySlurpResult sr $ uf + eval . AddDefsToCodebase . NewSlurp.selectDefinitions adds $ uf ppe <- prettyPrintEnvDecl =<< displayNames uf - respond $ SlurpOutput input (PPE.suffixifiedPPE ppe) sr + let oldSlurpResult = NewSlurp.toSlurpResult uf NewSlurp.AddOp vars sr + respond $ SlurpOutput input (PPE.suffixifiedPPE ppe) oldSlurpResult + -- respond $ NewSlurpOutput input (PPE.suffixifiedPPE ppe) NewSlurp.AddOp sr addDefaultMetadata adds syncRoot PreviewAddI hqs -> case (latestFile', uf) of diff --git a/unison-cli/src/Unison/Codebase/Editor/Input.hs b/unison-cli/src/Unison/Codebase/Editor/Input.hs index 4e1f23d2b9..374b90c491 100644 --- a/unison-cli/src/Unison/Codebase/Editor/Input.hs +++ b/unison-cli/src/Unison/Codebase/Editor/Input.hs @@ -112,7 +112,7 @@ data Input | ResolveTypeNameI Path.HQSplit' -- edits stuff: | LoadI (Maybe FilePath) - | AddI [HQ'.HashQualified Name] + | AddI (Set Name) | PreviewAddI [HQ'.HashQualified Name] | UpdateI (Maybe PatchPath) [HQ'.HashQualified Name] | PreviewUpdateI [HQ'.HashQualified Name] diff --git a/unison-cli/src/Unison/Codebase/Editor/Output.hs b/unison-cli/src/Unison/Codebase/Editor/Output.hs index e1a3e297eb..473531096b 100644 --- a/unison-cli/src/Unison/Codebase/Editor/Output.hs +++ b/unison-cli/src/Unison/Codebase/Editor/Output.hs @@ -64,6 +64,7 @@ import qualified Unison.WatchKind as WK import Data.Set.NonEmpty (NESet) import qualified Unison.CommandLine.InputPattern as Input import Data.List.NonEmpty (NonEmpty) +import qualified Unison.Codebase.Editor.Slurp as NewSlurp type ListDetailed = Bool @@ -159,6 +160,7 @@ data Output v | ListOfPatches (Set Name) | -- show the result of add/update SlurpOutput Input PPE.PrettyPrintEnv (SlurpResult v) + | NewSlurpOutput Input PPE.PrettyPrintEnv NewSlurp.SlurpOp (NewSlurp.Result v) | -- Original source, followed by the errors: ParseErrors Text [Parser.Err v] | TypeErrors Text PPE.PrettyPrintEnv [Context.ErrorNote v Ann] @@ -314,6 +316,7 @@ isFailure o = case o of ListOfDefinitions _ _ ds -> null ds ListOfPatches s -> Set.null s SlurpOutput _ _ sr -> not $ SR.isOk sr + NewSlurpOutput _ _ slurpOp sr -> NewSlurp.anyErrors slurpOp sr ParseErrors {} -> True TypeErrors {} -> True CompilerBugs {} -> True diff --git a/unison-cli/src/Unison/Codebase/Editor/Slurp.hs b/unison-cli/src/Unison/Codebase/Editor/Slurp.hs index 3448f8c71b..98f98d4d44 100644 --- a/unison-cli/src/Unison/Codebase/Editor/Slurp.hs +++ b/unison-cli/src/Unison/Codebase/Editor/Slurp.hs @@ -1,10 +1,16 @@ module Unison.Codebase.Editor.Slurp where import Control.Lens +import Data.Bifunctor (second) +import qualified Data.List as List import qualified Data.List.NonEmpty as NEList import qualified Data.Map as Map import qualified Data.Semigroup.Foldable as Semigroup import qualified Data.Set as Set +import Debug.Pretty.Simple (pTraceShowId) +import Unison.Codebase.Editor.SlurpComponent (SlurpComponent (..)) +import qualified Unison.Codebase.Editor.SlurpComponent as SC +import qualified Unison.Codebase.Editor.SlurpResult as OldSlurp import qualified Unison.DataDeclaration as DD import Unison.Hash (Hash) import qualified Unison.LabeledDependency as LD @@ -36,6 +42,9 @@ import Unison.Var (Var) -- -- Does depending on a type also mean depending on all its constructors +data SlurpOp = AddOp | UpdateOp + deriving (Eq, Show) + data LabeledVar v = LabeledVar v LD.LabeledDependency deriving (Eq, Ord) @@ -45,8 +54,8 @@ data SlurpStatus = New | Updated | Duplicate data BlockStatus v = Add | Duplicated - | NeedsUpdate v - | ErrFrom v SlurpErr + | NeedsUpdate (TypeOrTermVar v) + | ErrFrom (TypeOrTermVar v) SlurpErr | SelfErr SlurpErr deriving (Eq, Ord) @@ -94,24 +103,16 @@ data SlurpErr | CtorTermCollision deriving (Eq, Ord, Show) -data SlurpComponent v = SlurpComponent {scTypes :: Set v, scTerms :: Set v} - deriving (Eq, Ord, Show) - -instance Ord v => Semigroup (SlurpComponent v) where - SlurpComponent typeL termL <> SlurpComponent typeR termR = - SlurpComponent (typeL <> typeR) (termL <> termR) - -instance Ord v => Monoid (SlurpComponent v) where - mempty = SlurpComponent mempty mempty - data DefinitionNotes = DefOk SlurpStatus | DefErr SlurpErr + deriving (Show) data SlurpResult v = SlurpResult { termNotes :: Map v (DefinitionNotes, Set (TypeOrTermVar v)), typeNotes :: Map v (DefinitionNotes, Set (TypeOrTermVar v)) } + deriving (Show) type Result v = Map (BlockStatus v) (SlurpComponent v) @@ -130,42 +131,41 @@ type Result v = Map (BlockStatus v) (SlurpComponent v) -- mempty = Result mempty mempty mempty mempty mempty -- Compute all definitions which can be added, or the reasons why a def can't be added. -results :: forall v. Ord v => SlurpResult v -> Result v -results sr@(SlurpResult terms types) = +results :: forall v. (Ord v, Show v) => SlurpResult v -> Result v +results sr@(SlurpResult {termNotes, typeNotes}) = Map.unionWith (<>) analyzedTerms analyzedTypes where analyzedTerms :: Map (BlockStatus v) (SlurpComponent v) analyzedTerms = - terms + termNotes & Map.toList & fmap ( \(v, (_, deps)) -> ( Semigroup.foldMap1 (getBlockStatus sr) (TermVar v NEList.:| Set.toList deps), - mempty {scTerms = Set.singleton v} + mempty {SC.terms = Set.singleton v} ) ) & Map.fromListWith (<>) analyzedTypes :: Map (BlockStatus v) (SlurpComponent v) analyzedTypes = - types + typeNotes & Map.toList & fmap ( \(v, (_, deps)) -> ( Semigroup.foldMap1 (getBlockStatus sr) (TypeVar v NEList.:| Set.toList deps), - mempty {scTypes = Set.singleton v} + mempty {SC.types = Set.singleton v} ) ) & Map.fromListWith (<>) -getBlockStatus :: Ord v => SlurpResult v -> TypeOrTermVar v -> BlockStatus v +getBlockStatus :: (Ord v, Show v) => SlurpResult v -> TypeOrTermVar v -> BlockStatus v getBlockStatus (SlurpResult {termNotes, typeNotes}) tv = - let v = unlabeled tv - defNotes = case tv of - TypeVar v -> typeNotes Map.! v - TermVar v -> termNotes Map.! v + let defNotes = case tv of + TypeVar v -> fromMaybe (error $ "Expected " <> show v <> " in typeNotes") $ Map.lookup v typeNotes + TermVar v -> fromMaybe (error $ "Expected " <> show v <> " in termNotes") $ Map.lookup v termNotes in case fst defNotes of - DefOk Updated -> NeedsUpdate v - DefErr err -> ErrFrom v err + DefOk Updated -> NeedsUpdate tv + DefErr err -> ErrFrom tv err DefOk New -> Add DefOk Duplicate -> Duplicated @@ -197,38 +197,38 @@ analyzeTypecheckedUnisonFile :: Names -> SlurpResult v analyzeTypecheckedUnisonFile uf maybeDefsToConsider unalteredCodebaseNames = - let allInvolvedVars :: Set (LabeledVar v) - allInvolvedVars = foldMap transitiveVarDeps defsToConsider - - termStatuses, typeStatuses :: Map v (DefinitionNotes, Set (TypeOrTermVar v)) - (termStatuses, typeStatuses) = - allInvolvedVars - & Set.toList - & fmap - ( \lv -> - (lv, (definitionStatus lv, transitiveLabeledVarDeps lv)) - ) - & Map.fromList - & Map.mapEitherWithKey - ( \lv x -> case lv of - LabeledVar _ (LD.TypeReference {}) -> Left x - LabeledVar _ (LD.TermReferent {}) -> Right x - ) - & over both (Map.mapKeys (\(LabeledVar v _) -> v)) - & over - both - ( Map.map - ( fmap - ( Set.map - ( \case - LabeledVar v (LD.TermReferent {}) -> TermVar v - LabeledVar v (LD.TypeReference {}) -> TypeVar v - ) - ) - ) - ) - in -- & Map.mapEitherWithKey _ - SlurpResult termStatuses typeStatuses + pTraceShowId $ + let allInvolvedVars :: Set (LabeledVar v) + allInvolvedVars = foldMap transitiveVarDeps defsToConsider + + termStatuses, typeStatuses :: Map v (DefinitionNotes, Set (TypeOrTermVar v)) + (termStatuses, typeStatuses) = + allInvolvedVars + & Set.toList + & fmap + ( \lv -> + (lv, (definitionStatus lv, transitiveLabeledVarDeps lv)) + ) + & Map.fromList + & Map.mapEitherWithKey + ( \lv x -> case lv of + LabeledVar _ (LD.TypeReference {}) -> Right x + LabeledVar _ (LD.TermReferent {}) -> Left x + ) + & over both (Map.mapKeys (\(LabeledVar v _) -> v)) + & over + both + ( Map.map + ( fmap + ( Set.map + ( \case + LabeledVar v (LD.TermReferent {}) -> TermVar v + LabeledVar v (LD.TypeReference {}) -> TypeVar v + ) + ) + ) + ) + in SlurpResult {termNotes = termStatuses, typeNotes = typeStatuses} where transitiveCHDeps :: Map ComponentHash (Set ComponentHash) transitiveCHDeps = @@ -336,14 +336,14 @@ analyzeTypecheckedUnisonFile uf maybeDefsToConsider unalteredCodebaseNames = in -- Compute any constructors which were deleted existingConstructorsFromEditedTypes `Set.difference` constructorNamesInFile -slurpErrs :: SlurpResult v -> Map v SlurpErr -slurpErrs (SlurpResult defs _) = - defs - & Map.mapMaybe - ( \case - (DefErr err, _) -> Just err - _ -> Nothing - ) +-- slurpErrs :: SlurpResult v -> Map v SlurpErr +-- slurpErrs (SlurpResult defs _) = +-- defs +-- & Map.mapMaybe +-- ( \case +-- (DefErr err, _) -> Just err +-- _ -> Nothing +-- ) -- slurpOp :: -- forall v. @@ -422,6 +422,7 @@ dataDeclRefs decl = -- Ignore builtins & Set.mapMaybe componentHashForReference +-- TODO: Does this need to contain constructors? Probably. -- Does not include constructors labelling :: forall v a. Ord v => UF.TypecheckedUnisonFile v a -> Rel3.Relation3 v (LabeledVar v) ComponentHash labelling uf = decls <> effects <> terms @@ -449,24 +450,82 @@ labelling uf = decls <> effects <> terms idToComponentHash :: Ref.Id -> ComponentHash idToComponentHash (Ref.Id componentHash _ _) = componentHash --- selectVars :: SlurpComponent (LabeledVar v) -> UF.TypecheckedUnisonFile v a -> UF.TypecheckedUnisonFile v a --- selectVars --- vs --- ( UF.TypecheckedUnisonFileId --- dataDeclarations' --- effectDeclarations' --- topLevelComponents' --- watchComponents --- hashTerms --- ) = --- UF.TypecheckedUnisonFileId datas effects tlcs watches hashTerms' --- where --- keepTypes = SC.types keep --- hashTerms' = Map.restrictKeys hashTerms keepTerms --- datas = Map.restrictKeys dataDeclarations' keepTypes --- effects = Map.restrictKeys effectDeclarations' keepTypes --- tlcs = filter (not . null) $ fmap (List.filter filterTLC) topLevelComponents' --- watches = filter (not . null . snd) $ fmap (second (List.filter filterTLC)) watchComponents --- filterTLC (v, _, _) = Set.member v keepTerms - --- dependencyMap :: UF.TypecheckedUnisonFile -> Map +selectDefinitions :: Ord v => SlurpComponent v -> UF.TypecheckedUnisonFile v a -> UF.TypecheckedUnisonFile v a +selectDefinitions + (SlurpComponent {terms, types}) + ( UF.TypecheckedUnisonFileId + dataDeclarations' + effectDeclarations' + topLevelComponents' + watchComponents + hashTerms + ) = + UF.TypecheckedUnisonFileId datas effects tlcs watches hashTerms' + where + hashTerms' = Map.restrictKeys hashTerms terms + datas = Map.restrictKeys dataDeclarations' types + effects = Map.restrictKeys effectDeclarations' types + tlcs = filter (not . null) $ fmap (List.filter filterTLC) topLevelComponents' + watches = filter (not . null . snd) $ fmap (second (List.filter filterTLC)) watchComponents + filterTLC (v, _, _) = Set.member v terms + +toSlurpResult :: + forall v. + Ord v => + UF.TypecheckedUnisonFile v Ann -> + SlurpOp -> + Set v -> + Result v -> + OldSlurp.SlurpResult v +toSlurpResult uf op vs r = + -- TODO: Do a proper partition to speed this up. + OldSlurp.SlurpResult + { OldSlurp.originalFile = uf, + OldSlurp.extraDefinitions = SC.difference (fold r) (SlurpComponent vs vs), + OldSlurp.adds = adds, + OldSlurp.duplicates = duplicates, + OldSlurp.collisions = if op == AddOp then updates else mempty, + OldSlurp.conflicts = mempty, + OldSlurp.updates = if op == UpdateOp then updates else mempty, + OldSlurp.termExistingConstructorCollisions = + let SlurpComponent types terms = termCtorColl + in types <> terms, + OldSlurp.constructorExistingTermCollisions = + let SlurpComponent types terms = ctorTermColl + in types <> terms, + OldSlurp.termAlias = mempty, + OldSlurp.typeAlias = mempty, + OldSlurp.defsWithBlockedDependencies = blocked + } + where + adds, duplicates, updates, termCtorColl, ctorTermColl, blocked :: SlurpComponent v + (adds, duplicates, updates, termCtorColl, (ctorTermColl, blocked)) = + r + & ifoldMap + ( \k sc -> + case k of + Add -> (sc, mempty, mempty, mempty, (mempty, mempty)) + Duplicated -> (mempty, sc, mempty, mempty, (mempty, mempty)) + NeedsUpdate v -> + (mempty, mempty, singletonSC v, mempty, (mempty, sc `SC.difference` singletonSC v)) + ErrFrom v TermCtorCollision -> (mempty, mempty, mempty, singletonSC v, (mempty, sc `SC.difference` singletonSC v)) + ErrFrom v CtorTermCollision -> (mempty, mempty, mempty, mempty, (singletonSC v, sc `SC.difference` singletonSC v)) + SelfErr TermCtorCollision -> (mempty, mempty, mempty, sc, (mempty, mempty)) + SelfErr CtorTermCollision -> (mempty, mempty, mempty, mempty, (sc, mempty)) + ) + singletonSC = \case + TypeVar v -> SlurpComponent {terms = mempty, types = Set.singleton v} + TermVar v -> SlurpComponent {terms = Set.singleton v, types = mempty} + +anyErrors :: SlurpOp -> Result v -> Bool +anyErrors op r = + any isError . Map.keys $ Map.filter (not . SC.isEmpty) r + where + isError :: BlockStatus v -> Bool + isError = \case + Add -> False + Duplicated -> False + -- NeedsUpdate is an error only if we're trying to Add + NeedsUpdate {} -> op == AddOp + ErrFrom {} -> True + SelfErr {} -> True diff --git a/unison-cli/src/Unison/Codebase/Editor/TermsAndTypes.hs b/unison-cli/src/Unison/Codebase/Editor/TermsAndTypes.hs new file mode 100644 index 0000000000..d53f3cc47f --- /dev/null +++ b/unison-cli/src/Unison/Codebase/Editor/TermsAndTypes.hs @@ -0,0 +1,24 @@ +{-# LANGUAGE TypeFamilies #-} +module Unison.Codebase.Editor.TermsAndTypes where + +import Data.Distributive +import Data.Functor.Rep + +data TermOrType = TypeTag | TermTag + +data TermsAndTypes a = TermsAndTypes {terms :: a, types :: a} + deriving (Functor) + +instance Applicative TermsAndTypes where + pure a = TermsAndTypes a a + TermsAndTypes f g <*> TermsAndTypes a b = TermsAndTypes (f a) (g b) + +instance Distributive TermsAndTypes where + distribute = distributeRep + +instance Representable TermsAndTypes where + type Rep TermsAndTypes = TermOrType + index tt = \case + TermTag -> terms tt + TypeTag -> types tt + tabulate f = TermsAndTypes {terms = f TermTag, types = f TypeTag} diff --git a/unison-cli/src/Unison/CommandLine/InputPatterns.hs b/unison-cli/src/Unison/CommandLine/InputPatterns.hs index 73421db884..53138b7c4c 100644 --- a/unison-cli/src/Unison/CommandLine/InputPatterns.hs +++ b/unison-cli/src/Unison/CommandLine/InputPatterns.hs @@ -174,15 +174,15 @@ add = ( "`add` adds to the codebase all the definitions from the most recently " <> "typechecked file." ) - $ \ws -> case traverse HQ'.fromString ws of - Just ws -> pure $ Input.AddI ws - Nothing -> - Left - . warn - . P.lines - . fmap fromString - . ("I don't know what these refer to:\n" :) - $ collectNothings HQ'.fromString ws + $ \ws -> pure $ Input.AddI (Set.fromList $ map Name.unsafeFromString ws) + -- Just ws -> pure $ Input.AddI ws + -- Nothing -> + -- Left + -- . warn + -- . P.lines + -- . fmap fromString + -- . ("I don't know what these refer to:\n" :) + -- $ collectNothings HQ'.fromString ws previewAdd :: InputPattern previewAdd = @@ -1203,9 +1203,9 @@ prettyPrintParseError input = \case ] printTrivial :: (Maybe (P.ErrorItem Char)) -> (Set (P.ErrorItem Char)) -> P.Pretty P.ColorText - printTrivial ue ee = + printTrivial ue ee = let expected = "I expected " <> foldMap (P.singleQuoted . P.string . P.showErrorComponent) ee - found = P.string . mappend "I found " . P.showErrorComponent <$> ue + found = P.string . mappend "I found " . P.showErrorComponent <$> ue message = [expected] <> catMaybes [found] in P.oxfordCommasWith "." message diff --git a/unison-cli/src/Unison/CommandLine/OutputMessages.hs b/unison-cli/src/Unison/CommandLine/OutputMessages.hs index 8746d706c9..47af6c083a 100644 --- a/unison-cli/src/Unison/CommandLine/OutputMessages.hs +++ b/unison-cli/src/Unison/CommandLine/OutputMessages.hs @@ -578,7 +578,7 @@ notifyUser dir o = case o of pure . P.warnCallout $ P.lines [ "The current namespace '" <> prettyPath' path <> "' is not empty. `pull-request.load` downloads the PR into the current namespace which would clutter it.", - "Please switch to an empty namespace and try again." + "Please switch to an empty namespace and try again." ] CantUndo reason -> case reason of CantUndoPastStart -> pure . P.warnCallout $ "Nothing more to undo." @@ -750,6 +750,12 @@ notifyUser dir o = case o of Input.UpdateI {} -> True _ -> False in pure $ SlurpResult.pretty isPast ppe s + NewSlurpOutput input ppe slurpOp result -> + let isPast = case input of + Input.AddI {} -> True + Input.UpdateI {} -> True + _ -> False + in pure $ undefined isPast ppe slurpOp result NoExactTypeMatches -> pure . P.callout "☝️" $ P.wrap "I couldn't find exact type matches, resorting to fuzzy matching..." TypeParseError src e -> diff --git a/unison-cli/unison-cli.cabal b/unison-cli/unison-cli.cabal index 76fac607a3..2eaba1efe0 100644 --- a/unison-cli/unison-cli.cabal +++ b/unison-cli/unison-cli.cabal @@ -38,6 +38,7 @@ library Unison.Codebase.Editor.Slurp Unison.Codebase.Editor.SlurpComponent Unison.Codebase.Editor.SlurpResult + Unison.Codebase.Editor.TermsAndTypes Unison.Codebase.Editor.TodoOutput Unison.Codebase.Editor.UriParser Unison.Codebase.Editor.VersionParser @@ -81,6 +82,7 @@ library ghc-options: -Wall build-depends: ListLike + , adjunctions , async , base , bytestring @@ -88,6 +90,7 @@ library , containers >=0.6.3 , cryptonite , directory + , distributive , errors , extra , filepath @@ -100,6 +103,7 @@ library , pretty-simple , random >=1.2.0 , regex-tdfa + , semigroupoids , stm , text , transformers @@ -150,6 +154,7 @@ executable integration-tests ghc-options: -Wall -W -threaded -rtsopts "-with-rtsopts=-N -T" -v0 build-depends: ListLike + , adjunctions , async , base , bytestring @@ -157,6 +162,7 @@ executable integration-tests , containers >=0.6.3 , cryptonite , directory + , distributive , easytest , errors , extra @@ -171,6 +177,7 @@ executable integration-tests , process , random >=1.2.0 , regex-tdfa + , semigroupoids , shellmet , stm , text @@ -221,6 +228,7 @@ executable transcripts unison build-depends: ListLike + , adjunctions , async , base , bytestring @@ -228,6 +236,7 @@ executable transcripts , containers >=0.6.3 , cryptonite , directory + , distributive , easytest , errors , extra @@ -242,6 +251,7 @@ executable transcripts , process , random >=1.2.0 , regex-tdfa + , semigroupoids , shellmet , stm , text @@ -292,6 +302,7 @@ executable unison ghc-options: -Wall -threaded -rtsopts -with-rtsopts=-I0 -optP-Wno-nonportable-include-path build-depends: ListLike + , adjunctions , async , base , bytestring @@ -299,6 +310,7 @@ executable unison , containers >=0.6.3 , cryptonite , directory + , distributive , errors , extra , filepath @@ -312,6 +324,7 @@ executable unison , pretty-simple , random >=1.2.0 , regex-tdfa + , semigroupoids , shellmet , stm , template-haskell @@ -369,6 +382,7 @@ test-suite tests ghc-options: -Wall build-depends: ListLike + , adjunctions , async , base , bytestring @@ -376,6 +390,7 @@ test-suite tests , containers >=0.6.3 , cryptonite , directory + , distributive , easytest , errors , extra @@ -390,6 +405,7 @@ test-suite tests , pretty-simple , random >=1.2.0 , regex-tdfa + , semigroupoids , shellmet , stm , temporary From 5038744c76029d7b052cfef82f6c6e1d25005d3f Mon Sep 17 00:00:00 2001 From: Chris Penner Date: Fri, 14 Jan 2022 13:15:23 -0600 Subject: [PATCH 167/297] Seems to be working --- .../src/Unison/Codebase/Editor/HandleInput.hs | 12 ++++--- .../src/Unison/Codebase/Editor/Output.hs | 2 +- .../src/Unison/Codebase/Editor/Slurp.hs | 36 ++++++++++++------- 3 files changed, 33 insertions(+), 17 deletions(-) diff --git a/unison-cli/src/Unison/Codebase/Editor/HandleInput.hs b/unison-cli/src/Unison/Codebase/Editor/HandleInput.hs index b050b7c0cf..07d195d346 100644 --- a/unison-cli/src/Unison/Codebase/Editor/HandleInput.hs +++ b/unison-cli/src/Unison/Codebase/Editor/HandleInput.hs @@ -260,18 +260,22 @@ loop = do loadUnisonFile sourceName text = do let lexed = L.lexer (Text.unpack sourceName) (Text.unpack text) withFile [] sourceName (text, lexed) $ \unisonFile -> do - sr <- toSlurpResult currentPath' unisonFile <$> slurpResultNames + sr <- NewSlurp.results . NewSlurp.analyzeTypecheckedUnisonFile unisonFile + Nothing + <$> currentPathNames + let oldSlurpResult = NewSlurp.toSlurpResult unisonFile NewSlurp.UpdateOp Nothing sr + -- sr <- toSlurpResult currentPath' unisonFile <$> slurpResultNames names <- displayNames unisonFile pped <- prettyPrintEnvDecl names let ppe = PPE.suffixifiedPPE pped - eval . Notify $ Typechecked sourceName ppe sr unisonFile + respond $ Typechecked sourceName ppe oldSlurpResult unisonFile unlessError' EvaluationFailure do (bindings, e) <- ExceptT . eval . Evaluate ppe $ unisonFile lift do let e' = Map.map go e go (ann, kind, _hash, _uneval, eval, isHit) = (ann, kind, eval, isHit) unless (null e') $ - eval . Notify $ Evaluated text ppe bindings e' + respond $ Evaluated text ppe bindings e' LoopState.latestTypecheckedFile .= Just unisonFile case e of @@ -1268,7 +1272,7 @@ loop = do stepAtNoSync Branch.CompressHistory (Path.unabsolute currentPath', doSlurpAdds adds uf) eval . AddDefsToCodebase . NewSlurp.selectDefinitions adds $ uf ppe <- prettyPrintEnvDecl =<< displayNames uf - let oldSlurpResult = NewSlurp.toSlurpResult uf NewSlurp.AddOp vars sr + let oldSlurpResult = NewSlurp.toSlurpResult uf NewSlurp.AddOp (Just vars) sr respond $ SlurpOutput input (PPE.suffixifiedPPE ppe) oldSlurpResult -- respond $ NewSlurpOutput input (PPE.suffixifiedPPE ppe) NewSlurp.AddOp sr addDefaultMetadata adds diff --git a/unison-cli/src/Unison/Codebase/Editor/Output.hs b/unison-cli/src/Unison/Codebase/Editor/Output.hs index 473531096b..b4b6120c14 100644 --- a/unison-cli/src/Unison/Codebase/Editor/Output.hs +++ b/unison-cli/src/Unison/Codebase/Editor/Output.hs @@ -316,7 +316,7 @@ isFailure o = case o of ListOfDefinitions _ _ ds -> null ds ListOfPatches s -> Set.null s SlurpOutput _ _ sr -> not $ SR.isOk sr - NewSlurpOutput _ _ slurpOp sr -> NewSlurp.anyErrors slurpOp sr + NewSlurpOutput _ _ _ sr -> NewSlurp.anyErrors sr ParseErrors {} -> True TypeErrors {} -> True CompilerBugs {} -> True diff --git a/unison-cli/src/Unison/Codebase/Editor/Slurp.hs b/unison-cli/src/Unison/Codebase/Editor/Slurp.hs index 98f98d4d44..ca24fc55b3 100644 --- a/unison-cli/src/Unison/Codebase/Editor/Slurp.hs +++ b/unison-cli/src/Unison/Codebase/Editor/Slurp.hs @@ -55,6 +55,7 @@ data BlockStatus v = Add | Duplicated | NeedsUpdate (TypeOrTermVar v) + | Update | ErrFrom (TypeOrTermVar v) SlurpErr | SelfErr SlurpErr deriving (Eq, Ord) @@ -64,6 +65,8 @@ instance Semigroup (BlockStatus v) where _ <> SelfErr err = SelfErr err ErrFrom v err <> _ = ErrFrom v err _ <> ErrFrom v err = ErrFrom v err + Update <> _ = Update + _ <> Update = Update NeedsUpdate v <> _ = NeedsUpdate v _ <> NeedsUpdate v = NeedsUpdate v Add <> _ = Add @@ -141,7 +144,7 @@ results sr@(SlurpResult {termNotes, typeNotes}) = & Map.toList & fmap ( \(v, (_, deps)) -> - ( Semigroup.foldMap1 (getBlockStatus sr) (TermVar v NEList.:| Set.toList deps), + ( Semigroup.fold1 (getBlockStatus False sr (TermVar v) NEList.:| fmap (getBlockStatus True sr) (Set.toList deps)), mempty {SC.terms = Set.singleton v} ) ) @@ -152,19 +155,19 @@ results sr@(SlurpResult {termNotes, typeNotes}) = & Map.toList & fmap ( \(v, (_, deps)) -> - ( Semigroup.foldMap1 (getBlockStatus sr) (TypeVar v NEList.:| Set.toList deps), + ( Semigroup.fold1 (getBlockStatus False sr (TypeVar v) NEList.:| fmap (getBlockStatus True sr) (Set.toList deps)), mempty {SC.types = Set.singleton v} ) ) & Map.fromListWith (<>) -getBlockStatus :: (Ord v, Show v) => SlurpResult v -> TypeOrTermVar v -> BlockStatus v -getBlockStatus (SlurpResult {termNotes, typeNotes}) tv = +getBlockStatus :: (Ord v, Show v) => Bool -> SlurpResult v -> TypeOrTermVar v -> BlockStatus v +getBlockStatus isDep (SlurpResult {termNotes, typeNotes}) tv = let defNotes = case tv of TypeVar v -> fromMaybe (error $ "Expected " <> show v <> " in typeNotes") $ Map.lookup v typeNotes TermVar v -> fromMaybe (error $ "Expected " <> show v <> " in termNotes") $ Map.lookup v termNotes in case fst defNotes of - DefOk Updated -> NeedsUpdate tv + DefOk Updated -> if isDep then NeedsUpdate tv else Update DefErr err -> ErrFrom tv err DefOk New -> Add DefOk Duplicate -> Duplicated @@ -474,14 +477,17 @@ toSlurpResult :: Ord v => UF.TypecheckedUnisonFile v Ann -> SlurpOp -> - Set v -> + Maybe (Set v) -> Result v -> OldSlurp.SlurpResult v -toSlurpResult uf op vs r = +toSlurpResult uf op mvs r = -- TODO: Do a proper partition to speed this up. OldSlurp.SlurpResult { OldSlurp.originalFile = uf, - OldSlurp.extraDefinitions = SC.difference (fold r) (SlurpComponent vs vs), + OldSlurp.extraDefinitions = + case mvs of + Nothing -> mempty + Just vs -> SC.difference (fold r) (SlurpComponent vs vs), OldSlurp.adds = adds, OldSlurp.duplicates = duplicates, OldSlurp.collisions = if op == AddOp then updates else mempty, @@ -506,8 +512,13 @@ toSlurpResult uf op vs r = case k of Add -> (sc, mempty, mempty, mempty, (mempty, mempty)) Duplicated -> (mempty, sc, mempty, mempty, (mempty, mempty)) + Update -> (mempty, mempty, sc, mempty, (mempty, mempty)) NeedsUpdate v -> - (mempty, mempty, singletonSC v, mempty, (mempty, sc `SC.difference` singletonSC v)) + case op of + AddOp -> + (mempty, mempty, singletonSC v, mempty, (mempty, sc `SC.difference` singletonSC v)) + UpdateOp -> + (sc, mempty, mempty, mempty, (mempty, mempty)) ErrFrom v TermCtorCollision -> (mempty, mempty, mempty, singletonSC v, (mempty, sc `SC.difference` singletonSC v)) ErrFrom v CtorTermCollision -> (mempty, mempty, mempty, mempty, (singletonSC v, sc `SC.difference` singletonSC v)) SelfErr TermCtorCollision -> (mempty, mempty, mempty, sc, (mempty, mempty)) @@ -517,15 +528,16 @@ toSlurpResult uf op vs r = TypeVar v -> SlurpComponent {terms = mempty, types = Set.singleton v} TermVar v -> SlurpComponent {terms = Set.singleton v, types = mempty} -anyErrors :: SlurpOp -> Result v -> Bool -anyErrors op r = +anyErrors :: Result v -> Bool +anyErrors r = any isError . Map.keys $ Map.filter (not . SC.isEmpty) r where isError :: BlockStatus v -> Bool isError = \case Add -> False Duplicated -> False + Update {} -> False -- NeedsUpdate is an error only if we're trying to Add - NeedsUpdate {} -> op == AddOp + NeedsUpdate {} -> True ErrFrom {} -> True SelfErr {} -> True From 9f85ff5e5611218e19a8b04c7babc26b2790f985 Mon Sep 17 00:00:00 2001 From: Chris Penner Date: Fri, 14 Jan 2022 13:23:58 -0600 Subject: [PATCH 168/297] Checkpoint --- .../src/Unison/Codebase/Editor/HandleInput.hs | 58 ++++++++++--------- .../src/Unison/Codebase/Editor/Input.hs | 7 +-- .../src/Unison/CommandLine/InputPatterns.hs | 30 ++-------- 3 files changed, 39 insertions(+), 56 deletions(-) diff --git a/unison-cli/src/Unison/Codebase/Editor/HandleInput.hs b/unison-cli/src/Unison/Codebase/Editor/HandleInput.hs index 07d195d346..1178ec716d 100644 --- a/unison-cli/src/Unison/Codebase/Editor/HandleInput.hs +++ b/unison-cli/src/Unison/Codebase/Editor/HandleInput.hs @@ -1277,23 +1277,24 @@ loop = do -- respond $ NewSlurpOutput input (PPE.suffixifiedPPE ppe) NewSlurp.AddOp sr addDefaultMetadata adds syncRoot - PreviewAddI hqs -> case (latestFile', uf) of + PreviewAddI names -> case (latestFile', uf) of (Just (sourceName, _), Just uf) -> do - sr <- - Slurp.disallowUpdates - . applySelection hqs uf - . toSlurpResult currentPath' uf - <$> slurpResultNames - previewResponse sourceName sr uf + let vars = Set.map Name.toVar names + sr <- NewSlurp.results . NewSlurp.analyzeTypecheckedUnisonFile uf + (Just vars) + <$> currentPathNames + let oldSlurpResult = NewSlurp.toSlurpResult uf NewSlurp.UpdateOp (Just vars) sr + previewResponse sourceName oldSlurpResult uf _ -> respond NoUnisonFile - UpdateI maybePatchPath hqs -> handleUpdate input maybePatchPath hqs - PreviewUpdateI hqs -> case (latestFile', uf) of + UpdateI maybePatchPath names -> handleUpdate input maybePatchPath names + PreviewUpdateI names -> case (latestFile', uf) of (Just (sourceName, _), Just uf) -> do - sr <- - applySelection hqs uf - . toSlurpResult currentPath' uf - <$> slurpResultNames - previewResponse sourceName sr uf + let vars = Set.map Name.toVar names + sr <- NewSlurp.results . NewSlurp.analyzeTypecheckedUnisonFile uf + (Just vars) + <$> currentPathNames + let oldSlurpResult = NewSlurp.toSlurpResult uf NewSlurp.UpdateOp (Just vars) sr + previewResponse sourceName oldSlurpResult uf _ -> respond NoUnisonFile TodoI patchPath branchPath' -> do patch <- getPatchAt (fromMaybe defaultPatchPath patchPath) @@ -1814,8 +1815,9 @@ handleShowDefinition outputLoc inputQuery = do Just (path, _) -> Just path -- | Handle an @update@ command. -handleUpdate :: forall m v. (Monad m, Var v) => Input -> Maybe PatchPath -> [HQ'.HashQualified Name] -> Action' m v () -handleUpdate input maybePatchPath hqs = do +handleUpdate :: forall m v. (Monad m, Var v) => Input -> Maybe PatchPath -> Set Name -> Action' m v () +handleUpdate input maybePatchPath names = do + let vars = Set.map Name.toVar names use LoopState.latestTypecheckedFile >>= \case Nothing -> respond NoUnisonFile Just uf -> do @@ -1830,11 +1832,15 @@ handleUpdate input maybePatchPath hqs = do let patchPath = fromMaybe defaultPatchPath maybePatchPath slurpCheckNames <- slurpResultNames let currentPathNames = slurpCheckNames - let sr :: SlurpResult v - sr = - applySelection hqs uf - . toSlurpResult currentPath' uf - $ slurpCheckNames + let newSR = NewSlurp.results . NewSlurp.analyzeTypecheckedUnisonFile uf + (Just vars) + $ slurpCheckNames + let sr = NewSlurp.toSlurpResult uf NewSlurp.UpdateOp (Just vars) newSR + -- let sr :: SlurpResult v + -- sr = + -- applySelection hqs uf + -- . toSlurpResult currentPath' uf + -- $ slurpCheckNames addsAndUpdates :: SlurpComponent v addsAndUpdates = Slurp.updates sr <> Slurp.adds sr fileNames :: Names @@ -2759,15 +2765,15 @@ getEndangeredDependents getDependents namesToDelete rootNames = do -- meaning that adds/updates should only contain the selection or its transitive -- dependencies, any unselected transitive dependencies of the selection will -- be added to `extraDefinitions`. -applySelection :: +_applySelection :: forall v a. Var v => [HQ'.HashQualified Name] -> UF.TypecheckedUnisonFile v a -> SlurpResult v -> SlurpResult v -applySelection [] _ = id -applySelection hqs file = \sr@SlurpResult {adds, updates} -> +_applySelection [] _ = id +_applySelection hqs file = \sr@SlurpResult {adds, updates} -> sr { adds = adds `SC.intersection` closed, updates = updates `SC.intersection` closed, @@ -2786,14 +2792,14 @@ applySelection hqs file = \sr@SlurpResult {adds, updates} -> var :: Var v => Name -> v var name = Var.named (Name.toText name) -toSlurpResult :: +_toSlurpResult :: forall v. Var v => Path.Absolute -> UF.TypecheckedUnisonFile v Ann -> Names -> SlurpResult v -toSlurpResult curPath uf existingNames = +_toSlurpResult curPath uf existingNames = Slurp.subtractComponent (conflicts <> ctorCollisions) $ SlurpResult uf diff --git a/unison-cli/src/Unison/Codebase/Editor/Input.hs b/unison-cli/src/Unison/Codebase/Editor/Input.hs index 374b90c491..ac55110b97 100644 --- a/unison-cli/src/Unison/Codebase/Editor/Input.hs +++ b/unison-cli/src/Unison/Codebase/Editor/Input.hs @@ -26,7 +26,6 @@ import qualified Unison.Codebase.ShortBranchHash as SBH import Unison.Codebase.SyncMode (SyncMode) import Unison.Codebase.Verbosity import qualified Unison.HashQualified as HQ -import qualified Unison.HashQualified' as HQ' import Unison.Name (Name) import Unison.NameSegment (NameSegment) import Unison.Prelude @@ -113,9 +112,9 @@ data Input -- edits stuff: | LoadI (Maybe FilePath) | AddI (Set Name) - | PreviewAddI [HQ'.HashQualified Name] - | UpdateI (Maybe PatchPath) [HQ'.HashQualified Name] - | PreviewUpdateI [HQ'.HashQualified Name] + | PreviewAddI (Set Name) + | UpdateI (Maybe PatchPath) (Set Name) + | PreviewUpdateI (Set Name) | TodoI (Maybe PatchPath) Path' | PropagatePatchI PatchPath Path' | ListEditsI (Maybe PatchPath) diff --git a/unison-cli/src/Unison/CommandLine/InputPatterns.hs b/unison-cli/src/Unison/CommandLine/InputPatterns.hs index 53138b7c4c..24c7198ce5 100644 --- a/unison-cli/src/Unison/CommandLine/InputPatterns.hs +++ b/unison-cli/src/Unison/CommandLine/InputPatterns.hs @@ -40,7 +40,6 @@ import Unison.CommandLine.InputPattern ) import qualified Unison.CommandLine.InputPattern as I import qualified Unison.HashQualified as HQ -import qualified Unison.HashQualified' as HQ' import Unison.Name (Name) import qualified Unison.Name as Name import Unison.NameSegment (NameSegment (NameSegment)) @@ -195,15 +194,7 @@ previewAdd = <> "results. Use `load` to reparse & typecheck the file if the context " <> "has changed." ) - $ \ws -> case traverse HQ'.fromString ws of - Just ws -> pure $ Input.PreviewAddI ws - Nothing -> - Left - . warn - . P.lines - . fmap fromString - . ("I don't know what these refer to:\n" :) - $ collectNothings HQ'.fromString ws + $ \ws -> pure $ Input.PreviewAddI (Set.fromList $ map Name.unsafeFromString ws) update :: InputPattern update = @@ -240,13 +231,8 @@ update = ( \case patchStr : ws -> do patch <- first fromString $ Path.parseSplit' Path.definitionNameSegment patchStr - case traverse HQ'.fromString ws of - Just ws -> Right $ Input.UpdateI (Just patch) ws - Nothing -> - Left . warn . P.lines . fmap fromString - . ("I don't know what these refer to:\n" :) - $ collectNothings HQ'.fromString ws - [] -> Right $ Input.UpdateI Nothing [] + pure $ Input.UpdateI (Just patch) (Set.fromList $ map Name.unsafeFromString ws) + [] -> Right $ Input.UpdateI Nothing mempty ) previewUpdate :: InputPattern @@ -260,15 +246,7 @@ previewUpdate = <> "typechecking results. Use `load` to reparse & typecheck the file if " <> "the context has changed." ) - $ \ws -> case traverse HQ'.fromString ws of - Just ws -> pure $ Input.PreviewUpdateI ws - Nothing -> - Left - . warn - . P.lines - . fmap fromString - . ("I don't know what these refer to:\n" :) - $ collectNothings HQ'.fromString ws + $ \ws -> pure $ Input.PreviewUpdateI (Set.fromList $ map Name.unsafeFromString ws) patch :: InputPattern patch = From 1360594b6122b7087195f45506c54283c1d757dd Mon Sep 17 00:00:00 2001 From: Chris Penner Date: Fri, 14 Jan 2022 15:47:06 -0600 Subject: [PATCH 169/297] Wire up aliasing --- .../src/Unison/Codebase/Editor/HandleInput.hs | 314 +++++++++--------- .../src/Unison/Codebase/Editor/Slurp.hs | 68 ++-- 2 files changed, 205 insertions(+), 177 deletions(-) diff --git a/unison-cli/src/Unison/Codebase/Editor/HandleInput.hs b/unison-cli/src/Unison/Codebase/Editor/HandleInput.hs index 1178ec716d..4168b138e1 100644 --- a/unison-cli/src/Unison/Codebase/Editor/HandleInput.hs +++ b/unison-cli/src/Unison/Codebase/Editor/HandleInput.hs @@ -260,10 +260,12 @@ loop = do loadUnisonFile sourceName text = do let lexed = L.lexer (Text.unpack sourceName) (Text.unpack text) withFile [] sourceName (text, lexed) $ \unisonFile -> do - sr <- NewSlurp.results . NewSlurp.analyzeTypecheckedUnisonFile unisonFile + currentNames <- currentPathNames + let sr = NewSlurp.results . NewSlurp.analyzeTypecheckedUnisonFile unisonFile Nothing - <$> currentPathNames + $ currentNames let oldSlurpResult = NewSlurp.toSlurpResult unisonFile NewSlurp.UpdateOp Nothing sr + & addAliases currentNames unisonFile currentPath' -- sr <- toSlurpResult currentPath' unisonFile <$> slurpResultNames names <- displayNames unisonFile pped <- prettyPrintEnvDecl names @@ -1260,9 +1262,10 @@ loop = do case uf of Nothing -> respond NoUnisonFile Just uf -> do - sr <- NewSlurp.results . NewSlurp.analyzeTypecheckedUnisonFile uf + currentNames <- currentPathNames + let sr = NewSlurp.results . NewSlurp.analyzeTypecheckedUnisonFile uf (if null vars then Nothing else Just vars) - <$> currentPathNames + $ currentNames -- sr <- -- Slurp.disallowUpdates -- . applySelection hqs uf @@ -1273,6 +1276,7 @@ loop = do eval . AddDefsToCodebase . NewSlurp.selectDefinitions adds $ uf ppe <- prettyPrintEnvDecl =<< displayNames uf let oldSlurpResult = NewSlurp.toSlurpResult uf NewSlurp.AddOp (Just vars) sr + & addAliases currentNames uf currentPath' respond $ SlurpOutput input (PPE.suffixifiedPPE ppe) oldSlurpResult -- respond $ NewSlurpOutput input (PPE.suffixifiedPPE ppe) NewSlurp.AddOp sr addDefaultMetadata adds @@ -1280,20 +1284,24 @@ loop = do PreviewAddI names -> case (latestFile', uf) of (Just (sourceName, _), Just uf) -> do let vars = Set.map Name.toVar names - sr <- NewSlurp.results . NewSlurp.analyzeTypecheckedUnisonFile uf + currentNames <- currentPathNames + let sr = NewSlurp.results . NewSlurp.analyzeTypecheckedUnisonFile uf (Just vars) - <$> currentPathNames + $ currentNames let oldSlurpResult = NewSlurp.toSlurpResult uf NewSlurp.UpdateOp (Just vars) sr + & addAliases currentNames uf currentPath' previewResponse sourceName oldSlurpResult uf _ -> respond NoUnisonFile UpdateI maybePatchPath names -> handleUpdate input maybePatchPath names PreviewUpdateI names -> case (latestFile', uf) of (Just (sourceName, _), Just uf) -> do let vars = Set.map Name.toVar names - sr <- NewSlurp.results . NewSlurp.analyzeTypecheckedUnisonFile uf + currentNames <- currentPathNames + let sr = NewSlurp.results . NewSlurp.analyzeTypecheckedUnisonFile uf (Just vars) - <$> currentPathNames + $ currentNames let oldSlurpResult = NewSlurp.toSlurpResult uf NewSlurp.UpdateOp (Just vars) sr + & addAliases currentNames uf currentPath' previewResponse sourceName oldSlurpResult uf _ -> respond NoUnisonFile TodoI patchPath branchPath' -> do @@ -2792,167 +2800,173 @@ _applySelection hqs file = \sr@SlurpResult {adds, updates} -> var :: Var v => Name -> v var name = Var.named (Name.toText name) -_toSlurpResult :: - forall v. - Var v => - Path.Absolute -> - UF.TypecheckedUnisonFile v Ann -> - Names -> - SlurpResult v -_toSlurpResult curPath uf existingNames = - Slurp.subtractComponent (conflicts <> ctorCollisions) $ - SlurpResult - uf - mempty - adds - dups - mempty - conflicts - updates - termCtorCollisions - ctorTermCollisions - termAliases - typeAliases - mempty +-- _toSlurpResult :: +-- forall v. +-- Var v => +-- Path.Absolute -> +-- UF.TypecheckedUnisonFile v Ann -> +-- Names -> +-- SlurpResult v +-- _toSlurpResult curPath uf existingNames = +-- Slurp.subtractComponent (conflicts <> ctorCollisions) $ +-- SlurpResult +-- uf +-- mempty +-- adds +-- dups +-- mempty +-- conflicts +-- updates +-- termCtorCollisions +-- ctorTermCollisions +-- termAliases +-- typeAliases +-- mempty +-- where +-- fileNames = UF.typecheckedToNames uf + +-- sc :: R.Relation Name Referent -> R.Relation Name Reference -> SlurpComponent v +-- sc terms types = +-- SlurpComponent +-- { terms = Set.map var (R.dom terms), +-- types = Set.map var (R.dom types) +-- } + +-- -- conflict (n,r) if n is conflicted in names0 +-- conflicts :: SlurpComponent v +-- conflicts = sc terms types +-- where +-- terms = +-- R.filterDom +-- (conflicted . Names.termsNamed existingNames) +-- (Names.terms fileNames) +-- types = +-- R.filterDom +-- (conflicted . Names.typesNamed existingNames) +-- (Names.types fileNames) +-- conflicted s = Set.size s > 1 + +-- ctorCollisions :: SlurpComponent v +-- ctorCollisions = +-- mempty {SC.terms = termCtorCollisions <> ctorTermCollisions} + +-- -- termCtorCollision (n,r) if (n, r' /= r) exists in existingNames and +-- -- r is Ref and r' is Con +-- termCtorCollisions :: Set v +-- termCtorCollisions = +-- Set.fromList +-- [ var n +-- | (n, Referent.Ref {}) <- R.toList (Names.terms fileNames), +-- [r@Referent.Con {}] <- [toList $ Names.termsNamed existingNames n], +-- -- ignore collisions w/ ctors of types being updated +-- Set.notMember (Referent.toReference r) typesToUpdate +-- ] + +-- -- the set of typerefs that are being updated by this file +-- typesToUpdate :: Set Reference +-- typesToUpdate = +-- Set.fromList +-- [ r +-- | (n, r') <- R.toList (Names.types fileNames), +-- r <- toList (Names.typesNamed existingNames n), +-- r /= r' +-- ] + +-- -- ctorTermCollisions (n,r) if (n, r' /= r) exists in names0 and r is Con +-- -- and r' is Ref except we relaxed it to where r' can be Con or Ref +-- -- what if (n,r) and (n,r' /= r) exists in names and r, r' are Con +-- ctorTermCollisions :: Set v +-- ctorTermCollisions = +-- Set.fromList +-- [ var n +-- | (n, Referent.Con {}) <- R.toList (Names.terms fileNames), +-- r <- toList $ Names.termsNamed existingNames n, +-- -- ignore collisions w/ ctors of types being updated +-- Set.notMember (Referent.toReference r) typesToUpdate, +-- Set.notMember (var n) (terms dups) +-- ] + +-- -- duplicate (n,r) if (n,r) exists in names0 +-- dups :: SlurpComponent v +-- dups = sc terms types +-- where +-- terms = R.intersection (Names.terms existingNames) (Names.terms fileNames) +-- types = R.intersection (Names.types existingNames) (Names.types fileNames) + +-- -- update (n,r) if (n,r' /= r) exists in existingNames and r, r' are Ref +-- updates :: SlurpComponent v +-- updates = SlurpComponent (Set.fromList types) (Set.fromList terms) +-- where +-- terms = +-- [ var n +-- | (n, r'@Referent.Ref {}) <- R.toList (Names.terms fileNames), +-- [r@Referent.Ref {}] <- [toList $ Names.termsNamed existingNames n], +-- r' /= r +-- ] +-- types = +-- [ var n +-- | (n, r') <- R.toList (Names.types fileNames), +-- [r] <- [toList $ Names.typesNamed existingNames n], +-- r' /= r +-- ] + + +-- -- (n,r) is in `adds` if n isn't in existingNames +-- adds = sc terms types +-- where +-- terms = addTerms (Names.terms existingNames) (Names.terms fileNames) +-- types = addTypes (Names.types existingNames) (Names.types fileNames) +-- addTerms existingNames = R.filter go +-- where +-- go (n, Referent.Ref {}) = (not . R.memberDom n) existingNames +-- go _ = False +-- addTypes existingNames = R.filter go +-- where +-- go (n, _) = (not . R.memberDom n) existingNames + + +addAliases :: forall v a. (Ord v, Var v) => Names -> UF.TypecheckedUnisonFile v a -> Path.Absolute -> SlurpResult v -> SlurpResult v +addAliases existingNames uf curPath sr = sr{ termAlias=termAliases, typeAlias=typeAliases } where fileNames = UF.typecheckedToNames uf - - sc :: R.Relation Name Referent -> R.Relation Name Reference -> SlurpComponent v - sc terms types = - SlurpComponent - { terms = Set.map var (R.dom terms), - types = Set.map var (R.dom types) - } - - -- conflict (n,r) if n is conflicted in names0 - conflicts :: SlurpComponent v - conflicts = sc terms types - where - terms = - R.filterDom - (conflicted . Names.termsNamed existingNames) - (Names.terms fileNames) - types = - R.filterDom - (conflicted . Names.typesNamed existingNames) - (Names.types fileNames) - conflicted s = Set.size s > 1 - - ctorCollisions :: SlurpComponent v - ctorCollisions = - mempty {SC.terms = termCtorCollisions <> ctorTermCollisions} - - -- termCtorCollision (n,r) if (n, r' /= r) exists in existingNames and - -- r is Ref and r' is Con - termCtorCollisions :: Set v - termCtorCollisions = - Set.fromList - [ var n - | (n, Referent.Ref {}) <- R.toList (Names.terms fileNames), - [r@Referent.Con {}] <- [toList $ Names.termsNamed existingNames n], - -- ignore collisions w/ ctors of types being updated - Set.notMember (Referent.toReference r) typesToUpdate - ] - - -- the set of typerefs that are being updated by this file - typesToUpdate :: Set Reference - typesToUpdate = - Set.fromList - [ r - | (n, r') <- R.toList (Names.types fileNames), - r <- toList (Names.typesNamed existingNames n), - r /= r' - ] - - -- ctorTermCollisions (n,r) if (n, r' /= r) exists in names0 and r is Con - -- and r' is Ref except we relaxed it to where r' can be Con or Ref - -- what if (n,r) and (n,r' /= r) exists in names and r, r' are Con - ctorTermCollisions :: Set v - ctorTermCollisions = - Set.fromList - [ var n - | (n, Referent.Con {}) <- R.toList (Names.terms fileNames), - r <- toList $ Names.termsNamed existingNames n, - -- ignore collisions w/ ctors of types being updated - Set.notMember (Referent.toReference r) typesToUpdate, - Set.notMember (var n) (terms dups) - ] - - -- duplicate (n,r) if (n,r) exists in names0 - dups :: SlurpComponent v - dups = sc terms types - where - terms = R.intersection (Names.terms existingNames) (Names.terms fileNames) - types = R.intersection (Names.types existingNames) (Names.types fileNames) - - -- update (n,r) if (n,r' /= r) exists in existingNames and r, r' are Ref - updates :: SlurpComponent v - updates = SlurpComponent (Set.fromList types) (Set.fromList terms) - where - terms = - [ var n - | (n, r'@Referent.Ref {}) <- R.toList (Names.terms fileNames), - [r@Referent.Ref {}] <- [toList $ Names.termsNamed existingNames n], - r' /= r - ] - types = - [ var n - | (n, r') <- R.toList (Names.types fileNames), - [r] <- [toList $ Names.typesNamed existingNames n], - r' /= r - ] - buildAliases :: R.Relation Name Referent -> R.Relation Name Referent -> Set v -> Map v Slurp.Aliases - buildAliases existingNames namesFromFile duplicates = - Map.fromList - [ ( var n, - if null aliasesOfOld - then Slurp.AddAliases aliasesOfNew - else Slurp.UpdateAliases aliasesOfOld aliasesOfNew - ) - | (n, r@Referent.Ref {}) <- R.toList namesFromFile, - -- All the refs whose names include `n`, and are not `r` - let refs = Set.delete r $ R.lookupDom n existingNames - aliasesOfNew = - Set.map (Path.unprefixName curPath) . Set.delete n $ - R.lookupRan r existingNames - aliasesOfOld = - Set.map (Path.unprefixName curPath) . Set.delete n . R.dom $ - R.restrictRan existingNames refs, - not (null aliasesOfNew && null aliasesOfOld), - Set.notMember (var n) duplicates - ] + buildAliases existingNames namesFromFile dups = + Map.fromList + [ ( var n, + if null aliasesOfOld + then Slurp.AddAliases aliasesOfNew + else Slurp.UpdateAliases aliasesOfOld aliasesOfNew + ) + | (n, r@Referent.Ref {}) <- R.toList namesFromFile, + -- All the refs whose names include `n`, and are not `r` + let refs = Set.delete r $ R.lookupDom n existingNames + aliasesOfNew = + Set.map (Path.unprefixName curPath) . Set.delete n $ + R.lookupRan r existingNames + aliasesOfOld = + Set.map (Path.unprefixName curPath) . Set.delete n . R.dom $ + R.restrictRan existingNames refs, + not (null aliasesOfNew && null aliasesOfOld), + Set.notMember (var n) dups + ] termAliases :: Map v Slurp.Aliases termAliases = buildAliases (Names.terms existingNames) (Names.terms fileNames) - (SC.terms dups) + (SC.terms (duplicates sr)) typeAliases :: Map v Slurp.Aliases typeAliases = buildAliases (R.mapRan Referent.Ref $ Names.types existingNames) (R.mapRan Referent.Ref $ Names.types fileNames) - (SC.types dups) - - -- (n,r) is in `adds` if n isn't in existingNames - adds = sc terms types - where - terms = addTerms (Names.terms existingNames) (Names.terms fileNames) - types = addTypes (Names.types existingNames) (Names.types fileNames) - addTerms existingNames = R.filter go - where - go (n, Referent.Ref {}) = (not . R.memberDom n) existingNames - go _ = False - addTypes existingNames = R.filter go - where - go (n, _) = (not . R.memberDom n) existingNames + (SC.types (duplicates sr)) displayI :: Monad m => diff --git a/unison-cli/src/Unison/Codebase/Editor/Slurp.hs b/unison-cli/src/Unison/Codebase/Editor/Slurp.hs index ca24fc55b3..a9ddbe6dde 100644 --- a/unison-cli/src/Unison/Codebase/Editor/Slurp.hs +++ b/unison-cli/src/Unison/Codebase/Editor/Slurp.hs @@ -48,7 +48,10 @@ data SlurpOp = AddOp | UpdateOp data LabeledVar v = LabeledVar v LD.LabeledDependency deriving (Eq, Ord) -data SlurpStatus = New | Updated | Duplicate +data SlurpStatus + = New + | Updated + | Duplicate deriving (Eq, Ord, Show) data BlockStatus v @@ -58,7 +61,7 @@ data BlockStatus v | Update | ErrFrom (TypeOrTermVar v) SlurpErr | SelfErr SlurpErr - deriving (Eq, Ord) + deriving (Eq, Ord, Show) instance Semigroup (BlockStatus v) where SelfErr err <> _ = SelfErr err @@ -71,7 +74,7 @@ instance Semigroup (BlockStatus v) where _ <> NeedsUpdate v = NeedsUpdate v Add <> _ = Add _ <> Add = Add - Duplicated <> Duplicated = Duplicated + Duplicated <> _ = Duplicated data TypeOrTermVar v = TypeVar v | TermVar v deriving (Eq, Ord, Show) @@ -136,7 +139,8 @@ type Result v = Map (BlockStatus v) (SlurpComponent v) -- Compute all definitions which can be added, or the reasons why a def can't be added. results :: forall v. (Ord v, Show v) => SlurpResult v -> Result v results sr@(SlurpResult {termNotes, typeNotes}) = - Map.unionWith (<>) analyzedTerms analyzedTypes + pTraceShowId $ + Map.unionWith (<>) analyzedTerms analyzedTypes where analyzedTerms :: Map (BlockStatus v) (SlurpComponent v) analyzedTerms = @@ -233,6 +237,12 @@ analyzeTypecheckedUnisonFile uf maybeDefsToConsider unalteredCodebaseNames = ) in SlurpResult {termNotes = termStatuses, typeNotes = typeStatuses} where + -- TODO: types and terms which have the same hash are currently treated as dependencies of + -- one another even if they don't actually reference each other. + -- E.g. + -- structural type X = x + -- structural type Y = y + -- will say that X and Y depend on each other since they have the same component hash. transitiveCHDeps :: Map ComponentHash (Set ComponentHash) transitiveCHDeps = componentTransitiveDeps uf @@ -241,6 +251,7 @@ analyzeTypecheckedUnisonFile uf maybeDefsToConsider unalteredCodebaseNames = -- This version is for when you don't know whether a var is a type or term -- E.g., if the user types 'add x', we don't know whether x is a term, type, or -- constructor, so we add all of them. + -- Includes self transitiveVarDeps :: v -> Set (LabeledVar v) transitiveVarDeps v = Rel3.lookupD1 v varRelation @@ -252,6 +263,7 @@ analyzeTypecheckedUnisonFile uf maybeDefsToConsider unalteredCodebaseNames = -- Find all variables within all considered components & foldMap (\ch -> Rel.ran $ Rel3.lookupD3 ch varRelation) + -- Doesn't include self transitiveLabeledVarDeps :: LabeledVar v -> Set (LabeledVar v) transitiveLabeledVarDeps lv = Rel3.lookupD2 lv varRelation @@ -262,6 +274,7 @@ analyzeTypecheckedUnisonFile uf maybeDefsToConsider unalteredCodebaseNames = ) -- Find all variables within all considered components & foldMap (\ch -> Rel.ran $ Rel3.lookupD3 ch varRelation) + & Set.delete lv defsToConsider :: Set v defsToConsider = case maybeDefsToConsider of @@ -474,35 +487,36 @@ selectDefinitions toSlurpResult :: forall v. - Ord v => + (Ord v, Show v) => UF.TypecheckedUnisonFile v Ann -> SlurpOp -> Maybe (Set v) -> Result v -> OldSlurp.SlurpResult v toSlurpResult uf op mvs r = - -- TODO: Do a proper partition to speed this up. - OldSlurp.SlurpResult - { OldSlurp.originalFile = uf, - OldSlurp.extraDefinitions = - case mvs of - Nothing -> mempty - Just vs -> SC.difference (fold r) (SlurpComponent vs vs), - OldSlurp.adds = adds, - OldSlurp.duplicates = duplicates, - OldSlurp.collisions = if op == AddOp then updates else mempty, - OldSlurp.conflicts = mempty, - OldSlurp.updates = if op == UpdateOp then updates else mempty, - OldSlurp.termExistingConstructorCollisions = - let SlurpComponent types terms = termCtorColl - in types <> terms, - OldSlurp.constructorExistingTermCollisions = - let SlurpComponent types terms = ctorTermColl - in types <> terms, - OldSlurp.termAlias = mempty, - OldSlurp.typeAlias = mempty, - OldSlurp.defsWithBlockedDependencies = blocked - } + pTraceShowId $ + -- TODO: Do a proper partition to speed this up. + OldSlurp.SlurpResult + { OldSlurp.originalFile = uf, + OldSlurp.extraDefinitions = + case mvs of + Nothing -> mempty + Just vs -> SC.difference (fold r) (SlurpComponent vs vs), + OldSlurp.adds = adds, + OldSlurp.duplicates = duplicates, + OldSlurp.collisions = if op == AddOp then updates else mempty, + OldSlurp.conflicts = mempty, + OldSlurp.updates = if op == UpdateOp then updates else mempty, + OldSlurp.termExistingConstructorCollisions = + let SlurpComponent types terms = termCtorColl + in types <> terms, + OldSlurp.constructorExistingTermCollisions = + let SlurpComponent types terms = ctorTermColl + in types <> terms, + OldSlurp.termAlias = mempty, + OldSlurp.typeAlias = mempty, + OldSlurp.defsWithBlockedDependencies = blocked + } where adds, duplicates, updates, termCtorColl, ctorTermColl, blocked :: SlurpComponent v (adds, duplicates, updates, termCtorColl, (ctorTermColl, blocked)) = From fb139d62839033dbde41f64af52b1d8d9190b573 Mon Sep 17 00:00:00 2001 From: Chris Penner Date: Fri, 14 Jan 2022 16:23:54 -0600 Subject: [PATCH 170/297] Feature parity?? --- .../src/Unison/Codebase/Editor/Slurp.hs | 85 +++++++++++-------- 1 file changed, 49 insertions(+), 36 deletions(-) diff --git a/unison-cli/src/Unison/Codebase/Editor/Slurp.hs b/unison-cli/src/Unison/Codebase/Editor/Slurp.hs index a9ddbe6dde..9f260bfa45 100644 --- a/unison-cli/src/Unison/Codebase/Editor/Slurp.hs +++ b/unison-cli/src/Unison/Codebase/Editor/Slurp.hs @@ -48,6 +48,13 @@ data SlurpOp = AddOp | UpdateOp data LabeledVar v = LabeledVar v LD.LabeledDependency deriving (Eq, Ord) +isTypeVar :: LabeledVar v -> Bool +isTypeVar (LabeledVar _ LD.TypeReference {}) = True +isTypeVar _ = False + +isTermVar :: LabeledVar v -> Bool +isTermVar = not . isTypeVar + data SlurpStatus = New | Updated @@ -206,7 +213,18 @@ analyzeTypecheckedUnisonFile :: analyzeTypecheckedUnisonFile uf maybeDefsToConsider unalteredCodebaseNames = pTraceShowId $ let allInvolvedVars :: Set (LabeledVar v) - allInvolvedVars = foldMap transitiveVarDeps defsToConsider + allInvolvedVars = + case maybeDefsToConsider of + Nothing -> + allFileVars + Just vs -> + let lvs :: Set (LabeledVar v) = Set.unions $ do + v <- Set.toList vs + pure . Rel.dom $ Rel3.lookupD1 v varRelation + in foldMap transitiveLabeledVarDeps lvs + + allFileVars :: Set (LabeledVar v) + allFileVars = Rel3.d2s varRelation termStatuses, typeStatuses :: Map v (DefinitionNotes, Set (TypeOrTermVar v)) (termStatuses, typeStatuses) = @@ -243,45 +261,40 @@ analyzeTypecheckedUnisonFile uf maybeDefsToConsider unalteredCodebaseNames = -- structural type X = x -- structural type Y = y -- will say that X and Y depend on each other since they have the same component hash. - transitiveCHDeps :: Map ComponentHash (Set ComponentHash) - transitiveCHDeps = + _transitiveCHDeps :: Map ComponentHash (Set ComponentHash) + _transitiveCHDeps = componentTransitiveDeps uf - -- Find all other file-local vars that a var depends on. - -- This version is for when you don't know whether a var is a type or term - -- E.g., if the user types 'add x', we don't know whether x is a term, type, or - -- constructor, so we add all of them. - -- Includes self - transitiveVarDeps :: v -> Set (LabeledVar v) - transitiveVarDeps v = - Rel3.lookupD1 v varRelation - & Rel.ran - -- Find all transitive components we rely on - & ( \chs -> - chs <> foldMap (\ch -> fold $ Map.lookup ch transitiveCHDeps) chs - ) - -- Find all variables within all considered components - & foldMap (\ch -> Rel.ran $ Rel3.lookupD3 ch varRelation) - - -- Doesn't include self + -- TODO: make this faster! transitiveLabeledVarDeps :: LabeledVar v -> Set (LabeledVar v) transitiveLabeledVarDeps lv = - Rel3.lookupD2 lv varRelation - & Rel.ran - -- Find all transitive components we rely on - & ( \chs -> - chs <> foldMap (\ch -> fold $ Map.lookup ch transitiveCHDeps) chs - ) - -- Find all variables within all considered components - & foldMap (\ch -> Rel.ran $ Rel3.lookupD3 ch varRelation) - & Set.delete lv - - defsToConsider :: Set v - defsToConsider = case maybeDefsToConsider of - Nothing -> - varRelation - & Rel3.d1s - Just vs -> vs + let SlurpComponent {terms, types} = SC.closeWithDependencies uf $ + case lv of + LabeledVar v (LD.TypeReference {}) -> mempty {types = Set.singleton v} + LabeledVar v _ -> mempty {terms = Set.singleton v} + labeledTerms = Set.fromList $ do + v <- Set.toList terms + lv <- Set.toList . Rel.dom $ Rel3.lookupD1 v varRelation + guard (isTermVar lv) + pure lv + labeledTypes = Set.fromList $ do + v <- Set.toList types + lv <- Set.toList . Rel.dom $ Rel3.lookupD1 v varRelation + guard (isTypeVar lv) + pure lv + in labeledTerms <> labeledTypes + + -- Rel3.lookupD2 + -- lv + -- varRelation + -- & Rel.ran + -- -- Find all transitive components we rely on + -- & ( \chs -> + -- chs <> foldMap (\ch -> fold $ Map.lookup ch transitiveCHDeps) chs + -- ) + -- -- Find all variables within all considered components + -- & foldMap (\ch -> Rel.ran $ Rel3.lookupD3 ch varRelation) + -- & Set.delete lv definitionStatus :: LabeledVar v -> DefinitionNotes definitionStatus (LabeledVar v ld) = From b24a20cd979adf1752058d8255add0a15b1a7427 Mon Sep 17 00:00:00 2001 From: Chris Penner Date: Sat, 15 Jan 2022 19:25:01 -0600 Subject: [PATCH 171/297] WIP --- .../src/Unison/UnisonFile/Type.hs | 18 +----------------- 1 file changed, 1 insertion(+), 17 deletions(-) diff --git a/parser-typechecker/src/Unison/UnisonFile/Type.hs b/parser-typechecker/src/Unison/UnisonFile/Type.hs index 13851cbdf7..bf91ff02b6 100644 --- a/parser-typechecker/src/Unison/UnisonFile/Type.hs +++ b/parser-typechecker/src/Unison/UnisonFile/Type.hs @@ -16,8 +16,7 @@ import Unison.Term (Term) import qualified Unison.Term as Term import Unison.Type (Type) import Unison.WatchKind (WatchKind) -import Unison.Hash (Hash) -import qualified Unison.LabeledDependency as LD +import qualified Unison.Util.Relation3 as Rel3 data UnisonFile v a = UnisonFileId { dataDeclarationsId :: Map v (Reference.Id, DataDeclaration v a), @@ -44,21 +43,6 @@ data TypecheckedUnisonFile v a = hashTermsId :: Map v (Reference.Id, Maybe WatchKind, Term v a, Type v a) } deriving Show --- Produce a mapping which includes all component hashes and the variables contained --- within them. --- This includes all kinds of definitions: types, terms, abilities, constructors -componentMap :: - TypecheckedUnisonFile v ann - -- Left is a variable for a type - -- Right is a variable for a term or constructor - -> Map Hash (Set (Either v v)) -componentMap _uf = undefined - -- TODO: watch components? - --- Produce a mapping which includes all variables their reference. -referencesMap :: TypecheckedUnisonFile v ann -> Map v LD.LabeledDependency -referencesMap _uf = undefined - {-# COMPLETE TypecheckedUnisonFile #-} pattern TypecheckedUnisonFile ds es tlcs wcs hts <- TypecheckedUnisonFileId (fmap (first Reference.DerivedId) -> ds) From a88dd94f3b6788ee4d7f8739c01504be1a045267 Mon Sep 17 00:00:00 2001 From: Chris Penner Date: Sat, 15 Jan 2022 20:05:39 -0600 Subject: [PATCH 172/297] WIP --- lib/unison-prelude/src/Unison/Util/Set.hs | 4 + .../src/Unison/UnisonFile/Type.hs | 1 - .../src/Unison/Codebase/Editor/HandleInput.hs | 2 +- .../src/Unison/Codebase/Editor/Slurp.hs | 522 +++++++----------- .../Unison/Codebase/Editor/TermsAndTypes.hs | 43 +- 5 files changed, 257 insertions(+), 315 deletions(-) diff --git a/lib/unison-prelude/src/Unison/Util/Set.hs b/lib/unison-prelude/src/Unison/Util/Set.hs index 636d0322e3..838299dcaf 100644 --- a/lib/unison-prelude/src/Unison/Util/Set.hs +++ b/lib/unison-prelude/src/Unison/Util/Set.hs @@ -3,6 +3,7 @@ module Unison.Util.Set mapMaybe, symmetricDifference, Unison.Util.Set.traverse, + flatMap, ) where @@ -25,3 +26,6 @@ mapMaybe f = Set.fromList . Maybe.mapMaybe f . Set.toList traverse :: (Applicative f, Ord b) => (a -> f b) -> Set a -> f (Set b) traverse f = fmap Set.fromList . Prelude.traverse f . Set.toList + +flatMap :: Ord b => (a -> Set b) -> Set a -> Set b +flatMap f = Set.unions . fmap f . Set.toList diff --git a/parser-typechecker/src/Unison/UnisonFile/Type.hs b/parser-typechecker/src/Unison/UnisonFile/Type.hs index bf91ff02b6..f4c595a237 100644 --- a/parser-typechecker/src/Unison/UnisonFile/Type.hs +++ b/parser-typechecker/src/Unison/UnisonFile/Type.hs @@ -16,7 +16,6 @@ import Unison.Term (Term) import qualified Unison.Term as Term import Unison.Type (Type) import Unison.WatchKind (WatchKind) -import qualified Unison.Util.Relation3 as Rel3 data UnisonFile v a = UnisonFileId { dataDeclarationsId :: Map v (Reference.Id, DataDeclaration v a), diff --git a/unison-cli/src/Unison/Codebase/Editor/HandleInput.hs b/unison-cli/src/Unison/Codebase/Editor/HandleInput.hs index 4168b138e1..53575d9844 100644 --- a/unison-cli/src/Unison/Codebase/Editor/HandleInput.hs +++ b/unison-cli/src/Unison/Codebase/Editor/HandleInput.hs @@ -1271,7 +1271,7 @@ loop = do -- . applySelection hqs uf -- . toSlurpResult currentPath' uf -- <$> slurpResultNames - let adds = fromMaybe mempty . Map.lookup NewSlurp.Add $ sr + let adds = NewSlurp.toSlurpComponent . NewSlurp.sortVars . fromMaybe mempty . Map.lookup NewSlurp.Add $ sr stepAtNoSync Branch.CompressHistory (Path.unabsolute currentPath', doSlurpAdds adds uf) eval . AddDefsToCodebase . NewSlurp.selectDefinitions adds $ uf ppe <- prettyPrintEnvDecl =<< displayNames uf diff --git a/unison-cli/src/Unison/Codebase/Editor/Slurp.hs b/unison-cli/src/Unison/Codebase/Editor/Slurp.hs index 9f260bfa45..470718493e 100644 --- a/unison-cli/src/Unison/Codebase/Editor/Slurp.hs +++ b/unison-cli/src/Unison/Codebase/Editor/Slurp.hs @@ -11,8 +11,9 @@ import Debug.Pretty.Simple (pTraceShowId) import Unison.Codebase.Editor.SlurpComponent (SlurpComponent (..)) import qualified Unison.Codebase.Editor.SlurpComponent as SC import qualified Unison.Codebase.Editor.SlurpResult as OldSlurp +import Unison.Codebase.Editor.TermsAndTypes (TermedOrTyped (Termed, Typed), TermsAndTypes (TermsAndTypes)) +import qualified Unison.Codebase.Editor.TermsAndTypes as TT import qualified Unison.DataDeclaration as DD -import Unison.Hash (Hash) import qualified Unison.LabeledDependency as LD import Unison.Name (Name) import qualified Unison.Name as Name @@ -22,13 +23,11 @@ import Unison.Parser.Ann (Ann) import Unison.Prelude import qualified Unison.Reference as Ref import qualified Unison.Referent' as Referent -import Unison.Term (Term) -import qualified Unison.Term as Term import qualified Unison.UnisonFile as UF import qualified Unison.Util.Relation as Rel -import qualified Unison.Util.Relation3 as Rel3 import qualified Unison.Util.Set as Set import Unison.Var (Var) +import Unison.WatchKind (pattern TestWatch) -- Determine which components we're considering, i.e. find the components of all provided -- vars, then include any components they depend on. @@ -64,9 +63,9 @@ data SlurpStatus data BlockStatus v = Add | Duplicated - | NeedsUpdate (TypeOrTermVar v) + | NeedsUpdate (TermedOrTyped v) | Update - | ErrFrom (TypeOrTermVar v) SlurpErr + | ErrFrom (TermedOrTyped v) SlurpErr | SelfErr SlurpErr deriving (Eq, Ord, Show) @@ -83,28 +82,6 @@ instance Semigroup (BlockStatus v) where _ <> Add = Add Duplicated <> _ = Duplicated -data TypeOrTermVar v = TypeVar v | TermVar v - deriving (Eq, Ord, Show) - -labeledDepToComponentHash :: LD.LabeledDependency -> ComponentHash -labeledDepToComponentHash ld = - LD.fold unsafeComponentHashForReference (unsafeComponentHashForReference . Referent.toReference') ld - where - unsafeComponentHashForReference = - fromMaybe (error "Builtin encountered when var was expected") - . componentHashForReference - -componentHashForReference :: Ref.Reference -> Maybe Hash -componentHashForReference = - \case - Ref.Builtin {} -> Nothing - Ref.DerivedId (Ref.Id componentHash _ _) -> Just componentHash - -unlabeled :: TypeOrTermVar v -> v -unlabeled = \case - TypeVar v -> v - TermVar v -> v - data SlurpPrintout v = SlurpPrintout { notOk :: Map v SlurpErr, ok :: Map v SlurpStatus @@ -121,13 +98,9 @@ data DefinitionNotes | DefErr SlurpErr deriving (Show) -data SlurpResult v = SlurpResult - { termNotes :: Map v (DefinitionNotes, Set (TypeOrTermVar v)), - typeNotes :: Map v (DefinitionNotes, Set (TypeOrTermVar v)) - } - deriving (Show) +type SlurpResult v = Map (TermedOrTyped v) (DefinitionNotes, Map (TermedOrTyped v) DefinitionNotes) -type Result v = Map (BlockStatus v) (SlurpComponent v) +type Result v = Map (BlockStatus v) (Set (TermedOrTyped v)) -- data Result v = Result -- { addable :: SlurpComponent v, @@ -145,43 +118,28 @@ type Result v = Map (BlockStatus v) (SlurpComponent v) -- Compute all definitions which can be added, or the reasons why a def can't be added. results :: forall v. (Ord v, Show v) => SlurpResult v -> Result v -results sr@(SlurpResult {termNotes, typeNotes}) = - pTraceShowId $ - Map.unionWith (<>) analyzedTerms analyzedTypes +results sr = + pTraceShowId $ analyzed where - analyzedTerms :: Map (BlockStatus v) (SlurpComponent v) - analyzedTerms = - termNotes - & Map.toList - & fmap - ( \(v, (_, deps)) -> - ( Semigroup.fold1 (getBlockStatus False sr (TermVar v) NEList.:| fmap (getBlockStatus True sr) (Set.toList deps)), - mempty {SC.terms = Set.singleton v} - ) - ) - & Map.fromListWith (<>) - analyzedTypes :: Map (BlockStatus v) (SlurpComponent v) - analyzedTypes = - typeNotes + analyzed :: Map (BlockStatus v) (Set (TermedOrTyped v)) + analyzed = + sr & Map.toList & fmap - ( \(v, (_, deps)) -> - ( Semigroup.fold1 (getBlockStatus False sr (TypeVar v) NEList.:| fmap (getBlockStatus True sr) (Set.toList deps)), - mempty {SC.types = Set.singleton v} + ( \(tv, (defNotes, deps)) -> + ( Semigroup.fold1 (getBlockStatus False defNotes tv NEList.:| (Map.toList deps <&> \(depTV, depDefNotes) -> getBlockStatus True depDefNotes depTV)), + Set.singleton tv ) ) & Map.fromListWith (<>) -getBlockStatus :: (Ord v, Show v) => Bool -> SlurpResult v -> TypeOrTermVar v -> BlockStatus v -getBlockStatus isDep (SlurpResult {termNotes, typeNotes}) tv = - let defNotes = case tv of - TypeVar v -> fromMaybe (error $ "Expected " <> show v <> " in typeNotes") $ Map.lookup v typeNotes - TermVar v -> fromMaybe (error $ "Expected " <> show v <> " in termNotes") $ Map.lookup v termNotes - in case fst defNotes of - DefOk Updated -> if isDep then NeedsUpdate tv else Update - DefErr err -> ErrFrom tv err - DefOk New -> Add - DefOk Duplicate -> Duplicated +getBlockStatus :: (Ord v, Show v) => Bool -> DefinitionNotes -> TermedOrTyped v -> BlockStatus v +getBlockStatus isDep defNotes tv = + case defNotes of + DefOk Updated -> if isDep then NeedsUpdate tv else Update + DefErr err -> ErrFrom tv err + DefOk New -> Add + DefOk Duplicate -> Duplicated -- Need to know: -- What can be added without errors? @@ -189,116 +147,106 @@ getBlockStatus isDep (SlurpResult {termNotes, typeNotes}) tv = -- What has errors? -- What is blocked? -type ComponentHash = Hash - -data Components v = Components - { termComponents :: Map Hash (Set v), - typeComponents :: Map Hash (Set v) - } - --- groupByOp :: SlurpResult v -> (SlurpComponent v, SlurpComponent v) --- groupByOp (SlurpResult terms types) = --- terms --- & Map.mapEither (\(notes, deps) -> --- any (== ) --- ) - analyzeTypecheckedUnisonFile :: forall v. Var v => UF.TypecheckedUnisonFile v Ann -> Maybe (Set v) -> Names -> - SlurpResult v + (Map (TermedOrTyped v) (DefinitionNotes, Map (TermedOrTyped v) (DefinitionNotes))) analyzeTypecheckedUnisonFile uf maybeDefsToConsider unalteredCodebaseNames = - pTraceShowId $ - let allInvolvedVars :: Set (LabeledVar v) - allInvolvedVars = - case maybeDefsToConsider of - Nothing -> - allFileVars - Just vs -> - let lvs :: Set (LabeledVar v) = Set.unions $ do - v <- Set.toList vs - pure . Rel.dom $ Rel3.lookupD1 v varRelation - in foldMap transitiveLabeledVarDeps lvs + let codebaseNames :: Names + codebaseNames = computeNamesWithDeprecations uf unalteredCodebaseNames + varDeps :: Map (TermedOrTyped v) (Set (TermedOrTyped v)) + varDeps = computeVarDeps uf maybeDefsToConsider varRelation + statusMap :: Map (TermedOrTyped v) (DefinitionNotes, Map (TermedOrTyped v) DefinitionNotes) + statusMap = computeVarStatuses varDeps varRelation codebaseNames + in pTraceShowId statusMap + where + varRelation :: Rel.Relation (TermedOrTyped v) LD.LabeledDependency + varRelation = undefined $ labelling uf - allFileVars :: Set (LabeledVar v) - allFileVars = Rel3.d2s varRelation +computeNamesWithDeprecations :: + Var v => + UF.TypecheckedUnisonFile v Ann -> + Names -> + Names +computeNamesWithDeprecations uf unalteredCodebaseNames = codebaseNames + where + -- Get the set of all DIRECT definitions in the file which a definition depends on. + codebaseNames :: Names + codebaseNames = + -- TODO: make faster + -- TODO: how does defsToConsider affect deprecations? + Names.filter (`Set.notMember` deprecatedConstructors) unalteredCodebaseNames + constructorNamesInFile :: Set Name + constructorNamesInFile = + Map.elems (UF.dataDeclarationsId' uf) + <> (fmap . fmap) DD.toDataDecl (Map.elems (UF.effectDeclarationsId' uf)) + & fmap snd + & concatMap + ( \decl -> + DD.constructors' decl <&> \(_ann, v, _typ) -> + Name.unsafeFromVar v + ) + & Set.fromList - termStatuses, typeStatuses :: Map v (DefinitionNotes, Set (TypeOrTermVar v)) - (termStatuses, typeStatuses) = - allInvolvedVars - & Set.toList - & fmap - ( \lv -> - (lv, (definitionStatus lv, transitiveLabeledVarDeps lv)) - ) - & Map.fromList - & Map.mapEitherWithKey - ( \lv x -> case lv of - LabeledVar _ (LD.TypeReference {}) -> Right x - LabeledVar _ (LD.TermReferent {}) -> Left x - ) - & over both (Map.mapKeys (\(LabeledVar v _) -> v)) - & over - both - ( Map.map - ( fmap - ( Set.map - ( \case - LabeledVar v (LD.TermReferent {}) -> TermVar v - LabeledVar v (LD.TypeReference {}) -> TypeVar v - ) - ) - ) - ) - in SlurpResult {termNotes = termStatuses, typeNotes = typeStatuses} + deprecatedConstructors :: Set Name + deprecatedConstructors = + let allRefIds = + fmap fst (Map.elems (UF.dataDeclarationsId' uf)) + <> fmap fst (Map.elems (UF.effectDeclarationsId' uf)) + existingConstructorsFromEditedTypes = Set.fromList $ do + -- List Monad + refId <- allRefIds + (name, _ref) <- Names.constructorsForType (Ref.DerivedId refId) unalteredCodebaseNames + pure name + in -- Compute any constructors which were deleted + existingConstructorsFromEditedTypes `Set.difference` constructorNamesInFile + +computeVarStatuses :: + forall v. + (Ord v, Var v) => + Map (TermedOrTyped v) (Set (TermedOrTyped v)) -> + Rel.Relation (TermedOrTyped v) LD.LabeledDependency -> + Names -> + ( Map + (TermedOrTyped v) + (DefinitionNotes, Map (TermedOrTyped v) DefinitionNotes) + ) +computeVarStatuses depMap varRelation codebaseNames = statuses where - -- TODO: types and terms which have the same hash are currently treated as dependencies of - -- one another even if they don't actually reference each other. - -- E.g. - -- structural type X = x - -- structural type Y = y - -- will say that X and Y depend on each other since they have the same component hash. - _transitiveCHDeps :: Map ComponentHash (Set ComponentHash) - _transitiveCHDeps = - componentTransitiveDeps uf - - -- TODO: make this faster! - transitiveLabeledVarDeps :: LabeledVar v -> Set (LabeledVar v) - transitiveLabeledVarDeps lv = - let SlurpComponent {terms, types} = SC.closeWithDependencies uf $ - case lv of - LabeledVar v (LD.TypeReference {}) -> mempty {types = Set.singleton v} - LabeledVar v _ -> mempty {terms = Set.singleton v} - labeledTerms = Set.fromList $ do - v <- Set.toList terms - lv <- Set.toList . Rel.dom $ Rel3.lookupD1 v varRelation - guard (isTermVar lv) - pure lv - labeledTypes = Set.fromList $ do - v <- Set.toList types - lv <- Set.toList . Rel.dom $ Rel3.lookupD1 v varRelation - guard (isTypeVar lv) - pure lv - in labeledTerms <> labeledTypes - - -- Rel3.lookupD2 - -- lv - -- varRelation - -- & Rel.ran - -- -- Find all transitive components we rely on - -- & ( \chs -> - -- chs <> foldMap (\ch -> fold $ Map.lookup ch transitiveCHDeps) chs - -- ) - -- -- Find all variables within all considered components - -- & foldMap (\ch -> Rel.ran $ Rel3.lookupD3 ch varRelation) - -- & Set.delete lv - - definitionStatus :: LabeledVar v -> DefinitionNotes - definitionStatus (LabeledVar v ld) = - let existingTypesAtName = Names.typesNamed codebaseNames (Name.unsafeFromVar v) + statuses :: Map (TermedOrTyped v) (DefinitionNotes, Map (TermedOrTyped v) (DefinitionNotes)) + statuses = + let withNotes = + depMap + & Map.mapWithKey + ( \tv deps -> + (definitionStatus tv, deps) + ) + withTransitiveNotes :: + ( Map + (TermedOrTyped v) + ( DefinitionNotes, + (Map (TermedOrTyped v) DefinitionNotes) + ) + ) + withTransitiveNotes = + withNotes + & (fmap . fmap) + ( \deps -> Map.fromList $ do + tv <- Set.toList deps + (notes, _) <- maybeToList (Map.lookup tv withNotes) + pure (tv, notes) + ) + in withTransitiveNotes + definitionStatus :: TermedOrTyped v -> DefinitionNotes + definitionStatus tv = + let ld = case Set.toList (Rel.lookupDom tv varRelation) of + [r] -> r + _ -> error $ "Expected exactly one LabeledDependency in relation for var: " <> show tv + v = TT.unTermedOrTyped tv + existingTypesAtName = Names.typesNamed codebaseNames (Name.unsafeFromVar v) existingTermsAtName = Names.termsNamed codebaseNames (Name.unsafeFromVar v) in case ld of LD.TypeReference {} -> @@ -331,153 +279,78 @@ analyzeTypecheckedUnisonFile uf maybeDefsToConsider unalteredCodebaseNames = -- to resolve the conflict. _ -> DefOk Updated - varRelation :: Rel3.Relation3 v (LabeledVar v) ComponentHash - varRelation = labelling uf - - -- Get the set of all DIRECT definitions in the file which a definition depends on. - codebaseNames :: Names - codebaseNames = - -- TODO: make faster - -- TODO: how does defsToConsider affect deprecations? - Names.filter (`Set.notMember` deprecatedConstructors) unalteredCodebaseNames - constructorNamesInFile :: Set Name - constructorNamesInFile = - Map.elems (UF.dataDeclarationsId' uf) - <> (fmap . fmap) DD.toDataDecl (Map.elems (UF.effectDeclarationsId' uf)) - & fmap snd - & concatMap - ( \decl -> - DD.constructors' decl <&> \(_ann, v, _typ) -> - Name.unsafeFromVar v - ) - & Set.fromList - - deprecatedConstructors :: Set Name - deprecatedConstructors = - let allRefIds = - fmap fst (Map.elems (UF.dataDeclarationsId' uf)) - <> fmap fst (Map.elems (UF.effectDeclarationsId' uf)) - existingConstructorsFromEditedTypes = Set.fromList $ do - -- List Monad - refId <- allRefIds - (name, _ref) <- Names.constructorsForType (Ref.DerivedId refId) unalteredCodebaseNames - pure name - in -- Compute any constructors which were deleted - existingConstructorsFromEditedTypes `Set.difference` constructorNamesInFile +computeVarDeps :: + forall v. + Var v => + UF.TypecheckedUnisonFile v Ann -> + Maybe (Set v) -> + Rel.Relation (TermedOrTyped v) LD.LabeledDependency -> + Map (TermedOrTyped v) (Set (TermedOrTyped v)) +computeVarDeps uf maybeDefsToConsider varRelation = + pTraceShowId $ + let allFileVars :: Set (TermedOrTyped v) + allFileVars = Rel.dom varRelation --- slurpErrs :: SlurpResult v -> Map v SlurpErr --- slurpErrs (SlurpResult defs _) = --- defs --- & Map.mapMaybe --- ( \case --- (DefErr err, _) -> Just err --- _ -> Nothing --- ) - --- slurpOp :: --- forall v. --- Ord v => --- SlurpResult v -> --- (SlurpComponent v, SlurpComponent v) --- slurpOp (SlurpResult terms types) = --- let (termAdds, termUpdates, termErrs) = partition terms --- (typeAdds, typeUpdates, typeErrs) = partition types --- in (SlurpComponent termAdds termUpdates termErrs, SlurpComponent typeAdds typeUpdates typeErrs) --- where --- partition :: (Map v (DefinitionNotes, Set (LabeledVar v))) -> (Set v, Set v, Map v SlurpErr) --- partition sr = --- let (adds, updates, errs) = --- flip execState mempty $ --- for (Map.toList sr) $ \(v, (dn, _)) -> do --- case dn of --- DefOk New -> _1 %= Set.insert v --- DefOk Updated -> _2 %= Set.insert v --- DefOk Duplicate -> pure () --- DefErr err -> _3 . at v ?= err --- in (adds, updates, errs) - -componentTransitiveDeps :: Ord v => UF.TypecheckedUnisonFile v a -> Map ComponentHash (Set ComponentHash) -componentTransitiveDeps uf = - let deps = Map.unionsWith (<>) [termDeps, dataDeps, effectDeps] - filteredDeps :: Map ComponentHash (Set ComponentHash) - filteredDeps = - deps - & Map.mapWithKey - ( \k d -> - d - -- Don't track the component as one of its own deps - & Set.delete k - -- Filter out any references to components which aren't defined in this file. - & Set.filter (\ch -> Map.member ch deps) - ) - -- Find the fixed point of our dependencies, which will always terminate because - -- component dependencies are acyclic. - transitiveDeps = - filteredDeps - <&> ( \directDeps -> - directDeps - <> foldMap (\ch -> fold $ Map.lookup ch transitiveDeps) directDeps + allInvolvedVars :: Set (TermedOrTyped v) + allInvolvedVars = + case maybeDefsToConsider of + Nothing -> allFileVars + Just vs -> + let existingVars :: Set (TermedOrTyped v) = Set.fromList $ do + v <- Set.toList vs + -- We don't know whether each var is a type or term, so we try both. + tv <- [Typed v, Termed v] + guard (Rel.memberDom tv varRelation) + pure tv + in varClosure existingVars + + depMap :: (Map (TermedOrTyped v) (Set (TermedOrTyped v))) + depMap = + allInvolvedVars + & Set.toList + & fmap + ( \tv -> (tv, varClosure (Set.singleton tv)) ) - in transitiveDeps + & Map.fromListWith (<>) + in depMap where - termDeps :: Map ComponentHash (Set ComponentHash) - termDeps = - UF.hashTermsId uf - & Map.elems - & fmap (\(refId, _watchKind, trm, _typ) -> (idToComponentHash refId, termComponentRefs trm)) - & Map.fromListWith (<>) - dataDeps :: Map ComponentHash (Set ComponentHash) - dataDeps = - UF.dataDeclarationsId' uf - & Map.elems - & fmap (\(refId, decl) -> (idToComponentHash refId, dataDeclRefs decl)) - & Map.fromListWith (<>) - effectDeps :: Map ComponentHash (Set ComponentHash) - effectDeps = - UF.effectDeclarationsId' uf - & Map.elems - & fmap (\(refId, effect) -> (idToComponentHash refId, dataDeclRefs (DD.toDataDecl effect))) - & Map.fromListWith (<>) - -termComponentRefs :: Ord v => Term v a -> Set ComponentHash -termComponentRefs trm = - Term.dependencies trm - -- Ignore builtins - & Set.mapMaybe componentHashForReference - -dataDeclRefs :: Ord v => DD.DataDeclaration v a -> Set ComponentHash -dataDeclRefs decl = - DD.dependencies decl - -- Ignore builtins - & Set.mapMaybe componentHashForReference + -- Compute the closure of all vars which the provided vars depend on. + varClosure :: Set (TermedOrTyped v) -> Set (TermedOrTyped v) + varClosure (sortVars -> toSlurpComponent -> sc) = + mingleVars . fromSlurpComponent $ SC.closeWithDependencies uf sc -- TODO: Does this need to contain constructors? Probably. -- Does not include constructors -labelling :: forall v a. Ord v => UF.TypecheckedUnisonFile v a -> Rel3.Relation3 v (LabeledVar v) ComponentHash +labelling :: forall v a. Ord v => UF.TypecheckedUnisonFile v a -> Rel.Relation (TermedOrTyped v) LD.LabeledDependency labelling uf = decls <> effects <> terms where - terms :: Rel3.Relation3 v (LabeledVar v) ComponentHash + terms :: Rel.Relation (TermedOrTyped v) LD.LabeledDependency terms = UF.hashTermsId uf & Map.toList - & fmap (\(v, (refId, _, _, _)) -> (v, LabeledVar v (LD.derivedTerm refId), idToComponentHash refId)) - & Rel3.fromList - decls :: Rel3.Relation3 v (LabeledVar v) ComponentHash + -- TODO: ensure we handle watches with assignments correctly. + -- Filter out watches + & mapMaybe + ( \case + (v, (refId, w, _, _)) + | w == Just TestWatch || w == Nothing -> + Just (Termed v, LD.derivedTerm refId) + _ -> Nothing + ) + & Rel.fromList + decls :: Rel.Relation (TermedOrTyped v) LD.LabeledDependency decls = UF.dataDeclarationsId' uf & Map.toList - & fmap (\(v, (refId, _)) -> (v, LabeledVar v (LD.derivedType refId), idToComponentHash refId)) - & Rel3.fromList + & fmap (\(v, (refId, _)) -> (Typed v, LD.derivedType refId)) + & Rel.fromList - effects :: Rel3.Relation3 v (LabeledVar v) ComponentHash + effects :: Rel.Relation (TermedOrTyped v) LD.LabeledDependency effects = UF.effectDeclarationsId' uf & Map.toList - & fmap (\(v, (refId, _)) -> (v, LabeledVar v (LD.derivedType refId), idToComponentHash refId)) - & Rel3.fromList - -idToComponentHash :: Ref.Id -> ComponentHash -idToComponentHash (Ref.Id componentHash _ _) = componentHash + & fmap (\(v, (refId, _)) -> (Typed v, (LD.derivedType refId))) + & Rel.fromList selectDefinitions :: Ord v => SlurpComponent v -> UF.TypecheckedUnisonFile v a -> UF.TypecheckedUnisonFile v a selectDefinitions @@ -514,7 +387,12 @@ toSlurpResult uf op mvs r = OldSlurp.extraDefinitions = case mvs of Nothing -> mempty - Just vs -> SC.difference (fold r) (SlurpComponent vs vs), + Just vs -> + let allVars = fold r + desired = + vs + & Set.flatMap (\v -> Set.fromList [Typed v, Termed v]) + in toSlurpComponent . sortVars $ Set.difference allVars desired, OldSlurp.adds = adds, OldSlurp.duplicates = duplicates, OldSlurp.collisions = if op == AddOp then updates else mempty, @@ -535,29 +413,30 @@ toSlurpResult uf op mvs r = (adds, duplicates, updates, termCtorColl, (ctorTermColl, blocked)) = r & ifoldMap - ( \k sc -> - case k of - Add -> (sc, mempty, mempty, mempty, (mempty, mempty)) - Duplicated -> (mempty, sc, mempty, mempty, (mempty, mempty)) - Update -> (mempty, mempty, sc, mempty, (mempty, mempty)) - NeedsUpdate v -> - case op of - AddOp -> - (mempty, mempty, singletonSC v, mempty, (mempty, sc `SC.difference` singletonSC v)) - UpdateOp -> - (sc, mempty, mempty, mempty, (mempty, mempty)) - ErrFrom v TermCtorCollision -> (mempty, mempty, mempty, singletonSC v, (mempty, sc `SC.difference` singletonSC v)) - ErrFrom v CtorTermCollision -> (mempty, mempty, mempty, mempty, (singletonSC v, sc `SC.difference` singletonSC v)) - SelfErr TermCtorCollision -> (mempty, mempty, mempty, sc, (mempty, mempty)) - SelfErr CtorTermCollision -> (mempty, mempty, mempty, mempty, (sc, mempty)) + ( \k tvs -> + let sc = toSlurpComponent . sortVars $ tvs + in case k of + Add -> (sc, mempty, mempty, mempty, (mempty, mempty)) + Duplicated -> (mempty, sc, mempty, mempty, (mempty, mempty)) + Update -> (mempty, mempty, sc, mempty, (mempty, mempty)) + NeedsUpdate v -> + case op of + AddOp -> + (mempty, mempty, singletonSC v, mempty, (mempty, sc `SC.difference` singletonSC v)) + UpdateOp -> + (sc, mempty, mempty, mempty, (mempty, mempty)) + ErrFrom v TermCtorCollision -> (mempty, mempty, mempty, singletonSC v, (mempty, sc `SC.difference` singletonSC v)) + ErrFrom v CtorTermCollision -> (mempty, mempty, mempty, mempty, (singletonSC v, sc `SC.difference` singletonSC v)) + SelfErr TermCtorCollision -> (mempty, mempty, mempty, sc, (mempty, mempty)) + SelfErr CtorTermCollision -> (mempty, mempty, mempty, mempty, (sc, mempty)) ) singletonSC = \case - TypeVar v -> SlurpComponent {terms = mempty, types = Set.singleton v} - TermVar v -> SlurpComponent {terms = Set.singleton v, types = mempty} + Typed v -> SlurpComponent {terms = mempty, types = Set.singleton v} + Termed v -> SlurpComponent {terms = Set.singleton v, types = mempty} anyErrors :: Result v -> Bool anyErrors r = - any isError . Map.keys $ Map.filter (not . SC.isEmpty) r + any isError . Map.keys $ Map.filter (not . null) r where isError :: BlockStatus v -> Bool isError = \case @@ -568,3 +447,22 @@ anyErrors r = NeedsUpdate {} -> True ErrFrom {} -> True SelfErr {} -> True + +toSlurpComponent :: TermsAndTypes (Set v) -> SlurpComponent v +toSlurpComponent TermsAndTypes {TT.terms = terms, TT.types = types} = + SlurpComponent {terms = terms, types = types} + +fromSlurpComponent :: SlurpComponent v -> TermsAndTypes (Set v) +fromSlurpComponent SlurpComponent {terms = terms, types = types} = + TermsAndTypes {TT.terms = terms, TT.types = types} + +sortVars :: (Foldable f, Ord v) => f (TermedOrTyped v) -> TermsAndTypes (Set v) +sortVars = + foldMap + ( \case + Typed v -> TT.fromTypes (Set.singleton v) + Termed v -> TT.fromTerms (Set.singleton v) + ) + +mingleVars :: Ord v => TermsAndTypes (Set v) -> Set (TermedOrTyped v) +mingleVars = Set.fromList . fold . TT.labeledF . fmap Set.toList diff --git a/unison-cli/src/Unison/Codebase/Editor/TermsAndTypes.hs b/unison-cli/src/Unison/Codebase/Editor/TermsAndTypes.hs index d53f3cc47f..1c1dd1c6b5 100644 --- a/unison-cli/src/Unison/Codebase/Editor/TermsAndTypes.hs +++ b/unison-cli/src/Unison/Codebase/Editor/TermsAndTypes.hs @@ -1,13 +1,38 @@ {-# LANGUAGE TypeFamilies #-} + module Unison.Codebase.Editor.TermsAndTypes where import Data.Distributive +import Data.Functor.Adjunction import Data.Functor.Rep data TermOrType = TypeTag | TermTag + deriving stock (Show, Eq, Ord) + +data TermedOrTyped a = Typed a | Termed a + deriving stock (Functor, Foldable, Traversable) + deriving stock (Show, Eq, Ord) + +unTermedOrTyped :: TermedOrTyped a -> a +unTermedOrTyped = \case + Typed a -> a + Termed a -> a data TermsAndTypes a = TermsAndTypes {terms :: a, types :: a} - deriving (Functor) + deriving stock (Functor, Foldable, Traversable) + +fromTypes :: Monoid a => a -> TermsAndTypes a +fromTypes a = TermsAndTypes {terms = mempty, types = a} + +fromTerms :: Monoid a => a -> TermsAndTypes a +fromTerms a = TermsAndTypes {terms = a, types = mempty} + +instance Semigroup a => Semigroup (TermsAndTypes a) where + TermsAndTypes terms1 types1 <> TermsAndTypes terms2 types2 = + TermsAndTypes (terms1 <> terms2) (types1 <> types2) + +instance Monoid a => Monoid (TermsAndTypes a) where + mempty = TermsAndTypes mempty mempty instance Applicative TermsAndTypes where pure a = TermsAndTypes a a @@ -22,3 +47,19 @@ instance Representable TermsAndTypes where TermTag -> terms tt TypeTag -> types tt tabulate f = TermsAndTypes {terms = f TermTag, types = f TypeTag} + +instance Adjunction TermedOrTyped TermsAndTypes where + unit a = TermsAndTypes {terms = Termed a, types = Typed a} + counit (Termed (TermsAndTypes {terms})) = terms + counit (Typed (TermsAndTypes {types})) = types + +labeled :: TermsAndTypes a -> TermsAndTypes (TermedOrTyped a) +labeled (TermsAndTypes {terms, types}) = + TermsAndTypes {terms = Termed terms, types = Typed types} + +labeledF :: Functor f => TermsAndTypes (f a) -> TermsAndTypes (f (TermedOrTyped a)) +labeledF (TermsAndTypes {terms, types}) = + TermsAndTypes {terms = fmap Termed terms, types = fmap Typed types} + +mapWithTag :: (TermOrType -> a -> b) -> TermsAndTypes a -> TermsAndTypes b +mapWithTag f (TermsAndTypes {terms, types}) = TermsAndTypes (f TermTag terms) (f TypeTag types) From efb41b51a86501bf5b6242c911aa573b0f56d734 Mon Sep 17 00:00:00 2001 From: Chris Penner Date: Sat, 15 Jan 2022 20:10:30 -0600 Subject: [PATCH 173/297] Use shared SlurpComponent --- .../src/Unison/Codebase/Editor/HandleInput.hs | 2 +- .../src/Unison/Codebase/Editor/Slurp.hs | 30 ++++++++----------- .../Unison/Codebase/Editor/SlurpComponent.hs | 6 ++++ 3 files changed, 19 insertions(+), 19 deletions(-) diff --git a/unison-cli/src/Unison/Codebase/Editor/HandleInput.hs b/unison-cli/src/Unison/Codebase/Editor/HandleInput.hs index 53575d9844..cddc253eb2 100644 --- a/unison-cli/src/Unison/Codebase/Editor/HandleInput.hs +++ b/unison-cli/src/Unison/Codebase/Editor/HandleInput.hs @@ -1271,7 +1271,7 @@ loop = do -- . applySelection hqs uf -- . toSlurpResult currentPath' uf -- <$> slurpResultNames - let adds = NewSlurp.toSlurpComponent . NewSlurp.sortVars . fromMaybe mempty . Map.lookup NewSlurp.Add $ sr + let adds = NewSlurp.sortVars . fromMaybe mempty . Map.lookup NewSlurp.Add $ sr stepAtNoSync Branch.CompressHistory (Path.unabsolute currentPath', doSlurpAdds adds uf) eval . AddDefsToCodebase . NewSlurp.selectDefinitions adds $ uf ppe <- prettyPrintEnvDecl =<< displayNames uf diff --git a/unison-cli/src/Unison/Codebase/Editor/Slurp.hs b/unison-cli/src/Unison/Codebase/Editor/Slurp.hs index 470718493e..34728caf51 100644 --- a/unison-cli/src/Unison/Codebase/Editor/Slurp.hs +++ b/unison-cli/src/Unison/Codebase/Editor/Slurp.hs @@ -11,7 +11,7 @@ import Debug.Pretty.Simple (pTraceShowId) import Unison.Codebase.Editor.SlurpComponent (SlurpComponent (..)) import qualified Unison.Codebase.Editor.SlurpComponent as SC import qualified Unison.Codebase.Editor.SlurpResult as OldSlurp -import Unison.Codebase.Editor.TermsAndTypes (TermedOrTyped (Termed, Typed), TermsAndTypes (TermsAndTypes)) +import Unison.Codebase.Editor.TermsAndTypes (TermedOrTyped (Termed, Typed)) import qualified Unison.Codebase.Editor.TermsAndTypes as TT import qualified Unison.DataDeclaration as DD import qualified Unison.LabeledDependency as LD @@ -316,8 +316,8 @@ computeVarDeps uf maybeDefsToConsider varRelation = where -- Compute the closure of all vars which the provided vars depend on. varClosure :: Set (TermedOrTyped v) -> Set (TermedOrTyped v) - varClosure (sortVars -> toSlurpComponent -> sc) = - mingleVars . fromSlurpComponent $ SC.closeWithDependencies uf sc + varClosure (sortVars -> sc) = + mingleVars $ SC.closeWithDependencies uf sc -- TODO: Does this need to contain constructors? Probably. -- Does not include constructors @@ -392,7 +392,7 @@ toSlurpResult uf op mvs r = desired = vs & Set.flatMap (\v -> Set.fromList [Typed v, Termed v]) - in toSlurpComponent . sortVars $ Set.difference allVars desired, + in sortVars $ Set.difference allVars desired, OldSlurp.adds = adds, OldSlurp.duplicates = duplicates, OldSlurp.collisions = if op == AddOp then updates else mempty, @@ -414,7 +414,7 @@ toSlurpResult uf op mvs r = r & ifoldMap ( \k tvs -> - let sc = toSlurpComponent . sortVars $ tvs + let sc = sortVars $ tvs in case k of Add -> (sc, mempty, mempty, mempty, (mempty, mempty)) Duplicated -> (mempty, sc, mempty, mempty, (mempty, mempty)) @@ -448,21 +448,15 @@ anyErrors r = ErrFrom {} -> True SelfErr {} -> True -toSlurpComponent :: TermsAndTypes (Set v) -> SlurpComponent v -toSlurpComponent TermsAndTypes {TT.terms = terms, TT.types = types} = - SlurpComponent {terms = terms, types = types} - -fromSlurpComponent :: SlurpComponent v -> TermsAndTypes (Set v) -fromSlurpComponent SlurpComponent {terms = terms, types = types} = - TermsAndTypes {TT.terms = terms, TT.types = types} - -sortVars :: (Foldable f, Ord v) => f (TermedOrTyped v) -> TermsAndTypes (Set v) +sortVars :: (Foldable f, Ord v) => f (TermedOrTyped v) -> SlurpComponent v sortVars = foldMap ( \case - Typed v -> TT.fromTypes (Set.singleton v) - Termed v -> TT.fromTerms (Set.singleton v) + Typed v -> SC.fromTypes (Set.singleton v) + Termed v -> SC.fromTerms (Set.singleton v) ) -mingleVars :: Ord v => TermsAndTypes (Set v) -> Set (TermedOrTyped v) -mingleVars = Set.fromList . fold . TT.labeledF . fmap Set.toList +mingleVars :: Ord v => SlurpComponent v -> Set (TermedOrTyped v) +mingleVars SlurpComponent {terms, types} = + Set.map Termed types + <> Set.map Typed terms diff --git a/unison-cli/src/Unison/Codebase/Editor/SlurpComponent.hs b/unison-cli/src/Unison/Codebase/Editor/SlurpComponent.hs index 04139bf4e4..eb877ed1ae 100644 --- a/unison-cli/src/Unison/Codebase/Editor/SlurpComponent.hs +++ b/unison-cli/src/Unison/Codebase/Editor/SlurpComponent.hs @@ -86,3 +86,9 @@ closeWithDependencies uf inputs = seenDefns where invert :: forall k v . Ord k => Ord v => Map k v -> Map v k invert m = Map.fromList (swap <$> Map.toList m) + +fromTypes :: Ord v => Set v -> SlurpComponent v +fromTypes vs = SlurpComponent {terms = mempty, types = vs} + +fromTerms :: Ord v => Set v -> SlurpComponent v +fromTerms vs = SlurpComponent {terms = vs, types = mempty} From 8932523ed49b8a486aedf4c2579544a988238790 Mon Sep 17 00:00:00 2001 From: Chris Penner Date: Sat, 15 Jan 2022 20:16:13 -0600 Subject: [PATCH 174/297] Ensure we add adds on update --- .../src/Unison/Codebase/Editor/Slurp.hs | 27 ++++++++++--------- 1 file changed, 14 insertions(+), 13 deletions(-) diff --git a/unison-cli/src/Unison/Codebase/Editor/Slurp.hs b/unison-cli/src/Unison/Codebase/Editor/Slurp.hs index 34728caf51..1fb263ceb0 100644 --- a/unison-cli/src/Unison/Codebase/Editor/Slurp.hs +++ b/unison-cli/src/Unison/Codebase/Editor/Slurp.hs @@ -1,4 +1,15 @@ -module Unison.Codebase.Editor.Slurp where +module Unison.Codebase.Editor.Slurp + ( SlurpOp (..), + Result, + BlockStatus (..), + anyErrors, + results, + analyzeTypecheckedUnisonFile, + selectDefinitions, + toSlurpResult, + sortVars, + ) +where import Control.Lens import Data.Bifunctor (second) @@ -44,16 +55,6 @@ import Unison.WatchKind (pattern TestWatch) data SlurpOp = AddOp | UpdateOp deriving (Eq, Show) -data LabeledVar v = LabeledVar v LD.LabeledDependency - deriving (Eq, Ord) - -isTypeVar :: LabeledVar v -> Bool -isTypeVar (LabeledVar _ LD.TypeReference {}) = True -isTypeVar _ = False - -isTermVar :: LabeledVar v -> Bool -isTermVar = not . isTypeVar - data SlurpStatus = New | Updated @@ -319,7 +320,7 @@ computeVarDeps uf maybeDefsToConsider varRelation = varClosure (sortVars -> sc) = mingleVars $ SC.closeWithDependencies uf sc --- TODO: Does this need to contain constructors? Probably. +-- TODO: Does this need to contain constructors? Maybe. -- Does not include constructors labelling :: forall v a. Ord v => UF.TypecheckedUnisonFile v a -> Rel.Relation (TermedOrTyped v) LD.LabeledDependency labelling uf = decls <> effects <> terms @@ -397,7 +398,7 @@ toSlurpResult uf op mvs r = OldSlurp.duplicates = duplicates, OldSlurp.collisions = if op == AddOp then updates else mempty, OldSlurp.conflicts = mempty, - OldSlurp.updates = if op == UpdateOp then updates else mempty, + OldSlurp.updates = if op == UpdateOp then adds <> updates else mempty, OldSlurp.termExistingConstructorCollisions = let SlurpComponent types terms = termCtorColl in types <> terms, From b3ffe72b75e5bd586ca709423721418389e180bd Mon Sep 17 00:00:00 2001 From: Chris Penner Date: Sat, 15 Jan 2022 20:27:54 -0600 Subject: [PATCH 175/297] Actually handle passed vars correctly --- .../src/Unison/Codebase/Editor/HandleInput.hs | 10 ++++----- .../src/Unison/Codebase/Editor/Slurp.hs | 22 +++++++++---------- 2 files changed, 16 insertions(+), 16 deletions(-) diff --git a/unison-cli/src/Unison/Codebase/Editor/HandleInput.hs b/unison-cli/src/Unison/Codebase/Editor/HandleInput.hs index cddc253eb2..e2a4b1dbf1 100644 --- a/unison-cli/src/Unison/Codebase/Editor/HandleInput.hs +++ b/unison-cli/src/Unison/Codebase/Editor/HandleInput.hs @@ -262,7 +262,7 @@ loop = do withFile [] sourceName (text, lexed) $ \unisonFile -> do currentNames <- currentPathNames let sr = NewSlurp.results . NewSlurp.analyzeTypecheckedUnisonFile unisonFile - Nothing + mempty $ currentNames let oldSlurpResult = NewSlurp.toSlurpResult unisonFile NewSlurp.UpdateOp Nothing sr & addAliases currentNames unisonFile currentPath' @@ -1264,7 +1264,7 @@ loop = do Just uf -> do currentNames <- currentPathNames let sr = NewSlurp.results . NewSlurp.analyzeTypecheckedUnisonFile uf - (if null vars then Nothing else Just vars) + vars $ currentNames -- sr <- -- Slurp.disallowUpdates @@ -1286,7 +1286,7 @@ loop = do let vars = Set.map Name.toVar names currentNames <- currentPathNames let sr = NewSlurp.results . NewSlurp.analyzeTypecheckedUnisonFile uf - (Just vars) + vars $ currentNames let oldSlurpResult = NewSlurp.toSlurpResult uf NewSlurp.UpdateOp (Just vars) sr & addAliases currentNames uf currentPath' @@ -1298,7 +1298,7 @@ loop = do let vars = Set.map Name.toVar names currentNames <- currentPathNames let sr = NewSlurp.results . NewSlurp.analyzeTypecheckedUnisonFile uf - (Just vars) + vars $ currentNames let oldSlurpResult = NewSlurp.toSlurpResult uf NewSlurp.UpdateOp (Just vars) sr & addAliases currentNames uf currentPath' @@ -1841,7 +1841,7 @@ handleUpdate input maybePatchPath names = do slurpCheckNames <- slurpResultNames let currentPathNames = slurpCheckNames let newSR = NewSlurp.results . NewSlurp.analyzeTypecheckedUnisonFile uf - (Just vars) + vars $ slurpCheckNames let sr = NewSlurp.toSlurpResult uf NewSlurp.UpdateOp (Just vars) newSR -- let sr :: SlurpResult v diff --git a/unison-cli/src/Unison/Codebase/Editor/Slurp.hs b/unison-cli/src/Unison/Codebase/Editor/Slurp.hs index 1fb263ceb0..896aa1be1c 100644 --- a/unison-cli/src/Unison/Codebase/Editor/Slurp.hs +++ b/unison-cli/src/Unison/Codebase/Editor/Slurp.hs @@ -152,20 +152,20 @@ analyzeTypecheckedUnisonFile :: forall v. Var v => UF.TypecheckedUnisonFile v Ann -> - Maybe (Set v) -> + Set v -> Names -> (Map (TermedOrTyped v) (DefinitionNotes, Map (TermedOrTyped v) (DefinitionNotes))) -analyzeTypecheckedUnisonFile uf maybeDefsToConsider unalteredCodebaseNames = +analyzeTypecheckedUnisonFile uf defsToConsider unalteredCodebaseNames = let codebaseNames :: Names codebaseNames = computeNamesWithDeprecations uf unalteredCodebaseNames varDeps :: Map (TermedOrTyped v) (Set (TermedOrTyped v)) - varDeps = computeVarDeps uf maybeDefsToConsider varRelation + varDeps = computeVarDeps uf defsToConsider varRelation statusMap :: Map (TermedOrTyped v) (DefinitionNotes, Map (TermedOrTyped v) DefinitionNotes) statusMap = computeVarStatuses varDeps varRelation codebaseNames in pTraceShowId statusMap where varRelation :: Rel.Relation (TermedOrTyped v) LD.LabeledDependency - varRelation = undefined $ labelling uf + varRelation = labelling uf computeNamesWithDeprecations :: Var v => @@ -284,21 +284,21 @@ computeVarDeps :: forall v. Var v => UF.TypecheckedUnisonFile v Ann -> - Maybe (Set v) -> + Set v -> Rel.Relation (TermedOrTyped v) LD.LabeledDependency -> Map (TermedOrTyped v) (Set (TermedOrTyped v)) -computeVarDeps uf maybeDefsToConsider varRelation = +computeVarDeps uf defsToConsider varRelation = pTraceShowId $ let allFileVars :: Set (TermedOrTyped v) allFileVars = Rel.dom varRelation allInvolvedVars :: Set (TermedOrTyped v) allInvolvedVars = - case maybeDefsToConsider of - Nothing -> allFileVars - Just vs -> + if Set.null defsToConsider + then allFileVars + else let existingVars :: Set (TermedOrTyped v) = Set.fromList $ do - v <- Set.toList vs + v <- Set.toList defsToConsider -- We don't know whether each var is a type or term, so we try both. tv <- [Typed v, Termed v] guard (Rel.memberDom tv varRelation) @@ -398,7 +398,7 @@ toSlurpResult uf op mvs r = OldSlurp.duplicates = duplicates, OldSlurp.collisions = if op == AddOp then updates else mempty, OldSlurp.conflicts = mempty, - OldSlurp.updates = if op == UpdateOp then adds <> updates else mempty, + OldSlurp.updates = if op == UpdateOp then updates else mempty, OldSlurp.termExistingConstructorCollisions = let SlurpComponent types terms = termCtorColl in types <> terms, From fcea773b8b23ceb8024b1b709cebfd7a7a5df7d2 Mon Sep 17 00:00:00 2001 From: Chris Penner Date: Sun, 16 Jan 2022 17:56:45 -0600 Subject: [PATCH 176/297] Handle aliases the same --- .../src/Unison/Codebase/Editor/HandleInput.hs | 1 + .../src/Unison/Codebase/Editor/Slurp.hs | 102 +++++++++--------- 2 files changed, 54 insertions(+), 49 deletions(-) diff --git a/unison-cli/src/Unison/Codebase/Editor/HandleInput.hs b/unison-cli/src/Unison/Codebase/Editor/HandleInput.hs index e2a4b1dbf1..1168ba14bd 100644 --- a/unison-cli/src/Unison/Codebase/Editor/HandleInput.hs +++ b/unison-cli/src/Unison/Codebase/Editor/HandleInput.hs @@ -1844,6 +1844,7 @@ handleUpdate input maybePatchPath names = do vars $ slurpCheckNames let sr = NewSlurp.toSlurpResult uf NewSlurp.UpdateOp (Just vars) newSR + & addAliases slurpCheckNames uf currentPath' -- let sr :: SlurpResult v -- sr = -- applySelection hqs uf diff --git a/unison-cli/src/Unison/Codebase/Editor/Slurp.hs b/unison-cli/src/Unison/Codebase/Editor/Slurp.hs index 896aa1be1c..ca37d4d980 100644 --- a/unison-cli/src/Unison/Codebase/Editor/Slurp.hs +++ b/unison-cli/src/Unison/Codebase/Editor/Slurp.hs @@ -18,7 +18,7 @@ import qualified Data.List.NonEmpty as NEList import qualified Data.Map as Map import qualified Data.Semigroup.Foldable as Semigroup import qualified Data.Set as Set -import Debug.Pretty.Simple (pTraceShowId) +import Debug.Pretty.Simple (pTrace, pTraceShowId) import Unison.Codebase.Editor.SlurpComponent (SlurpComponent (..)) import qualified Unison.Codebase.Editor.SlurpComponent as SC import qualified Unison.Codebase.Editor.SlurpResult as OldSlurp @@ -194,16 +194,17 @@ computeNamesWithDeprecations uf unalteredCodebaseNames = codebaseNames deprecatedConstructors :: Set Name deprecatedConstructors = - let allRefIds = - fmap fst (Map.elems (UF.dataDeclarationsId' uf)) - <> fmap fst (Map.elems (UF.effectDeclarationsId' uf)) - existingConstructorsFromEditedTypes = Set.fromList $ do - -- List Monad - refId <- allRefIds - (name, _ref) <- Names.constructorsForType (Ref.DerivedId refId) unalteredCodebaseNames - pure name - in -- Compute any constructors which were deleted - existingConstructorsFromEditedTypes `Set.difference` constructorNamesInFile + pTraceShowId . pTrace "Deprecated Constructors" $ + let allRefIds = + fmap fst (Map.elems (UF.dataDeclarationsId' uf)) + <> fmap fst (Map.elems (UF.effectDeclarationsId' uf)) + existingConstructorsFromEditedTypes = Set.fromList $ do + -- List Monad + refId <- allRefIds + (name, _ref) <- Names.constructorsForType (Ref.DerivedId refId) unalteredCodebaseNames + pure name + in -- Compute any constructors which were deleted + existingConstructorsFromEditedTypes `Set.difference` constructorNamesInFile computeVarStatuses :: forall v. @@ -219,28 +220,29 @@ computeVarStatuses depMap varRelation codebaseNames = statuses where statuses :: Map (TermedOrTyped v) (DefinitionNotes, Map (TermedOrTyped v) (DefinitionNotes)) statuses = - let withNotes = - depMap - & Map.mapWithKey - ( \tv deps -> - (definitionStatus tv, deps) - ) - withTransitiveNotes :: - ( Map - (TermedOrTyped v) - ( DefinitionNotes, - (Map (TermedOrTyped v) DefinitionNotes) - ) - ) - withTransitiveNotes = - withNotes - & (fmap . fmap) - ( \deps -> Map.fromList $ do - tv <- Set.toList deps - (notes, _) <- maybeToList (Map.lookup tv withNotes) - pure (tv, notes) - ) - in withTransitiveNotes + pTraceShowId . pTrace "Statuses" $ + let withNotes = + depMap + & Map.mapWithKey + ( \tv deps -> + (definitionStatus tv, deps) + ) + withTransitiveNotes :: + ( Map + (TermedOrTyped v) + ( DefinitionNotes, + (Map (TermedOrTyped v) DefinitionNotes) + ) + ) + withTransitiveNotes = + withNotes + & (fmap . fmap) + ( \deps -> Map.fromList $ do + tv <- Set.toList deps + (notes, _) <- maybeToList (Map.lookup tv withNotes) + pure (tv, notes) + ) + in withTransitiveNotes definitionStatus :: TermedOrTyped v -> DefinitionNotes definitionStatus tv = let ld = case Set.toList (Rel.lookupDom tv varRelation) of @@ -294,25 +296,27 @@ computeVarDeps uf defsToConsider varRelation = allInvolvedVars :: Set (TermedOrTyped v) allInvolvedVars = - if Set.null defsToConsider - then allFileVars - else - let existingVars :: Set (TermedOrTyped v) = Set.fromList $ do - v <- Set.toList defsToConsider - -- We don't know whether each var is a type or term, so we try both. - tv <- [Typed v, Termed v] - guard (Rel.memberDom tv varRelation) - pure tv - in varClosure existingVars + pTraceShowId . pTrace "All involved vars" $ + if Set.null defsToConsider + then allFileVars + else + let existingVars :: Set (TermedOrTyped v) = Set.fromList $ do + v <- Set.toList defsToConsider + -- We don't know whether each var is a type or term, so we try both. + tv <- [Typed v, Termed v] + guard (Rel.memberDom tv varRelation) + pure tv + in varClosure existingVars depMap :: (Map (TermedOrTyped v) (Set (TermedOrTyped v))) depMap = - allInvolvedVars - & Set.toList - & fmap - ( \tv -> (tv, varClosure (Set.singleton tv)) - ) - & Map.fromListWith (<>) + pTraceShowId . pTrace "Dep Map" $ + allInvolvedVars + & Set.toList + & fmap + ( \tv -> (tv, Set.delete tv $ varClosure (Set.singleton tv)) + ) + & Map.fromListWith (<>) in depMap where -- Compute the closure of all vars which the provided vars depend on. From 1409f2b176eb132559d63e18cb1e2e073dd0ac06 Mon Sep 17 00:00:00 2001 From: Chris Penner Date: Sun, 16 Jan 2022 18:07:02 -0600 Subject: [PATCH 177/297] Fix error message for conflicts --- unison-cli/src/Unison/Codebase/Editor/HandleInput.hs | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/unison-cli/src/Unison/Codebase/Editor/HandleInput.hs b/unison-cli/src/Unison/Codebase/Editor/HandleInput.hs index 1168ba14bd..f6dd0c44fa 100644 --- a/unison-cli/src/Unison/Codebase/Editor/HandleInput.hs +++ b/unison-cli/src/Unison/Codebase/Editor/HandleInput.hs @@ -1862,12 +1862,12 @@ handleUpdate input maybePatchPath names = do toList (Names.typesNamed fileNames n) ) of ([old], [new]) -> (n, (old, new)) - _ -> + actual -> error $ - "Expected unique matches for " + "Expected unique matches for var \"" ++ Var.nameStr v - ++ " but got: " - ++ show otherwise + ++ "\" but got: " + ++ show actual where n = Name.unsafeFromVar v hashTerms :: Map Reference (Type v Ann) @@ -1881,12 +1881,12 @@ handleUpdate input maybePatchPath names = do toList (Names.refTermsNamed fileNames n) ) of ([old], [new]) -> (n, (old, new)) - _ -> + actual -> error $ "Expected unique matches for " ++ Var.nameStr v ++ " but got: " - ++ show otherwise + ++ show actual where n = Name.unsafeFromVar v termDeprecations :: [(Name, Referent)] From 3ae9867e41d504860b5b3899bc9f30088faad22c Mon Sep 17 00:00:00 2001 From: Chris Penner Date: Sun, 16 Jan 2022 18:13:48 -0600 Subject: [PATCH 178/297] Keep old conflict behaviour for the time being. --- .../src/Unison/Codebase/Editor/Slurp.hs | 45 ++++++++++--------- .../src/Unison/Codebase/Editor/SlurpResult.hs | 1 + 2 files changed, 25 insertions(+), 21 deletions(-) diff --git a/unison-cli/src/Unison/Codebase/Editor/Slurp.hs b/unison-cli/src/Unison/Codebase/Editor/Slurp.hs index ca37d4d980..e8ec33dfe3 100644 --- a/unison-cli/src/Unison/Codebase/Editor/Slurp.hs +++ b/unison-cli/src/Unison/Codebase/Editor/Slurp.hs @@ -92,6 +92,7 @@ data SlurpPrintout v = SlurpPrintout data SlurpErr = TermCtorCollision | CtorTermCollision + | Conflict deriving (Eq, Ord, Show) data DefinitionNotes @@ -258,9 +259,9 @@ computeVarStatuses depMap varRelation codebaseNames = statuses [r] | LD.typeRef r == ld -> DefOk Duplicate | otherwise -> DefOk Updated - -- If there are many existing terms, they must be in conflict, we can update - -- to resolve the conflict. - _ -> DefOk Updated + -- If there are many existing terms, they must be in conflict. + -- Currently we treat conflicts as errors rather than resolving them. + _ -> DefErr Conflict LD.TermReference {} -> case Set.toList existingTermsAtName of [] -> DefOk New @@ -268,9 +269,9 @@ computeVarStatuses depMap varRelation codebaseNames = statuses [r] | LD.referent r == ld -> DefOk Duplicate | otherwise -> DefOk Updated - -- If there are many existing terms, they must be in conflict, we can update - -- to resolve the conflict. - _ -> DefOk Updated + -- If there are many existing terms, they must be in conflict. + -- Currently we treat conflicts as errors rather than resolving them. + _ -> DefErr Conflict LD.ConReference {} -> case Set.toList existingTermsAtName of [] -> DefOk New @@ -278,9 +279,9 @@ computeVarStatuses depMap varRelation codebaseNames = statuses [r] | LD.referent r == ld -> DefOk Duplicate | otherwise -> DefOk Updated - -- If there are many existing terms, they must be in conflict, we can update - -- to resolve the conflict. - _ -> DefOk Updated + -- If there are many existing terms, they must be in conflict. + -- Currently we treat conflicts as errors rather than resolving them. + _ -> DefErr Conflict computeVarDeps :: forall v. @@ -401,7 +402,7 @@ toSlurpResult uf op mvs r = OldSlurp.adds = adds, OldSlurp.duplicates = duplicates, OldSlurp.collisions = if op == AddOp then updates else mempty, - OldSlurp.conflicts = mempty, + OldSlurp.conflicts = conflicts, OldSlurp.updates = if op == UpdateOp then updates else mempty, OldSlurp.termExistingConstructorCollisions = let SlurpComponent types terms = termCtorColl @@ -414,26 +415,28 @@ toSlurpResult uf op mvs r = OldSlurp.defsWithBlockedDependencies = blocked } where - adds, duplicates, updates, termCtorColl, ctorTermColl, blocked :: SlurpComponent v - (adds, duplicates, updates, termCtorColl, (ctorTermColl, blocked)) = + adds, duplicates, updates, termCtorColl, ctorTermColl, blocked, conflicts :: SlurpComponent v + (adds, duplicates, updates, termCtorColl, (ctorTermColl, blocked, conflicts)) = r & ifoldMap ( \k tvs -> let sc = sortVars $ tvs in case k of - Add -> (sc, mempty, mempty, mempty, (mempty, mempty)) - Duplicated -> (mempty, sc, mempty, mempty, (mempty, mempty)) - Update -> (mempty, mempty, sc, mempty, (mempty, mempty)) + Add -> (sc, mempty, mempty, mempty, (mempty, mempty, mempty)) + Duplicated -> (mempty, sc, mempty, mempty, (mempty, mempty, mempty)) + Update -> (mempty, mempty, sc, mempty, (mempty, mempty, mempty)) NeedsUpdate v -> case op of AddOp -> - (mempty, mempty, singletonSC v, mempty, (mempty, sc `SC.difference` singletonSC v)) + (mempty, mempty, singletonSC v, mempty, (mempty, sc `SC.difference` singletonSC v, mempty)) UpdateOp -> - (sc, mempty, mempty, mempty, (mempty, mempty)) - ErrFrom v TermCtorCollision -> (mempty, mempty, mempty, singletonSC v, (mempty, sc `SC.difference` singletonSC v)) - ErrFrom v CtorTermCollision -> (mempty, mempty, mempty, mempty, (singletonSC v, sc `SC.difference` singletonSC v)) - SelfErr TermCtorCollision -> (mempty, mempty, mempty, sc, (mempty, mempty)) - SelfErr CtorTermCollision -> (mempty, mempty, mempty, mempty, (sc, mempty)) + (sc, mempty, mempty, mempty, (mempty, mempty, mempty)) + ErrFrom v TermCtorCollision -> (mempty, mempty, mempty, singletonSC v, (mempty, sc `SC.difference` singletonSC v, mempty)) + ErrFrom v CtorTermCollision -> (mempty, mempty, mempty, mempty, (singletonSC v, sc `SC.difference` singletonSC v, mempty)) + ErrFrom v Conflict -> (mempty, mempty, mempty, mempty, (mempty, sc `SC.difference` singletonSC v, singletonSC v)) + SelfErr TermCtorCollision -> (mempty, mempty, mempty, sc, (mempty, mempty, mempty)) + SelfErr CtorTermCollision -> (mempty, mempty, mempty, mempty, (sc, mempty, mempty)) + SelfErr Conflict -> (mempty, mempty, mempty, mempty, (mempty, mempty, sc)) ) singletonSC = \case Typed v -> SlurpComponent {terms = mempty, types = Set.singleton v} diff --git a/unison-cli/src/Unison/Codebase/Editor/SlurpResult.hs b/unison-cli/src/Unison/Codebase/Editor/SlurpResult.hs index 4cfc49c81e..955dc5c950 100644 --- a/unison-cli/src/Unison/Codebase/Editor/SlurpResult.hs +++ b/unison-cli/src/Unison/Codebase/Editor/SlurpResult.hs @@ -54,6 +54,7 @@ data SlurpResult v = SlurpResult { , duplicates :: SlurpComponent v -- Not added to codebase due to the name already existing -- in the branch with a different definition. + -- I.e. an update is required but we're performing an add. , collisions :: SlurpComponent v -- Not added to codebase due to the name existing -- in the branch with a conflict (two or more definitions). From f9279d281b33d9df04adb1346940aa5a68d1d89d Mon Sep 17 00:00:00 2001 From: Chris Penner Date: Sun, 16 Jan 2022 18:25:50 -0600 Subject: [PATCH 179/297] Add additional test transcripts from #2786 --- .../ability-term-conflicts-on-update.md | 41 +++++++ ...ability-term-conflicts-on-update.output.md | 81 +++++++++++++ unison-src/transcripts/alias-clobbering.md | 34 ++++++ .../transcripts/alias-clobbering.output.md | 107 ++++++++++++++++++ .../transcripts/block-on-required-update.md | 28 +++++ .../block-on-required-update.output.md | 64 +++++++++++ .../transcripts/sum-type-update-conflicts.md | 36 ++++++ unison-src/transcripts/update-on-conflict.md | 30 +++++ .../transcripts/update-on-conflict.output.md | 96 ++++++++++++++++ ...e-with-conflicting-constructor-and-term.md | 17 +++ 10 files changed, 534 insertions(+) create mode 100644 unison-src/transcripts/ability-term-conflicts-on-update.md create mode 100644 unison-src/transcripts/ability-term-conflicts-on-update.output.md create mode 100644 unison-src/transcripts/alias-clobbering.md create mode 100644 unison-src/transcripts/alias-clobbering.output.md create mode 100644 unison-src/transcripts/block-on-required-update.md create mode 100644 unison-src/transcripts/block-on-required-update.output.md create mode 100644 unison-src/transcripts/sum-type-update-conflicts.md create mode 100644 unison-src/transcripts/update-on-conflict.md create mode 100644 unison-src/transcripts/update-on-conflict.output.md create mode 100644 unison-src/transcripts/update-with-conflicting-constructor-and-term.md diff --git a/unison-src/transcripts/ability-term-conflicts-on-update.md b/unison-src/transcripts/ability-term-conflicts-on-update.md new file mode 100644 index 0000000000..8b7eed2923 --- /dev/null +++ b/unison-src/transcripts/ability-term-conflicts-on-update.md @@ -0,0 +1,41 @@ +# Regression test for updates which conflict with an existing ability constructor + +https://github.com/unisonweb/unison/issues/2786 + +```ucm:hide +.builtins> builtins.mergeio +``` + +First we add an ability to the codebase. +Note that this will create the name `Channels.send` as an ability constructor. + +```unison +unique ability Channels where + send : a -> {Channels} () +``` + +```ucm +.ns> add +``` + +Now we update the ability, changing the name of the constructor, _but_, we simultaneously +add a new top-level term with the same name as the constructor which is being +removed from Channels. + +```unison +unique ability Channels where + sends : [a] -> {Channels} () + +Channels.send : a -> {Channels} () +Channels.send a = sends [a] + +thing : '{Channels} () +thing _ = send 1 +``` + +The 'update' will succeed up until it tries to resolve the reference to `send` +within `thing`; because the old send is deleted and the new `send` isn't ever added. + +```ucm +.ns> update +``` diff --git a/unison-src/transcripts/ability-term-conflicts-on-update.output.md b/unison-src/transcripts/ability-term-conflicts-on-update.output.md new file mode 100644 index 0000000000..a9f0c1fcaf --- /dev/null +++ b/unison-src/transcripts/ability-term-conflicts-on-update.output.md @@ -0,0 +1,81 @@ +# Regression test for updates which conflict with an existing ability constructor + +https://github.com/unisonweb/unison/issues/2786 + +First we add an ability to the codebase. +Note that this will create the name `Channels.send` as an ability constructor. + +```unison +unique ability Channels where + send : a -> {Channels} () +``` + +```ucm + + I found and typechecked these definitions in scratch.u. If you + do an `add` or `update`, here's how your codebase would + change: + + ⍟ These new definitions are ok to `add`: + + unique ability Channels + +``` +```ucm + ☝️ The namespace .ns is empty. + +.ns> add + + ⍟ I've added these definitions: + + unique ability Channels + +``` +Now we update the ability, changing the name of the constructor, _but_, we simultaneously +add a new top-level term with the same name as the constructor which is being +removed from Channels. + +```unison +unique ability Channels where + sends : [a] -> {Channels} () + +Channels.send : a -> {Channels} () +Channels.send a = sends [a] + +thing : '{Channels} () +thing _ = send 1 +``` + +```ucm + + I found and typechecked these definitions in scratch.u. If you + do an `add` or `update`, here's how your codebase would + change: + + ⍟ These new definitions are ok to `add`: + + Channels.send : a ->{Channels} () + thing : '{Channels} () + + ⍟ These names already exist. You can `update` them to your + new definition: + + unique ability Channels + +``` +The 'update' will succeed up until it tries to resolve the reference to `send` +within `thing`; because the old send is deleted and the new `send` isn't ever added. + +```ucm +.ns> update + + ⍟ I've added these definitions: + + Channels.send : a ->{Channels} () + thing : '{Channels} () + + ⍟ I've updated these names to your new definition: + + unique ability Channels + +``` diff --git a/unison-src/transcripts/alias-clobbering.md b/unison-src/transcripts/alias-clobbering.md new file mode 100644 index 0000000000..cd020b91f2 --- /dev/null +++ b/unison-src/transcripts/alias-clobbering.md @@ -0,0 +1,34 @@ +# Aliasing takes priority over provided definition + +```ucm:hide +.> builtins.merge +``` + +```unison +x = 1 +``` + +```ucm +.> add +``` + +```unison +-- Overwrite the existing alias +x = 2 +-- But add a _new_ definition that's an alias of the _old_ x +y = 1 +``` + +We see that `y` ends up as an alias for the new value of `x`, i.e. `y = 2`, +even though we explicitly said `y = 1`! + +Note how `update` isn't idempotent, it ping-pongs back and forth between aliases: + +```ucm +.> update +.> view x +.> view y +.> update +.> update +.> update +``` diff --git a/unison-src/transcripts/alias-clobbering.output.md b/unison-src/transcripts/alias-clobbering.output.md new file mode 100644 index 0000000000..4f1bc9dd7d --- /dev/null +++ b/unison-src/transcripts/alias-clobbering.output.md @@ -0,0 +1,107 @@ +# Aliasing takes priority over provided definition + +```unison +x = 1 +``` + +```ucm + + I found and typechecked these definitions in scratch.u. If you + do an `add` or `update`, here's how your codebase would + change: + + ⍟ These new definitions are ok to `add`: + + x : Nat + +``` +```ucm +.> add + + ⍟ I've added these definitions: + + x : Nat + +``` +```unison +-- Overwrite the existing alias +x = 2 +-- But add a _new_ definition that's an alias of the _old_ x +y = 1 +``` + +```ucm + + I found and typechecked these definitions in scratch.u. If you + do an `add` or `update`, here's how your codebase would + change: + + ⍟ These new definitions are ok to `add`: + + y : Nat + (also named x) + + ⍟ These names already exist. You can `update` them to your + new definition: + + x : Nat + +``` +We see that `y` ends up as an alias for the new value of `x`, i.e. `y = 2`, +even though we explicitly said `y = 1`! + +Note how `update` isn't idempotent, it ping-pongs back and forth between aliases: + +```ucm +.> update + + ⍟ I've added these definitions: + + y : Nat + (also named x) + + ⍟ I've updated these names to your new definition: + + x : Nat + +.> view x + + x : Nat + x = 2 + +.> view y + + x : Nat + x = 2 + +.> update + + ⊡ Ignored previously added definitions: x + + ⍟ I've updated these names to your new definition: + + y : Nat + (The old definition was also named x. I updated this name + too.) + +.> update + + ⊡ Ignored previously added definitions: y + + ⍟ I've updated these names to your new definition: + + x : Nat + (The old definition was also named y. I updated this name + too.) + +.> update + + ⊡ Ignored previously added definitions: x + + ⍟ I've updated these names to your new definition: + + y : Nat + (The old definition was also named x. I updated this name + too.) + +``` diff --git a/unison-src/transcripts/block-on-required-update.md b/unison-src/transcripts/block-on-required-update.md new file mode 100644 index 0000000000..1027188b06 --- /dev/null +++ b/unison-src/transcripts/block-on-required-update.md @@ -0,0 +1,28 @@ +# Block on required update + +Should block an `add` if it requires an update on an in-file dependency. + +```ucm:hide +.> builtins.merge +``` + +```unison +x = 1 +``` + +```ucm +.> add +``` + +Update `x`, and add a new `y` which depends on the update + +```unison +x = 10 +y = x + 1 +``` + +Try to add only the new `y`. This should fail because it requires an update to `x`, but we only ran an 'add'. + +```ucm:error +.> add y +``` diff --git a/unison-src/transcripts/block-on-required-update.output.md b/unison-src/transcripts/block-on-required-update.output.md new file mode 100644 index 0000000000..b23135afd9 --- /dev/null +++ b/unison-src/transcripts/block-on-required-update.output.md @@ -0,0 +1,64 @@ +# Block on required update + +Should block an `add` if it requires an update on an in-file dependency. + +```unison +x = 1 +``` + +```ucm + + I found and typechecked these definitions in scratch.u. If you + do an `add` or `update`, here's how your codebase would + change: + + ⍟ These new definitions are ok to `add`: + + x : Nat + +``` +```ucm +.> add + + ⍟ I've added these definitions: + + x : Nat + +``` +Update `x`, and add a new `y` which depends on the update + +```unison +x = 10 +y = x + 1 +``` + +```ucm + + I found and typechecked these definitions in scratch.u. If you + do an `add` or `update`, here's how your codebase would + change: + + ⍟ These new definitions are ok to `add`: + + y : Nat + + ⍟ These names already exist. You can `update` them to your + new definition: + + x : Nat + +``` +Try to add only the new `y`. This should fail because it requires an update to `x`, but we only ran an 'add'. + +```ucm +.> add y + + x These definitions failed: + + Reason + needs update x : Nat + blocked y : Nat + + Tip: Use `help filestatus` to learn more. + +``` diff --git a/unison-src/transcripts/sum-type-update-conflicts.md b/unison-src/transcripts/sum-type-update-conflicts.md new file mode 100644 index 0000000000..992c5726ea --- /dev/null +++ b/unison-src/transcripts/sum-type-update-conflicts.md @@ -0,0 +1,36 @@ +# Regression test for updates which conflict with an existing data constructor + +https://github.com/unisonweb/unison/issues/2786 + +```ucm:hide +.builtins> builtins.mergeio +``` + +First we add a sum-type to the codebase. + +```unison +structural type X = x +``` + +```ucm +.ns> add +``` + +Now we update the type, changing the name of the constructors, _but_, we simultaneously +add a new top-level term with the same name as the old constructor. + +```unison +structural type X = y | z + +X.x : Text +X.x = "some text that's not in the codebase" + +dependsOnX = Text.size X.x +``` + +The 'update' will succeed up until it tries to resolve the reference to `X.x` +within `depondsOnX`, but `X.x` is actually not in the codebase! + +```ucm:error +.ns> update +``` diff --git a/unison-src/transcripts/update-on-conflict.md b/unison-src/transcripts/update-on-conflict.md new file mode 100644 index 0000000000..30e1afe84d --- /dev/null +++ b/unison-src/transcripts/update-on-conflict.md @@ -0,0 +1,30 @@ +# Update on conflict + +```ucm:hide +.> builtins.merge +``` + +```unison +a.x = 1 +b.x = 2 +``` + +Cause a conflict: +```ucm +.> add +.merged> merge .a +.merged> merge .b +``` + +Ideally we could just define the canonical `x` that we want, and update +to accept it, but we can't: + +```unison +x = 1 + 2 +``` + +Update fails on conflicted `x`: + +```ucm:error +.merged> update +``` diff --git a/unison-src/transcripts/update-on-conflict.output.md b/unison-src/transcripts/update-on-conflict.output.md new file mode 100644 index 0000000000..2a5948c801 --- /dev/null +++ b/unison-src/transcripts/update-on-conflict.output.md @@ -0,0 +1,96 @@ +# Update on conflict + +```unison +a.x = 1 +b.x = 2 +``` + +```ucm + + I found and typechecked these definitions in scratch.u. If you + do an `add` or `update`, here's how your codebase would + change: + + ⍟ These new definitions are ok to `add`: + + a.x : Nat + b.x : Nat + +``` +Cause a conflict: +```ucm +.> add + + ⍟ I've added these definitions: + + a.x : Nat + b.x : Nat + + ☝️ The namespace .merged is empty. + +.merged> merge .a + + Here's what's changed in the current namespace after the + merge: + + Added definitions: + + 1. x : Nat + + Tip: You can use `todo` to see if this generated any work to + do in this namespace and `test` to run the tests. Or you + can use `undo` or `reflog` to undo the results of this + merge. + +.merged> merge .b + + Here's what's changed in the current namespace after the + merge: + + New name conflicts: + + 1. x#jk19sm5bf8 : Nat + ↓ + 2. ┌ x#0ja1qfpej6 : Nat + 3. └ x#jk19sm5bf8 : Nat + + Tip: You can use `todo` to see if this generated any work to + do in this namespace and `test` to run the tests. Or you + can use `undo` or `reflog` to undo the results of this + merge. + +``` +Ideally we could just define the canonical `x` that we want, and update +to accept it, but we can't: + +```unison +x = 1 + 2 +``` + +```ucm + + I found and typechecked these definitions in scratch.u. If you + do an `add` or `update`, here's how your codebase would + change: + + x These definitions would fail on `add` or `update`: + + Reason + conflicted x : Nat + + Tip: Use `help filestatus` to learn more. + +``` +Update fails on conflicted `x`: + +```ucm +.merged> update + + x These definitions failed: + + Reason + conflicted x : Nat + + Tip: Use `help filestatus` to learn more. + +``` diff --git a/unison-src/transcripts/update-with-conflicting-constructor-and-term.md b/unison-src/transcripts/update-with-conflicting-constructor-and-term.md new file mode 100644 index 0000000000..e3284c6f97 --- /dev/null +++ b/unison-src/transcripts/update-with-conflicting-constructor-and-term.md @@ -0,0 +1,17 @@ +# Update with conflicting ability constructor and term + +```ucm:hide +.> builtins.merge +``` + +```unison +unique ability Stream where + send : a -> () + +Stream.send : a -> () +Stream.send _ = () +``` + +```ucm +.> add +``` From dac624331008f86e0d6b37b6c367f3e94d0046b2 Mon Sep 17 00:00:00 2001 From: Chris Penner Date: Sun, 16 Jan 2022 18:46:12 -0600 Subject: [PATCH 180/297] Fix deprecation detection --- .../src/Unison/Codebase/Editor/Slurp.hs | 134 +++++++++--------- 1 file changed, 70 insertions(+), 64 deletions(-) diff --git a/unison-cli/src/Unison/Codebase/Editor/Slurp.hs b/unison-cli/src/Unison/Codebase/Editor/Slurp.hs index e8ec33dfe3..1da83f7606 100644 --- a/unison-cli/src/Unison/Codebase/Editor/Slurp.hs +++ b/unison-cli/src/Unison/Codebase/Editor/Slurp.hs @@ -18,7 +18,7 @@ import qualified Data.List.NonEmpty as NEList import qualified Data.Map as Map import qualified Data.Semigroup.Foldable as Semigroup import qualified Data.Set as Set -import Debug.Pretty.Simple (pTrace, pTraceShowId) +import Debug.Pretty.Simple (pTraceShow, pTraceShowId) import Unison.Codebase.Editor.SlurpComponent (SlurpComponent (..)) import qualified Unison.Codebase.Editor.SlurpComponent as SC import qualified Unison.Codebase.Editor.SlurpResult as OldSlurp @@ -32,7 +32,6 @@ import Unison.Names (Names) import qualified Unison.Names as Names import Unison.Parser.Ann (Ann) import Unison.Prelude -import qualified Unison.Reference as Ref import qualified Unison.Referent' as Referent import qualified Unison.UnisonFile as UF import qualified Unison.Util.Relation as Rel @@ -173,7 +172,10 @@ computeNamesWithDeprecations :: UF.TypecheckedUnisonFile v Ann -> Names -> Names -computeNamesWithDeprecations uf unalteredCodebaseNames = codebaseNames +computeNamesWithDeprecations uf unalteredCodebaseNames = + pTraceShow ("Deprecated constructors", deprecatedConstructors) + . pTraceShow ("constructorNamesInFile", constructorNamesInFile) + $ codebaseNames where -- Get the set of all DIRECT definitions in the file which a definition depends on. codebaseNames :: Names @@ -195,17 +197,21 @@ computeNamesWithDeprecations uf unalteredCodebaseNames = codebaseNames deprecatedConstructors :: Set Name deprecatedConstructors = - pTraceShowId . pTrace "Deprecated Constructors" $ - let allRefIds = - fmap fst (Map.elems (UF.dataDeclarationsId' uf)) - <> fmap fst (Map.elems (UF.effectDeclarationsId' uf)) - existingConstructorsFromEditedTypes = Set.fromList $ do - -- List Monad - refId <- allRefIds - (name, _ref) <- Names.constructorsForType (Ref.DerivedId refId) unalteredCodebaseNames - pure name - in -- Compute any constructors which were deleted - existingConstructorsFromEditedTypes `Set.difference` constructorNamesInFile + let oldRefsForEditedTypes = Set.unions $ do + let declNames = Map.keys (UF.dataDeclarationsId' uf) + let effectNames = Map.keys (UF.effectDeclarationsId' uf) + typeName <- declNames <> effectNames + pure $ Names.typesNamed unalteredCodebaseNames (Name.unsafeFromVar typeName) + existingConstructorsFromEditedTypes = Set.fromList $ do + -- List Monad + ref <- Set.toList oldRefsForEditedTypes + (name, _ref) <- Names.constructorsForType ref unalteredCodebaseNames + pure name + in -- Compute any constructors which were deleted + pTraceShow ("codebaseNames", unalteredCodebaseNames) $ + pTraceShow ("allRefIds", oldRefsForEditedTypes) $ + pTraceShow ("existingConstructorsFromEditedTypes", existingConstructorsFromEditedTypes) $ + existingConstructorsFromEditedTypes `Set.difference` constructorNamesInFile computeVarStatuses :: forall v. @@ -217,33 +223,34 @@ computeVarStatuses :: (TermedOrTyped v) (DefinitionNotes, Map (TermedOrTyped v) DefinitionNotes) ) -computeVarStatuses depMap varRelation codebaseNames = statuses +computeVarStatuses depMap varRelation codebaseNames = + pTraceShow ("Statuses", statuses) $ + statuses where statuses :: Map (TermedOrTyped v) (DefinitionNotes, Map (TermedOrTyped v) (DefinitionNotes)) statuses = - pTraceShowId . pTrace "Statuses" $ - let withNotes = - depMap - & Map.mapWithKey - ( \tv deps -> - (definitionStatus tv, deps) - ) - withTransitiveNotes :: - ( Map - (TermedOrTyped v) - ( DefinitionNotes, - (Map (TermedOrTyped v) DefinitionNotes) - ) - ) - withTransitiveNotes = - withNotes - & (fmap . fmap) - ( \deps -> Map.fromList $ do - tv <- Set.toList deps - (notes, _) <- maybeToList (Map.lookup tv withNotes) - pure (tv, notes) - ) - in withTransitiveNotes + let withNotes = + depMap + & Map.mapWithKey + ( \tv deps -> + (definitionStatus tv, deps) + ) + withTransitiveNotes :: + ( Map + (TermedOrTyped v) + ( DefinitionNotes, + (Map (TermedOrTyped v) DefinitionNotes) + ) + ) + withTransitiveNotes = + withNotes + & (fmap . fmap) + ( \deps -> Map.fromList $ do + tv <- Set.toList deps + (notes, _) <- maybeToList (Map.lookup tv withNotes) + pure (tv, notes) + ) + in withTransitiveNotes definitionStatus :: TermedOrTyped v -> DefinitionNotes definitionStatus tv = let ld = case Set.toList (Rel.lookupDom tv varRelation) of @@ -291,34 +298,33 @@ computeVarDeps :: Rel.Relation (TermedOrTyped v) LD.LabeledDependency -> Map (TermedOrTyped v) (Set (TermedOrTyped v)) computeVarDeps uf defsToConsider varRelation = - pTraceShowId $ - let allFileVars :: Set (TermedOrTyped v) - allFileVars = Rel.dom varRelation + let allFileVars :: Set (TermedOrTyped v) + allFileVars = Rel.dom varRelation - allInvolvedVars :: Set (TermedOrTyped v) - allInvolvedVars = - pTraceShowId . pTrace "All involved vars" $ - if Set.null defsToConsider - then allFileVars - else - let existingVars :: Set (TermedOrTyped v) = Set.fromList $ do - v <- Set.toList defsToConsider - -- We don't know whether each var is a type or term, so we try both. - tv <- [Typed v, Termed v] - guard (Rel.memberDom tv varRelation) - pure tv - in varClosure existingVars + allInvolvedVars :: Set (TermedOrTyped v) + allInvolvedVars = + if Set.null defsToConsider + then allFileVars + else + let existingVars :: Set (TermedOrTyped v) = Set.fromList $ do + v <- Set.toList defsToConsider + -- We don't know whether each var is a type or term, so we try both. + tv <- [Typed v, Termed v] + guard (Rel.memberDom tv varRelation) + pure tv + in varClosure existingVars - depMap :: (Map (TermedOrTyped v) (Set (TermedOrTyped v))) - depMap = - pTraceShowId . pTrace "Dep Map" $ - allInvolvedVars - & Set.toList - & fmap - ( \tv -> (tv, Set.delete tv $ varClosure (Set.singleton tv)) - ) - & Map.fromListWith (<>) - in depMap + depMap :: (Map (TermedOrTyped v) (Set (TermedOrTyped v))) + depMap = + allInvolvedVars + & Set.toList + & fmap + ( \tv -> (tv, Set.delete tv $ varClosure (Set.singleton tv)) + ) + & Map.fromListWith (<>) + in pTraceShow ("all involved variables", allInvolvedVars) + . pTraceShow ("depmap", depMap) + $ depMap where -- Compute the closure of all vars which the provided vars depend on. varClosure :: Set (TermedOrTyped v) -> Set (TermedOrTyped v) From b13620a2b419f337504573edce12cc37a7fa5cad Mon Sep 17 00:00:00 2001 From: Chris Penner Date: Sun, 16 Jan 2022 19:14:11 -0600 Subject: [PATCH 181/297] Check defsToConsider in deprecation checks --- .../src/Unison/Codebase/Editor/Slurp.hs | 28 +++++++++++-------- 1 file changed, 16 insertions(+), 12 deletions(-) diff --git a/unison-cli/src/Unison/Codebase/Editor/Slurp.hs b/unison-cli/src/Unison/Codebase/Editor/Slurp.hs index 1da83f7606..1ef7a9a9a6 100644 --- a/unison-cli/src/Unison/Codebase/Editor/Slurp.hs +++ b/unison-cli/src/Unison/Codebase/Editor/Slurp.hs @@ -157,7 +157,7 @@ analyzeTypecheckedUnisonFile :: (Map (TermedOrTyped v) (DefinitionNotes, Map (TermedOrTyped v) (DefinitionNotes))) analyzeTypecheckedUnisonFile uf defsToConsider unalteredCodebaseNames = let codebaseNames :: Names - codebaseNames = computeNamesWithDeprecations uf unalteredCodebaseNames + codebaseNames = computeNamesWithDeprecations uf unalteredCodebaseNames defsToConsider varDeps :: Map (TermedOrTyped v) (Set (TermedOrTyped v)) varDeps = computeVarDeps uf defsToConsider varRelation statusMap :: Map (TermedOrTyped v) (DefinitionNotes, Map (TermedOrTyped v) DefinitionNotes) @@ -171,8 +171,9 @@ computeNamesWithDeprecations :: Var v => UF.TypecheckedUnisonFile v Ann -> Names -> + Set v -> Names -computeNamesWithDeprecations uf unalteredCodebaseNames = +computeNamesWithDeprecations uf unalteredCodebaseNames defsToConsider = pTraceShow ("Deprecated constructors", deprecatedConstructors) . pTraceShow ("constructorNamesInFile", constructorNamesInFile) $ codebaseNames @@ -180,7 +181,6 @@ computeNamesWithDeprecations uf unalteredCodebaseNames = -- Get the set of all DIRECT definitions in the file which a definition depends on. codebaseNames :: Names codebaseNames = - -- TODO: make faster -- TODO: how does defsToConsider affect deprecations? Names.filter (`Set.notMember` deprecatedConstructors) unalteredCodebaseNames constructorNamesInFile :: Set Name @@ -201,6 +201,7 @@ computeNamesWithDeprecations uf unalteredCodebaseNames = let declNames = Map.keys (UF.dataDeclarationsId' uf) let effectNames = Map.keys (UF.effectDeclarationsId' uf) typeName <- declNames <> effectNames + when (not . null $ defsToConsider) (guard (typeName `Set.member` defsToConsider)) pure $ Names.typesNamed unalteredCodebaseNames (Name.unsafeFromVar typeName) existingConstructorsFromEditedTypes = Set.fromList $ do -- List Monad @@ -208,10 +209,11 @@ computeNamesWithDeprecations uf unalteredCodebaseNames = (name, _ref) <- Names.constructorsForType ref unalteredCodebaseNames pure name in -- Compute any constructors which were deleted - pTraceShow ("codebaseNames", unalteredCodebaseNames) $ - pTraceShow ("allRefIds", oldRefsForEditedTypes) $ - pTraceShow ("existingConstructorsFromEditedTypes", existingConstructorsFromEditedTypes) $ - existingConstructorsFromEditedTypes `Set.difference` constructorNamesInFile + pTraceShow ("defsToConsider", defsToConsider) $ + pTraceShow ("codebaseNames", unalteredCodebaseNames) $ + pTraceShow ("allRefIds", oldRefsForEditedTypes) $ + pTraceShow ("existingConstructorsFromEditedTypes", existingConstructorsFromEditedTypes) $ + existingConstructorsFromEditedTypes `Set.difference` constructorNamesInFile computeVarStatuses :: forall v. @@ -255,7 +257,7 @@ computeVarStatuses depMap varRelation codebaseNames = definitionStatus tv = let ld = case Set.toList (Rel.lookupDom tv varRelation) of [r] -> r - _ -> error $ "Expected exactly one LabeledDependency in relation for var: " <> show tv + actual -> error $ "Expected exactly one LabeledDependency in relation for var: " <> show tv <> " but got: " <> show actual v = TT.unTermedOrTyped tv existingTypesAtName = Names.typesNamed codebaseNames (Name.unsafeFromVar v) existingTermsAtName = Names.termsNamed codebaseNames (Name.unsafeFromVar v) @@ -333,8 +335,10 @@ computeVarDeps uf defsToConsider varRelation = -- TODO: Does this need to contain constructors? Maybe. -- Does not include constructors -labelling :: forall v a. Ord v => UF.TypecheckedUnisonFile v a -> Rel.Relation (TermedOrTyped v) LD.LabeledDependency -labelling uf = decls <> effects <> terms +labelling :: forall v a. (Ord v, Show v) => UF.TypecheckedUnisonFile v a -> Rel.Relation (TermedOrTyped v) LD.LabeledDependency +labelling uf = + let result = decls <> effects <> terms + in pTraceShow ("varRelation", result) $ result where terms :: Rel.Relation (TermedOrTyped v) LD.LabeledDependency terms = @@ -472,5 +476,5 @@ sortVars = mingleVars :: Ord v => SlurpComponent v -> Set (TermedOrTyped v) mingleVars SlurpComponent {terms, types} = - Set.map Termed types - <> Set.map Typed terms + Set.map Typed types + <> Set.map Termed terms From d33c2009a9ef7490226de9e6cfc8a01f9729695a Mon Sep 17 00:00:00 2001 From: Chris Penner Date: Sun, 16 Jan 2022 19:31:01 -0600 Subject: [PATCH 182/297] Make deprecated constructors more robust --- .../src/Unison/Codebase/Editor/Slurp.hs | 103 +++++++++--------- 1 file changed, 53 insertions(+), 50 deletions(-) diff --git a/unison-cli/src/Unison/Codebase/Editor/Slurp.hs b/unison-cli/src/Unison/Codebase/Editor/Slurp.hs index 1ef7a9a9a6..4a2d56101d 100644 --- a/unison-cli/src/Unison/Codebase/Editor/Slurp.hs +++ b/unison-cli/src/Unison/Codebase/Editor/Slurp.hs @@ -156,42 +156,41 @@ analyzeTypecheckedUnisonFile :: Names -> (Map (TermedOrTyped v) (DefinitionNotes, Map (TermedOrTyped v) (DefinitionNotes))) analyzeTypecheckedUnisonFile uf defsToConsider unalteredCodebaseNames = - let codebaseNames :: Names - codebaseNames = computeNamesWithDeprecations uf unalteredCodebaseNames defsToConsider + let varRelation :: Rel.Relation (TermedOrTyped v) LD.LabeledDependency + varRelation = labelling uf + involvedVars :: Set (TermedOrTyped v) + involvedVars = computeInvolvedVars uf defsToConsider varRelation + codebaseNames :: Names + codebaseNames = computeNamesWithDeprecations uf unalteredCodebaseNames involvedVars varDeps :: Map (TermedOrTyped v) (Set (TermedOrTyped v)) - varDeps = computeVarDeps uf defsToConsider varRelation + varDeps = computeVarDeps uf involvedVars statusMap :: Map (TermedOrTyped v) (DefinitionNotes, Map (TermedOrTyped v) DefinitionNotes) statusMap = computeVarStatuses varDeps varRelation codebaseNames in pTraceShowId statusMap - where - varRelation :: Rel.Relation (TermedOrTyped v) LD.LabeledDependency - varRelation = labelling uf computeNamesWithDeprecations :: Var v => UF.TypecheckedUnisonFile v Ann -> Names -> - Set v -> + Set (TermedOrTyped v) -> Names -computeNamesWithDeprecations uf unalteredCodebaseNames defsToConsider = +computeNamesWithDeprecations uf unalteredCodebaseNames involvedVars = pTraceShow ("Deprecated constructors", deprecatedConstructors) - . pTraceShow ("constructorNamesInFile", constructorNamesInFile) + . pTraceShow ("constructorNamesInFile", constructorsUnderConsideration) $ codebaseNames where -- Get the set of all DIRECT definitions in the file which a definition depends on. codebaseNames :: Names codebaseNames = - -- TODO: how does defsToConsider affect deprecations? Names.filter (`Set.notMember` deprecatedConstructors) unalteredCodebaseNames - constructorNamesInFile :: Set Name - constructorNamesInFile = - Map.elems (UF.dataDeclarationsId' uf) - <> (fmap . fmap) DD.toDataDecl (Map.elems (UF.effectDeclarationsId' uf)) - & fmap snd - & concatMap - ( \decl -> - DD.constructors' decl <&> \(_ann, v, _typ) -> - Name.unsafeFromVar v + constructorsUnderConsideration :: Set Name + constructorsUnderConsideration = + Map.toList (UF.dataDeclarationsId' uf) + <> (fmap . fmap . fmap) DD.toDataDecl (Map.toList (UF.effectDeclarationsId' uf)) + & filter (\(typeV, _) -> Set.member (Typed typeV) involvedVars) + & concatMap (\(_typeV, (_refId, decl)) -> DD.constructors' decl) + & fmap + ( \(_ann, v, _typ) -> Name.unsafeFromVar v ) & Set.fromList @@ -201,7 +200,7 @@ computeNamesWithDeprecations uf unalteredCodebaseNames defsToConsider = let declNames = Map.keys (UF.dataDeclarationsId' uf) let effectNames = Map.keys (UF.effectDeclarationsId' uf) typeName <- declNames <> effectNames - when (not . null $ defsToConsider) (guard (typeName `Set.member` defsToConsider)) + when (not . null $ involvedVars) (guard (Typed typeName `Set.member` involvedVars)) pure $ Names.typesNamed unalteredCodebaseNames (Name.unsafeFromVar typeName) existingConstructorsFromEditedTypes = Set.fromList $ do -- List Monad @@ -209,11 +208,11 @@ computeNamesWithDeprecations uf unalteredCodebaseNames defsToConsider = (name, _ref) <- Names.constructorsForType ref unalteredCodebaseNames pure name in -- Compute any constructors which were deleted - pTraceShow ("defsToConsider", defsToConsider) $ - pTraceShow ("codebaseNames", unalteredCodebaseNames) $ - pTraceShow ("allRefIds", oldRefsForEditedTypes) $ - pTraceShow ("existingConstructorsFromEditedTypes", existingConstructorsFromEditedTypes) $ - existingConstructorsFromEditedTypes `Set.difference` constructorNamesInFile + pTraceShow ("defsToConsider", involvedVars) + . pTraceShow ("codebaseNames", unalteredCodebaseNames) + . pTraceShow ("allRefIds", oldRefsForEditedTypes) + . pTraceShow ("existingConstructorsFromEditedTypes", existingConstructorsFromEditedTypes) + $ existingConstructorsFromEditedTypes `Set.difference` constructorsUnderConsideration computeVarStatuses :: forall v. @@ -292,46 +291,50 @@ computeVarStatuses depMap varRelation codebaseNames = -- Currently we treat conflicts as errors rather than resolving them. _ -> DefErr Conflict -computeVarDeps :: +computeInvolvedVars :: forall v. Var v => UF.TypecheckedUnisonFile v Ann -> Set v -> Rel.Relation (TermedOrTyped v) LD.LabeledDependency -> - Map (TermedOrTyped v) (Set (TermedOrTyped v)) -computeVarDeps uf defsToConsider varRelation = - let allFileVars :: Set (TermedOrTyped v) - allFileVars = Rel.dom varRelation - - allInvolvedVars :: Set (TermedOrTyped v) - allInvolvedVars = - if Set.null defsToConsider - then allFileVars - else - let existingVars :: Set (TermedOrTyped v) = Set.fromList $ do - v <- Set.toList defsToConsider - -- We don't know whether each var is a type or term, so we try both. - tv <- [Typed v, Termed v] - guard (Rel.memberDom tv varRelation) - pure tv - in varClosure existingVars + Set (TermedOrTyped v) +computeInvolvedVars uf defsToConsider varRelation + | Set.null defsToConsider = Rel.dom varRelation + | otherwise = allInvolvedVars + where + allInvolvedVars :: Set (TermedOrTyped v) + allInvolvedVars = + let existingVars :: Set (TermedOrTyped v) = Set.fromList $ do + v <- Set.toList defsToConsider + -- We don't know whether each var is a type or term, so we try both. + tv <- [Typed v, Termed v] + guard (Rel.memberDom tv varRelation) + pure tv + in varClosure uf existingVars - depMap :: (Map (TermedOrTyped v) (Set (TermedOrTyped v))) +computeVarDeps :: + forall v. + Var v => + UF.TypecheckedUnisonFile v Ann -> + Set (TermedOrTyped v) -> + Map (TermedOrTyped v) (Set (TermedOrTyped v)) +computeVarDeps uf allInvolvedVars = + let depMap :: (Map (TermedOrTyped v) (Set (TermedOrTyped v))) depMap = allInvolvedVars & Set.toList & fmap - ( \tv -> (tv, Set.delete tv $ varClosure (Set.singleton tv)) + ( \tv -> (tv, Set.delete tv $ varClosure uf (Set.singleton tv)) ) & Map.fromListWith (<>) in pTraceShow ("all involved variables", allInvolvedVars) . pTraceShow ("depmap", depMap) $ depMap - where - -- Compute the closure of all vars which the provided vars depend on. - varClosure :: Set (TermedOrTyped v) -> Set (TermedOrTyped v) - varClosure (sortVars -> sc) = - mingleVars $ SC.closeWithDependencies uf sc + +-- Compute the closure of all vars which the provided vars depend on. +varClosure :: Ord v => UF.TypecheckedUnisonFile v a -> Set (TermedOrTyped v) -> Set (TermedOrTyped v) +varClosure uf (sortVars -> sc) = + mingleVars $ SC.closeWithDependencies uf sc -- TODO: Does this need to contain constructors? Maybe. -- Does not include constructors From 6b5db96cde0255a7ca6cd7b6e37e2519b04f4e59 Mon Sep 17 00:00:00 2001 From: Chris Penner Date: Mon, 17 Jan 2022 10:46:15 -0600 Subject: [PATCH 183/297] Only return old slurp result --- .../src/Unison/Codebase/Editor/HandleInput.hs | 252 +----------------- .../src/Unison/Codebase/Editor/Output.hs | 3 - .../src/Unison/Codebase/Editor/Slurp.hs | 125 +++++---- .../src/Unison/CommandLine/OutputMessages.hs | 6 - 4 files changed, 90 insertions(+), 296 deletions(-) diff --git a/unison-cli/src/Unison/Codebase/Editor/HandleInput.hs b/unison-cli/src/Unison/Codebase/Editor/HandleInput.hs index f6dd0c44fa..8a4e9f91f7 100644 --- a/unison-cli/src/Unison/Codebase/Editor/HandleInput.hs +++ b/unison-cli/src/Unison/Codebase/Editor/HandleInput.hs @@ -152,6 +152,7 @@ import Data.Set.NonEmpty (NESet) import Unison.Symbol (Symbol) import qualified Unison.Codebase.Editor.Input as Input import qualified Unison.Codebase.Editor.Slurp as NewSlurp +import qualified Unison.Codebase.Editor.SlurpResult as SlurpResult defaultPatchNameSegment :: NameSegment defaultPatchNameSegment = "patch" @@ -261,16 +262,11 @@ loop = do let lexed = L.lexer (Text.unpack sourceName) (Text.unpack text) withFile [] sourceName (text, lexed) $ \unisonFile -> do currentNames <- currentPathNames - let sr = NewSlurp.results . NewSlurp.analyzeTypecheckedUnisonFile unisonFile - mempty - $ currentNames - let oldSlurpResult = NewSlurp.toSlurpResult unisonFile NewSlurp.UpdateOp Nothing sr - & addAliases currentNames unisonFile currentPath' - -- sr <- toSlurpResult currentPath' unisonFile <$> slurpResultNames + let sr = NewSlurp.analyzeTypecheckedUnisonFile unisonFile mempty Nothing currentNames currentPath' names <- displayNames unisonFile pped <- prettyPrintEnvDecl names let ppe = PPE.suffixifiedPPE pped - respond $ Typechecked sourceName ppe oldSlurpResult unisonFile + respond $ Typechecked sourceName ppe sr unisonFile unlessError' EvaluationFailure do (bindings, e) <- ExceptT . eval . Evaluate ppe $ unisonFile lift do @@ -1263,46 +1259,28 @@ loop = do Nothing -> respond NoUnisonFile Just uf -> do currentNames <- currentPathNames - let sr = NewSlurp.results . NewSlurp.analyzeTypecheckedUnisonFile uf - vars - $ currentNames - -- sr <- - -- Slurp.disallowUpdates - -- . applySelection hqs uf - -- . toSlurpResult currentPath' uf - -- <$> slurpResultNames - let adds = NewSlurp.sortVars . fromMaybe mempty . Map.lookup NewSlurp.Add $ sr + let sr = NewSlurp.analyzeTypecheckedUnisonFile uf vars (Just NewSlurp.AddOp) currentNames currentPath' + let adds = SlurpResult.adds sr stepAtNoSync Branch.CompressHistory (Path.unabsolute currentPath', doSlurpAdds adds uf) eval . AddDefsToCodebase . NewSlurp.selectDefinitions adds $ uf ppe <- prettyPrintEnvDecl =<< displayNames uf - let oldSlurpResult = NewSlurp.toSlurpResult uf NewSlurp.AddOp (Just vars) sr - & addAliases currentNames uf currentPath' - respond $ SlurpOutput input (PPE.suffixifiedPPE ppe) oldSlurpResult - -- respond $ NewSlurpOutput input (PPE.suffixifiedPPE ppe) NewSlurp.AddOp sr + respond $ SlurpOutput input (PPE.suffixifiedPPE ppe) sr addDefaultMetadata adds syncRoot PreviewAddI names -> case (latestFile', uf) of (Just (sourceName, _), Just uf) -> do let vars = Set.map Name.toVar names currentNames <- currentPathNames - let sr = NewSlurp.results . NewSlurp.analyzeTypecheckedUnisonFile uf - vars - $ currentNames - let oldSlurpResult = NewSlurp.toSlurpResult uf NewSlurp.UpdateOp (Just vars) sr - & addAliases currentNames uf currentPath' - previewResponse sourceName oldSlurpResult uf + let sr = NewSlurp.analyzeTypecheckedUnisonFile uf vars (Just NewSlurp.AddOp) currentNames currentPath' + previewResponse sourceName sr uf _ -> respond NoUnisonFile UpdateI maybePatchPath names -> handleUpdate input maybePatchPath names PreviewUpdateI names -> case (latestFile', uf) of (Just (sourceName, _), Just uf) -> do let vars = Set.map Name.toVar names currentNames <- currentPathNames - let sr = NewSlurp.results . NewSlurp.analyzeTypecheckedUnisonFile uf - vars - $ currentNames - let oldSlurpResult = NewSlurp.toSlurpResult uf NewSlurp.UpdateOp (Just vars) sr - & addAliases currentNames uf currentPath' - previewResponse sourceName oldSlurpResult uf + let sr = NewSlurp.analyzeTypecheckedUnisonFile uf vars (Just NewSlurp.UpdateOp) currentNames currentPath' + previewResponse sourceName sr uf _ -> respond NoUnisonFile TodoI patchPath branchPath' -> do patch <- getPatchAt (fromMaybe defaultPatchPath patchPath) @@ -1840,16 +1818,7 @@ handleUpdate input maybePatchPath names = do let patchPath = fromMaybe defaultPatchPath maybePatchPath slurpCheckNames <- slurpResultNames let currentPathNames = slurpCheckNames - let newSR = NewSlurp.results . NewSlurp.analyzeTypecheckedUnisonFile uf - vars - $ slurpCheckNames - let sr = NewSlurp.toSlurpResult uf NewSlurp.UpdateOp (Just vars) newSR - & addAliases slurpCheckNames uf currentPath' - -- let sr :: SlurpResult v - -- sr = - -- applySelection hqs uf - -- . toSlurpResult currentPath' uf - -- $ slurpCheckNames + let sr = NewSlurp.analyzeTypecheckedUnisonFile uf vars (Just NewSlurp.UpdateOp) slurpCheckNames currentPath' addsAndUpdates :: SlurpComponent v addsAndUpdates = Slurp.updates sr <> Slurp.adds sr fileNames :: Names @@ -2770,205 +2739,6 @@ getEndangeredDependents getDependents namesToDelete rootNames = do in NESet.nonEmptySet remainingEndangered pure extinctToEndangered --- Applies the selection filter to the adds/updates of a slurp result, --- meaning that adds/updates should only contain the selection or its transitive --- dependencies, any unselected transitive dependencies of the selection will --- be added to `extraDefinitions`. -_applySelection :: - forall v a. - Var v => - [HQ'.HashQualified Name] -> - UF.TypecheckedUnisonFile v a -> - SlurpResult v -> - SlurpResult v -_applySelection [] _ = id -_applySelection hqs file = \sr@SlurpResult {adds, updates} -> - sr - { adds = adds `SC.intersection` closed, - updates = updates `SC.intersection` closed, - extraDefinitions = closed `SC.difference` selection - } - where - selectedNames = - Names.filterByHQs (Set.fromList hqs) (UF.typecheckedToNames file) - selection, closed :: SlurpComponent v - selection = SlurpComponent selectedTypes selectedTerms - closed = SC.closeWithDependencies file selection - selectedTypes, selectedTerms :: Set v - selectedTypes = Set.map var $ R.dom (Names.types selectedNames) - selectedTerms = Set.map var $ R.dom (Names.terms selectedNames) - -var :: Var v => Name -> v -var name = Var.named (Name.toText name) - --- _toSlurpResult :: --- forall v. --- Var v => --- Path.Absolute -> --- UF.TypecheckedUnisonFile v Ann -> --- Names -> --- SlurpResult v --- _toSlurpResult curPath uf existingNames = --- Slurp.subtractComponent (conflicts <> ctorCollisions) $ --- SlurpResult --- uf --- mempty --- adds --- dups --- mempty --- conflicts --- updates --- termCtorCollisions --- ctorTermCollisions --- termAliases --- typeAliases --- mempty --- where --- fileNames = UF.typecheckedToNames uf - --- sc :: R.Relation Name Referent -> R.Relation Name Reference -> SlurpComponent v --- sc terms types = --- SlurpComponent --- { terms = Set.map var (R.dom terms), --- types = Set.map var (R.dom types) --- } - --- -- conflict (n,r) if n is conflicted in names0 --- conflicts :: SlurpComponent v --- conflicts = sc terms types --- where --- terms = --- R.filterDom --- (conflicted . Names.termsNamed existingNames) --- (Names.terms fileNames) --- types = --- R.filterDom --- (conflicted . Names.typesNamed existingNames) --- (Names.types fileNames) --- conflicted s = Set.size s > 1 - --- ctorCollisions :: SlurpComponent v --- ctorCollisions = --- mempty {SC.terms = termCtorCollisions <> ctorTermCollisions} - --- -- termCtorCollision (n,r) if (n, r' /= r) exists in existingNames and --- -- r is Ref and r' is Con --- termCtorCollisions :: Set v --- termCtorCollisions = --- Set.fromList --- [ var n --- | (n, Referent.Ref {}) <- R.toList (Names.terms fileNames), --- [r@Referent.Con {}] <- [toList $ Names.termsNamed existingNames n], --- -- ignore collisions w/ ctors of types being updated --- Set.notMember (Referent.toReference r) typesToUpdate --- ] - --- -- the set of typerefs that are being updated by this file --- typesToUpdate :: Set Reference --- typesToUpdate = --- Set.fromList --- [ r --- | (n, r') <- R.toList (Names.types fileNames), --- r <- toList (Names.typesNamed existingNames n), --- r /= r' --- ] - --- -- ctorTermCollisions (n,r) if (n, r' /= r) exists in names0 and r is Con --- -- and r' is Ref except we relaxed it to where r' can be Con or Ref --- -- what if (n,r) and (n,r' /= r) exists in names and r, r' are Con --- ctorTermCollisions :: Set v --- ctorTermCollisions = --- Set.fromList --- [ var n --- | (n, Referent.Con {}) <- R.toList (Names.terms fileNames), --- r <- toList $ Names.termsNamed existingNames n, --- -- ignore collisions w/ ctors of types being updated --- Set.notMember (Referent.toReference r) typesToUpdate, --- Set.notMember (var n) (terms dups) --- ] - --- -- duplicate (n,r) if (n,r) exists in names0 --- dups :: SlurpComponent v --- dups = sc terms types --- where --- terms = R.intersection (Names.terms existingNames) (Names.terms fileNames) --- types = R.intersection (Names.types existingNames) (Names.types fileNames) - --- -- update (n,r) if (n,r' /= r) exists in existingNames and r, r' are Ref --- updates :: SlurpComponent v --- updates = SlurpComponent (Set.fromList types) (Set.fromList terms) --- where --- terms = --- [ var n --- | (n, r'@Referent.Ref {}) <- R.toList (Names.terms fileNames), --- [r@Referent.Ref {}] <- [toList $ Names.termsNamed existingNames n], --- r' /= r --- ] --- types = --- [ var n --- | (n, r') <- R.toList (Names.types fileNames), --- [r] <- [toList $ Names.typesNamed existingNames n], --- r' /= r --- ] - - --- -- (n,r) is in `adds` if n isn't in existingNames --- adds = sc terms types --- where --- terms = addTerms (Names.terms existingNames) (Names.terms fileNames) --- types = addTypes (Names.types existingNames) (Names.types fileNames) --- addTerms existingNames = R.filter go --- where --- go (n, Referent.Ref {}) = (not . R.memberDom n) existingNames --- go _ = False --- addTypes existingNames = R.filter go --- where --- go (n, _) = (not . R.memberDom n) existingNames - - -addAliases :: forall v a. (Ord v, Var v) => Names -> UF.TypecheckedUnisonFile v a -> Path.Absolute -> SlurpResult v -> SlurpResult v -addAliases existingNames uf curPath sr = sr{ termAlias=termAliases, typeAlias=typeAliases } - where - fileNames = UF.typecheckedToNames uf - buildAliases :: - R.Relation Name Referent -> - R.Relation Name Referent -> - Set v -> - Map v Slurp.Aliases - buildAliases existingNames namesFromFile dups = - Map.fromList - [ ( var n, - if null aliasesOfOld - then Slurp.AddAliases aliasesOfNew - else Slurp.UpdateAliases aliasesOfOld aliasesOfNew - ) - | (n, r@Referent.Ref {}) <- R.toList namesFromFile, - -- All the refs whose names include `n`, and are not `r` - let refs = Set.delete r $ R.lookupDom n existingNames - aliasesOfNew = - Set.map (Path.unprefixName curPath) . Set.delete n $ - R.lookupRan r existingNames - aliasesOfOld = - Set.map (Path.unprefixName curPath) . Set.delete n . R.dom $ - R.restrictRan existingNames refs, - not (null aliasesOfNew && null aliasesOfOld), - Set.notMember (var n) dups - ] - - termAliases :: Map v Slurp.Aliases - termAliases = - buildAliases - (Names.terms existingNames) - (Names.terms fileNames) - (SC.terms (duplicates sr)) - - typeAliases :: Map v Slurp.Aliases - typeAliases = - buildAliases - (R.mapRan Referent.Ref $ Names.types existingNames) - (R.mapRan Referent.Ref $ Names.types fileNames) - (SC.types (duplicates sr)) - displayI :: Monad m => Names -> diff --git a/unison-cli/src/Unison/Codebase/Editor/Output.hs b/unison-cli/src/Unison/Codebase/Editor/Output.hs index b4b6120c14..e1a3e297eb 100644 --- a/unison-cli/src/Unison/Codebase/Editor/Output.hs +++ b/unison-cli/src/Unison/Codebase/Editor/Output.hs @@ -64,7 +64,6 @@ import qualified Unison.WatchKind as WK import Data.Set.NonEmpty (NESet) import qualified Unison.CommandLine.InputPattern as Input import Data.List.NonEmpty (NonEmpty) -import qualified Unison.Codebase.Editor.Slurp as NewSlurp type ListDetailed = Bool @@ -160,7 +159,6 @@ data Output v | ListOfPatches (Set Name) | -- show the result of add/update SlurpOutput Input PPE.PrettyPrintEnv (SlurpResult v) - | NewSlurpOutput Input PPE.PrettyPrintEnv NewSlurp.SlurpOp (NewSlurp.Result v) | -- Original source, followed by the errors: ParseErrors Text [Parser.Err v] | TypeErrors Text PPE.PrettyPrintEnv [Context.ErrorNote v Ann] @@ -316,7 +314,6 @@ isFailure o = case o of ListOfDefinitions _ _ ds -> null ds ListOfPatches s -> Set.null s SlurpOutput _ _ sr -> not $ SR.isOk sr - NewSlurpOutput _ _ _ sr -> NewSlurp.anyErrors sr ParseErrors {} -> True TypeErrors {} -> True CompilerBugs {} -> True diff --git a/unison-cli/src/Unison/Codebase/Editor/Slurp.hs b/unison-cli/src/Unison/Codebase/Editor/Slurp.hs index 4a2d56101d..615fe9e82e 100644 --- a/unison-cli/src/Unison/Codebase/Editor/Slurp.hs +++ b/unison-cli/src/Unison/Codebase/Editor/Slurp.hs @@ -1,6 +1,6 @@ module Unison.Codebase.Editor.Slurp ( SlurpOp (..), - Result, + VarsByStatus, BlockStatus (..), anyErrors, results, @@ -22,8 +22,10 @@ import Debug.Pretty.Simple (pTraceShow, pTraceShowId) import Unison.Codebase.Editor.SlurpComponent (SlurpComponent (..)) import qualified Unison.Codebase.Editor.SlurpComponent as SC import qualified Unison.Codebase.Editor.SlurpResult as OldSlurp +import qualified Unison.Codebase.Editor.SlurpResult as SR import Unison.Codebase.Editor.TermsAndTypes (TermedOrTyped (Termed, Typed)) import qualified Unison.Codebase.Editor.TermsAndTypes as TT +import qualified Unison.Codebase.Path as Path import qualified Unison.DataDeclaration as DD import qualified Unison.LabeledDependency as LD import Unison.Name (Name) @@ -32,25 +34,17 @@ import Unison.Names (Names) import qualified Unison.Names as Names import Unison.Parser.Ann (Ann) import Unison.Prelude +import Unison.Referent (Referent) +import qualified Unison.Referent as Referent import qualified Unison.Referent' as Referent import qualified Unison.UnisonFile as UF +import qualified Unison.UnisonFile.Names as UF import qualified Unison.Util.Relation as Rel import qualified Unison.Util.Set as Set import Unison.Var (Var) +import qualified Unison.Var as Var import Unison.WatchKind (pattern TestWatch) --- Determine which components we're considering, i.e. find the components of all provided --- vars, then include any components they depend on. --- --- Then, compute any deprecations and build the env --- Then, consider all vars in each component and get status (collision, add, or update) --- Collect and collapse the statuses of each component. --- I.e., if any definition has an error, the whole component is an error --- if any piece needs an update --- --- --- Does depending on a type also mean depending on all its constructors - data SlurpOp = AddOp | UpdateOp deriving (Eq, Show) @@ -99,26 +93,12 @@ data DefinitionNotes | DefErr SlurpErr deriving (Show) -type SlurpResult v = Map (TermedOrTyped v) (DefinitionNotes, Map (TermedOrTyped v) DefinitionNotes) - -type Result v = Map (BlockStatus v) (Set (TermedOrTyped v)) - --- data Result v = Result --- { addable :: SlurpComponent v, --- needUpdate :: SlurpComponent v, --- duplicate :: SlurpComponent v, --- blockedTerms :: Map (SlurpErr v) (Set v) --- } +type SlurpAnalysis v = Map (TermedOrTyped v) (DefinitionNotes, Map (TermedOrTyped v) DefinitionNotes) --- instance Semigroup (Result v) where --- Result adds1 updates1 duplicates1 tcColl1 ctColl1 <> Result adds2 updates2 duplicates2 tcColl2 ctColl2 = --- Result (adds1 <> adds2) (updates1 <> updates2) (duplicates1 <> duplicates2) (tcColl1 <> tcColl2) (ctColl1 <> ctColl2) - --- instance Monoid (Result v) where --- mempty = Result mempty mempty mempty mempty mempty +type VarsByStatus v = Map (BlockStatus v) (Set (TermedOrTyped v)) -- Compute all definitions which can be added, or the reasons why a def can't be added. -results :: forall v. (Ord v, Show v) => SlurpResult v -> Result v +results :: forall v. (Ord v, Show v) => SlurpAnalysis v -> VarsByStatus v results sr = pTraceShowId $ analyzed where @@ -153,9 +133,11 @@ analyzeTypecheckedUnisonFile :: Var v => UF.TypecheckedUnisonFile v Ann -> Set v -> + Maybe SlurpOp -> Names -> - (Map (TermedOrTyped v) (DefinitionNotes, Map (TermedOrTyped v) (DefinitionNotes))) -analyzeTypecheckedUnisonFile uf defsToConsider unalteredCodebaseNames = + Path.Absolute -> + SR.SlurpResult v +analyzeTypecheckedUnisonFile uf defsToConsider slurpOp unalteredCodebaseNames currentPath = let varRelation :: Rel.Relation (TermedOrTyped v) LD.LabeledDependency varRelation = labelling uf involvedVars :: Set (TermedOrTyped v) @@ -164,9 +146,15 @@ analyzeTypecheckedUnisonFile uf defsToConsider unalteredCodebaseNames = codebaseNames = computeNamesWithDeprecations uf unalteredCodebaseNames involvedVars varDeps :: Map (TermedOrTyped v) (Set (TermedOrTyped v)) varDeps = computeVarDeps uf involvedVars - statusMap :: Map (TermedOrTyped v) (DefinitionNotes, Map (TermedOrTyped v) DefinitionNotes) - statusMap = computeVarStatuses varDeps varRelation codebaseNames - in pTraceShowId statusMap + analysis :: SlurpAnalysis v + analysis = computeVarStatuses varDeps varRelation codebaseNames + varsByStatus :: VarsByStatus v + varsByStatus = results analysis + slurpResult :: SR.SlurpResult v + slurpResult = + toSlurpResult uf (fromMaybe UpdateOp slurpOp) defsToConsider varsByStatus + & addAliases codebaseNames currentPath + in pTraceShowId slurpResult computeNamesWithDeprecations :: Var v => @@ -395,21 +383,20 @@ toSlurpResult :: (Ord v, Show v) => UF.TypecheckedUnisonFile v Ann -> SlurpOp -> - Maybe (Set v) -> - Result v -> + Set v -> + VarsByStatus v -> OldSlurp.SlurpResult v -toSlurpResult uf op mvs r = +toSlurpResult uf op requestedVars varsByStatus = pTraceShowId $ - -- TODO: Do a proper partition to speed this up. OldSlurp.SlurpResult { OldSlurp.originalFile = uf, OldSlurp.extraDefinitions = - case mvs of - Nothing -> mempty - Just vs -> - let allVars = fold r + if Set.null requestedVars + then mempty + else + let allVars = fold varsByStatus desired = - vs + requestedVars & Set.flatMap (\v -> Set.fromList [Typed v, Termed v]) in sortVars $ Set.difference allVars desired, OldSlurp.adds = adds, @@ -430,7 +417,7 @@ toSlurpResult uf op mvs r = where adds, duplicates, updates, termCtorColl, ctorTermColl, blocked, conflicts :: SlurpComponent v (adds, duplicates, updates, termCtorColl, (ctorTermColl, blocked, conflicts)) = - r + varsByStatus & ifoldMap ( \k tvs -> let sc = sortVars $ tvs @@ -455,7 +442,7 @@ toSlurpResult uf op mvs r = Typed v -> SlurpComponent {terms = mempty, types = Set.singleton v} Termed v -> SlurpComponent {terms = Set.singleton v, types = mempty} -anyErrors :: Result v -> Bool +anyErrors :: VarsByStatus v -> Bool anyErrors r = any isError . Map.keys $ Map.filter (not . null) r where @@ -481,3 +468,49 @@ mingleVars :: Ord v => SlurpComponent v -> Set (TermedOrTyped v) mingleVars SlurpComponent {terms, types} = Set.map Typed types <> Set.map Termed terms + +addAliases :: forall v. (Ord v, Var v) => Names -> Path.Absolute -> SR.SlurpResult v -> SR.SlurpResult v +addAliases existingNames curPath sr = sr {SR.termAlias = termAliases, SR.typeAlias = typeAliases} + where + fileNames = UF.typecheckedToNames $ SR.originalFile sr + buildAliases :: + Rel.Relation Name Referent -> + Rel.Relation Name Referent -> + Set v -> + Map v SR.Aliases + buildAliases existingNames namesFromFile dups = + Map.fromList + [ ( var n, + if null aliasesOfOld + then SR.AddAliases aliasesOfNew + else SR.UpdateAliases aliasesOfOld aliasesOfNew + ) + | (n, r@Referent.Ref {}) <- Rel.toList namesFromFile, + -- All the refs whose names include `n`, and are not `r` + let refs = Set.delete r $ Rel.lookupDom n existingNames + aliasesOfNew = + Set.map (Path.unprefixName curPath) . Set.delete n $ + Rel.lookupRan r existingNames + aliasesOfOld = + Set.map (Path.unprefixName curPath) . Set.delete n . Rel.dom $ + Rel.restrictRan existingNames refs, + not (null aliasesOfNew && null aliasesOfOld), + Set.notMember (var n) dups + ] + + termAliases :: Map v SR.Aliases + termAliases = + buildAliases + (Names.terms existingNames) + (Names.terms fileNames) + (SC.terms (SR.duplicates sr)) + + typeAliases :: Map v SR.Aliases + typeAliases = + buildAliases + (Rel.mapRan Referent.Ref $ Names.types existingNames) + (Rel.mapRan Referent.Ref $ Names.types fileNames) + (SC.types (SR.duplicates sr)) + +var :: Var v => Name -> v +var name = Var.named (Name.toText name) diff --git a/unison-cli/src/Unison/CommandLine/OutputMessages.hs b/unison-cli/src/Unison/CommandLine/OutputMessages.hs index 47af6c083a..151b912281 100644 --- a/unison-cli/src/Unison/CommandLine/OutputMessages.hs +++ b/unison-cli/src/Unison/CommandLine/OutputMessages.hs @@ -750,12 +750,6 @@ notifyUser dir o = case o of Input.UpdateI {} -> True _ -> False in pure $ SlurpResult.pretty isPast ppe s - NewSlurpOutput input ppe slurpOp result -> - let isPast = case input of - Input.AddI {} -> True - Input.UpdateI {} -> True - _ -> False - in pure $ undefined isPast ppe slurpOp result NoExactTypeMatches -> pure . P.callout "☝️" $ P.wrap "I couldn't find exact type matches, resorting to fuzzy matching..." TypeParseError src e -> From 0993de48293e4279436b9e10a13aa4a62bcee909 Mon Sep 17 00:00:00 2001 From: Chris Penner Date: Mon, 17 Jan 2022 10:59:32 -0600 Subject: [PATCH 184/297] Add ability-term conflict transcripts --- .../ability-term-conflicts-on-update.md | 34 ++++++- ...ability-term-conflicts-on-update.output.md | 95 ++++++++++++++++++- 2 files changed, 121 insertions(+), 8 deletions(-) diff --git a/unison-src/transcripts/ability-term-conflicts-on-update.md b/unison-src/transcripts/ability-term-conflicts-on-update.md index 8b7eed2923..28d33390a5 100644 --- a/unison-src/transcripts/ability-term-conflicts-on-update.md +++ b/unison-src/transcripts/ability-term-conflicts-on-update.md @@ -26,15 +26,41 @@ removed from Channels. unique ability Channels where sends : [a] -> {Channels} () -Channels.send : a -> {Channels} () +Channels.send : a -> () +Channels.send a = () + +thing : '() +thing _ = send 1 +``` + +These should fail with a term/ctor conflict since we exclude the ability from the update. + +```ucm:error +.ns> update patch Channels.send +.ns> update patch thing +``` + +If however, `Channels.send` and `thing` _depend_ on `Channels`, updating them should succeed since it pulls in the ability as a dependency. + +```unison +unique ability Channels where + sends : [a] -> {Channels} () + +Channels.send : a -> () Channels.send a = sends [a] -thing : '{Channels} () +thing : '() thing _ = send 1 ``` -The 'update' will succeed up until it tries to resolve the reference to `send` -within `thing`; because the old send is deleted and the new `send` isn't ever added. +These updates should succeed since `Channels` is a dependency. + +```ucm +.ns> update.preview patch Channels.send +.ns> update.preview patch thing +``` + +We should also be able to successfully update the whole thing. ```ucm .ns> update diff --git a/unison-src/transcripts/ability-term-conflicts-on-update.output.md b/unison-src/transcripts/ability-term-conflicts-on-update.output.md index a9f0c1fcaf..65e8d08abe 100644 --- a/unison-src/transcripts/ability-term-conflicts-on-update.output.md +++ b/unison-src/transcripts/ability-term-conflicts-on-update.output.md @@ -39,10 +39,63 @@ removed from Channels. unique ability Channels where sends : [a] -> {Channels} () -Channels.send : a -> {Channels} () +Channels.send : a -> () +Channels.send a = () + +thing : '() +thing _ = send 1 +``` + +```ucm + + I found and typechecked these definitions in scratch.u. If you + do an `add` or `update`, here's how your codebase would + change: + + ⍟ These new definitions are ok to `add`: + + Channels.send : a -> () + thing : '() + + ⍟ These names already exist. You can `update` them to your + new definition: + + unique ability Channels + +``` +These should fail with a term/ctor conflict since we exclude the ability from the update. + +```ucm +.ns> update patch Channels.send + + x These definitions failed: + + Reason + term/ctor collision Channels.send : a -> () + + Tip: Use `help filestatus` to learn more. + +.ns> update patch thing + + x These definitions failed: + + Reason + term/ctor collision Channels.send : a -> () + blocked thing : '() + + Tip: Use `help filestatus` to learn more. + +``` +If however, `Channels.send` and `thing` _depend_ on `Channels`, updating them should succeed since it pulls in the ability as a dependency. + +```unison +unique ability Channels where + sends : [a] -> {Channels} () + +Channels.send : a -> () Channels.send a = sends [a] -thing : '{Channels} () +thing : '() thing _ = send 1 ``` @@ -63,8 +116,42 @@ thing _ = send 1 unique ability Channels ``` -The 'update' will succeed up until it tries to resolve the reference to `send` -within `thing`; because the old send is deleted and the new `send` isn't ever added. +These updates should succeed since `Channels` is a dependency. + +```ucm +.ns> update.preview patch Channels.send + + I found and typechecked these definitions in scratch.u. If you + do an `add` or `update`, here's how your codebase would + change: + + ⍟ These new definitions are ok to `add`: + + Channels.send : a ->{Channels} () + + ⍟ These names already exist. You can `update` them to your + new definition: + + unique ability Channels + +.ns> update.preview patch thing + + I found and typechecked these definitions in scratch.u. If you + do an `add` or `update`, here's how your codebase would + change: + + ⍟ These new definitions are ok to `add`: + + Channels.send : a ->{Channels} () + thing : '{Channels} () + + ⍟ These names already exist. You can `update` them to your + new definition: + + unique ability Channels + +``` +We should also be able to successfully update the whole thing. ```ucm .ns> update From cd6f039e1832228ed66368d33f24293b4b06be1b Mon Sep 17 00:00:00 2001 From: Chris Penner Date: Mon, 17 Jan 2022 10:59:56 -0600 Subject: [PATCH 185/297] Remove unused combinators from old slurp result --- .../src/Unison/Codebase/Editor/SlurpResult.hs | 44 ------------------- 1 file changed, 44 deletions(-) diff --git a/unison-cli/src/Unison/Codebase/Editor/SlurpResult.hs b/unison-cli/src/Unison/Codebase/Editor/SlurpResult.hs index 955dc5c950..a4e382ec81 100644 --- a/unison-cli/src/Unison/Codebase/Editor/SlurpResult.hs +++ b/unison-cli/src/Unison/Codebase/Editor/SlurpResult.hs @@ -17,7 +17,6 @@ import qualified Data.Map as Map import qualified Data.Set as Set import qualified Unison.Codebase.Editor.SlurpComponent as SC import qualified Unison.ConstructorReference as ConstructorReference -import qualified Unison.DataDeclaration as DD import qualified Unison.DeclPrinter as DeclPrinter import qualified Unison.HashQualified as HQ import qualified Unison.Name as Name @@ -83,49 +82,6 @@ constructorsFor types uf = let isOkCtor _ = False in Set.map Name.toVar $ R.dom ctorNames --- Remove `removed` from the slurp result, and move any defns with transitive --- dependencies on the removed component into `defsWithBlockedDependencies`. --- Also removes `removed` from `extraDefinitions`. -subtractComponent :: forall v. Var v => SlurpComponent v -> SlurpResult v -> SlurpResult v -subtractComponent removed sr = - sr { adds = SC.difference (adds sr) (removed <> blocked) - , updates = SC.difference (updates sr) (removed <> blocked) - , defsWithBlockedDependencies = blocked - , extraDefinitions = SC.difference (extraDefinitions sr) blocked - } - where - -- for each v in adds, move to blocked if transitive dependency in removed - blocked = defsWithBlockedDependencies sr <> - SC.difference (blockedTerms <> blockedTypes) removed - - uf = originalFile sr - constructorsFor v = case UF.lookupDecl v uf of - Nothing -> mempty - Just (_, e) -> Set.fromList . DD.constructorVars $ either DD.toDataDecl id e - - blockedTypes = foldMap doType . SC.types $ adds sr <> updates sr where - -- include this type if it or any of its dependencies are removed - doType :: v -> SlurpComponent v - doType v = - if null (Set.intersection (SC.types removed) (SC.types (SC.closeWithDependencies uf vc))) - && null (Set.intersection (SC.terms removed) (constructorsFor v)) - then mempty else vc - where vc = mempty { types = Set.singleton v } - - blockedTerms = foldMap doTerm . SC.terms $ adds sr <> updates sr where - doTerm :: v -> SlurpComponent v - doTerm v = - if mempty == SC.intersection removed (SC.closeWithDependencies uf vc) - then mempty else vc - where vc = mempty { terms = Set.singleton v } - --- Move `updates` to `collisions`, and move any dependents of those updates to `*WithBlockedDependencies`. --- Subtract stuff from `extraDefinitions` that isn't in `adds` or `updates` -disallowUpdates :: forall v. Var v => SlurpResult v -> SlurpResult v -disallowUpdates sr = - let sr2 = subtractComponent (updates sr) sr - in sr2 { collisions = collisions sr2 <> updates sr } - isNonempty :: Ord v => SlurpResult v -> Bool isNonempty s = Monoid.nonEmpty (adds s) || Monoid.nonEmpty (updates s) From 6bbc6b3a07924692840c23ae8c8e1dc70bf2026a Mon Sep 17 00:00:00 2001 From: Chris Penner Date: Mon, 17 Jan 2022 11:15:21 -0600 Subject: [PATCH 186/297] Re-use old filterBySlurpResult --- unison-cli/package.yaml | 2 - .../src/Unison/Codebase/Editor/HandleInput.hs | 2 +- .../src/Unison/Codebase/Editor/Slurp.hs | 123 ++++++++---------- .../Unison/Codebase/Editor/TermsAndTypes.hs | 65 --------- unison-cli/unison-cli.cabal | 11 -- 5 files changed, 54 insertions(+), 149 deletions(-) delete mode 100644 unison-cli/src/Unison/Codebase/Editor/TermsAndTypes.hs diff --git a/unison-cli/package.yaml b/unison-cli/package.yaml index 33d5b5b202..953981fbcc 100644 --- a/unison-cli/package.yaml +++ b/unison-cli/package.yaml @@ -11,8 +11,6 @@ ghc-options: -Wall dependencies: - semigroupoids - - distributive - - adjunctions - pretty-simple - ListLike - async diff --git a/unison-cli/src/Unison/Codebase/Editor/HandleInput.hs b/unison-cli/src/Unison/Codebase/Editor/HandleInput.hs index 8a4e9f91f7..a2baebce01 100644 --- a/unison-cli/src/Unison/Codebase/Editor/HandleInput.hs +++ b/unison-cli/src/Unison/Codebase/Editor/HandleInput.hs @@ -1262,7 +1262,7 @@ loop = do let sr = NewSlurp.analyzeTypecheckedUnisonFile uf vars (Just NewSlurp.AddOp) currentNames currentPath' let adds = SlurpResult.adds sr stepAtNoSync Branch.CompressHistory (Path.unabsolute currentPath', doSlurpAdds adds uf) - eval . AddDefsToCodebase . NewSlurp.selectDefinitions adds $ uf + eval . AddDefsToCodebase . filterBySlurpResult sr $ uf ppe <- prettyPrintEnvDecl =<< displayNames uf respond $ SlurpOutput input (PPE.suffixifiedPPE ppe) sr addDefaultMetadata adds diff --git a/unison-cli/src/Unison/Codebase/Editor/Slurp.hs b/unison-cli/src/Unison/Codebase/Editor/Slurp.hs index 615fe9e82e..317c57d40c 100644 --- a/unison-cli/src/Unison/Codebase/Editor/Slurp.hs +++ b/unison-cli/src/Unison/Codebase/Editor/Slurp.hs @@ -5,15 +5,12 @@ module Unison.Codebase.Editor.Slurp anyErrors, results, analyzeTypecheckedUnisonFile, - selectDefinitions, toSlurpResult, sortVars, ) where import Control.Lens -import Data.Bifunctor (second) -import qualified Data.List as List import qualified Data.List.NonEmpty as NEList import qualified Data.Map as Map import qualified Data.Semigroup.Foldable as Semigroup @@ -23,8 +20,6 @@ import Unison.Codebase.Editor.SlurpComponent (SlurpComponent (..)) import qualified Unison.Codebase.Editor.SlurpComponent as SC import qualified Unison.Codebase.Editor.SlurpResult as OldSlurp import qualified Unison.Codebase.Editor.SlurpResult as SR -import Unison.Codebase.Editor.TermsAndTypes (TermedOrTyped (Termed, Typed)) -import qualified Unison.Codebase.Editor.TermsAndTypes as TT import qualified Unison.Codebase.Path as Path import qualified Unison.DataDeclaration as DD import qualified Unison.LabeledDependency as LD @@ -48,6 +43,13 @@ import Unison.WatchKind (pattern TestWatch) data SlurpOp = AddOp | UpdateOp deriving (Eq, Show) +data TermOrTypeVar v = TermVar v | TypeVar v + deriving (Eq, Ord, Show) + +unlabeled :: TermOrTypeVar v -> v +unlabeled (TermVar v) = v +unlabeled (TypeVar v) = v + data SlurpStatus = New | Updated @@ -57,9 +59,9 @@ data SlurpStatus data BlockStatus v = Add | Duplicated - | NeedsUpdate (TermedOrTyped v) + | NeedsUpdate (TermOrTypeVar v) | Update - | ErrFrom (TermedOrTyped v) SlurpErr + | ErrFrom (TermOrTypeVar v) SlurpErr | SelfErr SlurpErr deriving (Eq, Ord, Show) @@ -93,16 +95,16 @@ data DefinitionNotes | DefErr SlurpErr deriving (Show) -type SlurpAnalysis v = Map (TermedOrTyped v) (DefinitionNotes, Map (TermedOrTyped v) DefinitionNotes) +type SlurpAnalysis v = Map (TermOrTypeVar v) (DefinitionNotes, Map (TermOrTypeVar v) DefinitionNotes) -type VarsByStatus v = Map (BlockStatus v) (Set (TermedOrTyped v)) +type VarsByStatus v = Map (BlockStatus v) (Set (TermOrTypeVar v)) -- Compute all definitions which can be added, or the reasons why a def can't be added. results :: forall v. (Ord v, Show v) => SlurpAnalysis v -> VarsByStatus v results sr = pTraceShowId $ analyzed where - analyzed :: Map (BlockStatus v) (Set (TermedOrTyped v)) + analyzed :: Map (BlockStatus v) (Set (TermOrTypeVar v)) analyzed = sr & Map.toList @@ -114,7 +116,7 @@ results sr = ) & Map.fromListWith (<>) -getBlockStatus :: (Ord v, Show v) => Bool -> DefinitionNotes -> TermedOrTyped v -> BlockStatus v +getBlockStatus :: (Ord v, Show v) => Bool -> DefinitionNotes -> TermOrTypeVar v -> BlockStatus v getBlockStatus isDep defNotes tv = case defNotes of DefOk Updated -> if isDep then NeedsUpdate tv else Update @@ -138,13 +140,13 @@ analyzeTypecheckedUnisonFile :: Path.Absolute -> SR.SlurpResult v analyzeTypecheckedUnisonFile uf defsToConsider slurpOp unalteredCodebaseNames currentPath = - let varRelation :: Rel.Relation (TermedOrTyped v) LD.LabeledDependency + let varRelation :: Rel.Relation (TermOrTypeVar v) LD.LabeledDependency varRelation = labelling uf - involvedVars :: Set (TermedOrTyped v) + involvedVars :: Set (TermOrTypeVar v) involvedVars = computeInvolvedVars uf defsToConsider varRelation codebaseNames :: Names codebaseNames = computeNamesWithDeprecations uf unalteredCodebaseNames involvedVars - varDeps :: Map (TermedOrTyped v) (Set (TermedOrTyped v)) + varDeps :: Map (TermOrTypeVar v) (Set (TermOrTypeVar v)) varDeps = computeVarDeps uf involvedVars analysis :: SlurpAnalysis v analysis = computeVarStatuses varDeps varRelation codebaseNames @@ -160,7 +162,7 @@ computeNamesWithDeprecations :: Var v => UF.TypecheckedUnisonFile v Ann -> Names -> - Set (TermedOrTyped v) -> + Set (TermOrTypeVar v) -> Names computeNamesWithDeprecations uf unalteredCodebaseNames involvedVars = pTraceShow ("Deprecated constructors", deprecatedConstructors) @@ -175,7 +177,7 @@ computeNamesWithDeprecations uf unalteredCodebaseNames involvedVars = constructorsUnderConsideration = Map.toList (UF.dataDeclarationsId' uf) <> (fmap . fmap . fmap) DD.toDataDecl (Map.toList (UF.effectDeclarationsId' uf)) - & filter (\(typeV, _) -> Set.member (Typed typeV) involvedVars) + & filter (\(typeV, _) -> Set.member (TypeVar typeV) involvedVars) & concatMap (\(_typeV, (_refId, decl)) -> DD.constructors' decl) & fmap ( \(_ann, v, _typ) -> Name.unsafeFromVar v @@ -188,7 +190,7 @@ computeNamesWithDeprecations uf unalteredCodebaseNames involvedVars = let declNames = Map.keys (UF.dataDeclarationsId' uf) let effectNames = Map.keys (UF.effectDeclarationsId' uf) typeName <- declNames <> effectNames - when (not . null $ involvedVars) (guard (Typed typeName `Set.member` involvedVars)) + when (not . null $ involvedVars) (guard (TypeVar typeName `Set.member` involvedVars)) pure $ Names.typesNamed unalteredCodebaseNames (Name.unsafeFromVar typeName) existingConstructorsFromEditedTypes = Set.fromList $ do -- List Monad @@ -205,18 +207,18 @@ computeNamesWithDeprecations uf unalteredCodebaseNames involvedVars = computeVarStatuses :: forall v. (Ord v, Var v) => - Map (TermedOrTyped v) (Set (TermedOrTyped v)) -> - Rel.Relation (TermedOrTyped v) LD.LabeledDependency -> + Map (TermOrTypeVar v) (Set (TermOrTypeVar v)) -> + Rel.Relation (TermOrTypeVar v) LD.LabeledDependency -> Names -> ( Map - (TermedOrTyped v) - (DefinitionNotes, Map (TermedOrTyped v) DefinitionNotes) + (TermOrTypeVar v) + (DefinitionNotes, Map (TermOrTypeVar v) DefinitionNotes) ) computeVarStatuses depMap varRelation codebaseNames = pTraceShow ("Statuses", statuses) $ statuses where - statuses :: Map (TermedOrTyped v) (DefinitionNotes, Map (TermedOrTyped v) (DefinitionNotes)) + statuses :: Map (TermOrTypeVar v) (DefinitionNotes, Map (TermOrTypeVar v) (DefinitionNotes)) statuses = let withNotes = depMap @@ -226,9 +228,9 @@ computeVarStatuses depMap varRelation codebaseNames = ) withTransitiveNotes :: ( Map - (TermedOrTyped v) + (TermOrTypeVar v) ( DefinitionNotes, - (Map (TermedOrTyped v) DefinitionNotes) + (Map (TermOrTypeVar v) DefinitionNotes) ) ) withTransitiveNotes = @@ -240,12 +242,12 @@ computeVarStatuses depMap varRelation codebaseNames = pure (tv, notes) ) in withTransitiveNotes - definitionStatus :: TermedOrTyped v -> DefinitionNotes + definitionStatus :: TermOrTypeVar v -> DefinitionNotes definitionStatus tv = let ld = case Set.toList (Rel.lookupDom tv varRelation) of [r] -> r actual -> error $ "Expected exactly one LabeledDependency in relation for var: " <> show tv <> " but got: " <> show actual - v = TT.unTermedOrTyped tv + v = unlabeled tv existingTypesAtName = Names.typesNamed codebaseNames (Name.unsafeFromVar v) existingTermsAtName = Names.termsNamed codebaseNames (Name.unsafeFromVar v) in case ld of @@ -284,18 +286,18 @@ computeInvolvedVars :: Var v => UF.TypecheckedUnisonFile v Ann -> Set v -> - Rel.Relation (TermedOrTyped v) LD.LabeledDependency -> - Set (TermedOrTyped v) + Rel.Relation (TermOrTypeVar v) LD.LabeledDependency -> + Set (TermOrTypeVar v) computeInvolvedVars uf defsToConsider varRelation | Set.null defsToConsider = Rel.dom varRelation | otherwise = allInvolvedVars where - allInvolvedVars :: Set (TermedOrTyped v) + allInvolvedVars :: Set (TermOrTypeVar v) allInvolvedVars = - let existingVars :: Set (TermedOrTyped v) = Set.fromList $ do + let existingVars :: Set (TermOrTypeVar v) = Set.fromList $ do v <- Set.toList defsToConsider -- We don't know whether each var is a type or term, so we try both. - tv <- [Typed v, Termed v] + tv <- [TypeVar v, TermVar v] guard (Rel.memberDom tv varRelation) pure tv in varClosure uf existingVars @@ -304,10 +306,10 @@ computeVarDeps :: forall v. Var v => UF.TypecheckedUnisonFile v Ann -> - Set (TermedOrTyped v) -> - Map (TermedOrTyped v) (Set (TermedOrTyped v)) + Set (TermOrTypeVar v) -> + Map (TermOrTypeVar v) (Set (TermOrTypeVar v)) computeVarDeps uf allInvolvedVars = - let depMap :: (Map (TermedOrTyped v) (Set (TermedOrTyped v))) + let depMap :: (Map (TermOrTypeVar v) (Set (TermOrTypeVar v))) depMap = allInvolvedVars & Set.toList @@ -320,18 +322,18 @@ computeVarDeps uf allInvolvedVars = $ depMap -- Compute the closure of all vars which the provided vars depend on. -varClosure :: Ord v => UF.TypecheckedUnisonFile v a -> Set (TermedOrTyped v) -> Set (TermedOrTyped v) +varClosure :: Ord v => UF.TypecheckedUnisonFile v a -> Set (TermOrTypeVar v) -> Set (TermOrTypeVar v) varClosure uf (sortVars -> sc) = mingleVars $ SC.closeWithDependencies uf sc -- TODO: Does this need to contain constructors? Maybe. -- Does not include constructors -labelling :: forall v a. (Ord v, Show v) => UF.TypecheckedUnisonFile v a -> Rel.Relation (TermedOrTyped v) LD.LabeledDependency +labelling :: forall v a. (Ord v, Show v) => UF.TypecheckedUnisonFile v a -> Rel.Relation (TermOrTypeVar v) LD.LabeledDependency labelling uf = let result = decls <> effects <> terms in pTraceShow ("varRelation", result) $ result where - terms :: Rel.Relation (TermedOrTyped v) LD.LabeledDependency + terms :: Rel.Relation (TermOrTypeVar v) LD.LabeledDependency terms = UF.hashTermsId uf & Map.toList @@ -341,43 +343,24 @@ labelling uf = ( \case (v, (refId, w, _, _)) | w == Just TestWatch || w == Nothing -> - Just (Termed v, LD.derivedTerm refId) + Just (TermVar v, LD.derivedTerm refId) _ -> Nothing ) & Rel.fromList - decls :: Rel.Relation (TermedOrTyped v) LD.LabeledDependency + decls :: Rel.Relation (TermOrTypeVar v) LD.LabeledDependency decls = UF.dataDeclarationsId' uf & Map.toList - & fmap (\(v, (refId, _)) -> (Typed v, LD.derivedType refId)) + & fmap (\(v, (refId, _)) -> (TypeVar v, LD.derivedType refId)) & Rel.fromList - effects :: Rel.Relation (TermedOrTyped v) LD.LabeledDependency + effects :: Rel.Relation (TermOrTypeVar v) LD.LabeledDependency effects = UF.effectDeclarationsId' uf & Map.toList - & fmap (\(v, (refId, _)) -> (Typed v, (LD.derivedType refId))) + & fmap (\(v, (refId, _)) -> (TypeVar v, (LD.derivedType refId))) & Rel.fromList -selectDefinitions :: Ord v => SlurpComponent v -> UF.TypecheckedUnisonFile v a -> UF.TypecheckedUnisonFile v a -selectDefinitions - (SlurpComponent {terms, types}) - ( UF.TypecheckedUnisonFileId - dataDeclarations' - effectDeclarations' - topLevelComponents' - watchComponents - hashTerms - ) = - UF.TypecheckedUnisonFileId datas effects tlcs watches hashTerms' - where - hashTerms' = Map.restrictKeys hashTerms terms - datas = Map.restrictKeys dataDeclarations' types - effects = Map.restrictKeys effectDeclarations' types - tlcs = filter (not . null) $ fmap (List.filter filterTLC) topLevelComponents' - watches = filter (not . null . snd) $ fmap (second (List.filter filterTLC)) watchComponents - filterTLC (v, _, _) = Set.member v terms - toSlurpResult :: forall v. (Ord v, Show v) => @@ -397,7 +380,7 @@ toSlurpResult uf op requestedVars varsByStatus = let allVars = fold varsByStatus desired = requestedVars - & Set.flatMap (\v -> Set.fromList [Typed v, Termed v]) + & Set.flatMap (\v -> Set.fromList [TypeVar v, TermVar v]) in sortVars $ Set.difference allVars desired, OldSlurp.adds = adds, OldSlurp.duplicates = duplicates, @@ -439,8 +422,8 @@ toSlurpResult uf op requestedVars varsByStatus = SelfErr Conflict -> (mempty, mempty, mempty, mempty, (mempty, mempty, sc)) ) singletonSC = \case - Typed v -> SlurpComponent {terms = mempty, types = Set.singleton v} - Termed v -> SlurpComponent {terms = Set.singleton v, types = mempty} + TypeVar v -> SlurpComponent {terms = mempty, types = Set.singleton v} + TermVar v -> SlurpComponent {terms = Set.singleton v, types = mempty} anyErrors :: VarsByStatus v -> Bool anyErrors r = @@ -456,18 +439,18 @@ anyErrors r = ErrFrom {} -> True SelfErr {} -> True -sortVars :: (Foldable f, Ord v) => f (TermedOrTyped v) -> SlurpComponent v +sortVars :: (Foldable f, Ord v) => f (TermOrTypeVar v) -> SlurpComponent v sortVars = foldMap ( \case - Typed v -> SC.fromTypes (Set.singleton v) - Termed v -> SC.fromTerms (Set.singleton v) + TypeVar v -> SC.fromTypes (Set.singleton v) + TermVar v -> SC.fromTerms (Set.singleton v) ) -mingleVars :: Ord v => SlurpComponent v -> Set (TermedOrTyped v) +mingleVars :: Ord v => SlurpComponent v -> Set (TermOrTypeVar v) mingleVars SlurpComponent {terms, types} = - Set.map Typed types - <> Set.map Termed terms + Set.map TypeVar types + <> Set.map TermVar terms addAliases :: forall v. (Ord v, Var v) => Names -> Path.Absolute -> SR.SlurpResult v -> SR.SlurpResult v addAliases existingNames curPath sr = sr {SR.termAlias = termAliases, SR.typeAlias = typeAliases} diff --git a/unison-cli/src/Unison/Codebase/Editor/TermsAndTypes.hs b/unison-cli/src/Unison/Codebase/Editor/TermsAndTypes.hs deleted file mode 100644 index 1c1dd1c6b5..0000000000 --- a/unison-cli/src/Unison/Codebase/Editor/TermsAndTypes.hs +++ /dev/null @@ -1,65 +0,0 @@ -{-# LANGUAGE TypeFamilies #-} - -module Unison.Codebase.Editor.TermsAndTypes where - -import Data.Distributive -import Data.Functor.Adjunction -import Data.Functor.Rep - -data TermOrType = TypeTag | TermTag - deriving stock (Show, Eq, Ord) - -data TermedOrTyped a = Typed a | Termed a - deriving stock (Functor, Foldable, Traversable) - deriving stock (Show, Eq, Ord) - -unTermedOrTyped :: TermedOrTyped a -> a -unTermedOrTyped = \case - Typed a -> a - Termed a -> a - -data TermsAndTypes a = TermsAndTypes {terms :: a, types :: a} - deriving stock (Functor, Foldable, Traversable) - -fromTypes :: Monoid a => a -> TermsAndTypes a -fromTypes a = TermsAndTypes {terms = mempty, types = a} - -fromTerms :: Monoid a => a -> TermsAndTypes a -fromTerms a = TermsAndTypes {terms = a, types = mempty} - -instance Semigroup a => Semigroup (TermsAndTypes a) where - TermsAndTypes terms1 types1 <> TermsAndTypes terms2 types2 = - TermsAndTypes (terms1 <> terms2) (types1 <> types2) - -instance Monoid a => Monoid (TermsAndTypes a) where - mempty = TermsAndTypes mempty mempty - -instance Applicative TermsAndTypes where - pure a = TermsAndTypes a a - TermsAndTypes f g <*> TermsAndTypes a b = TermsAndTypes (f a) (g b) - -instance Distributive TermsAndTypes where - distribute = distributeRep - -instance Representable TermsAndTypes where - type Rep TermsAndTypes = TermOrType - index tt = \case - TermTag -> terms tt - TypeTag -> types tt - tabulate f = TermsAndTypes {terms = f TermTag, types = f TypeTag} - -instance Adjunction TermedOrTyped TermsAndTypes where - unit a = TermsAndTypes {terms = Termed a, types = Typed a} - counit (Termed (TermsAndTypes {terms})) = terms - counit (Typed (TermsAndTypes {types})) = types - -labeled :: TermsAndTypes a -> TermsAndTypes (TermedOrTyped a) -labeled (TermsAndTypes {terms, types}) = - TermsAndTypes {terms = Termed terms, types = Typed types} - -labeledF :: Functor f => TermsAndTypes (f a) -> TermsAndTypes (f (TermedOrTyped a)) -labeledF (TermsAndTypes {terms, types}) = - TermsAndTypes {terms = fmap Termed terms, types = fmap Typed types} - -mapWithTag :: (TermOrType -> a -> b) -> TermsAndTypes a -> TermsAndTypes b -mapWithTag f (TermsAndTypes {terms, types}) = TermsAndTypes (f TermTag terms) (f TypeTag types) diff --git a/unison-cli/unison-cli.cabal b/unison-cli/unison-cli.cabal index 2eaba1efe0..01738d7f77 100644 --- a/unison-cli/unison-cli.cabal +++ b/unison-cli/unison-cli.cabal @@ -38,7 +38,6 @@ library Unison.Codebase.Editor.Slurp Unison.Codebase.Editor.SlurpComponent Unison.Codebase.Editor.SlurpResult - Unison.Codebase.Editor.TermsAndTypes Unison.Codebase.Editor.TodoOutput Unison.Codebase.Editor.UriParser Unison.Codebase.Editor.VersionParser @@ -82,7 +81,6 @@ library ghc-options: -Wall build-depends: ListLike - , adjunctions , async , base , bytestring @@ -90,7 +88,6 @@ library , containers >=0.6.3 , cryptonite , directory - , distributive , errors , extra , filepath @@ -154,7 +151,6 @@ executable integration-tests ghc-options: -Wall -W -threaded -rtsopts "-with-rtsopts=-N -T" -v0 build-depends: ListLike - , adjunctions , async , base , bytestring @@ -162,7 +158,6 @@ executable integration-tests , containers >=0.6.3 , cryptonite , directory - , distributive , easytest , errors , extra @@ -228,7 +223,6 @@ executable transcripts unison build-depends: ListLike - , adjunctions , async , base , bytestring @@ -236,7 +230,6 @@ executable transcripts , containers >=0.6.3 , cryptonite , directory - , distributive , easytest , errors , extra @@ -302,7 +295,6 @@ executable unison ghc-options: -Wall -threaded -rtsopts -with-rtsopts=-I0 -optP-Wno-nonportable-include-path build-depends: ListLike - , adjunctions , async , base , bytestring @@ -310,7 +302,6 @@ executable unison , containers >=0.6.3 , cryptonite , directory - , distributive , errors , extra , filepath @@ -382,7 +373,6 @@ test-suite tests ghc-options: -Wall build-depends: ListLike - , adjunctions , async , base , bytestring @@ -390,7 +380,6 @@ test-suite tests , containers >=0.6.3 , cryptonite , directory - , distributive , easytest , errors , extra From e5c314bbb9bb984cfdc0d8e4836766d3c74bbaad Mon Sep 17 00:00:00 2001 From: Chris Penner Date: Mon, 17 Jan 2022 11:36:05 -0600 Subject: [PATCH 187/297] Comments --- .../src/Unison/Codebase/Editor/Slurp.hs | 87 ++++++++----------- .../src/Unison/CommandLine/InputPatterns.hs | 8 -- 2 files changed, 38 insertions(+), 57 deletions(-) diff --git a/unison-cli/src/Unison/Codebase/Editor/Slurp.hs b/unison-cli/src/Unison/Codebase/Editor/Slurp.hs index 317c57d40c..e06c613479 100644 --- a/unison-cli/src/Unison/Codebase/Editor/Slurp.hs +++ b/unison-cli/src/Unison/Codebase/Editor/Slurp.hs @@ -1,12 +1,10 @@ module Unison.Codebase.Editor.Slurp ( SlurpOp (..), VarsByStatus, - BlockStatus (..), + DefStatus (..), anyErrors, - results, analyzeTypecheckedUnisonFile, toSlurpResult, - sortVars, ) where @@ -53,36 +51,33 @@ unlabeled (TypeVar v) = v data SlurpStatus = New | Updated - | Duplicate + | Duplicated deriving (Eq, Ord, Show) -data BlockStatus v - = Add - | Duplicated +data DefStatus v + = Ok SlurpStatus | NeedsUpdate (TermOrTypeVar v) - | Update | ErrFrom (TermOrTypeVar v) SlurpErr | SelfErr SlurpErr deriving (Eq, Ord, Show) -instance Semigroup (BlockStatus v) where +-- | This semigroup is how a definition's status is determined. +instance Semigroup (DefStatus v) where + -- If the definition has its own error, that takes highest priority. SelfErr err <> _ = SelfErr err _ <> SelfErr err = SelfErr err + -- Next we care if a dependency has an error ErrFrom v err <> _ = ErrFrom v err _ <> ErrFrom v err = ErrFrom v err - Update <> _ = Update - _ <> Update = Update + -- If our definition needs its own update then we don't care if dependencies need updates. + Ok Updated <> _ = Ok Updated + _ <> Ok Updated = Ok Updated NeedsUpdate v <> _ = NeedsUpdate v _ <> NeedsUpdate v = NeedsUpdate v - Add <> _ = Add - _ <> Add = Add - Duplicated <> _ = Duplicated - -data SlurpPrintout v = SlurpPrintout - { notOk :: Map v SlurpErr, - ok :: Map v SlurpStatus - } - deriving (Eq, Ord, Show) + -- 'New' definitions take precedence over duplicated dependencies + Ok New <> _ = Ok New + _ <> Ok New = Ok New + Ok Duplicated <> _ = Ok Duplicated data SlurpErr = TermCtorCollision @@ -97,38 +92,32 @@ data DefinitionNotes type SlurpAnalysis v = Map (TermOrTypeVar v) (DefinitionNotes, Map (TermOrTypeVar v) DefinitionNotes) -type VarsByStatus v = Map (BlockStatus v) (Set (TermOrTypeVar v)) +type VarsByStatus v = Map (DefStatus v) (Set (TermOrTypeVar v)) -- Compute all definitions which can be added, or the reasons why a def can't be added. -results :: forall v. (Ord v, Show v) => SlurpAnalysis v -> VarsByStatus v -results sr = +groupByStatus :: forall v. (Ord v, Show v) => SlurpAnalysis v -> VarsByStatus v +groupByStatus sr = pTraceShowId $ analyzed where - analyzed :: Map (BlockStatus v) (Set (TermOrTypeVar v)) + analyzed :: Map (DefStatus v) (Set (TermOrTypeVar v)) analyzed = sr & Map.toList & fmap ( \(tv, (defNotes, deps)) -> - ( Semigroup.fold1 (getBlockStatus False defNotes tv NEList.:| (Map.toList deps <&> \(depTV, depDefNotes) -> getBlockStatus True depDefNotes depTV)), + ( Semigroup.fold1 (computeDefStatus False defNotes tv NEList.:| (Map.toList deps <&> \(depTV, depDefNotes) -> computeDefStatus True depDefNotes depTV)), Set.singleton tv ) ) & Map.fromListWith (<>) -getBlockStatus :: (Ord v, Show v) => Bool -> DefinitionNotes -> TermOrTypeVar v -> BlockStatus v -getBlockStatus isDep defNotes tv = - case defNotes of - DefOk Updated -> if isDep then NeedsUpdate tv else Update - DefErr err -> ErrFrom tv err - DefOk New -> Add - DefOk Duplicate -> Duplicated - --- Need to know: --- What can be added without errors? --- What can be updated without errors? --- What has errors? --- What is blocked? + computeDefStatus :: (Ord v, Show v) => Bool -> DefinitionNotes -> TermOrTypeVar v -> DefStatus v + computeDefStatus isDep defNotes tv = + case defNotes of + DefOk Updated -> if isDep then NeedsUpdate tv else Ok Updated + DefErr err -> ErrFrom tv err + DefOk New -> Ok New + DefOk Duplicated -> Ok Duplicated analyzeTypecheckedUnisonFile :: forall v. @@ -151,7 +140,7 @@ analyzeTypecheckedUnisonFile uf defsToConsider slurpOp unalteredCodebaseNames cu analysis :: SlurpAnalysis v analysis = computeVarStatuses varDeps varRelation codebaseNames varsByStatus :: VarsByStatus v - varsByStatus = results analysis + varsByStatus = groupByStatus analysis slurpResult :: SR.SlurpResult v slurpResult = toSlurpResult uf (fromMaybe UpdateOp slurpOp) defsToConsider varsByStatus @@ -255,7 +244,7 @@ computeVarStatuses depMap varRelation codebaseNames = case Set.toList existingTypesAtName of [] -> DefOk New [r] - | LD.typeRef r == ld -> DefOk Duplicate + | LD.typeRef r == ld -> DefOk Duplicated | otherwise -> DefOk Updated -- If there are many existing terms, they must be in conflict. -- Currently we treat conflicts as errors rather than resolving them. @@ -265,7 +254,7 @@ computeVarStatuses depMap varRelation codebaseNames = [] -> DefOk New rs | any Referent.isConstructor rs -> DefErr TermCtorCollision [r] - | LD.referent r == ld -> DefOk Duplicate + | LD.referent r == ld -> DefOk Duplicated | otherwise -> DefOk Updated -- If there are many existing terms, they must be in conflict. -- Currently we treat conflicts as errors rather than resolving them. @@ -275,7 +264,7 @@ computeVarStatuses depMap varRelation codebaseNames = [] -> DefOk New rs | any (not . Referent.isConstructor) rs -> DefErr CtorTermCollision [r] - | LD.referent r == ld -> DefOk Duplicate + | LD.referent r == ld -> DefOk Duplicated | otherwise -> DefOk Updated -- If there are many existing terms, they must be in conflict. -- Currently we treat conflicts as errors rather than resolving them. @@ -405,9 +394,9 @@ toSlurpResult uf op requestedVars varsByStatus = ( \k tvs -> let sc = sortVars $ tvs in case k of - Add -> (sc, mempty, mempty, mempty, (mempty, mempty, mempty)) - Duplicated -> (mempty, sc, mempty, mempty, (mempty, mempty, mempty)) - Update -> (mempty, mempty, sc, mempty, (mempty, mempty, mempty)) + Ok New -> (sc, mempty, mempty, mempty, (mempty, mempty, mempty)) + Ok Duplicated -> (mempty, sc, mempty, mempty, (mempty, mempty, mempty)) + Ok Updated -> (mempty, mempty, sc, mempty, (mempty, mempty, mempty)) NeedsUpdate v -> case op of AddOp -> @@ -429,11 +418,11 @@ anyErrors :: VarsByStatus v -> Bool anyErrors r = any isError . Map.keys $ Map.filter (not . null) r where - isError :: BlockStatus v -> Bool + isError :: DefStatus v -> Bool isError = \case - Add -> False - Duplicated -> False - Update {} -> False + Ok New -> False + Ok Duplicated -> False + Ok Updated -> False -- NeedsUpdate is an error only if we're trying to Add NeedsUpdate {} -> True ErrFrom {} -> True diff --git a/unison-cli/src/Unison/CommandLine/InputPatterns.hs b/unison-cli/src/Unison/CommandLine/InputPatterns.hs index 24c7198ce5..f9ac72b400 100644 --- a/unison-cli/src/Unison/CommandLine/InputPatterns.hs +++ b/unison-cli/src/Unison/CommandLine/InputPatterns.hs @@ -174,14 +174,6 @@ add = <> "typechecked file." ) $ \ws -> pure $ Input.AddI (Set.fromList $ map Name.unsafeFromString ws) - -- Just ws -> pure $ Input.AddI ws - -- Nothing -> - -- Left - -- . warn - -- . P.lines - -- . fmap fromString - -- . ("I don't know what these refer to:\n" :) - -- $ collectNothings HQ'.fromString ws previewAdd :: InputPattern previewAdd = From 10e4389c5d475f8d41667ff57fcbff4182016179 Mon Sep 17 00:00:00 2001 From: Chris Penner Date: Mon, 17 Jan 2022 11:54:10 -0600 Subject: [PATCH 188/297] Constructor deprecations optimization --- .../src/Unison/Codebase/Editor/HandleInput.hs | 12 +- .../src/Unison/Codebase/Editor/Slurp.hs | 109 +++++++++--------- 2 files changed, 62 insertions(+), 59 deletions(-) diff --git a/unison-cli/src/Unison/Codebase/Editor/HandleInput.hs b/unison-cli/src/Unison/Codebase/Editor/HandleInput.hs index a2baebce01..43faa799ee 100644 --- a/unison-cli/src/Unison/Codebase/Editor/HandleInput.hs +++ b/unison-cli/src/Unison/Codebase/Editor/HandleInput.hs @@ -151,7 +151,7 @@ import qualified Data.Set.NonEmpty as NESet import Data.Set.NonEmpty (NESet) import Unison.Symbol (Symbol) import qualified Unison.Codebase.Editor.Input as Input -import qualified Unison.Codebase.Editor.Slurp as NewSlurp +import qualified Unison.Codebase.Editor.Slurp as Slurp import qualified Unison.Codebase.Editor.SlurpResult as SlurpResult defaultPatchNameSegment :: NameSegment @@ -262,7 +262,7 @@ loop = do let lexed = L.lexer (Text.unpack sourceName) (Text.unpack text) withFile [] sourceName (text, lexed) $ \unisonFile -> do currentNames <- currentPathNames - let sr = NewSlurp.analyzeTypecheckedUnisonFile unisonFile mempty Nothing currentNames currentPath' + let sr = Slurp.slurpFile unisonFile mempty Nothing currentNames currentPath' names <- displayNames unisonFile pped <- prettyPrintEnvDecl names let ppe = PPE.suffixifiedPPE pped @@ -1259,7 +1259,7 @@ loop = do Nothing -> respond NoUnisonFile Just uf -> do currentNames <- currentPathNames - let sr = NewSlurp.analyzeTypecheckedUnisonFile uf vars (Just NewSlurp.AddOp) currentNames currentPath' + let sr = Slurp.slurpFile uf vars (Just Slurp.AddOp) currentNames currentPath' let adds = SlurpResult.adds sr stepAtNoSync Branch.CompressHistory (Path.unabsolute currentPath', doSlurpAdds adds uf) eval . AddDefsToCodebase . filterBySlurpResult sr $ uf @@ -1271,7 +1271,7 @@ loop = do (Just (sourceName, _), Just uf) -> do let vars = Set.map Name.toVar names currentNames <- currentPathNames - let sr = NewSlurp.analyzeTypecheckedUnisonFile uf vars (Just NewSlurp.AddOp) currentNames currentPath' + let sr = Slurp.slurpFile uf vars (Just Slurp.AddOp) currentNames currentPath' previewResponse sourceName sr uf _ -> respond NoUnisonFile UpdateI maybePatchPath names -> handleUpdate input maybePatchPath names @@ -1279,7 +1279,7 @@ loop = do (Just (sourceName, _), Just uf) -> do let vars = Set.map Name.toVar names currentNames <- currentPathNames - let sr = NewSlurp.analyzeTypecheckedUnisonFile uf vars (Just NewSlurp.UpdateOp) currentNames currentPath' + let sr = Slurp.slurpFile uf vars (Just Slurp.UpdateOp) currentNames currentPath' previewResponse sourceName sr uf _ -> respond NoUnisonFile TodoI patchPath branchPath' -> do @@ -1818,7 +1818,7 @@ handleUpdate input maybePatchPath names = do let patchPath = fromMaybe defaultPatchPath maybePatchPath slurpCheckNames <- slurpResultNames let currentPathNames = slurpCheckNames - let sr = NewSlurp.analyzeTypecheckedUnisonFile uf vars (Just NewSlurp.UpdateOp) slurpCheckNames currentPath' + let sr = Slurp.slurpFile uf vars (Just Slurp.UpdateOp) slurpCheckNames currentPath' addsAndUpdates :: SlurpComponent v addsAndUpdates = Slurp.updates sr <> Slurp.adds sr fileNames :: Names diff --git a/unison-cli/src/Unison/Codebase/Editor/Slurp.hs b/unison-cli/src/Unison/Codebase/Editor/Slurp.hs index e06c613479..b542a7e74b 100644 --- a/unison-cli/src/Unison/Codebase/Editor/Slurp.hs +++ b/unison-cli/src/Unison/Codebase/Editor/Slurp.hs @@ -1,10 +1,6 @@ module Unison.Codebase.Editor.Slurp ( SlurpOp (..), - VarsByStatus, - DefStatus (..), - anyErrors, - analyzeTypecheckedUnisonFile, - toSlurpResult, + slurpFile, ) where @@ -13,10 +9,11 @@ import qualified Data.List.NonEmpty as NEList import qualified Data.Map as Map import qualified Data.Semigroup.Foldable as Semigroup import qualified Data.Set as Set +import Data.Set.NonEmpty (NESet) +import qualified Data.Set.NonEmpty as NESet import Debug.Pretty.Simple (pTraceShow, pTraceShowId) import Unison.Codebase.Editor.SlurpComponent (SlurpComponent (..)) import qualified Unison.Codebase.Editor.SlurpComponent as SC -import qualified Unison.Codebase.Editor.SlurpResult as OldSlurp import qualified Unison.Codebase.Editor.SlurpResult as SR import qualified Unison.Codebase.Path as Path import qualified Unison.DataDeclaration as DD @@ -38,22 +35,27 @@ import Unison.Var (Var) import qualified Unison.Var as Var import Unison.WatchKind (pattern TestWatch) +-- | The operation which is being performed or checked. data SlurpOp = AddOp | UpdateOp deriving (Eq, Show) +-- | Tag a variable as either a term/constructor or type data TermOrTypeVar v = TermVar v | TypeVar v deriving (Eq, Ord, Show) +-- | Extract the var from a TermOrTypeVar unlabeled :: TermOrTypeVar v -> v unlabeled (TermVar v) = v unlabeled (TypeVar v) = v +-- | A definition's status with relation to the codebase. data SlurpStatus = New | Updated | Duplicated deriving (Eq, Ord, Show) +-- | A definition's final status, incorporating the statuses of all of its dependencies. data DefStatus v = Ok SlurpStatus | NeedsUpdate (TermOrTypeVar v) @@ -79,34 +81,38 @@ instance Semigroup (DefStatus v) where _ <> Ok New = Ok New Ok Duplicated <> _ = Ok Duplicated +-- | Possible error conditions for a definition. data SlurpErr = TermCtorCollision | CtorTermCollision | Conflict deriving (Eq, Ord, Show) +-- | Possible error conditions for a definition. data DefinitionNotes = DefOk SlurpStatus | DefErr SlurpErr deriving (Show) +-- | A map of variables to their status, and all of their dependencies' statuses. type SlurpAnalysis v = Map (TermOrTypeVar v) (DefinitionNotes, Map (TermOrTypeVar v) DefinitionNotes) -type VarsByStatus v = Map (DefStatus v) (Set (TermOrTypeVar v)) +-- | A mapping of each status to the vars which have that status. +type VarsByStatus v = Map (DefStatus v) (NESet (TermOrTypeVar v)) -- Compute all definitions which can be added, or the reasons why a def can't be added. groupByStatus :: forall v. (Ord v, Show v) => SlurpAnalysis v -> VarsByStatus v groupByStatus sr = pTraceShowId $ analyzed where - analyzed :: Map (DefStatus v) (Set (TermOrTypeVar v)) + analyzed :: Map (DefStatus v) (NESet (TermOrTypeVar v)) analyzed = sr & Map.toList & fmap ( \(tv, (defNotes, deps)) -> ( Semigroup.fold1 (computeDefStatus False defNotes tv NEList.:| (Map.toList deps <&> \(depTV, depDefNotes) -> computeDefStatus True depDefNotes depTV)), - Set.singleton tv + NESet.singleton tv ) ) & Map.fromListWith (<>) @@ -119,7 +125,9 @@ groupByStatus sr = DefOk New -> Ok New DefOk Duplicated -> Ok Duplicated -analyzeTypecheckedUnisonFile :: +-- | Analyze a file and determine the status of all of its definitions with respect to a set +-- of vars to analyze and an operation you wish to perform. +slurpFile :: forall v. Var v => UF.TypecheckedUnisonFile v Ann -> @@ -128,13 +136,13 @@ analyzeTypecheckedUnisonFile :: Names -> Path.Absolute -> SR.SlurpResult v -analyzeTypecheckedUnisonFile uf defsToConsider slurpOp unalteredCodebaseNames currentPath = +slurpFile uf defsToConsider maybeSlurpOp unalteredCodebaseNames currentPath = let varRelation :: Rel.Relation (TermOrTypeVar v) LD.LabeledDependency - varRelation = labelling uf + varRelation = fileDefinitions uf involvedVars :: Set (TermOrTypeVar v) involvedVars = computeInvolvedVars uf defsToConsider varRelation codebaseNames :: Names - codebaseNames = computeNamesWithDeprecations uf unalteredCodebaseNames involvedVars + codebaseNames = computeNamesWithDeprecations uf unalteredCodebaseNames involvedVars slurpOp varDeps :: Map (TermOrTypeVar v) (Set (TermOrTypeVar v)) varDeps = computeVarDeps uf involvedVars analysis :: SlurpAnalysis v @@ -143,17 +151,25 @@ analyzeTypecheckedUnisonFile uf defsToConsider slurpOp unalteredCodebaseNames cu varsByStatus = groupByStatus analysis slurpResult :: SR.SlurpResult v slurpResult = - toSlurpResult uf (fromMaybe UpdateOp slurpOp) defsToConsider varsByStatus + toSlurpResult uf slurpOp defsToConsider varsByStatus & addAliases codebaseNames currentPath in pTraceShowId slurpResult + where + slurpOp = fromMaybe UpdateOp maybeSlurpOp +-- | Return a modified set of names with constructors which would be deprecated by possible +-- updates are removed. computeNamesWithDeprecations :: Var v => UF.TypecheckedUnisonFile v Ann -> Names -> Set (TermOrTypeVar v) -> + SlurpOp -> Names -computeNamesWithDeprecations uf unalteredCodebaseNames involvedVars = +computeNamesWithDeprecations _uf unalteredCodebaseNames _involvedVars AddOp = + -- If we're 'adding', there won't be any deprecations. + unalteredCodebaseNames +computeNamesWithDeprecations uf unalteredCodebaseNames involvedVars UpdateOp = pTraceShow ("Deprecated constructors", deprecatedConstructors) . pTraceShow ("constructorNamesInFile", constructorsUnderConsideration) $ codebaseNames @@ -312,13 +328,13 @@ computeVarDeps uf allInvolvedVars = -- Compute the closure of all vars which the provided vars depend on. varClosure :: Ord v => UF.TypecheckedUnisonFile v a -> Set (TermOrTypeVar v) -> Set (TermOrTypeVar v) -varClosure uf (sortVars -> sc) = +varClosure uf (partitionVars -> sc) = mingleVars $ SC.closeWithDependencies uf sc --- TODO: Does this need to contain constructors? Maybe. --- Does not include constructors -labelling :: forall v a. (Ord v, Show v) => UF.TypecheckedUnisonFile v a -> Rel.Relation (TermOrTypeVar v) LD.LabeledDependency -labelling uf = +-- | Collect a relation of term or type var to labelled dependency for all definitions mentioned in a file. +-- Contains types but not their constructors. +fileDefinitions :: forall v a. (Ord v, Show v) => UF.TypecheckedUnisonFile v a -> Rel.Relation (TermOrTypeVar v) LD.LabeledDependency +fileDefinitions uf = let result = decls <> effects <> terms in pTraceShow ("varRelation", result) $ result where @@ -350,6 +366,7 @@ labelling uf = & fmap (\(v, (refId, _)) -> (TypeVar v, (LD.derivedType refId))) & Rel.fromList +-- | Convert a 'VarsByStatus' mapping into a 'SR.SlurpResult' toSlurpResult :: forall v. (Ord v, Show v) => @@ -357,34 +374,34 @@ toSlurpResult :: SlurpOp -> Set v -> VarsByStatus v -> - OldSlurp.SlurpResult v + SR.SlurpResult v toSlurpResult uf op requestedVars varsByStatus = pTraceShowId $ - OldSlurp.SlurpResult - { OldSlurp.originalFile = uf, - OldSlurp.extraDefinitions = + SR.SlurpResult + { SR.originalFile = uf, + SR.extraDefinitions = if Set.null requestedVars then mempty else - let allVars = fold varsByStatus + let allVars = foldMap NESet.toSet varsByStatus desired = requestedVars & Set.flatMap (\v -> Set.fromList [TypeVar v, TermVar v]) - in sortVars $ Set.difference allVars desired, - OldSlurp.adds = adds, - OldSlurp.duplicates = duplicates, - OldSlurp.collisions = if op == AddOp then updates else mempty, - OldSlurp.conflicts = conflicts, - OldSlurp.updates = if op == UpdateOp then updates else mempty, - OldSlurp.termExistingConstructorCollisions = + in partitionVars $ Set.difference allVars desired, + SR.adds = adds, + SR.duplicates = duplicates, + SR.collisions = if op == AddOp then updates else mempty, + SR.conflicts = conflicts, + SR.updates = if op == UpdateOp then updates else mempty, + SR.termExistingConstructorCollisions = let SlurpComponent types terms = termCtorColl in types <> terms, - OldSlurp.constructorExistingTermCollisions = + SR.constructorExistingTermCollisions = let SlurpComponent types terms = ctorTermColl in types <> terms, - OldSlurp.termAlias = mempty, - OldSlurp.typeAlias = mempty, - OldSlurp.defsWithBlockedDependencies = blocked + SR.termAlias = mempty, + SR.typeAlias = mempty, + SR.defsWithBlockedDependencies = blocked } where adds, duplicates, updates, termCtorColl, ctorTermColl, blocked, conflicts :: SlurpComponent v @@ -392,7 +409,7 @@ toSlurpResult uf op requestedVars varsByStatus = varsByStatus & ifoldMap ( \k tvs -> - let sc = sortVars $ tvs + let sc = partitionVars $ tvs in case k of Ok New -> (sc, mempty, mempty, mempty, (mempty, mempty, mempty)) Ok Duplicated -> (mempty, sc, mempty, mempty, (mempty, mempty, mempty)) @@ -414,22 +431,8 @@ toSlurpResult uf op requestedVars varsByStatus = TypeVar v -> SlurpComponent {terms = mempty, types = Set.singleton v} TermVar v -> SlurpComponent {terms = Set.singleton v, types = mempty} -anyErrors :: VarsByStatus v -> Bool -anyErrors r = - any isError . Map.keys $ Map.filter (not . null) r - where - isError :: DefStatus v -> Bool - isError = \case - Ok New -> False - Ok Duplicated -> False - Ok Updated -> False - -- NeedsUpdate is an error only if we're trying to Add - NeedsUpdate {} -> True - ErrFrom {} -> True - SelfErr {} -> True - -sortVars :: (Foldable f, Ord v) => f (TermOrTypeVar v) -> SlurpComponent v -sortVars = +partitionVars :: (Foldable f, Ord v) => f (TermOrTypeVar v) -> SlurpComponent v +partitionVars = foldMap ( \case TypeVar v -> SC.fromTypes (Set.singleton v) From 09286a398eac1063e6faad87f9a2342d1aef5183 Mon Sep 17 00:00:00 2001 From: Chris Penner Date: Mon, 17 Jan 2022 12:07:15 -0600 Subject: [PATCH 189/297] Docs --- .../src/Unison/Codebase/Editor/HandleInput.hs | 10 +- .../src/Unison/Codebase/Editor/Slurp.hs | 36 +++--- .../ability-term-conflicts-on-update.md | 41 +++++++ ...ability-term-conflicts-on-update.output.md | 81 +++++++++++++ .../transcripts/slurping/alias-clobbering.md | 34 ++++++ .../slurping/alias-clobbering.output.md | 107 ++++++++++++++++++ .../slurping/block-on-required-update.md | 28 +++++ .../block-on-required-update.output.md | 64 +++++++++++ .../slurping/sum-type-update-conflicts.md | 36 ++++++ .../slurping/update-on-conflict.md | 30 +++++ .../slurping/update-on-conflict.output.md | 96 ++++++++++++++++ ...e-with-conflicting-constructor-and-term.md | 17 +++ 12 files changed, 559 insertions(+), 21 deletions(-) create mode 100644 unison-src/transcripts/slurping/ability-term-conflicts-on-update.md create mode 100644 unison-src/transcripts/slurping/ability-term-conflicts-on-update.output.md create mode 100644 unison-src/transcripts/slurping/alias-clobbering.md create mode 100644 unison-src/transcripts/slurping/alias-clobbering.output.md create mode 100644 unison-src/transcripts/slurping/block-on-required-update.md create mode 100644 unison-src/transcripts/slurping/block-on-required-update.output.md create mode 100644 unison-src/transcripts/slurping/sum-type-update-conflicts.md create mode 100644 unison-src/transcripts/slurping/update-on-conflict.md create mode 100644 unison-src/transcripts/slurping/update-on-conflict.output.md create mode 100644 unison-src/transcripts/slurping/update-with-conflicting-constructor-and-term.md diff --git a/unison-cli/src/Unison/Codebase/Editor/HandleInput.hs b/unison-cli/src/Unison/Codebase/Editor/HandleInput.hs index 43faa799ee..9660c3f4f4 100644 --- a/unison-cli/src/Unison/Codebase/Editor/HandleInput.hs +++ b/unison-cli/src/Unison/Codebase/Editor/HandleInput.hs @@ -262,7 +262,7 @@ loop = do let lexed = L.lexer (Text.unpack sourceName) (Text.unpack text) withFile [] sourceName (text, lexed) $ \unisonFile -> do currentNames <- currentPathNames - let sr = Slurp.slurpFile unisonFile mempty Nothing currentNames currentPath' + let sr = Slurp.slurpFile unisonFile mempty Nothing currentNames names <- displayNames unisonFile pped <- prettyPrintEnvDecl names let ppe = PPE.suffixifiedPPE pped @@ -1259,7 +1259,7 @@ loop = do Nothing -> respond NoUnisonFile Just uf -> do currentNames <- currentPathNames - let sr = Slurp.slurpFile uf vars (Just Slurp.AddOp) currentNames currentPath' + let sr = Slurp.slurpFile uf vars (Just Slurp.AddOp) currentNames let adds = SlurpResult.adds sr stepAtNoSync Branch.CompressHistory (Path.unabsolute currentPath', doSlurpAdds adds uf) eval . AddDefsToCodebase . filterBySlurpResult sr $ uf @@ -1271,7 +1271,7 @@ loop = do (Just (sourceName, _), Just uf) -> do let vars = Set.map Name.toVar names currentNames <- currentPathNames - let sr = Slurp.slurpFile uf vars (Just Slurp.AddOp) currentNames currentPath' + let sr = Slurp.slurpFile uf vars (Just Slurp.AddOp) currentNames previewResponse sourceName sr uf _ -> respond NoUnisonFile UpdateI maybePatchPath names -> handleUpdate input maybePatchPath names @@ -1279,7 +1279,7 @@ loop = do (Just (sourceName, _), Just uf) -> do let vars = Set.map Name.toVar names currentNames <- currentPathNames - let sr = Slurp.slurpFile uf vars (Just Slurp.UpdateOp) currentNames currentPath' + let sr = Slurp.slurpFile uf vars (Just Slurp.UpdateOp) currentNames previewResponse sourceName sr uf _ -> respond NoUnisonFile TodoI patchPath branchPath' -> do @@ -1818,7 +1818,7 @@ handleUpdate input maybePatchPath names = do let patchPath = fromMaybe defaultPatchPath maybePatchPath slurpCheckNames <- slurpResultNames let currentPathNames = slurpCheckNames - let sr = Slurp.slurpFile uf vars (Just Slurp.UpdateOp) slurpCheckNames currentPath' + let sr = Slurp.slurpFile uf vars (Just Slurp.UpdateOp) slurpCheckNames addsAndUpdates :: SlurpComponent v addsAndUpdates = Slurp.updates sr <> Slurp.adds sr fileNames :: Names diff --git a/unison-cli/src/Unison/Codebase/Editor/Slurp.hs b/unison-cli/src/Unison/Codebase/Editor/Slurp.hs index b542a7e74b..ed7b8348c7 100644 --- a/unison-cli/src/Unison/Codebase/Editor/Slurp.hs +++ b/unison-cli/src/Unison/Codebase/Editor/Slurp.hs @@ -15,7 +15,6 @@ import Debug.Pretty.Simple (pTraceShow, pTraceShowId) import Unison.Codebase.Editor.SlurpComponent (SlurpComponent (..)) import qualified Unison.Codebase.Editor.SlurpComponent as SC import qualified Unison.Codebase.Editor.SlurpResult as SR -import qualified Unison.Codebase.Path as Path import qualified Unison.DataDeclaration as DD import qualified Unison.LabeledDependency as LD import Unison.Name (Name) @@ -134,9 +133,8 @@ slurpFile :: Set v -> Maybe SlurpOp -> Names -> - Path.Absolute -> SR.SlurpResult v -slurpFile uf defsToConsider maybeSlurpOp unalteredCodebaseNames currentPath = +slurpFile uf defsToConsider maybeSlurpOp unalteredCodebaseNames = let varRelation :: Rel.Relation (TermOrTypeVar v) LD.LabeledDependency varRelation = fileDefinitions uf involvedVars :: Set (TermOrTypeVar v) @@ -146,13 +144,13 @@ slurpFile uf defsToConsider maybeSlurpOp unalteredCodebaseNames currentPath = varDeps :: Map (TermOrTypeVar v) (Set (TermOrTypeVar v)) varDeps = computeVarDeps uf involvedVars analysis :: SlurpAnalysis v - analysis = computeVarStatuses varDeps varRelation codebaseNames + analysis = computeSlurpAnalysis varDeps varRelation codebaseNames varsByStatus :: VarsByStatus v varsByStatus = groupByStatus analysis slurpResult :: SR.SlurpResult v slurpResult = toSlurpResult uf slurpOp defsToConsider varsByStatus - & addAliases codebaseNames currentPath + & addAliases codebaseNames in pTraceShowId slurpResult where slurpOp = fromMaybe UpdateOp maybeSlurpOp @@ -209,17 +207,15 @@ computeNamesWithDeprecations uf unalteredCodebaseNames involvedVars UpdateOp = . pTraceShow ("existingConstructorsFromEditedTypes", existingConstructorsFromEditedTypes) $ existingConstructorsFromEditedTypes `Set.difference` constructorsUnderConsideration -computeVarStatuses :: +-- | Compute a mapping of each definition to its status, and its dependencies' statuses. +computeSlurpAnalysis :: forall v. (Ord v, Var v) => Map (TermOrTypeVar v) (Set (TermOrTypeVar v)) -> Rel.Relation (TermOrTypeVar v) LD.LabeledDependency -> Names -> - ( Map - (TermOrTypeVar v) - (DefinitionNotes, Map (TermOrTypeVar v) DefinitionNotes) - ) -computeVarStatuses depMap varRelation codebaseNames = + SlurpAnalysis v +computeSlurpAnalysis depMap varRelation codebaseNames = pTraceShow ("Statuses", statuses) $ statuses where @@ -286,6 +282,9 @@ computeVarStatuses depMap varRelation codebaseNames = -- Currently we treat conflicts as errors rather than resolving them. _ -> DefErr Conflict +-- | Determine all variables which should be considered in analysis. +-- I.e. any variable requested by the user and all of their dependencies, +-- component peers, and component peers of dependencies. computeInvolvedVars :: forall v. Var v => @@ -307,6 +306,7 @@ computeInvolvedVars uf defsToConsider varRelation pure tv in varClosure uf existingVars +-- | Compute transitive dependencies for all relevant variables. computeVarDeps :: forall v. Var v => @@ -326,7 +326,7 @@ computeVarDeps uf allInvolvedVars = . pTraceShow ("depmap", depMap) $ depMap --- Compute the closure of all vars which the provided vars depend on. +-- | Compute the closure of all vars which the provided vars depend on. varClosure :: Ord v => UF.TypecheckedUnisonFile v a -> Set (TermOrTypeVar v) -> Set (TermOrTypeVar v) varClosure uf (partitionVars -> sc) = mingleVars $ SC.closeWithDependencies uf sc @@ -431,6 +431,7 @@ toSlurpResult uf op requestedVars varsByStatus = TypeVar v -> SlurpComponent {terms = mempty, types = Set.singleton v} TermVar v -> SlurpComponent {terms = Set.singleton v, types = mempty} +-- | Sort out a set of variables by whether it is a term or type. partitionVars :: (Foldable f, Ord v) => f (TermOrTypeVar v) -> SlurpComponent v partitionVars = foldMap @@ -439,13 +440,16 @@ partitionVars = TermVar v -> SC.fromTerms (Set.singleton v) ) +-- | Collapse a SlurpComponent into a tagged set. mingleVars :: Ord v => SlurpComponent v -> Set (TermOrTypeVar v) mingleVars SlurpComponent {terms, types} = Set.map TypeVar types <> Set.map TermVar terms -addAliases :: forall v. (Ord v, Var v) => Names -> Path.Absolute -> SR.SlurpResult v -> SR.SlurpResult v -addAliases existingNames curPath sr = sr {SR.termAlias = termAliases, SR.typeAlias = typeAliases} +-- | Compute which definitions in a slurp result are aliases of definitions in the current +-- tree. +addAliases :: forall v. (Ord v, Var v) => Names -> SR.SlurpResult v -> SR.SlurpResult v +addAliases existingNames sr = sr {SR.termAlias = termAliases, SR.typeAlias = typeAliases} where fileNames = UF.typecheckedToNames $ SR.originalFile sr buildAliases :: @@ -464,10 +468,10 @@ addAliases existingNames curPath sr = sr {SR.termAlias = termAliases, SR.typeAli -- All the refs whose names include `n`, and are not `r` let refs = Set.delete r $ Rel.lookupDom n existingNames aliasesOfNew = - Set.map (Path.unprefixName curPath) . Set.delete n $ + Set.delete n $ Rel.lookupRan r existingNames aliasesOfOld = - Set.map (Path.unprefixName curPath) . Set.delete n . Rel.dom $ + Set.delete n . Rel.dom $ Rel.restrictRan existingNames refs, not (null aliasesOfNew && null aliasesOfOld), Set.notMember (var n) dups diff --git a/unison-src/transcripts/slurping/ability-term-conflicts-on-update.md b/unison-src/transcripts/slurping/ability-term-conflicts-on-update.md new file mode 100644 index 0000000000..8b7eed2923 --- /dev/null +++ b/unison-src/transcripts/slurping/ability-term-conflicts-on-update.md @@ -0,0 +1,41 @@ +# Regression test for updates which conflict with an existing ability constructor + +https://github.com/unisonweb/unison/issues/2786 + +```ucm:hide +.builtins> builtins.mergeio +``` + +First we add an ability to the codebase. +Note that this will create the name `Channels.send` as an ability constructor. + +```unison +unique ability Channels where + send : a -> {Channels} () +``` + +```ucm +.ns> add +``` + +Now we update the ability, changing the name of the constructor, _but_, we simultaneously +add a new top-level term with the same name as the constructor which is being +removed from Channels. + +```unison +unique ability Channels where + sends : [a] -> {Channels} () + +Channels.send : a -> {Channels} () +Channels.send a = sends [a] + +thing : '{Channels} () +thing _ = send 1 +``` + +The 'update' will succeed up until it tries to resolve the reference to `send` +within `thing`; because the old send is deleted and the new `send` isn't ever added. + +```ucm +.ns> update +``` diff --git a/unison-src/transcripts/slurping/ability-term-conflicts-on-update.output.md b/unison-src/transcripts/slurping/ability-term-conflicts-on-update.output.md new file mode 100644 index 0000000000..a9f0c1fcaf --- /dev/null +++ b/unison-src/transcripts/slurping/ability-term-conflicts-on-update.output.md @@ -0,0 +1,81 @@ +# Regression test for updates which conflict with an existing ability constructor + +https://github.com/unisonweb/unison/issues/2786 + +First we add an ability to the codebase. +Note that this will create the name `Channels.send` as an ability constructor. + +```unison +unique ability Channels where + send : a -> {Channels} () +``` + +```ucm + + I found and typechecked these definitions in scratch.u. If you + do an `add` or `update`, here's how your codebase would + change: + + ⍟ These new definitions are ok to `add`: + + unique ability Channels + +``` +```ucm + ☝️ The namespace .ns is empty. + +.ns> add + + ⍟ I've added these definitions: + + unique ability Channels + +``` +Now we update the ability, changing the name of the constructor, _but_, we simultaneously +add a new top-level term with the same name as the constructor which is being +removed from Channels. + +```unison +unique ability Channels where + sends : [a] -> {Channels} () + +Channels.send : a -> {Channels} () +Channels.send a = sends [a] + +thing : '{Channels} () +thing _ = send 1 +``` + +```ucm + + I found and typechecked these definitions in scratch.u. If you + do an `add` or `update`, here's how your codebase would + change: + + ⍟ These new definitions are ok to `add`: + + Channels.send : a ->{Channels} () + thing : '{Channels} () + + ⍟ These names already exist. You can `update` them to your + new definition: + + unique ability Channels + +``` +The 'update' will succeed up until it tries to resolve the reference to `send` +within `thing`; because the old send is deleted and the new `send` isn't ever added. + +```ucm +.ns> update + + ⍟ I've added these definitions: + + Channels.send : a ->{Channels} () + thing : '{Channels} () + + ⍟ I've updated these names to your new definition: + + unique ability Channels + +``` diff --git a/unison-src/transcripts/slurping/alias-clobbering.md b/unison-src/transcripts/slurping/alias-clobbering.md new file mode 100644 index 0000000000..cd020b91f2 --- /dev/null +++ b/unison-src/transcripts/slurping/alias-clobbering.md @@ -0,0 +1,34 @@ +# Aliasing takes priority over provided definition + +```ucm:hide +.> builtins.merge +``` + +```unison +x = 1 +``` + +```ucm +.> add +``` + +```unison +-- Overwrite the existing alias +x = 2 +-- But add a _new_ definition that's an alias of the _old_ x +y = 1 +``` + +We see that `y` ends up as an alias for the new value of `x`, i.e. `y = 2`, +even though we explicitly said `y = 1`! + +Note how `update` isn't idempotent, it ping-pongs back and forth between aliases: + +```ucm +.> update +.> view x +.> view y +.> update +.> update +.> update +``` diff --git a/unison-src/transcripts/slurping/alias-clobbering.output.md b/unison-src/transcripts/slurping/alias-clobbering.output.md new file mode 100644 index 0000000000..4f1bc9dd7d --- /dev/null +++ b/unison-src/transcripts/slurping/alias-clobbering.output.md @@ -0,0 +1,107 @@ +# Aliasing takes priority over provided definition + +```unison +x = 1 +``` + +```ucm + + I found and typechecked these definitions in scratch.u. If you + do an `add` or `update`, here's how your codebase would + change: + + ⍟ These new definitions are ok to `add`: + + x : Nat + +``` +```ucm +.> add + + ⍟ I've added these definitions: + + x : Nat + +``` +```unison +-- Overwrite the existing alias +x = 2 +-- But add a _new_ definition that's an alias of the _old_ x +y = 1 +``` + +```ucm + + I found and typechecked these definitions in scratch.u. If you + do an `add` or `update`, here's how your codebase would + change: + + ⍟ These new definitions are ok to `add`: + + y : Nat + (also named x) + + ⍟ These names already exist. You can `update` them to your + new definition: + + x : Nat + +``` +We see that `y` ends up as an alias for the new value of `x`, i.e. `y = 2`, +even though we explicitly said `y = 1`! + +Note how `update` isn't idempotent, it ping-pongs back and forth between aliases: + +```ucm +.> update + + ⍟ I've added these definitions: + + y : Nat + (also named x) + + ⍟ I've updated these names to your new definition: + + x : Nat + +.> view x + + x : Nat + x = 2 + +.> view y + + x : Nat + x = 2 + +.> update + + ⊡ Ignored previously added definitions: x + + ⍟ I've updated these names to your new definition: + + y : Nat + (The old definition was also named x. I updated this name + too.) + +.> update + + ⊡ Ignored previously added definitions: y + + ⍟ I've updated these names to your new definition: + + x : Nat + (The old definition was also named y. I updated this name + too.) + +.> update + + ⊡ Ignored previously added definitions: x + + ⍟ I've updated these names to your new definition: + + y : Nat + (The old definition was also named x. I updated this name + too.) + +``` diff --git a/unison-src/transcripts/slurping/block-on-required-update.md b/unison-src/transcripts/slurping/block-on-required-update.md new file mode 100644 index 0000000000..1027188b06 --- /dev/null +++ b/unison-src/transcripts/slurping/block-on-required-update.md @@ -0,0 +1,28 @@ +# Block on required update + +Should block an `add` if it requires an update on an in-file dependency. + +```ucm:hide +.> builtins.merge +``` + +```unison +x = 1 +``` + +```ucm +.> add +``` + +Update `x`, and add a new `y` which depends on the update + +```unison +x = 10 +y = x + 1 +``` + +Try to add only the new `y`. This should fail because it requires an update to `x`, but we only ran an 'add'. + +```ucm:error +.> add y +``` diff --git a/unison-src/transcripts/slurping/block-on-required-update.output.md b/unison-src/transcripts/slurping/block-on-required-update.output.md new file mode 100644 index 0000000000..b23135afd9 --- /dev/null +++ b/unison-src/transcripts/slurping/block-on-required-update.output.md @@ -0,0 +1,64 @@ +# Block on required update + +Should block an `add` if it requires an update on an in-file dependency. + +```unison +x = 1 +``` + +```ucm + + I found and typechecked these definitions in scratch.u. If you + do an `add` or `update`, here's how your codebase would + change: + + ⍟ These new definitions are ok to `add`: + + x : Nat + +``` +```ucm +.> add + + ⍟ I've added these definitions: + + x : Nat + +``` +Update `x`, and add a new `y` which depends on the update + +```unison +x = 10 +y = x + 1 +``` + +```ucm + + I found and typechecked these definitions in scratch.u. If you + do an `add` or `update`, here's how your codebase would + change: + + ⍟ These new definitions are ok to `add`: + + y : Nat + + ⍟ These names already exist. You can `update` them to your + new definition: + + x : Nat + +``` +Try to add only the new `y`. This should fail because it requires an update to `x`, but we only ran an 'add'. + +```ucm +.> add y + + x These definitions failed: + + Reason + needs update x : Nat + blocked y : Nat + + Tip: Use `help filestatus` to learn more. + +``` diff --git a/unison-src/transcripts/slurping/sum-type-update-conflicts.md b/unison-src/transcripts/slurping/sum-type-update-conflicts.md new file mode 100644 index 0000000000..992c5726ea --- /dev/null +++ b/unison-src/transcripts/slurping/sum-type-update-conflicts.md @@ -0,0 +1,36 @@ +# Regression test for updates which conflict with an existing data constructor + +https://github.com/unisonweb/unison/issues/2786 + +```ucm:hide +.builtins> builtins.mergeio +``` + +First we add a sum-type to the codebase. + +```unison +structural type X = x +``` + +```ucm +.ns> add +``` + +Now we update the type, changing the name of the constructors, _but_, we simultaneously +add a new top-level term with the same name as the old constructor. + +```unison +structural type X = y | z + +X.x : Text +X.x = "some text that's not in the codebase" + +dependsOnX = Text.size X.x +``` + +The 'update' will succeed up until it tries to resolve the reference to `X.x` +within `depondsOnX`, but `X.x` is actually not in the codebase! + +```ucm:error +.ns> update +``` diff --git a/unison-src/transcripts/slurping/update-on-conflict.md b/unison-src/transcripts/slurping/update-on-conflict.md new file mode 100644 index 0000000000..30e1afe84d --- /dev/null +++ b/unison-src/transcripts/slurping/update-on-conflict.md @@ -0,0 +1,30 @@ +# Update on conflict + +```ucm:hide +.> builtins.merge +``` + +```unison +a.x = 1 +b.x = 2 +``` + +Cause a conflict: +```ucm +.> add +.merged> merge .a +.merged> merge .b +``` + +Ideally we could just define the canonical `x` that we want, and update +to accept it, but we can't: + +```unison +x = 1 + 2 +``` + +Update fails on conflicted `x`: + +```ucm:error +.merged> update +``` diff --git a/unison-src/transcripts/slurping/update-on-conflict.output.md b/unison-src/transcripts/slurping/update-on-conflict.output.md new file mode 100644 index 0000000000..2a5948c801 --- /dev/null +++ b/unison-src/transcripts/slurping/update-on-conflict.output.md @@ -0,0 +1,96 @@ +# Update on conflict + +```unison +a.x = 1 +b.x = 2 +``` + +```ucm + + I found and typechecked these definitions in scratch.u. If you + do an `add` or `update`, here's how your codebase would + change: + + ⍟ These new definitions are ok to `add`: + + a.x : Nat + b.x : Nat + +``` +Cause a conflict: +```ucm +.> add + + ⍟ I've added these definitions: + + a.x : Nat + b.x : Nat + + ☝️ The namespace .merged is empty. + +.merged> merge .a + + Here's what's changed in the current namespace after the + merge: + + Added definitions: + + 1. x : Nat + + Tip: You can use `todo` to see if this generated any work to + do in this namespace and `test` to run the tests. Or you + can use `undo` or `reflog` to undo the results of this + merge. + +.merged> merge .b + + Here's what's changed in the current namespace after the + merge: + + New name conflicts: + + 1. x#jk19sm5bf8 : Nat + ↓ + 2. ┌ x#0ja1qfpej6 : Nat + 3. └ x#jk19sm5bf8 : Nat + + Tip: You can use `todo` to see if this generated any work to + do in this namespace and `test` to run the tests. Or you + can use `undo` or `reflog` to undo the results of this + merge. + +``` +Ideally we could just define the canonical `x` that we want, and update +to accept it, but we can't: + +```unison +x = 1 + 2 +``` + +```ucm + + I found and typechecked these definitions in scratch.u. If you + do an `add` or `update`, here's how your codebase would + change: + + x These definitions would fail on `add` or `update`: + + Reason + conflicted x : Nat + + Tip: Use `help filestatus` to learn more. + +``` +Update fails on conflicted `x`: + +```ucm +.merged> update + + x These definitions failed: + + Reason + conflicted x : Nat + + Tip: Use `help filestatus` to learn more. + +``` diff --git a/unison-src/transcripts/slurping/update-with-conflicting-constructor-and-term.md b/unison-src/transcripts/slurping/update-with-conflicting-constructor-and-term.md new file mode 100644 index 0000000000..e3284c6f97 --- /dev/null +++ b/unison-src/transcripts/slurping/update-with-conflicting-constructor-and-term.md @@ -0,0 +1,17 @@ +# Update with conflicting ability constructor and term + +```ucm:hide +.> builtins.merge +``` + +```unison +unique ability Stream where + send : a -> () + +Stream.send : a -> () +Stream.send _ = () +``` + +```ucm +.> add +``` From 86b374467a65ac397ab17b21a2ea51ada6bfba87 Mon Sep 17 00:00:00 2001 From: Chris Penner Date: Mon, 17 Jan 2022 12:07:40 -0600 Subject: [PATCH 190/297] Update transcripts --- .../transcripts/sum-type-update-conflicts.md | 6 +- .../sum-type-update-conflicts.output.md | 77 +++++++++++++++++++ ...e-with-conflicting-constructor-and-term.md | 17 ---- 3 files changed, 80 insertions(+), 20 deletions(-) create mode 100644 unison-src/transcripts/sum-type-update-conflicts.output.md delete mode 100644 unison-src/transcripts/update-with-conflicting-constructor-and-term.md diff --git a/unison-src/transcripts/sum-type-update-conflicts.md b/unison-src/transcripts/sum-type-update-conflicts.md index 992c5726ea..6c9adb1155 100644 --- a/unison-src/transcripts/sum-type-update-conflicts.md +++ b/unison-src/transcripts/sum-type-update-conflicts.md @@ -28,9 +28,9 @@ X.x = "some text that's not in the codebase" dependsOnX = Text.size X.x ``` -The 'update' will succeed up until it tries to resolve the reference to `X.x` -within `depondsOnX`, but `X.x` is actually not in the codebase! +This update should succeed since the conflicted constructor +is removed in the same update that the new term is being added. -```ucm:error +```ucm .ns> update ``` diff --git a/unison-src/transcripts/sum-type-update-conflicts.output.md b/unison-src/transcripts/sum-type-update-conflicts.output.md new file mode 100644 index 0000000000..e99695c90b --- /dev/null +++ b/unison-src/transcripts/sum-type-update-conflicts.output.md @@ -0,0 +1,77 @@ +# Regression test for updates which conflict with an existing data constructor + +https://github.com/unisonweb/unison/issues/2786 + +First we add a sum-type to the codebase. + +```unison +structural type X = x +``` + +```ucm + + I found and typechecked these definitions in scratch.u. If you + do an `add` or `update`, here's how your codebase would + change: + + ⍟ These new definitions are ok to `add`: + + structural type X + (also named builtin.Unit) + +``` +```ucm + ☝️ The namespace .ns is empty. + +.ns> add + + ⍟ I've added these definitions: + + structural type X + +``` +Now we update the type, changing the name of the constructors, _but_, we simultaneously +add a new top-level term with the same name as the old constructor. + +```unison +structural type X = y | z + +X.x : Text +X.x = "some text that's not in the codebase" + +dependsOnX = Text.size X.x +``` + +```ucm + + I found and typechecked these definitions in scratch.u. If you + do an `add` or `update`, here's how your codebase would + change: + + ⍟ These new definitions are ok to `add`: + + X.x : Text + dependsOnX : Nat + + ⍟ These names already exist. You can `update` them to your + new definition: + + structural type X + +``` +This update should succeed since the conflicted constructor +is removed in the same update that the new term is being added. + +```ucm +.ns> update + + ⍟ I've added these definitions: + + X.x : Text + dependsOnX : Nat + + ⍟ I've updated these names to your new definition: + + structural type X + +``` diff --git a/unison-src/transcripts/update-with-conflicting-constructor-and-term.md b/unison-src/transcripts/update-with-conflicting-constructor-and-term.md deleted file mode 100644 index e3284c6f97..0000000000 --- a/unison-src/transcripts/update-with-conflicting-constructor-and-term.md +++ /dev/null @@ -1,17 +0,0 @@ -# Update with conflicting ability constructor and term - -```ucm:hide -.> builtins.merge -``` - -```unison -unique ability Stream where - send : a -> () - -Stream.send : a -> () -Stream.send _ = () -``` - -```ucm -.> add -``` From 706f219b913e2234716870ec5be9cb77198ed798 Mon Sep 17 00:00:00 2001 From: Chris Penner Date: Mon, 17 Jan 2022 12:11:11 -0600 Subject: [PATCH 191/297] Docs --- unison-cli/src/Unison/Codebase/Editor/Slurp.hs | 13 +++++++------ 1 file changed, 7 insertions(+), 6 deletions(-) diff --git a/unison-cli/src/Unison/Codebase/Editor/Slurp.hs b/unison-cli/src/Unison/Codebase/Editor/Slurp.hs index ed7b8348c7..0b6a8cee94 100644 --- a/unison-cli/src/Unison/Codebase/Editor/Slurp.hs +++ b/unison-cli/src/Unison/Codebase/Editor/Slurp.hs @@ -427,9 +427,10 @@ toSlurpResult uf op requestedVars varsByStatus = SelfErr CtorTermCollision -> (mempty, mempty, mempty, mempty, (sc, mempty, mempty)) SelfErr Conflict -> (mempty, mempty, mempty, mempty, (mempty, mempty, sc)) ) + singletonSC :: TermOrTypeVar v -> SlurpComponent v singletonSC = \case - TypeVar v -> SlurpComponent {terms = mempty, types = Set.singleton v} - TermVar v -> SlurpComponent {terms = Set.singleton v, types = mempty} + TypeVar v -> SC.fromTypes (Set.singleton v) + TermVar v -> SC.fromTerms (Set.singleton v) -- | Sort out a set of variables by whether it is a term or type. partitionVars :: (Foldable f, Ord v) => f (TermOrTypeVar v) -> SlurpComponent v @@ -459,7 +460,7 @@ addAliases existingNames sr = sr {SR.termAlias = termAliases, SR.typeAlias = typ Map v SR.Aliases buildAliases existingNames namesFromFile dups = Map.fromList - [ ( var n, + [ ( varFromName n, if null aliasesOfOld then SR.AddAliases aliasesOfNew else SR.UpdateAliases aliasesOfOld aliasesOfNew @@ -474,7 +475,7 @@ addAliases existingNames sr = sr {SR.termAlias = termAliases, SR.typeAlias = typ Set.delete n . Rel.dom $ Rel.restrictRan existingNames refs, not (null aliasesOfNew && null aliasesOfOld), - Set.notMember (var n) dups + Set.notMember (varFromName n) dups ] termAliases :: Map v SR.Aliases @@ -491,5 +492,5 @@ addAliases existingNames sr = sr {SR.termAlias = termAliases, SR.typeAlias = typ (Rel.mapRan Referent.Ref $ Names.types fileNames) (SC.types (SR.duplicates sr)) -var :: Var v => Name -> v -var name = Var.named (Name.toText name) + varFromName :: Var v => Name -> v + varFromName name = Var.named (Name.toText name) From 68b387b713cdd51d03183ee16873cfcbd6156d6d Mon Sep 17 00:00:00 2001 From: Arya Irani Date: Tue, 18 Jan 2022 11:09:42 -0700 Subject: [PATCH 192/297] Don't depend on BL.toChunks logic in V2 hashing --- parser-typechecker/src/Unison/Hashing/V2/Tokenizable.hs | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/parser-typechecker/src/Unison/Hashing/V2/Tokenizable.hs b/parser-typechecker/src/Unison/Hashing/V2/Tokenizable.hs index eca0902717..54339a51f9 100644 --- a/parser-typechecker/src/Unison/Hashing/V2/Tokenizable.hs +++ b/parser-typechecker/src/Unison/Hashing/V2/Tokenizable.hs @@ -134,9 +134,9 @@ instance Accumulate Hash where go acc tokens = CH.hashUpdates acc (hashingVersion : tokens >>= toBS) toBS (Tag b) = [B.singleton b] toBS (Bytes bs) = [encodeLength $ B.length bs, bs] - toBS (Int i) = BL.toChunks . toLazyByteString . int64BE $ i - toBS (Nat i) = BL.toChunks . toLazyByteString . word64BE $ i - toBS (Double d) = BL.toChunks . toLazyByteString . doubleBE $ d + toBS (Int i) = [BL.toStrict . toLazyByteString . int64BE $ i] + toBS (Nat i) = [BL.toStrict . toLazyByteString . word64BE $ i] + toBS (Double d) = [BL.toStrict . toLazyByteString . doubleBE $ d] toBS (Text txt) = let tbytes = encodeUtf8 txt in [encodeLength (B.length tbytes), tbytes] From e356225573c00986782515525db7503bbea01bbf Mon Sep 17 00:00:00 2001 From: Chris Penner Date: Tue, 18 Jan 2022 14:31:58 -0600 Subject: [PATCH 193/297] Add note on tuple monoids --- unison-cli/src/Unison/Codebase/Editor/Slurp.hs | 1 + 1 file changed, 1 insertion(+) diff --git a/unison-cli/src/Unison/Codebase/Editor/Slurp.hs b/unison-cli/src/Unison/Codebase/Editor/Slurp.hs index 0b6a8cee94..5647d04759 100644 --- a/unison-cli/src/Unison/Codebase/Editor/Slurp.hs +++ b/unison-cli/src/Unison/Codebase/Editor/Slurp.hs @@ -405,6 +405,7 @@ toSlurpResult uf op requestedVars varsByStatus = } where adds, duplicates, updates, termCtorColl, ctorTermColl, blocked, conflicts :: SlurpComponent v + -- Monoid instances only go up to 5-tuples :/ (adds, duplicates, updates, termCtorColl, (ctorTermColl, blocked, conflicts)) = varsByStatus & ifoldMap From f91e9cd041439f5485801779ee0ae68787c839bd Mon Sep 17 00:00:00 2001 From: Chris Penner Date: Thu, 20 Jan 2022 09:57:50 -0600 Subject: [PATCH 194/297] Update transcript output --- unison-src/transcripts-using-base/fix2027.output.md | 1 - unison-src/transcripts-using-base/tls.output.md | 2 +- 2 files changed, 1 insertion(+), 2 deletions(-) diff --git a/unison-src/transcripts-using-base/fix2027.output.md b/unison-src/transcripts-using-base/fix2027.output.md index c598d513e1..f356f88402 100644 --- a/unison-src/transcripts-using-base/fix2027.output.md +++ b/unison-src/transcripts-using-base/fix2027.output.md @@ -53,7 +53,6 @@ myServer = unsafeRun! '(hello "127.0.0.1" "0") change: ⊡ Previously added definitions will be ignored: Exception - Exception.raise ⍟ These new definitions are ok to `add`: diff --git a/unison-src/transcripts-using-base/tls.output.md b/unison-src/transcripts-using-base/tls.output.md index 4caf77050d..cae5b3f174 100644 --- a/unison-src/transcripts-using-base/tls.output.md +++ b/unison-src/transcripts-using-base/tls.output.md @@ -29,7 +29,7 @@ test> match (decodeCert.impl (toUtf8 self_signed_cert_pem) with ⍟ These new definitions are ok to `add`: - test.ko630itb5m (Unison bug, unknown term) + test.ko630itb5m : [Result] Now evaluating any watch expressions (lines starting with `>`)... Ctrl+C cancels. From cb44a6af455ccc9c2b680fe20d15a6ad804ef890 Mon Sep 17 00:00:00 2001 From: Chris Penner Date: Thu, 20 Jan 2022 10:34:58 -0600 Subject: [PATCH 195/297] Use map instead of relation since we can. --- .../src/Unison/Codebase/Editor/Slurp.hs | 184 ++++++++++-------- 1 file changed, 106 insertions(+), 78 deletions(-) diff --git a/unison-cli/src/Unison/Codebase/Editor/Slurp.hs b/unison-cli/src/Unison/Codebase/Editor/Slurp.hs index 5647d04759..ba897276cf 100644 --- a/unison-cli/src/Unison/Codebase/Editor/Slurp.hs +++ b/unison-cli/src/Unison/Codebase/Editor/Slurp.hs @@ -135,7 +135,7 @@ slurpFile :: Names -> SR.SlurpResult v slurpFile uf defsToConsider maybeSlurpOp unalteredCodebaseNames = - let varRelation :: Rel.Relation (TermOrTypeVar v) LD.LabeledDependency + let varRelation :: Map (TermOrTypeVar v) LD.LabeledDependency varRelation = fileDefinitions uf involvedVars :: Set (TermOrTypeVar v) involvedVars = computeInvolvedVars uf defsToConsider varRelation @@ -144,17 +144,20 @@ slurpFile uf defsToConsider maybeSlurpOp unalteredCodebaseNames = varDeps :: Map (TermOrTypeVar v) (Set (TermOrTypeVar v)) varDeps = computeVarDeps uf involvedVars analysis :: SlurpAnalysis v - analysis = computeSlurpAnalysis varDeps varRelation codebaseNames + analysis = computeSlurpAnalysis varDeps varRelation fileNames codebaseNames varsByStatus :: VarsByStatus v varsByStatus = groupByStatus analysis slurpResult :: SR.SlurpResult v slurpResult = - toSlurpResult uf slurpOp defsToConsider varsByStatus - & addAliases codebaseNames + toSlurpResult uf slurpOp defsToConsider involvedVars fileNames codebaseNames varsByStatus in pTraceShowId slurpResult where + slurpOp :: SlurpOp slurpOp = fromMaybe UpdateOp maybeSlurpOp + fileNames :: Names + fileNames = UF.typecheckedToNames uf + -- | Return a modified set of names with constructors which would be deprecated by possible -- updates are removed. computeNamesWithDeprecations :: @@ -212,10 +215,11 @@ computeSlurpAnalysis :: forall v. (Ord v, Var v) => Map (TermOrTypeVar v) (Set (TermOrTypeVar v)) -> - Rel.Relation (TermOrTypeVar v) LD.LabeledDependency -> + Map (TermOrTypeVar v) LD.LabeledDependency -> + Names -> Names -> SlurpAnalysis v -computeSlurpAnalysis depMap varRelation codebaseNames = +computeSlurpAnalysis depMap varRelation _fileNames codebaseNames = pTraceShow ("Statuses", statuses) $ statuses where @@ -245,22 +249,29 @@ computeSlurpAnalysis depMap varRelation codebaseNames = in withTransitiveNotes definitionStatus :: TermOrTypeVar v -> DefinitionNotes definitionStatus tv = - let ld = case Set.toList (Rel.lookupDom tv varRelation) of - [r] -> r - actual -> error $ "Expected exactly one LabeledDependency in relation for var: " <> show tv <> " but got: " <> show actual + let ld = case Map.lookup tv varRelation of + Just r -> r + Nothing -> error $ "Expected LabeledDependency in map for var: " <> show tv v = unlabeled tv existingTypesAtName = Names.typesNamed codebaseNames (Name.unsafeFromVar v) existingTermsAtName = Names.termsNamed codebaseNames (Name.unsafeFromVar v) in case ld of - LD.TypeReference {} -> - case Set.toList existingTypesAtName of - [] -> DefOk New - [r] - | LD.typeRef r == ld -> DefOk Duplicated - | otherwise -> DefOk Updated - -- If there are many existing terms, they must be in conflict. - -- Currently we treat conflicts as errors rather than resolving them. - _ -> DefErr Conflict + LD.TypeReference _typeRef -> + let typeStatus = case Set.toList existingTypesAtName of + [] -> DefOk New + [r] + | LD.typeRef r == ld -> DefOk Duplicated + | otherwise -> DefOk Updated + -- If there are many existing terms, they must be in conflict. + -- Currently we treat conflicts as errors rather than resolving them. + _ -> DefErr Conflict + in -- ctorConflicts = do + -- (ctorName, ctorRef) <- Names.constructorsForType typeRef fileNames + -- existing <- Set.toList $ Names.termsNamed codebaseNames ctorName + -- case existing of + -- Referent.Ref _ -> pure _ + -- Referent.Con {} -> empty + typeStatus LD.TermReference {} -> case Set.toList existingTermsAtName of [] -> DefOk New @@ -290,10 +301,10 @@ computeInvolvedVars :: Var v => UF.TypecheckedUnisonFile v Ann -> Set v -> - Rel.Relation (TermOrTypeVar v) LD.LabeledDependency -> + Map (TermOrTypeVar v) LD.LabeledDependency -> Set (TermOrTypeVar v) computeInvolvedVars uf defsToConsider varRelation - | Set.null defsToConsider = Rel.dom varRelation + | Set.null defsToConsider = Set.fromList $ Map.keys varRelation | otherwise = allInvolvedVars where allInvolvedVars :: Set (TermOrTypeVar v) @@ -302,7 +313,7 @@ computeInvolvedVars uf defsToConsider varRelation v <- Set.toList defsToConsider -- We don't know whether each var is a type or term, so we try both. tv <- [TypeVar v, TermVar v] - guard (Rel.memberDom tv varRelation) + guard (Map.member tv varRelation) pure tv in varClosure uf existingVars @@ -333,12 +344,12 @@ varClosure uf (partitionVars -> sc) = -- | Collect a relation of term or type var to labelled dependency for all definitions mentioned in a file. -- Contains types but not their constructors. -fileDefinitions :: forall v a. (Ord v, Show v) => UF.TypecheckedUnisonFile v a -> Rel.Relation (TermOrTypeVar v) LD.LabeledDependency +fileDefinitions :: forall v a. (Ord v, Show v) => UF.TypecheckedUnisonFile v a -> Map (TermOrTypeVar v) LD.LabeledDependency fileDefinitions uf = let result = decls <> effects <> terms in pTraceShow ("varRelation", result) $ result where - terms :: Rel.Relation (TermOrTypeVar v) LD.LabeledDependency + terms :: Map (TermOrTypeVar v) LD.LabeledDependency terms = UF.hashTermsId uf & Map.toList @@ -351,31 +362,60 @@ fileDefinitions uf = Just (TermVar v, LD.derivedTerm refId) _ -> Nothing ) - & Rel.fromList - decls :: Rel.Relation (TermOrTypeVar v) LD.LabeledDependency + & Map.fromList + decls :: Map (TermOrTypeVar v) LD.LabeledDependency decls = UF.dataDeclarationsId' uf & Map.toList & fmap (\(v, (refId, _)) -> (TypeVar v, LD.derivedType refId)) - & Rel.fromList + & Map.fromList - effects :: Rel.Relation (TermOrTypeVar v) LD.LabeledDependency + effects :: Map (TermOrTypeVar v) LD.LabeledDependency effects = UF.effectDeclarationsId' uf & Map.toList & fmap (\(v, (refId, _)) -> (TypeVar v, (LD.derivedType refId))) - & Rel.fromList + & Map.fromList + +-- A helper type just used by 'toSlurpResult' for partitioning results. +data SlurpingSummary v = SlurpingSummary + { adds :: SlurpComponent v, + duplicates :: SlurpComponent v, + updates :: SlurpComponent v, + termCtorColl :: SlurpComponent v, + ctorTermColl :: SlurpComponent v, + blocked :: SlurpComponent v, + conflicts :: SlurpComponent v + } + +instance (Ord v) => Semigroup (SlurpingSummary v) where + SlurpingSummary a b c d e f g + <> SlurpingSummary a' b' c' d' e' f' g' = + SlurpingSummary + (a <> a') + (b <> b') + (c <> c') + (d <> d') + (e <> e') + (f <> f') + (g <> g') + +instance (Ord v) => Monoid (SlurpingSummary v) where + mempty = SlurpingSummary mempty mempty mempty mempty mempty mempty mempty -- | Convert a 'VarsByStatus' mapping into a 'SR.SlurpResult' toSlurpResult :: forall v. - (Ord v, Show v) => + (Var v) => UF.TypecheckedUnisonFile v Ann -> SlurpOp -> Set v -> + Set (TermOrTypeVar v) -> + Names -> + Names -> VarsByStatus v -> SR.SlurpResult v -toSlurpResult uf op requestedVars varsByStatus = +toSlurpResult uf op requestedVars involvedVars fileNames codebaseNames varsByStatus = pTraceShowId $ SR.SlurpResult { SR.originalFile = uf, @@ -383,11 +423,10 @@ toSlurpResult uf op requestedVars varsByStatus = if Set.null requestedVars then mempty else - let allVars = foldMap NESet.toSet varsByStatus - desired = + let desired = requestedVars & Set.flatMap (\v -> Set.fromList [TypeVar v, TermVar v]) - in partitionVars $ Set.difference allVars desired, + in partitionVars $ Set.difference involvedVars desired, SR.adds = adds, SR.duplicates = duplicates, SR.collisions = if op == AddOp then updates else mempty, @@ -399,61 +438,35 @@ toSlurpResult uf op requestedVars varsByStatus = SR.constructorExistingTermCollisions = let SlurpComponent types terms = ctorTermColl in types <> terms, - SR.termAlias = mempty, - SR.typeAlias = mempty, + SR.termAlias = termAliases, + SR.typeAlias = typeAliases, SR.defsWithBlockedDependencies = blocked } where - adds, duplicates, updates, termCtorColl, ctorTermColl, blocked, conflicts :: SlurpComponent v -- Monoid instances only go up to 5-tuples :/ - (adds, duplicates, updates, termCtorColl, (ctorTermColl, blocked, conflicts)) = + SlurpingSummary {adds, duplicates, updates, termCtorColl, ctorTermColl, blocked, conflicts} = varsByStatus & ifoldMap ( \k tvs -> let sc = partitionVars $ tvs in case k of - Ok New -> (sc, mempty, mempty, mempty, (mempty, mempty, mempty)) - Ok Duplicated -> (mempty, sc, mempty, mempty, (mempty, mempty, mempty)) - Ok Updated -> (mempty, mempty, sc, mempty, (mempty, mempty, mempty)) - NeedsUpdate v -> + Ok New -> mempty {adds = sc} + Ok Duplicated -> mempty {duplicates = sc} + Ok Updated -> mempty {updates = sc} + NeedsUpdate _ -> case op of AddOp -> - (mempty, mempty, singletonSC v, mempty, (mempty, sc `SC.difference` singletonSC v, mempty)) + mempty {blocked = sc} UpdateOp -> - (sc, mempty, mempty, mempty, (mempty, mempty, mempty)) - ErrFrom v TermCtorCollision -> (mempty, mempty, mempty, singletonSC v, (mempty, sc `SC.difference` singletonSC v, mempty)) - ErrFrom v CtorTermCollision -> (mempty, mempty, mempty, mempty, (singletonSC v, sc `SC.difference` singletonSC v, mempty)) - ErrFrom v Conflict -> (mempty, mempty, mempty, mempty, (mempty, sc `SC.difference` singletonSC v, singletonSC v)) - SelfErr TermCtorCollision -> (mempty, mempty, mempty, sc, (mempty, mempty, mempty)) - SelfErr CtorTermCollision -> (mempty, mempty, mempty, mempty, (sc, mempty, mempty)) - SelfErr Conflict -> (mempty, mempty, mempty, mempty, (mempty, mempty, sc)) + mempty {adds = sc} + ErrFrom _ TermCtorCollision -> mempty {blocked = sc} + ErrFrom _ CtorTermCollision -> mempty {blocked = sc} + ErrFrom _ Conflict -> mempty {blocked = sc} + SelfErr TermCtorCollision -> mempty {termCtorColl = sc} + SelfErr CtorTermCollision -> mempty {ctorTermColl = sc} + SelfErr Conflict -> mempty {conflicts = sc} ) - singletonSC :: TermOrTypeVar v -> SlurpComponent v - singletonSC = \case - TypeVar v -> SC.fromTypes (Set.singleton v) - TermVar v -> SC.fromTerms (Set.singleton v) - --- | Sort out a set of variables by whether it is a term or type. -partitionVars :: (Foldable f, Ord v) => f (TermOrTypeVar v) -> SlurpComponent v -partitionVars = - foldMap - ( \case - TypeVar v -> SC.fromTypes (Set.singleton v) - TermVar v -> SC.fromTerms (Set.singleton v) - ) --- | Collapse a SlurpComponent into a tagged set. -mingleVars :: Ord v => SlurpComponent v -> Set (TermOrTypeVar v) -mingleVars SlurpComponent {terms, types} = - Set.map TypeVar types - <> Set.map TermVar terms - --- | Compute which definitions in a slurp result are aliases of definitions in the current --- tree. -addAliases :: forall v. (Ord v, Var v) => Names -> SR.SlurpResult v -> SR.SlurpResult v -addAliases existingNames sr = sr {SR.termAlias = termAliases, SR.typeAlias = typeAliases} - where - fileNames = UF.typecheckedToNames $ SR.originalFile sr buildAliases :: Rel.Relation Name Referent -> Rel.Relation Name Referent -> @@ -482,16 +495,31 @@ addAliases existingNames sr = sr {SR.termAlias = termAliases, SR.typeAlias = typ termAliases :: Map v SR.Aliases termAliases = buildAliases - (Names.terms existingNames) + (Names.terms codebaseNames) (Names.terms fileNames) - (SC.terms (SR.duplicates sr)) + (SC.terms duplicates) typeAliases :: Map v SR.Aliases typeAliases = buildAliases - (Rel.mapRan Referent.Ref $ Names.types existingNames) + (Rel.mapRan Referent.Ref $ Names.types codebaseNames) (Rel.mapRan Referent.Ref $ Names.types fileNames) - (SC.types (SR.duplicates sr)) + (SC.types duplicates) varFromName :: Var v => Name -> v varFromName name = Var.named (Name.toText name) + +-- | Sort out a set of variables by whether it is a term or type. +partitionVars :: (Foldable f, Ord v) => f (TermOrTypeVar v) -> SlurpComponent v +partitionVars = + foldMap + ( \case + TypeVar v -> SC.fromTypes (Set.singleton v) + TermVar v -> SC.fromTerms (Set.singleton v) + ) + +-- | Collapse a SlurpComponent into a tagged set. +mingleVars :: Ord v => SlurpComponent v -> Set (TermOrTypeVar v) +mingleVars SlurpComponent {terms, types} = + Set.map TypeVar types + <> Set.map TermVar terms From e2a5f94646bcd76a2ba11a8690f42112251dab5d Mon Sep 17 00:00:00 2001 From: Chris Penner Date: Thu, 20 Jan 2022 11:55:13 -0600 Subject: [PATCH 196/297] Handle constructors explicitly --- .../src/Unison/Codebase/Editor/Slurp.hs | 92 ++++++++++++------- .../Unison/Codebase/Editor/SlurpComponent.hs | 15 +-- .../src/Unison/Codebase/Editor/SlurpResult.hs | 2 +- 3 files changed, 69 insertions(+), 40 deletions(-) diff --git a/unison-cli/src/Unison/Codebase/Editor/Slurp.hs b/unison-cli/src/Unison/Codebase/Editor/Slurp.hs index ba897276cf..20b48e2d88 100644 --- a/unison-cli/src/Unison/Codebase/Editor/Slurp.hs +++ b/unison-cli/src/Unison/Codebase/Editor/Slurp.hs @@ -15,6 +15,7 @@ import Debug.Pretty.Simple (pTraceShow, pTraceShowId) import Unison.Codebase.Editor.SlurpComponent (SlurpComponent (..)) import qualified Unison.Codebase.Editor.SlurpComponent as SC import qualified Unison.Codebase.Editor.SlurpResult as SR +import qualified Unison.ConstructorReference as CR import qualified Unison.DataDeclaration as DD import qualified Unison.LabeledDependency as LD import Unison.Name (Name) @@ -23,6 +24,7 @@ import Unison.Names (Names) import qualified Unison.Names as Names import Unison.Parser.Ann (Ann) import Unison.Prelude +import qualified Unison.Reference as Ref import Unison.Referent (Referent) import qualified Unison.Referent as Referent import qualified Unison.Referent' as Referent @@ -39,13 +41,14 @@ data SlurpOp = AddOp | UpdateOp deriving (Eq, Show) -- | Tag a variable as either a term/constructor or type -data TermOrTypeVar v = TermVar v | TypeVar v +data TermOrTypeVar v = TermVar v | TypeVar v | ConstructorVar v deriving (Eq, Ord, Show) -- | Extract the var from a TermOrTypeVar unlabeled :: TermOrTypeVar v -> v unlabeled (TermVar v) = v unlabeled (TypeVar v) = v +unlabeled (ConstructorVar v) = v -- | A definition's status with relation to the codebase. data SlurpStatus @@ -120,7 +123,10 @@ groupByStatus sr = computeDefStatus isDep defNotes tv = case defNotes of DefOk Updated -> if isDep then NeedsUpdate tv else Ok Updated - DefErr err -> ErrFrom tv err + DefErr err -> + if isDep + then ErrFrom tv err + else SelfErr err DefOk New -> Ok New DefOk Duplicated -> Ok Duplicated @@ -171,9 +177,7 @@ computeNamesWithDeprecations _uf unalteredCodebaseNames _involvedVars AddOp = -- If we're 'adding', there won't be any deprecations. unalteredCodebaseNames computeNamesWithDeprecations uf unalteredCodebaseNames involvedVars UpdateOp = - pTraceShow ("Deprecated constructors", deprecatedConstructors) - . pTraceShow ("constructorNamesInFile", constructorsUnderConsideration) - $ codebaseNames + codebaseNames where -- Get the set of all DIRECT definitions in the file which a definition depends on. codebaseNames :: Names @@ -204,11 +208,8 @@ computeNamesWithDeprecations uf unalteredCodebaseNames involvedVars UpdateOp = (name, _ref) <- Names.constructorsForType ref unalteredCodebaseNames pure name in -- Compute any constructors which were deleted - pTraceShow ("defsToConsider", involvedVars) - . pTraceShow ("codebaseNames", unalteredCodebaseNames) - . pTraceShow ("allRefIds", oldRefsForEditedTypes) - . pTraceShow ("existingConstructorsFromEditedTypes", existingConstructorsFromEditedTypes) - $ existingConstructorsFromEditedTypes `Set.difference` constructorsUnderConsideration + pTraceShow ("defsToConsider", involvedVars) $ + existingConstructorsFromEditedTypes `Set.difference` constructorsUnderConsideration -- | Compute a mapping of each definition to its status, and its dependencies' statuses. computeSlurpAnalysis :: @@ -262,7 +263,7 @@ computeSlurpAnalysis depMap varRelation _fileNames codebaseNames = [r] | LD.typeRef r == ld -> DefOk Duplicated | otherwise -> DefOk Updated - -- If there are many existing terms, they must be in conflict. + -- If there are many existing types, they must be in conflict. -- Currently we treat conflicts as errors rather than resolving them. _ -> DefErr Conflict in -- ctorConflicts = do @@ -309,13 +310,14 @@ computeInvolvedVars uf defsToConsider varRelation where allInvolvedVars :: Set (TermOrTypeVar v) allInvolvedVars = - let existingVars :: Set (TermOrTypeVar v) = Set.fromList $ do + let requestedVarsWhichActuallyExist :: Set (TermOrTypeVar v) + requestedVarsWhichActuallyExist = Set.fromList $ do v <- Set.toList defsToConsider -- We don't know whether each var is a type or term, so we try both. tv <- [TypeVar v, TermVar v] guard (Map.member tv varRelation) pure tv - in varClosure uf existingVars + in varClosure uf requestedVarsWhichActuallyExist -- | Compute transitive dependencies for all relevant variables. computeVarDeps :: @@ -338,15 +340,18 @@ computeVarDeps uf allInvolvedVars = $ depMap -- | Compute the closure of all vars which the provided vars depend on. -varClosure :: Ord v => UF.TypecheckedUnisonFile v a -> Set (TermOrTypeVar v) -> Set (TermOrTypeVar v) -varClosure uf (partitionVars -> sc) = - mingleVars $ SC.closeWithDependencies uf sc +-- A type depends on its constructors. +varClosure :: (Var v) => UF.TypecheckedUnisonFile v a -> Set (TermOrTypeVar v) -> Set (TermOrTypeVar v) +varClosure uf (partitionVars OmitConstructors -> sc) = + let deps = SC.closeWithDependencies uf sc + in mingleVars deps + <> Set.map ConstructorVar (SR.constructorsFor (SC.types sc) uf) -- | Collect a relation of term or type var to labelled dependency for all definitions mentioned in a file. -- Contains types but not their constructors. fileDefinitions :: forall v a. (Ord v, Show v) => UF.TypecheckedUnisonFile v a -> Map (TermOrTypeVar v) LD.LabeledDependency fileDefinitions uf = - let result = decls <> effects <> terms + let result = decls <> effects <> terms <> constructors in pTraceShow ("varRelation", result) $ result where terms :: Map (TermOrTypeVar v) LD.LabeledDependency @@ -377,6 +382,22 @@ fileDefinitions uf = & fmap (\(v, (refId, _)) -> (TypeVar v, (LD.derivedType refId))) & Map.fromList + constructors :: Map (TermOrTypeVar v) LD.LabeledDependency + constructors = + let effectConstructors :: Map (TermOrTypeVar v) LD.LabeledDependency + effectConstructors = Map.fromList $ do + (_, (typeRefId, effect)) <- Map.toList (UF.effectDeclarationsId' uf) + let decl = DD.toDataDecl effect + (conId, constructorV) <- zip (DD.constructorIds decl) (DD.constructorVars decl) + pure $ (ConstructorVar constructorV, LD.effectConstructor (CR.ConstructorReference (Ref.fromId typeRefId) conId)) + + dataConstructors :: Map (TermOrTypeVar v) LD.LabeledDependency + dataConstructors = Map.fromList $ do + (_, (typeRefId, decl)) <- Map.toList (UF.dataDeclarationsId' uf) + (conId, constructorV) <- zip (DD.constructorIds decl) (DD.constructorVars decl) + pure $ (ConstructorVar constructorV, LD.dataConstructor (CR.ConstructorReference (Ref.fromId typeRefId) conId)) + in effectConstructors <> dataConstructors + -- A helper type just used by 'toSlurpResult' for partitioning results. data SlurpingSummary v = SlurpingSummary { adds :: SlurpComponent v, @@ -426,7 +447,7 @@ toSlurpResult uf op requestedVars involvedVars fileNames codebaseNames varsBySta let desired = requestedVars & Set.flatMap (\v -> Set.fromList [TypeVar v, TermVar v]) - in partitionVars $ Set.difference involvedVars desired, + in partitionVars OmitConstructors $ Set.difference involvedVars desired, SR.adds = adds, SR.duplicates = duplicates, SR.collisions = if op == AddOp then updates else mempty, @@ -448,23 +469,24 @@ toSlurpResult uf op requestedVars involvedVars fileNames codebaseNames varsBySta varsByStatus & ifoldMap ( \k tvs -> - let sc = partitionVars $ tvs + let scWithConstructors = partitionVars IncludedConstructors $ tvs + scWithoutConstructors = partitionVars OmitConstructors $ tvs in case k of - Ok New -> mempty {adds = sc} - Ok Duplicated -> mempty {duplicates = sc} - Ok Updated -> mempty {updates = sc} + Ok New -> mempty {adds = scWithoutConstructors} + Ok Duplicated -> mempty {duplicates = scWithoutConstructors} + Ok Updated -> mempty {updates = scWithoutConstructors} NeedsUpdate _ -> case op of AddOp -> - mempty {blocked = sc} + mempty {blocked = scWithoutConstructors} UpdateOp -> - mempty {adds = sc} - ErrFrom _ TermCtorCollision -> mempty {blocked = sc} - ErrFrom _ CtorTermCollision -> mempty {blocked = sc} - ErrFrom _ Conflict -> mempty {blocked = sc} - SelfErr TermCtorCollision -> mempty {termCtorColl = sc} - SelfErr CtorTermCollision -> mempty {ctorTermColl = sc} - SelfErr Conflict -> mempty {conflicts = sc} + mempty {adds = scWithoutConstructors} + ErrFrom _ TermCtorCollision -> mempty {blocked = scWithoutConstructors} + ErrFrom _ CtorTermCollision -> mempty {blocked = scWithoutConstructors} + ErrFrom _ Conflict -> mempty {blocked = scWithoutConstructors} + SelfErr TermCtorCollision -> mempty {termCtorColl = scWithConstructors} + SelfErr CtorTermCollision -> mempty {ctorTermColl = scWithConstructors} + SelfErr Conflict -> mempty {conflicts = scWithConstructors} ) buildAliases :: @@ -509,13 +531,19 @@ toSlurpResult uf op requestedVars involvedVars fileNames codebaseNames varsBySta varFromName :: Var v => Name -> v varFromName name = Var.named (Name.toText name) +data HandleConstructors = OmitConstructors | IncludedConstructors + -- | Sort out a set of variables by whether it is a term or type. -partitionVars :: (Foldable f, Ord v) => f (TermOrTypeVar v) -> SlurpComponent v -partitionVars = +partitionVars :: (Foldable f, Ord v) => HandleConstructors -> f (TermOrTypeVar v) -> SlurpComponent v +partitionVars ctorHandling = foldMap ( \case TypeVar v -> SC.fromTypes (Set.singleton v) TermVar v -> SC.fromTerms (Set.singleton v) + ConstructorVar v -> + case ctorHandling of + OmitConstructors -> mempty + IncludedConstructors -> SC.fromTerms (Set.singleton v) ) -- | Collapse a SlurpComponent into a tagged set. diff --git a/unison-cli/src/Unison/Codebase/Editor/SlurpComponent.hs b/unison-cli/src/Unison/Codebase/Editor/SlurpComponent.hs index eb877ed1ae..167e08336b 100644 --- a/unison-cli/src/Unison/Codebase/Editor/SlurpComponent.hs +++ b/unison-cli/src/Unison/Codebase/Editor/SlurpComponent.hs @@ -22,23 +22,24 @@ isEmpty :: SlurpComponent v -> Bool isEmpty sc = Set.null (types sc) && Set.null (terms sc) empty :: Ord v => SlurpComponent v -empty = SlurpComponent mempty mempty +empty = SlurpComponent {types=mempty, terms=mempty} difference :: Ord v => SlurpComponent v -> SlurpComponent v -> SlurpComponent v -difference c1 c2 = SlurpComponent types' terms' where +difference c1 c2 = SlurpComponent {types=types', terms=terms'} where types' = types c1 `Set.difference` types c2 terms' = terms c1 `Set.difference` terms c2 intersection :: Ord v => SlurpComponent v -> SlurpComponent v -> SlurpComponent v -intersection c1 c2 = SlurpComponent types' terms' where +intersection c1 c2 = SlurpComponent {types=types', terms=terms'} where types' = types c1 `Set.intersection` types c2 terms' = terms c1 `Set.intersection` terms c2 instance Ord v => Semigroup (SlurpComponent v) where (<>) = mappend instance Ord v => Monoid (SlurpComponent v) where - mempty = SlurpComponent mempty mempty - c1 `mappend` c2 = SlurpComponent (types c1 <> types c2) - (terms c1 <> terms c2) + mempty = SlurpComponent {types=mempty, terms=mempty} + c1 `mappend` c2 = SlurpComponent { types = types c1 <> types c2 + , terms = terms c1 <> terms c2 + } -- I'm calling this `closeWithDependencies` because it doesn't just compute @@ -47,7 +48,7 @@ instance Ord v => Monoid (SlurpComponent v) where closeWithDependencies :: forall v a. Ord v => TypecheckedUnisonFile v a -> SlurpComponent v -> SlurpComponent v closeWithDependencies uf inputs = seenDefns where - seenDefns = foldl' termDeps (SlurpComponent mempty seenTypes) (terms inputs) + seenDefns = foldl' termDeps (SlurpComponent {terms=mempty, types=seenTypes}) (terms inputs) seenTypes = foldl' typeDeps mempty (types inputs) termDeps :: SlurpComponent v -> v -> SlurpComponent v diff --git a/unison-cli/src/Unison/Codebase/Editor/SlurpResult.hs b/unison-cli/src/Unison/Codebase/Editor/SlurpResult.hs index a4e382ec81..561cfc7453 100644 --- a/unison-cli/src/Unison/Codebase/Editor/SlurpResult.hs +++ b/unison-cli/src/Unison/Codebase/Editor/SlurpResult.hs @@ -73,7 +73,7 @@ data SlurpResult v = SlurpResult { } deriving (Show) -- Returns the set of constructor names for type names in the given `Set`. -constructorsFor :: Var v => Set v -> UF.TypecheckedUnisonFile v Ann -> Set v +constructorsFor :: Var v => Set v -> UF.TypecheckedUnisonFile v a -> Set v constructorsFor types uf = let names = UF.typecheckedToNames uf typesRefs = Set.unions $ Names.typesNamed names . Name.unsafeFromVar <$> toList types From 1346489fe9348175c7c5cbeb171fc9d04b1e1b31 Mon Sep 17 00:00:00 2001 From: Chris Penner Date: Thu, 20 Jan 2022 14:45:23 -0600 Subject: [PATCH 197/297] WIP --- parser-typechecker/src/Unison/UnisonFile.hs | 2 +- unison-core/src/Unison/ABT.hs | 11 ++++++----- unison-core/src/Unison/DataDeclaration.hs | 1 + 3 files changed, 8 insertions(+), 6 deletions(-) diff --git a/parser-typechecker/src/Unison/UnisonFile.hs b/parser-typechecker/src/Unison/UnisonFile.hs index 8b8477b563..22fa362e9c 100644 --- a/parser-typechecker/src/Unison/UnisonFile.hs +++ b/parser-typechecker/src/Unison/UnisonFile.hs @@ -138,7 +138,7 @@ indexByReference uf = (tms, tys) allTerms :: Ord v => TypecheckedUnisonFile v a -> Map v (Term v a) allTerms uf = - Map.fromList [ (v, t) | (v, t, _) <- join $ topLevelComponents' uf ] + Map.fromList [ (v, t) | (v, t, _) <- join $ topLevelComponents uf ] -- |the top level components (no watches) plus test watches. topLevelComponents :: TypecheckedUnisonFile v a diff --git a/unison-core/src/Unison/ABT.hs b/unison-core/src/Unison/ABT.hs index 719b88a943..d4f12e3231 100644 --- a/unison-core/src/Unison/ABT.hs +++ b/unison-core/src/Unison/ABT.hs @@ -734,8 +734,9 @@ hash = hash' [] where instance (Show1 f, Show v) => Show (Term f v a) where -- annotations not shown - showsPrec p (Term _ _ out) = case out of - Var v -> showParen (p>=9) $ \x -> "Var " ++ show v ++ x - Cycle body -> ("Cycle " ++) . showsPrec p body - Abs v body -> showParen True $ (show v ++) . showString ". " . showsPrec p body - Tm f -> showsPrec1 p f + showsPrec p (Term freevars _ out) = case out of + -- showsPrec p (Term _ _ out) = case out of + Var v -> showParen (p>=9) $ \x -> show freevars <> "Var " ++ show v ++ x + Cycle body -> (show freevars <>) . ("Cycle " ++) . showsPrec p body + Abs v body -> showParen True $ (show freevars <>) . (show v ++) . showString ". " . showsPrec p body + Tm f -> (show freevars <>) . showsPrec1 p f diff --git a/unison-core/src/Unison/DataDeclaration.hs b/unison-core/src/Unison/DataDeclaration.hs index 8404f5d22d..38e1d7ac83 100644 --- a/unison-core/src/Unison/DataDeclaration.hs +++ b/unison-core/src/Unison/DataDeclaration.hs @@ -21,6 +21,7 @@ module Unison.DataDeclaration constructorType, constructorTypes, constructorVars, + constructorIds, declConstructorReferents, declDependencies, declFields, From 2dc707dc78eb89b591becad5d278ea1713bd87c7 Mon Sep 17 00:00:00 2001 From: Chris Penner Date: Thu, 20 Jan 2022 14:45:43 -0600 Subject: [PATCH 198/297] Add transcript for ctor/term conflicts --- .../ability-term-conflicts-on-update.md | 21 +++++++++++++++++++ 1 file changed, 21 insertions(+) diff --git a/unison-src/transcripts/ability-term-conflicts-on-update.md b/unison-src/transcripts/ability-term-conflicts-on-update.md index 28d33390a5..72c319f3ac 100644 --- a/unison-src/transcripts/ability-term-conflicts-on-update.md +++ b/unison-src/transcripts/ability-term-conflicts-on-update.md @@ -65,3 +65,24 @@ We should also be able to successfully update the whole thing. ```ucm .ns> update ``` + +# Constructor-term conflict + +```unison +X.x = 1 +``` + +```ucm +.ns2> add +``` + +```unison:error +structural ability X where + x : () +``` + +This should fail with a ctor/term conflict. + +```ucm:error +.ns2> add +``` From 223f8ad3ae05fa96001dd8616846665a701ed9e6 Mon Sep 17 00:00:00 2001 From: Chris Penner Date: Thu, 20 Jan 2022 15:18:33 -0600 Subject: [PATCH 199/297] Handle constructors explicitly in slurping --- parser-typechecker/src/Unison/UnisonFile.hs | 19 ++++++++++++++ .../src/Unison/Codebase/Editor/HandleInput.hs | 10 +++---- .../src/Unison/Codebase/Editor/Slurp.hs | 12 ++++----- .../Unison/Codebase/Editor/SlurpComponent.hs | 26 ++++++++++++------- .../src/Unison/Codebase/Editor/SlurpResult.hs | 17 ------------ 5 files changed, 46 insertions(+), 38 deletions(-) diff --git a/parser-typechecker/src/Unison/UnisonFile.hs b/parser-typechecker/src/Unison/UnisonFile.hs index 22fa362e9c..be63cdcdb9 100644 --- a/parser-typechecker/src/Unison/UnisonFile.hs +++ b/parser-typechecker/src/Unison/UnisonFile.hs @@ -23,6 +23,7 @@ module Unison.UnisonFile discardTypes, effectDeclarations', hashConstructors, + constructorsForTypeVars, hashTerms, indexByReference, lookupDecl, @@ -59,6 +60,7 @@ import Unison.UnisonFile.Type (TypecheckedUnisonFile (..), UnisonFile (..), patt import qualified Unison.Util.List as List import Unison.Var (Var) import Unison.WatchKind (WatchKind, pattern TestWatch) + dataDeclarations :: UnisonFile v a -> Map v (Reference, DataDeclaration v a) dataDeclarations = fmap (first Reference.DerivedId) . dataDeclarationsId @@ -199,3 +201,20 @@ hashConstructors file = ctors2 = Map.elems (effectDeclarationsId' file) >>= \(ref, dd) -> [ (v, Referent.ConId (ConstructorReference ref i) CT.Effect) | (v,i) <- DD.constructorVars (DD.toDataDecl dd) `zip` [0 ..] ] in Map.fromList (ctors1 ++ ctors2) + +-- | Returns the set of constructor names for type names in the given `Set`. +constructorsForTypeVars :: Ord v => Set v -> TypecheckedUnisonFile v a -> Set v +constructorsForTypeVars types uf = + let dataConstructors = + dataDeclarationsId' uf + & Map.filterWithKey (\k _ -> Set.member k types) + & Map.elems + & fmap snd + & concatMap DD.constructorVars + effectConstructors = + effectDeclarationsId' uf + & Map.filterWithKey (\k _ -> Set.member k types) + & Map.elems + & fmap (DD.toDataDecl . snd) + & concatMap DD.constructorVars + in Set.fromList (dataConstructors <> effectConstructors) diff --git a/unison-cli/src/Unison/Codebase/Editor/HandleInput.hs b/unison-cli/src/Unison/Codebase/Editor/HandleInput.hs index 9660c3f4f4..f3c9210b53 100644 --- a/unison-cli/src/Unison/Codebase/Editor/HandleInput.hs +++ b/unison-cli/src/Unison/Codebase/Editor/HandleInput.hs @@ -1259,7 +1259,7 @@ loop = do Nothing -> respond NoUnisonFile Just uf -> do currentNames <- currentPathNames - let sr = Slurp.slurpFile uf vars (Just Slurp.AddOp) currentNames + let sr = Slurp.slurpFile uf vars (Just Slurp.AddOp) currentNames let adds = SlurpResult.adds sr stepAtNoSync Branch.CompressHistory (Path.unabsolute currentPath', doSlurpAdds adds uf) eval . AddDefsToCodebase . filterBySlurpResult sr $ uf @@ -1271,7 +1271,7 @@ loop = do (Just (sourceName, _), Just uf) -> do let vars = Set.map Name.toVar names currentNames <- currentPathNames - let sr = Slurp.slurpFile uf vars (Just Slurp.AddOp) currentNames + let sr = Slurp.slurpFile uf vars (Just Slurp.AddOp) currentNames previewResponse sourceName sr uf _ -> respond NoUnisonFile UpdateI maybePatchPath names -> handleUpdate input maybePatchPath names @@ -1279,7 +1279,7 @@ loop = do (Just (sourceName, _), Just uf) -> do let vars = Set.map Name.toVar names currentNames <- currentPathNames - let sr = Slurp.slurpFile uf vars (Just Slurp.UpdateOp) currentNames + let sr = Slurp.slurpFile uf vars (Just Slurp.UpdateOp) currentNames previewResponse sourceName sr uf _ -> respond NoUnisonFile TodoI patchPath branchPath' -> do @@ -1818,7 +1818,7 @@ handleUpdate input maybePatchPath names = do let patchPath = fromMaybe defaultPatchPath maybePatchPath slurpCheckNames <- slurpResultNames let currentPathNames = slurpCheckNames - let sr = Slurp.slurpFile uf vars (Just Slurp.UpdateOp) slurpCheckNames + let sr = Slurp.slurpFile uf vars (Just Slurp.UpdateOp) slurpCheckNames addsAndUpdates :: SlurpComponent v addsAndUpdates = Slurp.updates sr <> Slurp.adds sr fileNames :: Names @@ -2871,7 +2871,7 @@ doSlurpAdds slurp uf = Branch.batchUpdates (typeActions <> termActions) typeActions = map doType . toList $ SC.types slurp termActions = map doTerm . toList $ - SC.terms slurp <> Slurp.constructorsFor (SC.types slurp) uf + SC.terms slurp <> UF.constructorsForTypeVars (SC.types slurp) uf names = UF.typecheckedToNames uf tests = Set.fromList $ fst <$> UF.watchesOfKind WK.TestWatch (UF.discardTypes uf) (isTestType, isTestValue) = isTest diff --git a/unison-cli/src/Unison/Codebase/Editor/Slurp.hs b/unison-cli/src/Unison/Codebase/Editor/Slurp.hs index 20b48e2d88..c61f74dd08 100644 --- a/unison-cli/src/Unison/Codebase/Editor/Slurp.hs +++ b/unison-cli/src/Unison/Codebase/Editor/Slurp.hs @@ -345,7 +345,6 @@ varClosure :: (Var v) => UF.TypecheckedUnisonFile v a -> Set (TermOrTypeVar v) - varClosure uf (partitionVars OmitConstructors -> sc) = let deps = SC.closeWithDependencies uf sc in mingleVars deps - <> Set.map ConstructorVar (SR.constructorsFor (SC.types sc) uf) -- | Collect a relation of term or type var to labelled dependency for all definitions mentioned in a file. -- Contains types but not their constructors. @@ -454,11 +453,11 @@ toSlurpResult uf op requestedVars involvedVars fileNames codebaseNames varsBySta SR.conflicts = conflicts, SR.updates = if op == UpdateOp then updates else mempty, SR.termExistingConstructorCollisions = - let SlurpComponent types terms = termCtorColl - in types <> terms, + let SlurpComponent {types, terms, ctors} = termCtorColl + in types <> terms <> ctors, SR.constructorExistingTermCollisions = - let SlurpComponent types terms = ctorTermColl - in types <> terms, + let SlurpComponent {types, terms, ctors} = ctorTermColl + in types <> terms <> ctors, SR.termAlias = termAliases, SR.typeAlias = typeAliases, SR.defsWithBlockedDependencies = blocked @@ -548,6 +547,7 @@ partitionVars ctorHandling = -- | Collapse a SlurpComponent into a tagged set. mingleVars :: Ord v => SlurpComponent v -> Set (TermOrTypeVar v) -mingleVars SlurpComponent {terms, types} = +mingleVars SlurpComponent {terms, types, ctors} = Set.map TypeVar types <> Set.map TermVar terms + <> Set.map ConstructorVar ctors diff --git a/unison-cli/src/Unison/Codebase/Editor/SlurpComponent.hs b/unison-cli/src/Unison/Codebase/Editor/SlurpComponent.hs index 167e08336b..2b8af63c6c 100644 --- a/unison-cli/src/Unison/Codebase/Editor/SlurpComponent.hs +++ b/unison-cli/src/Unison/Codebase/Editor/SlurpComponent.hs @@ -15,30 +15,33 @@ import qualified Unison.Term as Term import qualified Unison.UnisonFile as UF data SlurpComponent v = - SlurpComponent { types :: Set v, terms :: Set v } + SlurpComponent { types :: Set v, terms :: Set v, ctors :: Set v } deriving (Eq,Ord,Show) isEmpty :: SlurpComponent v -> Bool -isEmpty sc = Set.null (types sc) && Set.null (terms sc) +isEmpty sc = Set.null (types sc) && Set.null (terms sc) && Set.null (ctors sc) empty :: Ord v => SlurpComponent v -empty = SlurpComponent {types=mempty, terms=mempty} +empty = SlurpComponent {types=mempty, terms=mempty, ctors=mempty} difference :: Ord v => SlurpComponent v -> SlurpComponent v -> SlurpComponent v -difference c1 c2 = SlurpComponent {types=types', terms=terms'} where +difference c1 c2 = SlurpComponent {types=types', terms=terms', ctors=ctors'} where types' = types c1 `Set.difference` types c2 terms' = terms c1 `Set.difference` terms c2 + ctors' = ctors c1 `Set.difference` ctors c2 intersection :: Ord v => SlurpComponent v -> SlurpComponent v -> SlurpComponent v -intersection c1 c2 = SlurpComponent {types=types', terms=terms'} where +intersection c1 c2 = SlurpComponent {types=types', terms=terms', ctors=ctors'} where types' = types c1 `Set.intersection` types c2 terms' = terms c1 `Set.intersection` terms c2 + ctors' = ctors c1 `Set.intersection` ctors c2 instance Ord v => Semigroup (SlurpComponent v) where (<>) = mappend instance Ord v => Monoid (SlurpComponent v) where - mempty = SlurpComponent {types=mempty, terms=mempty} + mempty = SlurpComponent {types=mempty, terms=mempty, ctors=mempty} c1 `mappend` c2 = SlurpComponent { types = types c1 <> types c2 , terms = terms c1 <> terms c2 + , ctors = ctors c1 <> ctors c2 } @@ -47,10 +50,13 @@ instance Ord v => Monoid (SlurpComponent v) where -- is what you want. closeWithDependencies :: forall v a. Ord v => TypecheckedUnisonFile v a -> SlurpComponent v -> SlurpComponent v -closeWithDependencies uf inputs = seenDefns where - seenDefns = foldl' termDeps (SlurpComponent {terms=mempty, types=seenTypes}) (terms inputs) +closeWithDependencies uf inputs = seenDefns{ctors=constructorDeps} where + seenDefns = foldl' termDeps (SlurpComponent {terms=mempty, types=seenTypes, ctors=mempty}) (terms inputs) seenTypes = foldl' typeDeps mempty (types inputs) + constructorDeps :: Set v + constructorDeps = UF.constructorsForTypeVars seenTypes uf + termDeps :: SlurpComponent v -> v -> SlurpComponent v termDeps seen v | Set.member v (terms seen) = seen termDeps seen v = fromMaybe seen $ do @@ -89,7 +95,7 @@ closeWithDependencies uf inputs = seenDefns where invert m = Map.fromList (swap <$> Map.toList m) fromTypes :: Ord v => Set v -> SlurpComponent v -fromTypes vs = SlurpComponent {terms = mempty, types = vs} +fromTypes vs = SlurpComponent {terms = mempty, types = vs, ctors=mempty} fromTerms :: Ord v => Set v -> SlurpComponent v -fromTerms vs = SlurpComponent {terms = vs, types = mempty} +fromTerms vs = SlurpComponent {terms = vs, types = mempty, ctors=mempty} diff --git a/unison-cli/src/Unison/Codebase/Editor/SlurpResult.hs b/unison-cli/src/Unison/Codebase/Editor/SlurpResult.hs index 561cfc7453..38102b6ea1 100644 --- a/unison-cli/src/Unison/Codebase/Editor/SlurpResult.hs +++ b/unison-cli/src/Unison/Codebase/Editor/SlurpResult.hs @@ -8,7 +8,6 @@ module Unison.Codebase.Editor.SlurpResult where import Unison.Prelude -import Control.Lens ((^.)) import Unison.Codebase.Editor.SlurpComponent (SlurpComponent(..)) import Unison.Name ( Name ) import Unison.Parser.Ann ( Ann ) @@ -16,19 +15,13 @@ import Unison.Var (Var) import qualified Data.Map as Map import qualified Data.Set as Set import qualified Unison.Codebase.Editor.SlurpComponent as SC -import qualified Unison.ConstructorReference as ConstructorReference import qualified Unison.DeclPrinter as DeclPrinter import qualified Unison.HashQualified as HQ -import qualified Unison.Name as Name -import qualified Unison.Names as Names import qualified Unison.PrettyPrintEnv as PPE -import qualified Unison.Referent as Referent import qualified Unison.TypePrinter as TP import qualified Unison.UnisonFile as UF -import qualified Unison.UnisonFile.Names as UF import qualified Unison.Util.Monoid as Monoid import qualified Unison.Util.Pretty as P -import qualified Unison.Util.Relation as R import qualified Unison.Var as Var -- `oldRefNames` are the previously existing names for the old reference @@ -72,16 +65,6 @@ data SlurpResult v = SlurpResult { , defsWithBlockedDependencies :: SlurpComponent v } deriving (Show) --- Returns the set of constructor names for type names in the given `Set`. -constructorsFor :: Var v => Set v -> UF.TypecheckedUnisonFile v a -> Set v -constructorsFor types uf = let - names = UF.typecheckedToNames uf - typesRefs = Set.unions $ Names.typesNamed names . Name.unsafeFromVar <$> toList types - ctorNames = R.filterRan isOkCtor (Names.terms names) - isOkCtor (Referent.Con r _) | Set.member (r ^. ConstructorReference.reference_) typesRefs = True - isOkCtor _ = False - in Set.map Name.toVar $ R.dom ctorNames - isNonempty :: Ord v => SlurpResult v -> Bool isNonempty s = Monoid.nonEmpty (adds s) || Monoid.nonEmpty (updates s) From bf82f3e9579b1cfc090ebe083dc6fb1d2362d7cb Mon Sep 17 00:00:00 2001 From: Chris Penner Date: Thu, 20 Jan 2022 15:39:16 -0600 Subject: [PATCH 200/297] Clean up --- unison-cli/package.yaml | 1 - .../src/Unison/Codebase/Editor/Slurp.hs | 202 +++++++++--------- .../Unison/Codebase/Editor/SlurpComponent.hs | 7 +- 3 files changed, 103 insertions(+), 107 deletions(-) diff --git a/unison-cli/package.yaml b/unison-cli/package.yaml index 953981fbcc..9228fea898 100644 --- a/unison-cli/package.yaml +++ b/unison-cli/package.yaml @@ -10,7 +10,6 @@ flags: ghc-options: -Wall dependencies: - - semigroupoids - pretty-simple - ListLike - async diff --git a/unison-cli/src/Unison/Codebase/Editor/Slurp.hs b/unison-cli/src/Unison/Codebase/Editor/Slurp.hs index c61f74dd08..b90843c2ac 100644 --- a/unison-cli/src/Unison/Codebase/Editor/Slurp.hs +++ b/unison-cli/src/Unison/Codebase/Editor/Slurp.hs @@ -5,9 +5,7 @@ module Unison.Codebase.Editor.Slurp where import Control.Lens -import qualified Data.List.NonEmpty as NEList import qualified Data.Map as Map -import qualified Data.Semigroup.Foldable as Semigroup import qualified Data.Set as Set import Data.Set.NonEmpty (NESet) import qualified Data.Set.NonEmpty as NESet @@ -40,87 +38,96 @@ import Unison.WatchKind (pattern TestWatch) data SlurpOp = AddOp | UpdateOp deriving (Eq, Show) --- | Tag a variable as either a term/constructor or type -data TermOrTypeVar v = TermVar v | TypeVar v | ConstructorVar v +-- | Tag a variable as representing a term, type, or constructor +data TaggedVar v = TermVar v | TypeVar v | ConstructorVar v deriving (Eq, Ord, Show) --- | Extract the var from a TermOrTypeVar -unlabeled :: TermOrTypeVar v -> v -unlabeled (TermVar v) = v -unlabeled (TypeVar v) = v -unlabeled (ConstructorVar v) = v +-- | Extract the var from a TaggedVar +untagged :: TaggedVar v -> v +untagged (TermVar v) = v +untagged (TypeVar v) = v +untagged (ConstructorVar v) = v -- | A definition's status with relation to the codebase. -data SlurpStatus +data SlurpOk = New | Updated | Duplicated deriving (Eq, Ord, Show) -- | A definition's final status, incorporating the statuses of all of its dependencies. -data DefStatus v - = Ok SlurpStatus - | NeedsUpdate (TermOrTypeVar v) - | ErrFrom (TermOrTypeVar v) SlurpErr +data SummarizedStatus v + = Ok SlurpOk + | NeedsUpdate (TaggedVar v) + | ErrFrom (TaggedVar v) SlurpErr | SelfErr SlurpErr deriving (Eq, Ord, Show) --- | This semigroup is how a definition's status is determined. -instance Semigroup (DefStatus v) where - -- If the definition has its own error, that takes highest priority. - SelfErr err <> _ = SelfErr err - _ <> SelfErr err = SelfErr err - -- Next we care if a dependency has an error - ErrFrom v err <> _ = ErrFrom v err - _ <> ErrFrom v err = ErrFrom v err - -- If our definition needs its own update then we don't care if dependencies need updates. - Ok Updated <> _ = Ok Updated - _ <> Ok Updated = Ok Updated - NeedsUpdate v <> _ = NeedsUpdate v - _ <> NeedsUpdate v = NeedsUpdate v - -- 'New' definitions take precedence over duplicated dependencies - Ok New <> _ = Ok New - _ <> Ok New = Ok New - Ok Duplicated <> _ = Ok Duplicated +-- | Ideally we would display all available information about each var to the end-user, +-- but for now we simply pick the "most important" issue to maintain backwards compatibility +-- with current behaviour. +pickPriorityStatus :: SummarizedStatus v -> SummarizedStatus v -> SummarizedStatus v +pickPriorityStatus a b = + case (a, b) of + -- If the definition has its own error, that takes highest priority. + (SelfErr err, _) -> SelfErr err + (_, SelfErr err) -> SelfErr err + -- Next we care if a dependency has an error + (ErrFrom v err, _) -> ErrFrom v err + (_, ErrFrom v err) -> ErrFrom v err + -- If our definition needs its own update then we don't care if dependencies need updates. + (Ok Updated, _) -> Ok Updated + (_, Ok Updated) -> Ok Updated + (NeedsUpdate v, _) -> NeedsUpdate v + (_, NeedsUpdate v) -> NeedsUpdate v + -- 'New' definitions take precedence over duplicated dependencies + (Ok New, _) -> Ok New + (_, Ok New) -> Ok New + (Ok Duplicated, _) -> Ok Duplicated -- | Possible error conditions for a definition. data SlurpErr - = TermCtorCollision - | CtorTermCollision - | Conflict + = -- | A term in the scratch file conflicts with a Ctor in the codebase + TermCtorCollision + | -- | A constructor in the scratch file conflicts with a term in the codebase + CtorTermCollision + | -- | The name of this term is conflicted in the codebase. + Conflict deriving (Eq, Ord, Show) --- | Possible error conditions for a definition. -data DefinitionNotes - = DefOk SlurpStatus +-- | Possible statuses for a given definition +data DefnStatus + = DefOk SlurpOk | DefErr SlurpErr deriving (Show) -- | A map of variables to their status, and all of their dependencies' statuses. -type SlurpAnalysis v = Map (TermOrTypeVar v) (DefinitionNotes, Map (TermOrTypeVar v) DefinitionNotes) +type SlurpAnalysis v = Map (TaggedVar v) (DefnStatus, Map (TaggedVar v) DefnStatus) -- | A mapping of each status to the vars which have that status. -type VarsByStatus v = Map (DefStatus v) (NESet (TermOrTypeVar v)) +type VarsByStatus v = Map (SummarizedStatus v) (NESet (TaggedVar v)) --- Compute all definitions which can be added, or the reasons why a def can't be added. +-- | Compute all definitions which can be added, or the reasons why a def can't be added. groupByStatus :: forall v. (Ord v, Show v) => SlurpAnalysis v -> VarsByStatus v groupByStatus sr = pTraceShowId $ analyzed where - analyzed :: Map (DefStatus v) (NESet (TermOrTypeVar v)) + analyzed :: Map (SummarizedStatus v) (NESet (TaggedVar v)) analyzed = sr & Map.toList & fmap ( \(tv, (defNotes, deps)) -> - ( Semigroup.fold1 (computeDefStatus False defNotes tv NEList.:| (Map.toList deps <&> \(depTV, depDefNotes) -> computeDefStatus True depDefNotes depTV)), + ( let selfStatus = toSummary False defNotes tv + summaryOfDeps = (Map.toList deps <&> \(depTV, depDefNotes) -> toSummary True depDefNotes depTV) + in foldl' pickPriorityStatus selfStatus summaryOfDeps, NESet.singleton tv ) ) & Map.fromListWith (<>) - computeDefStatus :: (Ord v, Show v) => Bool -> DefinitionNotes -> TermOrTypeVar v -> DefStatus v - computeDefStatus isDep defNotes tv = + toSummary :: (Ord v, Show v) => Bool -> DefnStatus -> TaggedVar v -> SummarizedStatus v + toSummary isDep defNotes tv = case defNotes of DefOk Updated -> if isDep then NeedsUpdate tv else Ok Updated DefErr err -> @@ -141,13 +148,13 @@ slurpFile :: Names -> SR.SlurpResult v slurpFile uf defsToConsider maybeSlurpOp unalteredCodebaseNames = - let varRelation :: Map (TermOrTypeVar v) LD.LabeledDependency + let varRelation :: Map (TaggedVar v) LD.LabeledDependency varRelation = fileDefinitions uf - involvedVars :: Set (TermOrTypeVar v) + involvedVars :: Set (TaggedVar v) involvedVars = computeInvolvedVars uf defsToConsider varRelation codebaseNames :: Names codebaseNames = computeNamesWithDeprecations uf unalteredCodebaseNames involvedVars slurpOp - varDeps :: Map (TermOrTypeVar v) (Set (TermOrTypeVar v)) + varDeps :: Map (TaggedVar v) (Set (TaggedVar v)) varDeps = computeVarDeps uf involvedVars analysis :: SlurpAnalysis v analysis = computeSlurpAnalysis varDeps varRelation fileNames codebaseNames @@ -170,7 +177,7 @@ computeNamesWithDeprecations :: Var v => UF.TypecheckedUnisonFile v Ann -> Names -> - Set (TermOrTypeVar v) -> + Set (TaggedVar v) -> SlurpOp -> Names computeNamesWithDeprecations _uf unalteredCodebaseNames _involvedVars AddOp = @@ -215,8 +222,8 @@ computeNamesWithDeprecations uf unalteredCodebaseNames involvedVars UpdateOp = computeSlurpAnalysis :: forall v. (Ord v, Var v) => - Map (TermOrTypeVar v) (Set (TermOrTypeVar v)) -> - Map (TermOrTypeVar v) LD.LabeledDependency -> + Map (TaggedVar v) (Set (TaggedVar v)) -> + Map (TaggedVar v) LD.LabeledDependency -> Names -> Names -> SlurpAnalysis v @@ -224,7 +231,7 @@ computeSlurpAnalysis depMap varRelation _fileNames codebaseNames = pTraceShow ("Statuses", statuses) $ statuses where - statuses :: Map (TermOrTypeVar v) (DefinitionNotes, Map (TermOrTypeVar v) (DefinitionNotes)) + statuses :: Map (TaggedVar v) (DefnStatus, Map (TaggedVar v) (DefnStatus)) statuses = let withNotes = depMap @@ -234,9 +241,9 @@ computeSlurpAnalysis depMap varRelation _fileNames codebaseNames = ) withTransitiveNotes :: ( Map - (TermOrTypeVar v) - ( DefinitionNotes, - (Map (TermOrTypeVar v) DefinitionNotes) + (TaggedVar v) + ( DefnStatus, + (Map (TaggedVar v) DefnStatus) ) ) withTransitiveNotes = @@ -248,33 +255,26 @@ computeSlurpAnalysis depMap varRelation _fileNames codebaseNames = pure (tv, notes) ) in withTransitiveNotes - definitionStatus :: TermOrTypeVar v -> DefinitionNotes + definitionStatus :: TaggedVar v -> DefnStatus definitionStatus tv = let ld = case Map.lookup tv varRelation of Just r -> r Nothing -> error $ "Expected LabeledDependency in map for var: " <> show tv - v = unlabeled tv + v = untagged tv existingTypesAtName = Names.typesNamed codebaseNames (Name.unsafeFromVar v) - existingTermsAtName = Names.termsNamed codebaseNames (Name.unsafeFromVar v) + existingTermsOrCtorsAtName = Names.termsNamed codebaseNames (Name.unsafeFromVar v) in case ld of LD.TypeReference _typeRef -> - let typeStatus = case Set.toList existingTypesAtName of - [] -> DefOk New - [r] - | LD.typeRef r == ld -> DefOk Duplicated - | otherwise -> DefOk Updated - -- If there are many existing types, they must be in conflict. - -- Currently we treat conflicts as errors rather than resolving them. - _ -> DefErr Conflict - in -- ctorConflicts = do - -- (ctorName, ctorRef) <- Names.constructorsForType typeRef fileNames - -- existing <- Set.toList $ Names.termsNamed codebaseNames ctorName - -- case existing of - -- Referent.Ref _ -> pure _ - -- Referent.Con {} -> empty - typeStatus + case Set.toList existingTypesAtName of + [] -> DefOk New + [r] + | LD.typeRef r == ld -> DefOk Duplicated + | otherwise -> DefOk Updated + -- If there are many existing types, they must be in conflict. + -- Currently we treat conflicts as errors rather than resolving them. + _ -> DefErr Conflict LD.TermReference {} -> - case Set.toList existingTermsAtName of + case Set.toList existingTermsOrCtorsAtName of [] -> DefOk New rs | any Referent.isConstructor rs -> DefErr TermCtorCollision [r] @@ -284,7 +284,7 @@ computeSlurpAnalysis depMap varRelation _fileNames codebaseNames = -- Currently we treat conflicts as errors rather than resolving them. _ -> DefErr Conflict LD.ConReference {} -> - case Set.toList existingTermsAtName of + case Set.toList existingTermsOrCtorsAtName of [] -> DefOk New rs | any (not . Referent.isConstructor) rs -> DefErr CtorTermCollision [r] @@ -302,15 +302,15 @@ computeInvolvedVars :: Var v => UF.TypecheckedUnisonFile v Ann -> Set v -> - Map (TermOrTypeVar v) LD.LabeledDependency -> - Set (TermOrTypeVar v) + Map (TaggedVar v) LD.LabeledDependency -> + Set (TaggedVar v) computeInvolvedVars uf defsToConsider varRelation | Set.null defsToConsider = Set.fromList $ Map.keys varRelation | otherwise = allInvolvedVars where - allInvolvedVars :: Set (TermOrTypeVar v) + allInvolvedVars :: Set (TaggedVar v) allInvolvedVars = - let requestedVarsWhichActuallyExist :: Set (TermOrTypeVar v) + let requestedVarsWhichActuallyExist :: Set (TaggedVar v) requestedVarsWhichActuallyExist = Set.fromList $ do v <- Set.toList defsToConsider -- We don't know whether each var is a type or term, so we try both. @@ -324,10 +324,10 @@ computeVarDeps :: forall v. Var v => UF.TypecheckedUnisonFile v Ann -> - Set (TermOrTypeVar v) -> - Map (TermOrTypeVar v) (Set (TermOrTypeVar v)) + Set (TaggedVar v) -> + Map (TaggedVar v) (Set (TaggedVar v)) computeVarDeps uf allInvolvedVars = - let depMap :: (Map (TermOrTypeVar v) (Set (TermOrTypeVar v))) + let depMap :: (Map (TaggedVar v) (Set (TaggedVar v))) depMap = allInvolvedVars & Set.toList @@ -341,19 +341,18 @@ computeVarDeps uf allInvolvedVars = -- | Compute the closure of all vars which the provided vars depend on. -- A type depends on its constructors. -varClosure :: (Var v) => UF.TypecheckedUnisonFile v a -> Set (TermOrTypeVar v) -> Set (TermOrTypeVar v) -varClosure uf (partitionVars OmitConstructors -> sc) = +varClosure :: (Var v) => UF.TypecheckedUnisonFile v a -> Set (TaggedVar v) -> Set (TaggedVar v) +varClosure uf (partitionVars -> sc) = let deps = SC.closeWithDependencies uf sc in mingleVars deps -- | Collect a relation of term or type var to labelled dependency for all definitions mentioned in a file. --- Contains types but not their constructors. -fileDefinitions :: forall v a. (Ord v, Show v) => UF.TypecheckedUnisonFile v a -> Map (TermOrTypeVar v) LD.LabeledDependency +fileDefinitions :: forall v a. (Ord v, Show v) => UF.TypecheckedUnisonFile v a -> Map (TaggedVar v) LD.LabeledDependency fileDefinitions uf = let result = decls <> effects <> terms <> constructors in pTraceShow ("varRelation", result) $ result where - terms :: Map (TermOrTypeVar v) LD.LabeledDependency + terms :: Map (TaggedVar v) LD.LabeledDependency terms = UF.hashTermsId uf & Map.toList @@ -367,30 +366,30 @@ fileDefinitions uf = _ -> Nothing ) & Map.fromList - decls :: Map (TermOrTypeVar v) LD.LabeledDependency + decls :: Map (TaggedVar v) LD.LabeledDependency decls = UF.dataDeclarationsId' uf & Map.toList & fmap (\(v, (refId, _)) -> (TypeVar v, LD.derivedType refId)) & Map.fromList - effects :: Map (TermOrTypeVar v) LD.LabeledDependency + effects :: Map (TaggedVar v) LD.LabeledDependency effects = UF.effectDeclarationsId' uf & Map.toList & fmap (\(v, (refId, _)) -> (TypeVar v, (LD.derivedType refId))) & Map.fromList - constructors :: Map (TermOrTypeVar v) LD.LabeledDependency + constructors :: Map (TaggedVar v) LD.LabeledDependency constructors = - let effectConstructors :: Map (TermOrTypeVar v) LD.LabeledDependency + let effectConstructors :: Map (TaggedVar v) LD.LabeledDependency effectConstructors = Map.fromList $ do (_, (typeRefId, effect)) <- Map.toList (UF.effectDeclarationsId' uf) let decl = DD.toDataDecl effect (conId, constructorV) <- zip (DD.constructorIds decl) (DD.constructorVars decl) pure $ (ConstructorVar constructorV, LD.effectConstructor (CR.ConstructorReference (Ref.fromId typeRefId) conId)) - dataConstructors :: Map (TermOrTypeVar v) LD.LabeledDependency + dataConstructors :: Map (TaggedVar v) LD.LabeledDependency dataConstructors = Map.fromList $ do (_, (typeRefId, decl)) <- Map.toList (UF.dataDeclarationsId' uf) (conId, constructorV) <- zip (DD.constructorIds decl) (DD.constructorVars decl) @@ -430,7 +429,7 @@ toSlurpResult :: UF.TypecheckedUnisonFile v Ann -> SlurpOp -> Set v -> - Set (TermOrTypeVar v) -> + Set (TaggedVar v) -> Names -> Names -> VarsByStatus v -> @@ -446,7 +445,7 @@ toSlurpResult uf op requestedVars involvedVars fileNames codebaseNames varsBySta let desired = requestedVars & Set.flatMap (\v -> Set.fromList [TypeVar v, TermVar v]) - in partitionVars OmitConstructors $ Set.difference involvedVars desired, + in partitionVars $ Set.difference involvedVars desired, SR.adds = adds, SR.duplicates = duplicates, SR.collisions = if op == AddOp then updates else mempty, @@ -468,8 +467,8 @@ toSlurpResult uf op requestedVars involvedVars fileNames codebaseNames varsBySta varsByStatus & ifoldMap ( \k tvs -> - let scWithConstructors = partitionVars IncludedConstructors $ tvs - scWithoutConstructors = partitionVars OmitConstructors $ tvs + let scWithConstructors = partitionVars tvs + scWithoutConstructors = partitionVars tvs in case k of Ok New -> mempty {adds = scWithoutConstructors} Ok Duplicated -> mempty {duplicates = scWithoutConstructors} @@ -530,23 +529,18 @@ toSlurpResult uf op requestedVars involvedVars fileNames codebaseNames varsBySta varFromName :: Var v => Name -> v varFromName name = Var.named (Name.toText name) -data HandleConstructors = OmitConstructors | IncludedConstructors - -- | Sort out a set of variables by whether it is a term or type. -partitionVars :: (Foldable f, Ord v) => HandleConstructors -> f (TermOrTypeVar v) -> SlurpComponent v -partitionVars ctorHandling = +partitionVars :: (Foldable f, Ord v) => f (TaggedVar v) -> SlurpComponent v +partitionVars = foldMap ( \case TypeVar v -> SC.fromTypes (Set.singleton v) TermVar v -> SC.fromTerms (Set.singleton v) - ConstructorVar v -> - case ctorHandling of - OmitConstructors -> mempty - IncludedConstructors -> SC.fromTerms (Set.singleton v) + ConstructorVar v -> SC.fromCtors (Set.singleton v) ) -- | Collapse a SlurpComponent into a tagged set. -mingleVars :: Ord v => SlurpComponent v -> Set (TermOrTypeVar v) +mingleVars :: Ord v => SlurpComponent v -> Set (TaggedVar v) mingleVars SlurpComponent {terms, types, ctors} = Set.map TypeVar types <> Set.map TermVar terms diff --git a/unison-cli/src/Unison/Codebase/Editor/SlurpComponent.hs b/unison-cli/src/Unison/Codebase/Editor/SlurpComponent.hs index 2b8af63c6c..ea9acbcb82 100644 --- a/unison-cli/src/Unison/Codebase/Editor/SlurpComponent.hs +++ b/unison-cli/src/Unison/Codebase/Editor/SlurpComponent.hs @@ -95,7 +95,10 @@ closeWithDependencies uf inputs = seenDefns{ctors=constructorDeps} where invert m = Map.fromList (swap <$> Map.toList m) fromTypes :: Ord v => Set v -> SlurpComponent v -fromTypes vs = SlurpComponent {terms = mempty, types = vs, ctors=mempty} +fromTypes vs = mempty{types=vs} fromTerms :: Ord v => Set v -> SlurpComponent v -fromTerms vs = SlurpComponent {terms = vs, types = mempty, ctors=mempty} +fromTerms vs = mempty {terms = vs} + +fromCtors :: Ord v => Set v -> SlurpComponent v +fromCtors vs = mempty {ctors=vs} From ce4f6edb6516df6f223e7e741918faa241a11fb9 Mon Sep 17 00:00:00 2001 From: Chris Penner Date: Thu, 20 Jan 2022 15:39:22 -0600 Subject: [PATCH 201/297] Update ability-term transcript --- .../ability-term-conflicts-on-update.md | 2 +- ...ability-term-conflicts-on-update.output.md | 61 +++++++++++++++++++ 2 files changed, 62 insertions(+), 1 deletion(-) diff --git a/unison-src/transcripts/ability-term-conflicts-on-update.md b/unison-src/transcripts/ability-term-conflicts-on-update.md index 72c319f3ac..f0f5b68bb4 100644 --- a/unison-src/transcripts/ability-term-conflicts-on-update.md +++ b/unison-src/transcripts/ability-term-conflicts-on-update.md @@ -76,7 +76,7 @@ X.x = 1 .ns2> add ``` -```unison:error +```unison structural ability X where x : () ``` diff --git a/unison-src/transcripts/ability-term-conflicts-on-update.output.md b/unison-src/transcripts/ability-term-conflicts-on-update.output.md index 65e8d08abe..7a1fb4134e 100644 --- a/unison-src/transcripts/ability-term-conflicts-on-update.output.md +++ b/unison-src/transcripts/ability-term-conflicts-on-update.output.md @@ -166,3 +166,64 @@ We should also be able to successfully update the whole thing. unique ability Channels ``` +# Constructor-term conflict + +```unison +X.x = 1 +``` + +```ucm + + I found and typechecked these definitions in scratch.u. If you + do an `add` or `update`, here's how your codebase would + change: + + ⍟ These new definitions are ok to `add`: + + X.x : Nat + +``` +```ucm + ☝️ The namespace .ns2 is empty. + +.ns2> add + + ⍟ I've added these definitions: + + X.x : Nat + +``` +```unison +structural ability X where + x : () +``` + +```ucm + + I found and typechecked these definitions in scratch.u. If you + do an `add` or `update`, here's how your codebase would + change: + + x These definitions would fail on `add` or `update`: + + Reason + blocked structural ability X + ctor/term collision X.x + + Tip: Use `help filestatus` to learn more. + +``` +This should fail with a ctor/term conflict. + +```ucm +.ns2> add + + x These definitions failed: + + Reason + blocked structural ability X + ctor/term collision X.x + + Tip: Use `help filestatus` to learn more. + +``` From 7146712c80b085f2ce830f33dfb93711cbef326a Mon Sep 17 00:00:00 2001 From: Chris Penner Date: Thu, 20 Jan 2022 16:53:19 -0600 Subject: [PATCH 202/297] Properly pipe constructors through everything --- unison-cli/package.yaml | 2 + .../src/Unison/Codebase/Editor/Slurp.hs | 227 +++++++++--------- unison-cli/unison-cli.cabal | 15 +- 3 files changed, 122 insertions(+), 122 deletions(-) diff --git a/unison-cli/package.yaml b/unison-cli/package.yaml index 9228fea898..3e2ec98834 100644 --- a/unison-cli/package.yaml +++ b/unison-cli/package.yaml @@ -11,6 +11,8 @@ ghc-options: -Wall dependencies: - pretty-simple + - semialign + - these - ListLike - async - base diff --git a/unison-cli/src/Unison/Codebase/Editor/Slurp.hs b/unison-cli/src/Unison/Codebase/Editor/Slurp.hs index b90843c2ac..571c27e406 100644 --- a/unison-cli/src/Unison/Codebase/Editor/Slurp.hs +++ b/unison-cli/src/Unison/Codebase/Editor/Slurp.hs @@ -6,9 +6,9 @@ where import Control.Lens import qualified Data.Map as Map +import qualified Data.Semialign as Align import qualified Data.Set as Set -import Data.Set.NonEmpty (NESet) -import qualified Data.Set.NonEmpty as NESet +import Data.These import Debug.Pretty.Simple (pTraceShow, pTraceShowId) import Unison.Codebase.Editor.SlurpComponent (SlurpComponent (..)) import qualified Unison.Codebase.Editor.SlurpComponent as SC @@ -61,6 +61,7 @@ data SummarizedStatus v | NeedsUpdate (TaggedVar v) | ErrFrom (TaggedVar v) SlurpErr | SelfErr SlurpErr + | InertConstructorSummary deriving (Eq, Ord, Show) -- | Ideally we would display all available information about each var to the end-user, @@ -84,6 +85,8 @@ pickPriorityStatus a b = (Ok New, _) -> Ok New (_, Ok New) -> Ok New (Ok Duplicated, _) -> Ok Duplicated + (_, Ok Duplicated) -> Ok Duplicated + (InertConstructorSummary, InertConstructorSummary) -> InertConstructorSummary -- | Possible error conditions for a definition. data SlurpErr @@ -99,44 +102,9 @@ data SlurpErr data DefnStatus = DefOk SlurpOk | DefErr SlurpErr + | InertConstructor deriving (Show) --- | A map of variables to their status, and all of their dependencies' statuses. -type SlurpAnalysis v = Map (TaggedVar v) (DefnStatus, Map (TaggedVar v) DefnStatus) - --- | A mapping of each status to the vars which have that status. -type VarsByStatus v = Map (SummarizedStatus v) (NESet (TaggedVar v)) - --- | Compute all definitions which can be added, or the reasons why a def can't be added. -groupByStatus :: forall v. (Ord v, Show v) => SlurpAnalysis v -> VarsByStatus v -groupByStatus sr = - pTraceShowId $ analyzed - where - analyzed :: Map (SummarizedStatus v) (NESet (TaggedVar v)) - analyzed = - sr - & Map.toList - & fmap - ( \(tv, (defNotes, deps)) -> - ( let selfStatus = toSummary False defNotes tv - summaryOfDeps = (Map.toList deps <&> \(depTV, depDefNotes) -> toSummary True depDefNotes depTV) - in foldl' pickPriorityStatus selfStatus summaryOfDeps, - NESet.singleton tv - ) - ) - & Map.fromListWith (<>) - - toSummary :: (Ord v, Show v) => Bool -> DefnStatus -> TaggedVar v -> SummarizedStatus v - toSummary isDep defNotes tv = - case defNotes of - DefOk Updated -> if isDep then NeedsUpdate tv else Ok Updated - DefErr err -> - if isDep - then ErrFrom tv err - else SelfErr err - DefOk New -> Ok New - DefOk Duplicated -> Ok Duplicated - -- | Analyze a file and determine the status of all of its definitions with respect to a set -- of vars to analyze and an operation you wish to perform. slurpFile :: @@ -148,22 +116,35 @@ slurpFile :: Names -> SR.SlurpResult v slurpFile uf defsToConsider maybeSlurpOp unalteredCodebaseNames = - let varRelation :: Map (TaggedVar v) LD.LabeledDependency - varRelation = fileDefinitions uf + let -- A mapping of all vars in the file to their references. + varReferences :: Map (TaggedVar v) LD.LabeledDependency + varReferences = buildVarReferences uf + -- All variables which were either: + -- 1. specified explicitly by the end-user + -- 2. An in-file transitive dependency (within the file) of a var specified by the end-user. involvedVars :: Set (TaggedVar v) - involvedVars = computeInvolvedVars uf defsToConsider varRelation + involvedVars = computeInvolvedVars uf defsToConsider varReferences + -- The set of names after removing any constructors which would removed by the requested + -- operation. codebaseNames :: Names codebaseNames = computeNamesWithDeprecations uf unalteredCodebaseNames involvedVars slurpOp + -- A mapping of every involved variable to its transitive dependencies. + -- Dependency here is any type or term referenced within the definition (transitively). + -- This also includes all Constructors of any type used by a term. varDeps :: Map (TaggedVar v) (Set (TaggedVar v)) varDeps = computeVarDeps uf involvedVars - analysis :: SlurpAnalysis v - analysis = computeSlurpAnalysis varDeps varRelation fileNames codebaseNames - varsByStatus :: VarsByStatus v - varsByStatus = groupByStatus analysis + -- Compute the status of each definition on its own. + -- This doesn't consider the vars dependencies. + selfStatuses :: Map (TaggedVar v) DefnStatus + selfStatuses = computeVarStatuses varDeps varReferences codebaseNames + -- Determine the _actual_ status of each var by summarizing all of the statuses of its + -- dependencies. + summaries :: Map (TaggedVar v) (SummarizedStatus v) + summaries = summarizeTransitiveStatus selfStatuses varDeps slurpResult :: SR.SlurpResult v slurpResult = - toSlurpResult uf slurpOp defsToConsider involvedVars fileNames codebaseNames varsByStatus - in pTraceShowId slurpResult + toSlurpResult uf slurpOp defsToConsider involvedVars fileNames codebaseNames summaries + in pTraceShow ("selfStatuses", selfStatuses) $ pTraceShowId slurpResult where slurpOp :: SlurpOp slurpOp = fromMaybe UpdateOp maybeSlurpOp @@ -219,45 +200,22 @@ computeNamesWithDeprecations uf unalteredCodebaseNames involvedVars UpdateOp = existingConstructorsFromEditedTypes `Set.difference` constructorsUnderConsideration -- | Compute a mapping of each definition to its status, and its dependencies' statuses. -computeSlurpAnalysis :: +computeVarStatuses :: forall v. (Ord v, Var v) => Map (TaggedVar v) (Set (TaggedVar v)) -> Map (TaggedVar v) LD.LabeledDependency -> Names -> - Names -> - SlurpAnalysis v -computeSlurpAnalysis depMap varRelation _fileNames codebaseNames = - pTraceShow ("Statuses", statuses) $ - statuses + Map (TaggedVar v) DefnStatus +computeVarStatuses depMap varReferences codebaseNames = + depMap + & Map.mapWithKey + ( \tv _ -> definitionStatus tv + ) where - statuses :: Map (TaggedVar v) (DefnStatus, Map (TaggedVar v) (DefnStatus)) - statuses = - let withNotes = - depMap - & Map.mapWithKey - ( \tv deps -> - (definitionStatus tv, deps) - ) - withTransitiveNotes :: - ( Map - (TaggedVar v) - ( DefnStatus, - (Map (TaggedVar v) DefnStatus) - ) - ) - withTransitiveNotes = - withNotes - & (fmap . fmap) - ( \deps -> Map.fromList $ do - tv <- Set.toList deps - (notes, _) <- maybeToList (Map.lookup tv withNotes) - pure (tv, notes) - ) - in withTransitiveNotes definitionStatus :: TaggedVar v -> DefnStatus definitionStatus tv = - let ld = case Map.lookup tv varRelation of + let ld = case Map.lookup tv varReferences of Just r -> r Nothing -> error $ "Expected LabeledDependency in map for var: " <> show tv v = untagged tv @@ -285,15 +243,45 @@ computeSlurpAnalysis depMap varRelation _fileNames codebaseNames = _ -> DefErr Conflict LD.ConReference {} -> case Set.toList existingTermsOrCtorsAtName of - [] -> DefOk New rs | any (not . Referent.isConstructor) rs -> DefErr CtorTermCollision - [r] - | LD.referent r == ld -> DefOk Duplicated - | otherwise -> DefOk Updated + [] -> InertConstructor + [_r] -> InertConstructor -- If there are many existing terms, they must be in conflict. -- Currently we treat conflicts as errors rather than resolving them. _ -> DefErr Conflict +-- | Compute all definitions which can be added, or the reasons why a def can't be added. +summarizeTransitiveStatus :: + forall v. + (Ord v, Show v) => + Map (TaggedVar v) DefnStatus -> + Map (TaggedVar v) (Set (TaggedVar v)) -> + Map (TaggedVar v) (SummarizedStatus v) +summarizeTransitiveStatus statuses deps = + flip imap (Align.align statuses deps) $ \tv -> \case + -- No dependencies + This selfStatus -> toSummary False selfStatus tv + That _set -> error $ "Encountered var without a status during slurping: " <> show tv + These selfStatus deps -> + let selfSummary = toSummary False selfStatus tv + summaryOfDeps = do + v <- Set.toList deps + depStatus <- maybeToList $ Map.lookup v statuses + pure $ toSummary True depStatus v + in foldl' pickPriorityStatus selfSummary summaryOfDeps + where + toSummary :: (Ord v, Show v) => Bool -> DefnStatus -> TaggedVar v -> SummarizedStatus v + toSummary isDep defNotes tv = + case defNotes of + DefOk Updated -> if isDep then NeedsUpdate tv else Ok Updated + DefErr err -> + if isDep + then ErrFrom tv err + else SelfErr err + DefOk New -> Ok New + DefOk Duplicated -> Ok Duplicated + InertConstructor -> InertConstructorSummary + -- | Determine all variables which should be considered in analysis. -- I.e. any variable requested by the user and all of their dependencies, -- component peers, and component peers of dependencies. @@ -304,20 +292,20 @@ computeInvolvedVars :: Set v -> Map (TaggedVar v) LD.LabeledDependency -> Set (TaggedVar v) -computeInvolvedVars uf defsToConsider varRelation - | Set.null defsToConsider = Set.fromList $ Map.keys varRelation - | otherwise = allInvolvedVars +computeInvolvedVars uf defsToConsider varReferences + -- If nothing was specified, consider every var in the file. + | Set.null defsToConsider = Set.fromList $ Map.keys varReferences + | otherwise = varClosure uf requestedVarsWhichActuallyExist where - allInvolvedVars :: Set (TaggedVar v) - allInvolvedVars = - let requestedVarsWhichActuallyExist :: Set (TaggedVar v) - requestedVarsWhichActuallyExist = Set.fromList $ do - v <- Set.toList defsToConsider - -- We don't know whether each var is a type or term, so we try both. - tv <- [TypeVar v, TermVar v] - guard (Map.member tv varRelation) - pure tv - in varClosure uf requestedVarsWhichActuallyExist + -- The user specifies _untyped_ names, which may not even exist in the file. + -- We need to figure out which vars exist, and what type they are if they do. + requestedVarsWhichActuallyExist :: Set (TaggedVar v) + requestedVarsWhichActuallyExist = Set.fromList $ do + v <- Set.toList defsToConsider + -- We don't know whether each var is a type or term, so we try both. + tv <- [TypeVar v, TermVar v] + guard (Map.member tv varReferences) + pure tv -- | Compute transitive dependencies for all relevant variables. computeVarDeps :: @@ -347,10 +335,10 @@ varClosure uf (partitionVars -> sc) = in mingleVars deps -- | Collect a relation of term or type var to labelled dependency for all definitions mentioned in a file. -fileDefinitions :: forall v a. (Ord v, Show v) => UF.TypecheckedUnisonFile v a -> Map (TaggedVar v) LD.LabeledDependency -fileDefinitions uf = +buildVarReferences :: forall v a. (Ord v, Show v) => UF.TypecheckedUnisonFile v a -> Map (TaggedVar v) LD.LabeledDependency +buildVarReferences uf = let result = decls <> effects <> terms <> constructors - in pTraceShow ("varRelation", result) $ result + in pTraceShow ("varReferences", result) $ result where terms :: Map (TaggedVar v) LD.LabeledDependency terms = @@ -432,9 +420,9 @@ toSlurpResult :: Set (TaggedVar v) -> Names -> Names -> - VarsByStatus v -> + Map (TaggedVar v) (SummarizedStatus v) -> SR.SlurpResult v -toSlurpResult uf op requestedVars involvedVars fileNames codebaseNames varsByStatus = +toSlurpResult uf op requestedVars involvedVars fileNames codebaseNames summarizedStatuses = pTraceShowId $ SR.SlurpResult { SR.originalFile = uf, @@ -462,31 +450,36 @@ toSlurpResult uf op requestedVars involvedVars fileNames codebaseNames varsBySta SR.defsWithBlockedDependencies = blocked } where - -- Monoid instances only go up to 5-tuples :/ SlurpingSummary {adds, duplicates, updates, termCtorColl, ctorTermColl, blocked, conflicts} = - varsByStatus + summarizedStatuses & ifoldMap - ( \k tvs -> - let scWithConstructors = partitionVars tvs - scWithoutConstructors = partitionVars tvs - in case k of - Ok New -> mempty {adds = scWithoutConstructors} - Ok Duplicated -> mempty {duplicates = scWithoutConstructors} - Ok Updated -> mempty {updates = scWithoutConstructors} + ( \tv status -> + let sc = scFromTaggedVar tv + in case status of + Ok New -> mempty {adds = sc} + Ok Duplicated -> mempty {duplicates = sc} + Ok Updated -> mempty {updates = sc} NeedsUpdate _ -> case op of AddOp -> - mempty {blocked = scWithoutConstructors} + mempty {blocked = sc} UpdateOp -> - mempty {adds = scWithoutConstructors} - ErrFrom _ TermCtorCollision -> mempty {blocked = scWithoutConstructors} - ErrFrom _ CtorTermCollision -> mempty {blocked = scWithoutConstructors} - ErrFrom _ Conflict -> mempty {blocked = scWithoutConstructors} - SelfErr TermCtorCollision -> mempty {termCtorColl = scWithConstructors} - SelfErr CtorTermCollision -> mempty {ctorTermColl = scWithConstructors} - SelfErr Conflict -> mempty {conflicts = scWithConstructors} + mempty {adds = sc} + ErrFrom _ TermCtorCollision -> mempty {blocked = sc} + ErrFrom _ CtorTermCollision -> mempty {blocked = sc} + ErrFrom _ Conflict -> mempty {blocked = sc} + SelfErr TermCtorCollision -> mempty {termCtorColl = sc} + SelfErr CtorTermCollision -> mempty {ctorTermColl = sc} + SelfErr Conflict -> mempty {conflicts = sc} + InertConstructorSummary -> mempty ) + scFromTaggedVar :: TaggedVar v -> SlurpComponent v + scFromTaggedVar = \case + TermVar v -> SC.fromTerms (Set.singleton v) + TypeVar v -> SC.fromTypes (Set.singleton v) + ConstructorVar v -> SC.fromCtors (Set.singleton v) + buildAliases :: Rel.Relation Name Referent -> Rel.Relation Name Referent -> diff --git a/unison-cli/unison-cli.cabal b/unison-cli/unison-cli.cabal index 01738d7f77..caa1065b4a 100644 --- a/unison-cli/unison-cli.cabal +++ b/unison-cli/unison-cli.cabal @@ -100,9 +100,10 @@ library , pretty-simple , random >=1.2.0 , regex-tdfa - , semigroupoids + , semialign , stm , text + , these , transformers , unison-codebase-sqlite , unison-core1 @@ -172,10 +173,11 @@ executable integration-tests , process , random >=1.2.0 , regex-tdfa - , semigroupoids + , semialign , shellmet , stm , text + , these , time , transformers , unison-codebase-sqlite @@ -244,10 +246,11 @@ executable transcripts , process , random >=1.2.0 , regex-tdfa - , semigroupoids + , semialign , shellmet , stm , text + , these , transformers , unison-codebase-sqlite , unison-core1 @@ -315,12 +318,13 @@ executable unison , pretty-simple , random >=1.2.0 , regex-tdfa - , semigroupoids + , semialign , shellmet , stm , template-haskell , temporary , text + , these , transformers , unison-cli , unison-codebase-sqlite @@ -394,11 +398,12 @@ test-suite tests , pretty-simple , random >=1.2.0 , regex-tdfa - , semigroupoids + , semialign , shellmet , stm , temporary , text + , these , transformers , unison-cli , unison-codebase-sqlite From 952b49ec4263470f0dc93ed5376f755ddd58b0fd Mon Sep 17 00:00:00 2001 From: Chris Penner Date: Thu, 20 Jan 2022 17:06:39 -0600 Subject: [PATCH 203/297] Ensure constructors are ignored when necessary. --- .../src/Unison/Codebase/Editor/HandleInput.hs | 2 +- .../src/Unison/Codebase/Editor/Slurp.hs | 14 +++++------- .../src/Unison/Codebase/Editor/SlurpResult.hs | 22 +++++++++++++------ 3 files changed, 21 insertions(+), 17 deletions(-) diff --git a/unison-cli/src/Unison/Codebase/Editor/HandleInput.hs b/unison-cli/src/Unison/Codebase/Editor/HandleInput.hs index f3c9210b53..cf98c138fb 100644 --- a/unison-cli/src/Unison/Codebase/Editor/HandleInput.hs +++ b/unison-cli/src/Unison/Codebase/Editor/HandleInput.hs @@ -1914,7 +1914,7 @@ handleUpdate input maybePatchPath names = do updatePatches :: Branch0 m -> m (Branch0 m) updatePatches = Branch.modifyPatches seg updatePatch - when (Slurp.isNonempty sr) $ do + when (Slurp.hasAddsOrUpdates sr) $ do -- take a look at the `updates` from the SlurpResult -- and make a patch diff to record a replacement from the old to new references stepManyAtMNoSync Branch.CompressHistory diff --git a/unison-cli/src/Unison/Codebase/Editor/Slurp.hs b/unison-cli/src/Unison/Codebase/Editor/Slurp.hs index 571c27e406..2b82bcd92a 100644 --- a/unison-cli/src/Unison/Codebase/Editor/Slurp.hs +++ b/unison-cli/src/Unison/Codebase/Editor/Slurp.hs @@ -61,7 +61,6 @@ data SummarizedStatus v | NeedsUpdate (TaggedVar v) | ErrFrom (TaggedVar v) SlurpErr | SelfErr SlurpErr - | InertConstructorSummary deriving (Eq, Ord, Show) -- | Ideally we would display all available information about each var to the end-user, @@ -85,8 +84,6 @@ pickPriorityStatus a b = (Ok New, _) -> Ok New (_, Ok New) -> Ok New (Ok Duplicated, _) -> Ok Duplicated - (_, Ok Duplicated) -> Ok Duplicated - (InertConstructorSummary, InertConstructorSummary) -> InertConstructorSummary -- | Possible error conditions for a definition. data SlurpErr @@ -102,7 +99,6 @@ data SlurpErr data DefnStatus = DefOk SlurpOk | DefErr SlurpErr - | InertConstructor deriving (Show) -- | Analyze a file and determine the status of all of its definitions with respect to a set @@ -144,7 +140,7 @@ slurpFile uf defsToConsider maybeSlurpOp unalteredCodebaseNames = slurpResult :: SR.SlurpResult v slurpResult = toSlurpResult uf slurpOp defsToConsider involvedVars fileNames codebaseNames summaries - in pTraceShow ("selfStatuses", selfStatuses) $ pTraceShowId slurpResult + in pTraceShowId slurpResult where slurpOp :: SlurpOp slurpOp = fromMaybe UpdateOp maybeSlurpOp @@ -243,9 +239,11 @@ computeVarStatuses depMap varReferences codebaseNames = _ -> DefErr Conflict LD.ConReference {} -> case Set.toList existingTermsOrCtorsAtName of + [] -> DefOk New rs | any (not . Referent.isConstructor) rs -> DefErr CtorTermCollision - [] -> InertConstructor - [_r] -> InertConstructor + [r] + | LD.referent r == ld -> DefOk Duplicated + | otherwise -> DefOk Updated -- If there are many existing terms, they must be in conflict. -- Currently we treat conflicts as errors rather than resolving them. _ -> DefErr Conflict @@ -280,7 +278,6 @@ summarizeTransitiveStatus statuses deps = else SelfErr err DefOk New -> Ok New DefOk Duplicated -> Ok Duplicated - InertConstructor -> InertConstructorSummary -- | Determine all variables which should be considered in analysis. -- I.e. any variable requested by the user and all of their dependencies, @@ -471,7 +468,6 @@ toSlurpResult uf op requestedVars involvedVars fileNames codebaseNames summarize SelfErr TermCtorCollision -> mempty {termCtorColl = sc} SelfErr CtorTermCollision -> mempty {ctorTermColl = sc} SelfErr Conflict -> mempty {conflicts = sc} - InertConstructorSummary -> mempty ) scFromTaggedVar :: TaggedVar v -> SlurpComponent v diff --git a/unison-cli/src/Unison/Codebase/Editor/SlurpResult.hs b/unison-cli/src/Unison/Codebase/Editor/SlurpResult.hs index 38102b6ea1..b9d5bbc0da 100644 --- a/unison-cli/src/Unison/Codebase/Editor/SlurpResult.hs +++ b/unison-cli/src/Unison/Codebase/Editor/SlurpResult.hs @@ -20,7 +20,6 @@ import qualified Unison.HashQualified as HQ import qualified Unison.PrettyPrintEnv as PPE import qualified Unison.TypePrinter as TP import qualified Unison.UnisonFile as UF -import qualified Unison.Util.Monoid as Monoid import qualified Unison.Util.Pretty as P import qualified Unison.Var as Var @@ -65,8 +64,13 @@ data SlurpResult v = SlurpResult { , defsWithBlockedDependencies :: SlurpComponent v } deriving (Show) -isNonempty :: Ord v => SlurpResult v -> Bool -isNonempty s = Monoid.nonEmpty (adds s) || Monoid.nonEmpty (updates s) +hasAddsOrUpdates :: Ord v => SlurpResult v -> Bool +hasAddsOrUpdates s = + -- We intentionally ignore constructors here since they are added as part of adding their + -- types. + let SC.SlurpComponent{terms=termAdds, types=typeAdds} = adds s + SC.SlurpComponent{terms=termUpdates, types=typeUpdates} = updates s + in not . null $ termAdds <> typeAdds <> termUpdates <> typeUpdates data Status = Add | Update | Duplicate | Collision | Conflicted | @@ -293,16 +297,20 @@ isOk SlurpResult {..} = isAllDuplicates :: Ord v => SlurpResult v -> Bool isAllDuplicates SlurpResult {..} = - SC.isEmpty adds && - SC.isEmpty updates && - SC.isEmpty extraDefinitions && + emptyIgnoringConstructors adds && + emptyIgnoringConstructors updates && + emptyIgnoringConstructors extraDefinitions && SC.isEmpty collisions && SC.isEmpty conflicts && Map.null typeAlias && Map.null termAlias && Set.null termExistingConstructorCollisions && Set.null constructorExistingTermCollisions && - SC.isEmpty defsWithBlockedDependencies + emptyIgnoringConstructors defsWithBlockedDependencies + where + emptyIgnoringConstructors :: SlurpComponent v -> Bool + emptyIgnoringConstructors SlurpComponent{types, terms} = + null types && null terms -- stack repl -- From 6db90f043fef2bfab1627e1218f1c5da7278d09a Mon Sep 17 00:00:00 2001 From: Chris Penner Date: Thu, 20 Jan 2022 17:17:04 -0600 Subject: [PATCH 204/297] Check explicitly on types & terms for slurp components --- unison-cli/src/Unison/Codebase/Editor/SlurpResult.hs | 4 ++-- unison-core/src/Unison/ABT.hs | 11 +++++------ 2 files changed, 7 insertions(+), 8 deletions(-) diff --git a/unison-cli/src/Unison/Codebase/Editor/SlurpResult.hs b/unison-cli/src/Unison/Codebase/Editor/SlurpResult.hs index b9d5bbc0da..e1fd7f63c0 100644 --- a/unison-cli/src/Unison/Codebase/Editor/SlurpResult.hs +++ b/unison-cli/src/Unison/Codebase/Editor/SlurpResult.hs @@ -187,7 +187,7 @@ pretty isPast ppe sr = : ((, Nothing) <$> aliases) where aliases = fmap (P.indentN 2) . aliasesMessage . Map.lookup v $ termAlias sr - ok _ _ sc | SC.isEmpty sc = mempty + ok _ _ sc | null (SC.terms sc) && null (SC.types sc) = mempty ok past present sc = let header = goodIcon <> P.indentNAfterNewline 2 @@ -265,7 +265,7 @@ pretty isPast ppe sr = in P.sepNonEmpty "\n\n" - [ if SC.isEmpty (duplicates sr) + [ if null (terms (duplicates sr)) && null (types (duplicates sr)) then mempty else (if isPast diff --git a/unison-core/src/Unison/ABT.hs b/unison-core/src/Unison/ABT.hs index d4f12e3231..719b88a943 100644 --- a/unison-core/src/Unison/ABT.hs +++ b/unison-core/src/Unison/ABT.hs @@ -734,9 +734,8 @@ hash = hash' [] where instance (Show1 f, Show v) => Show (Term f v a) where -- annotations not shown - showsPrec p (Term freevars _ out) = case out of - -- showsPrec p (Term _ _ out) = case out of - Var v -> showParen (p>=9) $ \x -> show freevars <> "Var " ++ show v ++ x - Cycle body -> (show freevars <>) . ("Cycle " ++) . showsPrec p body - Abs v body -> showParen True $ (show freevars <>) . (show v ++) . showString ". " . showsPrec p body - Tm f -> (show freevars <>) . showsPrec1 p f + showsPrec p (Term _ _ out) = case out of + Var v -> showParen (p>=9) $ \x -> "Var " ++ show v ++ x + Cycle body -> ("Cycle " ++) . showsPrec p body + Abs v body -> showParen True $ (show v ++) . showString ". " . showsPrec p body + Tm f -> showsPrec1 p f From 443eb67e25234069bd5bd9a30f09b19d9e5b6e8e Mon Sep 17 00:00:00 2001 From: Chris Penner Date: Fri, 21 Jan 2022 10:29:23 -0600 Subject: [PATCH 205/297] Update transcripts --- unison-src/transcripts/alias-clobbering.md | 34 ----- .../transcripts/alias-clobbering.output.md | 107 -------------- unison-src/transcripts/duplicate-names.md | 54 +++++++ .../transcripts/duplicate-names.output.md | 133 ++++++++++++++++++ .../ability-term-conflicts-on-update.md | 41 ------ ...ability-term-conflicts-on-update.output.md | 81 ----------- .../transcripts/slurping/alias-clobbering.md | 34 ----- .../slurping/alias-clobbering.output.md | 107 -------------- .../slurping/block-on-required-update.md | 28 ---- .../block-on-required-update.output.md | 64 --------- .../slurping/sum-type-update-conflicts.md | 36 ----- .../slurping/update-on-conflict.md | 30 ---- .../slurping/update-on-conflict.output.md | 96 ------------- ...e-with-conflicting-constructor-and-term.md | 17 --- 14 files changed, 187 insertions(+), 675 deletions(-) delete mode 100644 unison-src/transcripts/alias-clobbering.md delete mode 100644 unison-src/transcripts/alias-clobbering.output.md create mode 100644 unison-src/transcripts/duplicate-names.md create mode 100644 unison-src/transcripts/duplicate-names.output.md delete mode 100644 unison-src/transcripts/slurping/ability-term-conflicts-on-update.md delete mode 100644 unison-src/transcripts/slurping/ability-term-conflicts-on-update.output.md delete mode 100644 unison-src/transcripts/slurping/alias-clobbering.md delete mode 100644 unison-src/transcripts/slurping/alias-clobbering.output.md delete mode 100644 unison-src/transcripts/slurping/block-on-required-update.md delete mode 100644 unison-src/transcripts/slurping/block-on-required-update.output.md delete mode 100644 unison-src/transcripts/slurping/sum-type-update-conflicts.md delete mode 100644 unison-src/transcripts/slurping/update-on-conflict.md delete mode 100644 unison-src/transcripts/slurping/update-on-conflict.output.md delete mode 100644 unison-src/transcripts/slurping/update-with-conflicting-constructor-and-term.md diff --git a/unison-src/transcripts/alias-clobbering.md b/unison-src/transcripts/alias-clobbering.md deleted file mode 100644 index cd020b91f2..0000000000 --- a/unison-src/transcripts/alias-clobbering.md +++ /dev/null @@ -1,34 +0,0 @@ -# Aliasing takes priority over provided definition - -```ucm:hide -.> builtins.merge -``` - -```unison -x = 1 -``` - -```ucm -.> add -``` - -```unison --- Overwrite the existing alias -x = 2 --- But add a _new_ definition that's an alias of the _old_ x -y = 1 -``` - -We see that `y` ends up as an alias for the new value of `x`, i.e. `y = 2`, -even though we explicitly said `y = 1`! - -Note how `update` isn't idempotent, it ping-pongs back and forth between aliases: - -```ucm -.> update -.> view x -.> view y -.> update -.> update -.> update -``` diff --git a/unison-src/transcripts/alias-clobbering.output.md b/unison-src/transcripts/alias-clobbering.output.md deleted file mode 100644 index 4f1bc9dd7d..0000000000 --- a/unison-src/transcripts/alias-clobbering.output.md +++ /dev/null @@ -1,107 +0,0 @@ -# Aliasing takes priority over provided definition - -```unison -x = 1 -``` - -```ucm - - I found and typechecked these definitions in scratch.u. If you - do an `add` or `update`, here's how your codebase would - change: - - ⍟ These new definitions are ok to `add`: - - x : Nat - -``` -```ucm -.> add - - ⍟ I've added these definitions: - - x : Nat - -``` -```unison --- Overwrite the existing alias -x = 2 --- But add a _new_ definition that's an alias of the _old_ x -y = 1 -``` - -```ucm - - I found and typechecked these definitions in scratch.u. If you - do an `add` or `update`, here's how your codebase would - change: - - ⍟ These new definitions are ok to `add`: - - y : Nat - (also named x) - - ⍟ These names already exist. You can `update` them to your - new definition: - - x : Nat - -``` -We see that `y` ends up as an alias for the new value of `x`, i.e. `y = 2`, -even though we explicitly said `y = 1`! - -Note how `update` isn't idempotent, it ping-pongs back and forth between aliases: - -```ucm -.> update - - ⍟ I've added these definitions: - - y : Nat - (also named x) - - ⍟ I've updated these names to your new definition: - - x : Nat - -.> view x - - x : Nat - x = 2 - -.> view y - - x : Nat - x = 2 - -.> update - - ⊡ Ignored previously added definitions: x - - ⍟ I've updated these names to your new definition: - - y : Nat - (The old definition was also named x. I updated this name - too.) - -.> update - - ⊡ Ignored previously added definitions: y - - ⍟ I've updated these names to your new definition: - - x : Nat - (The old definition was also named y. I updated this name - too.) - -.> update - - ⊡ Ignored previously added definitions: x - - ⍟ I've updated these names to your new definition: - - y : Nat - (The old definition was also named x. I updated this name - too.) - -``` diff --git a/unison-src/transcripts/duplicate-names.md b/unison-src/transcripts/duplicate-names.md new file mode 100644 index 0000000000..2935a401b9 --- /dev/null +++ b/unison-src/transcripts/duplicate-names.md @@ -0,0 +1,54 @@ +# Duplicate names in scratch file. + +```ucm:hide +.> builtins.merge +``` + +Term and ability constructor collisions should cause a parse error. + +```unison:error +structural ability Stream where + send : a -> () + +Stream.send : a -> () +Stream.send _ = () +``` + +Term and type constructor collisions should cause a parse error. + +```unison:error +structural type X = x + +X.x : a -> () +X.x _ = () +``` + +Ability and type constructor collisions should cause a parse error. + +```unison:error +structural type X = x +structural ability X where + x : () +``` + +Field accessors and terms with the same name should cause a parse error. + +```unison:error +structural type X = {x : ()} +X.x.modify = () +X.x.set = () +X.x = () +``` + +Types and terms with the same name are allowed. + +```unison +structural type X = Z + +X = () +``` + +```ucm +.> add +.> view X +``` diff --git a/unison-src/transcripts/duplicate-names.output.md b/unison-src/transcripts/duplicate-names.output.md new file mode 100644 index 0000000000..311202b91a --- /dev/null +++ b/unison-src/transcripts/duplicate-names.output.md @@ -0,0 +1,133 @@ +# Duplicate names in scratch file. + +Term and ability constructor collisions should cause a parse error. + +```unison +structural ability Stream where + send : a -> () + +Stream.send : a -> () +Stream.send _ = () +``` + +```ucm + + ❗️ + + I found multiple bindings with the name Stream.send: + 2 | send : a -> () + 3 | + 4 | Stream.send : a -> () + 5 | Stream.send _ = () + + +``` +Term and type constructor collisions should cause a parse error. + +```unison +structural type X = x + +X.x : a -> () +X.x _ = () +``` + +```ucm + + ❗️ + + I found multiple bindings with the name X.x: + 1 | structural type X = x + 2 | + 3 | X.x : a -> () + 4 | X.x _ = () + + +``` +Ability and type constructor collisions should cause a parse error. + +```unison +structural type X = x +structural ability X where + x : () +``` + +```ucm + + I found two types called X: + + 1 | structural type X = x + 2 | structural ability X where + 3 | x : () + + +``` +Field accessors and terms with the same name should cause a parse error. + +```unison +structural type X = {x : ()} +X.x.modify = () +X.x.set = () +X.x = () +``` + +```ucm + + ❗️ + + I found multiple bindings with the name X.x: + 1 | structural type X = {x : ()} + 2 | X.x.modify = () + 3 | X.x.set = () + 4 | X.x = () + + + I found multiple bindings with the name X.x.modify: + 1 | structural type X = {x : ()} + 2 | X.x.modify = () + + + I found multiple bindings with the name X.x.set: + 1 | structural type X = {x : ()} + 2 | X.x.modify = () + 3 | X.x.set = () + + +``` +Types and terms with the same name are allowed. + +```unison +structural type X = Z + +X = () +``` + +```ucm + + I found and typechecked these definitions in scratch.u. If you + do an `add` or `update`, here's how your codebase would + change: + + ⍟ These new definitions are ok to `add`: + + structural type X + (also named builtin.Unit) + X : () + +``` +```ucm +.> add + + ⍟ I've added these definitions: + + structural type X + (also named builtin.Unit) + X : () + +.> view X + + structural type X = Z + + X : () + X = () + +``` diff --git a/unison-src/transcripts/slurping/ability-term-conflicts-on-update.md b/unison-src/transcripts/slurping/ability-term-conflicts-on-update.md deleted file mode 100644 index 8b7eed2923..0000000000 --- a/unison-src/transcripts/slurping/ability-term-conflicts-on-update.md +++ /dev/null @@ -1,41 +0,0 @@ -# Regression test for updates which conflict with an existing ability constructor - -https://github.com/unisonweb/unison/issues/2786 - -```ucm:hide -.builtins> builtins.mergeio -``` - -First we add an ability to the codebase. -Note that this will create the name `Channels.send` as an ability constructor. - -```unison -unique ability Channels where - send : a -> {Channels} () -``` - -```ucm -.ns> add -``` - -Now we update the ability, changing the name of the constructor, _but_, we simultaneously -add a new top-level term with the same name as the constructor which is being -removed from Channels. - -```unison -unique ability Channels where - sends : [a] -> {Channels} () - -Channels.send : a -> {Channels} () -Channels.send a = sends [a] - -thing : '{Channels} () -thing _ = send 1 -``` - -The 'update' will succeed up until it tries to resolve the reference to `send` -within `thing`; because the old send is deleted and the new `send` isn't ever added. - -```ucm -.ns> update -``` diff --git a/unison-src/transcripts/slurping/ability-term-conflicts-on-update.output.md b/unison-src/transcripts/slurping/ability-term-conflicts-on-update.output.md deleted file mode 100644 index a9f0c1fcaf..0000000000 --- a/unison-src/transcripts/slurping/ability-term-conflicts-on-update.output.md +++ /dev/null @@ -1,81 +0,0 @@ -# Regression test for updates which conflict with an existing ability constructor - -https://github.com/unisonweb/unison/issues/2786 - -First we add an ability to the codebase. -Note that this will create the name `Channels.send` as an ability constructor. - -```unison -unique ability Channels where - send : a -> {Channels} () -``` - -```ucm - - I found and typechecked these definitions in scratch.u. If you - do an `add` or `update`, here's how your codebase would - change: - - ⍟ These new definitions are ok to `add`: - - unique ability Channels - -``` -```ucm - ☝️ The namespace .ns is empty. - -.ns> add - - ⍟ I've added these definitions: - - unique ability Channels - -``` -Now we update the ability, changing the name of the constructor, _but_, we simultaneously -add a new top-level term with the same name as the constructor which is being -removed from Channels. - -```unison -unique ability Channels where - sends : [a] -> {Channels} () - -Channels.send : a -> {Channels} () -Channels.send a = sends [a] - -thing : '{Channels} () -thing _ = send 1 -``` - -```ucm - - I found and typechecked these definitions in scratch.u. If you - do an `add` or `update`, here's how your codebase would - change: - - ⍟ These new definitions are ok to `add`: - - Channels.send : a ->{Channels} () - thing : '{Channels} () - - ⍟ These names already exist. You can `update` them to your - new definition: - - unique ability Channels - -``` -The 'update' will succeed up until it tries to resolve the reference to `send` -within `thing`; because the old send is deleted and the new `send` isn't ever added. - -```ucm -.ns> update - - ⍟ I've added these definitions: - - Channels.send : a ->{Channels} () - thing : '{Channels} () - - ⍟ I've updated these names to your new definition: - - unique ability Channels - -``` diff --git a/unison-src/transcripts/slurping/alias-clobbering.md b/unison-src/transcripts/slurping/alias-clobbering.md deleted file mode 100644 index cd020b91f2..0000000000 --- a/unison-src/transcripts/slurping/alias-clobbering.md +++ /dev/null @@ -1,34 +0,0 @@ -# Aliasing takes priority over provided definition - -```ucm:hide -.> builtins.merge -``` - -```unison -x = 1 -``` - -```ucm -.> add -``` - -```unison --- Overwrite the existing alias -x = 2 --- But add a _new_ definition that's an alias of the _old_ x -y = 1 -``` - -We see that `y` ends up as an alias for the new value of `x`, i.e. `y = 2`, -even though we explicitly said `y = 1`! - -Note how `update` isn't idempotent, it ping-pongs back and forth between aliases: - -```ucm -.> update -.> view x -.> view y -.> update -.> update -.> update -``` diff --git a/unison-src/transcripts/slurping/alias-clobbering.output.md b/unison-src/transcripts/slurping/alias-clobbering.output.md deleted file mode 100644 index 4f1bc9dd7d..0000000000 --- a/unison-src/transcripts/slurping/alias-clobbering.output.md +++ /dev/null @@ -1,107 +0,0 @@ -# Aliasing takes priority over provided definition - -```unison -x = 1 -``` - -```ucm - - I found and typechecked these definitions in scratch.u. If you - do an `add` or `update`, here's how your codebase would - change: - - ⍟ These new definitions are ok to `add`: - - x : Nat - -``` -```ucm -.> add - - ⍟ I've added these definitions: - - x : Nat - -``` -```unison --- Overwrite the existing alias -x = 2 --- But add a _new_ definition that's an alias of the _old_ x -y = 1 -``` - -```ucm - - I found and typechecked these definitions in scratch.u. If you - do an `add` or `update`, here's how your codebase would - change: - - ⍟ These new definitions are ok to `add`: - - y : Nat - (also named x) - - ⍟ These names already exist. You can `update` them to your - new definition: - - x : Nat - -``` -We see that `y` ends up as an alias for the new value of `x`, i.e. `y = 2`, -even though we explicitly said `y = 1`! - -Note how `update` isn't idempotent, it ping-pongs back and forth between aliases: - -```ucm -.> update - - ⍟ I've added these definitions: - - y : Nat - (also named x) - - ⍟ I've updated these names to your new definition: - - x : Nat - -.> view x - - x : Nat - x = 2 - -.> view y - - x : Nat - x = 2 - -.> update - - ⊡ Ignored previously added definitions: x - - ⍟ I've updated these names to your new definition: - - y : Nat - (The old definition was also named x. I updated this name - too.) - -.> update - - ⊡ Ignored previously added definitions: y - - ⍟ I've updated these names to your new definition: - - x : Nat - (The old definition was also named y. I updated this name - too.) - -.> update - - ⊡ Ignored previously added definitions: x - - ⍟ I've updated these names to your new definition: - - y : Nat - (The old definition was also named x. I updated this name - too.) - -``` diff --git a/unison-src/transcripts/slurping/block-on-required-update.md b/unison-src/transcripts/slurping/block-on-required-update.md deleted file mode 100644 index 1027188b06..0000000000 --- a/unison-src/transcripts/slurping/block-on-required-update.md +++ /dev/null @@ -1,28 +0,0 @@ -# Block on required update - -Should block an `add` if it requires an update on an in-file dependency. - -```ucm:hide -.> builtins.merge -``` - -```unison -x = 1 -``` - -```ucm -.> add -``` - -Update `x`, and add a new `y` which depends on the update - -```unison -x = 10 -y = x + 1 -``` - -Try to add only the new `y`. This should fail because it requires an update to `x`, but we only ran an 'add'. - -```ucm:error -.> add y -``` diff --git a/unison-src/transcripts/slurping/block-on-required-update.output.md b/unison-src/transcripts/slurping/block-on-required-update.output.md deleted file mode 100644 index b23135afd9..0000000000 --- a/unison-src/transcripts/slurping/block-on-required-update.output.md +++ /dev/null @@ -1,64 +0,0 @@ -# Block on required update - -Should block an `add` if it requires an update on an in-file dependency. - -```unison -x = 1 -``` - -```ucm - - I found and typechecked these definitions in scratch.u. If you - do an `add` or `update`, here's how your codebase would - change: - - ⍟ These new definitions are ok to `add`: - - x : Nat - -``` -```ucm -.> add - - ⍟ I've added these definitions: - - x : Nat - -``` -Update `x`, and add a new `y` which depends on the update - -```unison -x = 10 -y = x + 1 -``` - -```ucm - - I found and typechecked these definitions in scratch.u. If you - do an `add` or `update`, here's how your codebase would - change: - - ⍟ These new definitions are ok to `add`: - - y : Nat - - ⍟ These names already exist. You can `update` them to your - new definition: - - x : Nat - -``` -Try to add only the new `y`. This should fail because it requires an update to `x`, but we only ran an 'add'. - -```ucm -.> add y - - x These definitions failed: - - Reason - needs update x : Nat - blocked y : Nat - - Tip: Use `help filestatus` to learn more. - -``` diff --git a/unison-src/transcripts/slurping/sum-type-update-conflicts.md b/unison-src/transcripts/slurping/sum-type-update-conflicts.md deleted file mode 100644 index 992c5726ea..0000000000 --- a/unison-src/transcripts/slurping/sum-type-update-conflicts.md +++ /dev/null @@ -1,36 +0,0 @@ -# Regression test for updates which conflict with an existing data constructor - -https://github.com/unisonweb/unison/issues/2786 - -```ucm:hide -.builtins> builtins.mergeio -``` - -First we add a sum-type to the codebase. - -```unison -structural type X = x -``` - -```ucm -.ns> add -``` - -Now we update the type, changing the name of the constructors, _but_, we simultaneously -add a new top-level term with the same name as the old constructor. - -```unison -structural type X = y | z - -X.x : Text -X.x = "some text that's not in the codebase" - -dependsOnX = Text.size X.x -``` - -The 'update' will succeed up until it tries to resolve the reference to `X.x` -within `depondsOnX`, but `X.x` is actually not in the codebase! - -```ucm:error -.ns> update -``` diff --git a/unison-src/transcripts/slurping/update-on-conflict.md b/unison-src/transcripts/slurping/update-on-conflict.md deleted file mode 100644 index 30e1afe84d..0000000000 --- a/unison-src/transcripts/slurping/update-on-conflict.md +++ /dev/null @@ -1,30 +0,0 @@ -# Update on conflict - -```ucm:hide -.> builtins.merge -``` - -```unison -a.x = 1 -b.x = 2 -``` - -Cause a conflict: -```ucm -.> add -.merged> merge .a -.merged> merge .b -``` - -Ideally we could just define the canonical `x` that we want, and update -to accept it, but we can't: - -```unison -x = 1 + 2 -``` - -Update fails on conflicted `x`: - -```ucm:error -.merged> update -``` diff --git a/unison-src/transcripts/slurping/update-on-conflict.output.md b/unison-src/transcripts/slurping/update-on-conflict.output.md deleted file mode 100644 index 2a5948c801..0000000000 --- a/unison-src/transcripts/slurping/update-on-conflict.output.md +++ /dev/null @@ -1,96 +0,0 @@ -# Update on conflict - -```unison -a.x = 1 -b.x = 2 -``` - -```ucm - - I found and typechecked these definitions in scratch.u. If you - do an `add` or `update`, here's how your codebase would - change: - - ⍟ These new definitions are ok to `add`: - - a.x : Nat - b.x : Nat - -``` -Cause a conflict: -```ucm -.> add - - ⍟ I've added these definitions: - - a.x : Nat - b.x : Nat - - ☝️ The namespace .merged is empty. - -.merged> merge .a - - Here's what's changed in the current namespace after the - merge: - - Added definitions: - - 1. x : Nat - - Tip: You can use `todo` to see if this generated any work to - do in this namespace and `test` to run the tests. Or you - can use `undo` or `reflog` to undo the results of this - merge. - -.merged> merge .b - - Here's what's changed in the current namespace after the - merge: - - New name conflicts: - - 1. x#jk19sm5bf8 : Nat - ↓ - 2. ┌ x#0ja1qfpej6 : Nat - 3. └ x#jk19sm5bf8 : Nat - - Tip: You can use `todo` to see if this generated any work to - do in this namespace and `test` to run the tests. Or you - can use `undo` or `reflog` to undo the results of this - merge. - -``` -Ideally we could just define the canonical `x` that we want, and update -to accept it, but we can't: - -```unison -x = 1 + 2 -``` - -```ucm - - I found and typechecked these definitions in scratch.u. If you - do an `add` or `update`, here's how your codebase would - change: - - x These definitions would fail on `add` or `update`: - - Reason - conflicted x : Nat - - Tip: Use `help filestatus` to learn more. - -``` -Update fails on conflicted `x`: - -```ucm -.merged> update - - x These definitions failed: - - Reason - conflicted x : Nat - - Tip: Use `help filestatus` to learn more. - -``` diff --git a/unison-src/transcripts/slurping/update-with-conflicting-constructor-and-term.md b/unison-src/transcripts/slurping/update-with-conflicting-constructor-and-term.md deleted file mode 100644 index e3284c6f97..0000000000 --- a/unison-src/transcripts/slurping/update-with-conflicting-constructor-and-term.md +++ /dev/null @@ -1,17 +0,0 @@ -# Update with conflicting ability constructor and term - -```ucm:hide -.> builtins.merge -``` - -```unison -unique ability Stream where - send : a -> () - -Stream.send : a -> () -Stream.send _ = () -``` - -```ucm -.> add -``` From ab0e3e52d20f31ee0155358844d767b115be2653 Mon Sep 17 00:00:00 2001 From: Chris Penner Date: Fri, 21 Jan 2022 10:34:33 -0600 Subject: [PATCH 206/297] Make an explicit 'CheckOp' constructor --- .../src/Unison/Codebase/Editor/HandleInput.hs | 28 +++++++++---------- .../src/Unison/Codebase/Editor/Slurp.hs | 28 +++++++++++-------- 2 files changed, 30 insertions(+), 26 deletions(-) diff --git a/unison-cli/src/Unison/Codebase/Editor/HandleInput.hs b/unison-cli/src/Unison/Codebase/Editor/HandleInput.hs index cf98c138fb..f9b79b70c4 100644 --- a/unison-cli/src/Unison/Codebase/Editor/HandleInput.hs +++ b/unison-cli/src/Unison/Codebase/Editor/HandleInput.hs @@ -262,7 +262,7 @@ loop = do let lexed = L.lexer (Text.unpack sourceName) (Text.unpack text) withFile [] sourceName (text, lexed) $ \unisonFile -> do currentNames <- currentPathNames - let sr = Slurp.slurpFile unisonFile mempty Nothing currentNames + let sr = Slurp.slurpFile unisonFile mempty Slurp.CheckOp currentNames names <- displayNames unisonFile pped <- prettyPrintEnvDecl names let ppe = PPE.suffixifiedPPE pped @@ -1253,13 +1253,13 @@ loop = do InvalidSourceNameError -> respond $ InvalidSourceName path LoadError -> respond $ SourceLoadFailed path LoadSuccess contents -> loadUnisonFile (Text.pack path) contents - AddI names -> do - let vars = Set.map Name.toVar names + AddI requestedNames -> do + let vars = Set.map Name.toVar requestedNames case uf of Nothing -> respond NoUnisonFile Just uf -> do currentNames <- currentPathNames - let sr = Slurp.slurpFile uf vars (Just Slurp.AddOp) currentNames + let sr = Slurp.slurpFile uf vars Slurp.AddOp currentNames let adds = SlurpResult.adds sr stepAtNoSync Branch.CompressHistory (Path.unabsolute currentPath', doSlurpAdds adds uf) eval . AddDefsToCodebase . filterBySlurpResult sr $ uf @@ -1267,19 +1267,19 @@ loop = do respond $ SlurpOutput input (PPE.suffixifiedPPE ppe) sr addDefaultMetadata adds syncRoot - PreviewAddI names -> case (latestFile', uf) of + PreviewAddI requestedNames -> case (latestFile', uf) of (Just (sourceName, _), Just uf) -> do - let vars = Set.map Name.toVar names + let vars = Set.map Name.toVar requestedNames currentNames <- currentPathNames - let sr = Slurp.slurpFile uf vars (Just Slurp.AddOp) currentNames + let sr = Slurp.slurpFile uf vars Slurp.AddOp currentNames previewResponse sourceName sr uf _ -> respond NoUnisonFile - UpdateI maybePatchPath names -> handleUpdate input maybePatchPath names - PreviewUpdateI names -> case (latestFile', uf) of + UpdateI maybePatchPath requestedNames -> handleUpdate input maybePatchPath requestedNames + PreviewUpdateI requestedNames -> case (latestFile', uf) of (Just (sourceName, _), Just uf) -> do - let vars = Set.map Name.toVar names + let vars = Set.map Name.toVar requestedNames currentNames <- currentPathNames - let sr = Slurp.slurpFile uf vars (Just Slurp.UpdateOp) currentNames + let sr = Slurp.slurpFile uf vars Slurp.UpdateOp currentNames previewResponse sourceName sr uf _ -> respond NoUnisonFile TodoI patchPath branchPath' -> do @@ -1802,8 +1802,8 @@ handleShowDefinition outputLoc inputQuery = do -- | Handle an @update@ command. handleUpdate :: forall m v. (Monad m, Var v) => Input -> Maybe PatchPath -> Set Name -> Action' m v () -handleUpdate input maybePatchPath names = do - let vars = Set.map Name.toVar names +handleUpdate input maybePatchPath requestedNames = do + let requestedVars = Set.map Name.toVar requestedNames use LoopState.latestTypecheckedFile >>= \case Nothing -> respond NoUnisonFile Just uf -> do @@ -1818,7 +1818,7 @@ handleUpdate input maybePatchPath names = do let patchPath = fromMaybe defaultPatchPath maybePatchPath slurpCheckNames <- slurpResultNames let currentPathNames = slurpCheckNames - let sr = Slurp.slurpFile uf vars (Just Slurp.UpdateOp) slurpCheckNames + let sr = Slurp.slurpFile uf requestedVars Slurp.UpdateOp slurpCheckNames addsAndUpdates :: SlurpComponent v addsAndUpdates = Slurp.updates sr <> Slurp.adds sr fileNames :: Names diff --git a/unison-cli/src/Unison/Codebase/Editor/Slurp.hs b/unison-cli/src/Unison/Codebase/Editor/Slurp.hs index 2b82bcd92a..2ac1076166 100644 --- a/unison-cli/src/Unison/Codebase/Editor/Slurp.hs +++ b/unison-cli/src/Unison/Codebase/Editor/Slurp.hs @@ -35,7 +35,11 @@ import qualified Unison.Var as Var import Unison.WatchKind (pattern TestWatch) -- | The operation which is being performed or checked. -data SlurpOp = AddOp | UpdateOp +data SlurpOp + = AddOp + | UpdateOp + | -- Run when the user saves the scratch file. + CheckOp deriving (Eq, Show) -- | Tag a variable as representing a term, type, or constructor @@ -108,10 +112,10 @@ slurpFile :: Var v => UF.TypecheckedUnisonFile v Ann -> Set v -> - Maybe SlurpOp -> + SlurpOp -> Names -> SR.SlurpResult v -slurpFile uf defsToConsider maybeSlurpOp unalteredCodebaseNames = +slurpFile uf defsToConsider slurpOp unalteredCodebaseNames = let -- A mapping of all vars in the file to their references. varReferences :: Map (TaggedVar v) LD.LabeledDependency varReferences = buildVarReferences uf @@ -142,9 +146,6 @@ slurpFile uf defsToConsider maybeSlurpOp unalteredCodebaseNames = toSlurpResult uf slurpOp defsToConsider involvedVars fileNames codebaseNames summaries in pTraceShowId slurpResult where - slurpOp :: SlurpOp - slurpOp = fromMaybe UpdateOp maybeSlurpOp - fileNames :: Names fileNames = UF.typecheckedToNames uf @@ -157,11 +158,12 @@ computeNamesWithDeprecations :: Set (TaggedVar v) -> SlurpOp -> Names -computeNamesWithDeprecations _uf unalteredCodebaseNames _involvedVars AddOp = - -- If we're 'adding', there won't be any deprecations. - unalteredCodebaseNames -computeNamesWithDeprecations uf unalteredCodebaseNames involvedVars UpdateOp = - codebaseNames +computeNamesWithDeprecations uf unalteredCodebaseNames involvedVars op = + case op of + AddOp -> + -- If we're 'adding', there won't be any deprecations to worry about. + unalteredCodebaseNames + _ -> codebaseNames where -- Get the set of all DIRECT definitions in the file which a definition depends on. codebaseNames :: Names @@ -435,7 +437,7 @@ toSlurpResult uf op requestedVars involvedVars fileNames codebaseNames summarize SR.duplicates = duplicates, SR.collisions = if op == AddOp then updates else mempty, SR.conflicts = conflicts, - SR.updates = if op == UpdateOp then updates else mempty, + SR.updates = if op /= AddOp then updates else mempty, SR.termExistingConstructorCollisions = let SlurpComponent {types, terms, ctors} = termCtorColl in types <> terms <> ctors, @@ -462,6 +464,8 @@ toSlurpResult uf op requestedVars involvedVars fileNames codebaseNames summarize mempty {blocked = sc} UpdateOp -> mempty {adds = sc} + CheckOp -> + mempty {adds = sc} ErrFrom _ TermCtorCollision -> mempty {blocked = sc} ErrFrom _ CtorTermCollision -> mempty {blocked = sc} ErrFrom _ Conflict -> mempty {blocked = sc} From 6d5cf29c26643f39727910871731af888b24c1d3 Mon Sep 17 00:00:00 2001 From: Chris Penner Date: Fri, 21 Jan 2022 10:36:06 -0600 Subject: [PATCH 207/297] Reorganize --- .../src/Unison/Codebase/Editor/Slurp.hs | 32 +++++++++---------- 1 file changed, 16 insertions(+), 16 deletions(-) diff --git a/unison-cli/src/Unison/Codebase/Editor/Slurp.hs b/unison-cli/src/Unison/Codebase/Editor/Slurp.hs index 2ac1076166..96b344af71 100644 --- a/unison-cli/src/Unison/Codebase/Editor/Slurp.hs +++ b/unison-cli/src/Unison/Codebase/Editor/Slurp.hs @@ -59,6 +59,22 @@ data SlurpOk | Duplicated deriving (Eq, Ord, Show) +-- | Possible error conditions for a definition. +data SlurpErr + = -- | A term in the scratch file conflicts with a Ctor in the codebase + TermCtorCollision + | -- | A constructor in the scratch file conflicts with a term in the codebase + CtorTermCollision + | -- | The name of this term is conflicted in the codebase. + Conflict + deriving (Eq, Ord, Show) + +-- | Possible statuses for a given definition +data DefnStatus + = DefOk SlurpOk + | DefErr SlurpErr + deriving (Show) + -- | A definition's final status, incorporating the statuses of all of its dependencies. data SummarizedStatus v = Ok SlurpOk @@ -89,22 +105,6 @@ pickPriorityStatus a b = (_, Ok New) -> Ok New (Ok Duplicated, _) -> Ok Duplicated --- | Possible error conditions for a definition. -data SlurpErr - = -- | A term in the scratch file conflicts with a Ctor in the codebase - TermCtorCollision - | -- | A constructor in the scratch file conflicts with a term in the codebase - CtorTermCollision - | -- | The name of this term is conflicted in the codebase. - Conflict - deriving (Eq, Ord, Show) - --- | Possible statuses for a given definition -data DefnStatus - = DefOk SlurpOk - | DefErr SlurpErr - deriving (Show) - -- | Analyze a file and determine the status of all of its definitions with respect to a set -- of vars to analyze and an operation you wish to perform. slurpFile :: From 725acab049af1d0faefe0568df0c6b5fe68b7e63 Mon Sep 17 00:00:00 2001 From: Chris Penner Date: Fri, 21 Jan 2022 10:36:55 -0600 Subject: [PATCH 208/297] Remove tracing --- unison-cli/package.yaml | 1 - .../src/Unison/Codebase/Editor/Slurp.hs | 64 +++++++++---------- unison-cli/unison-cli.cabal | 5 -- 3 files changed, 29 insertions(+), 41 deletions(-) diff --git a/unison-cli/package.yaml b/unison-cli/package.yaml index 3e2ec98834..50cb2a3b9c 100644 --- a/unison-cli/package.yaml +++ b/unison-cli/package.yaml @@ -10,7 +10,6 @@ flags: ghc-options: -Wall dependencies: - - pretty-simple - semialign - these - ListLike diff --git a/unison-cli/src/Unison/Codebase/Editor/Slurp.hs b/unison-cli/src/Unison/Codebase/Editor/Slurp.hs index 96b344af71..459965ed54 100644 --- a/unison-cli/src/Unison/Codebase/Editor/Slurp.hs +++ b/unison-cli/src/Unison/Codebase/Editor/Slurp.hs @@ -9,7 +9,6 @@ import qualified Data.Map as Map import qualified Data.Semialign as Align import qualified Data.Set as Set import Data.These -import Debug.Pretty.Simple (pTraceShow, pTraceShowId) import Unison.Codebase.Editor.SlurpComponent (SlurpComponent (..)) import qualified Unison.Codebase.Editor.SlurpComponent as SC import qualified Unison.Codebase.Editor.SlurpResult as SR @@ -144,7 +143,7 @@ slurpFile uf defsToConsider slurpOp unalteredCodebaseNames = slurpResult :: SR.SlurpResult v slurpResult = toSlurpResult uf slurpOp defsToConsider involvedVars fileNames codebaseNames summaries - in pTraceShowId slurpResult + in slurpResult where fileNames :: Names fileNames = UF.typecheckedToNames uf @@ -194,8 +193,7 @@ computeNamesWithDeprecations uf unalteredCodebaseNames involvedVars op = (name, _ref) <- Names.constructorsForType ref unalteredCodebaseNames pure name in -- Compute any constructors which were deleted - pTraceShow ("defsToConsider", involvedVars) $ - existingConstructorsFromEditedTypes `Set.difference` constructorsUnderConsideration + existingConstructorsFromEditedTypes `Set.difference` constructorsUnderConsideration -- | Compute a mapping of each definition to its status, and its dependencies' statuses. computeVarStatuses :: @@ -322,9 +320,7 @@ computeVarDeps uf allInvolvedVars = ( \tv -> (tv, Set.delete tv $ varClosure uf (Set.singleton tv)) ) & Map.fromListWith (<>) - in pTraceShow ("all involved variables", allInvolvedVars) - . pTraceShow ("depmap", depMap) - $ depMap + in depMap -- | Compute the closure of all vars which the provided vars depend on. -- A type depends on its constructors. @@ -336,8 +332,7 @@ varClosure uf (partitionVars -> sc) = -- | Collect a relation of term or type var to labelled dependency for all definitions mentioned in a file. buildVarReferences :: forall v a. (Ord v, Show v) => UF.TypecheckedUnisonFile v a -> Map (TaggedVar v) LD.LabeledDependency buildVarReferences uf = - let result = decls <> effects <> terms <> constructors - in pTraceShow ("varReferences", result) $ result + decls <> effects <> terms <> constructors where terms :: Map (TaggedVar v) LD.LabeledDependency terms = @@ -422,32 +417,31 @@ toSlurpResult :: Map (TaggedVar v) (SummarizedStatus v) -> SR.SlurpResult v toSlurpResult uf op requestedVars involvedVars fileNames codebaseNames summarizedStatuses = - pTraceShowId $ - SR.SlurpResult - { SR.originalFile = uf, - SR.extraDefinitions = - if Set.null requestedVars - then mempty - else - let desired = - requestedVars - & Set.flatMap (\v -> Set.fromList [TypeVar v, TermVar v]) - in partitionVars $ Set.difference involvedVars desired, - SR.adds = adds, - SR.duplicates = duplicates, - SR.collisions = if op == AddOp then updates else mempty, - SR.conflicts = conflicts, - SR.updates = if op /= AddOp then updates else mempty, - SR.termExistingConstructorCollisions = - let SlurpComponent {types, terms, ctors} = termCtorColl - in types <> terms <> ctors, - SR.constructorExistingTermCollisions = - let SlurpComponent {types, terms, ctors} = ctorTermColl - in types <> terms <> ctors, - SR.termAlias = termAliases, - SR.typeAlias = typeAliases, - SR.defsWithBlockedDependencies = blocked - } + SR.SlurpResult + { SR.originalFile = uf, + SR.extraDefinitions = + if Set.null requestedVars + then mempty + else + let desired = + requestedVars + & Set.flatMap (\v -> Set.fromList [TypeVar v, TermVar v]) + in partitionVars $ Set.difference involvedVars desired, + SR.adds = adds, + SR.duplicates = duplicates, + SR.collisions = if op == AddOp then updates else mempty, + SR.conflicts = conflicts, + SR.updates = if op /= AddOp then updates else mempty, + SR.termExistingConstructorCollisions = + let SlurpComponent {types, terms, ctors} = termCtorColl + in types <> terms <> ctors, + SR.constructorExistingTermCollisions = + let SlurpComponent {types, terms, ctors} = ctorTermColl + in types <> terms <> ctors, + SR.termAlias = termAliases, + SR.typeAlias = typeAliases, + SR.defsWithBlockedDependencies = blocked + } where SlurpingSummary {adds, duplicates, updates, termCtorColl, ctorTermColl, blocked, conflicts} = summarizedStatuses diff --git a/unison-cli/unison-cli.cabal b/unison-cli/unison-cli.cabal index caa1065b4a..8f7aa870d2 100644 --- a/unison-cli/unison-cli.cabal +++ b/unison-cli/unison-cli.cabal @@ -97,7 +97,6 @@ library , mtl , nonempty-containers , open-browser - , pretty-simple , random >=1.2.0 , regex-tdfa , semialign @@ -169,7 +168,6 @@ executable integration-tests , mtl , nonempty-containers , open-browser - , pretty-simple , process , random >=1.2.0 , regex-tdfa @@ -242,7 +240,6 @@ executable transcripts , mtl , nonempty-containers , open-browser - , pretty-simple , process , random >=1.2.0 , regex-tdfa @@ -315,7 +312,6 @@ executable unison , nonempty-containers , open-browser , optparse-applicative >=0.16.1.0 - , pretty-simple , random >=1.2.0 , regex-tdfa , semialign @@ -395,7 +391,6 @@ test-suite tests , mtl , nonempty-containers , open-browser - , pretty-simple , random >=1.2.0 , regex-tdfa , semialign From acd6e049c4628f595546a750d83bd9097829b929 Mon Sep 17 00:00:00 2001 From: Chris Penner Date: Fri, 21 Jan 2022 12:01:29 -0600 Subject: [PATCH 209/297] Add progress bar to migration and add prompt to begin migration. --- .../U/Codebase/Sqlite/Queries.hs | 17 +++++++ .../src/Unison/Codebase/SqliteCodebase.hs | 6 ++- .../SqliteCodebase/MigrateSchema12.hs | 46 ++++++++++++++----- 3 files changed, 56 insertions(+), 13 deletions(-) diff --git a/codebase2/codebase-sqlite/U/Codebase/Sqlite/Queries.hs b/codebase2/codebase-sqlite/U/Codebase/Sqlite/Queries.hs index 492d1233b8..984669ad8b 100644 --- a/codebase2/codebase-sqlite/U/Codebase/Sqlite/Queries.hs +++ b/codebase2/codebase-sqlite/U/Codebase/Sqlite/Queries.hs @@ -112,6 +112,11 @@ module U.Codebase.Sqlite.Queries ( garbageCollectObjectsWithoutHashes, garbageCollectWatchesWithoutObjects, + -- migrations + countObjects, + countCausals, + countWatches, + -- * db misc createSchema, schemaVersion, @@ -279,6 +284,18 @@ setSchemaVersion schemaVersion = execute sql (Only schemaVersion) SET version = ? |] +countObjects :: DB m => m Int +countObjects = head <$> queryAtoms_ sql + where sql = [here| SELECT COUNT(*) FROM object |] + +countCausals :: DB m => m Int +countCausals = head <$> queryAtoms_ sql + where sql = [here| SELECT COUNT(*) FROM causal |] + +countWatches :: DB m => m Int +countWatches = head <$> queryAtoms_ sql + where sql = [here| SELECT COUNT(*) FROM watch |] + saveHash :: DB m => Base32Hex -> m HashId saveHash base32 = execute sql (Only base32) >> queryOne (loadHashId base32) where sql = [here| diff --git a/parser-typechecker/src/Unison/Codebase/SqliteCodebase.hs b/parser-typechecker/src/Unison/Codebase/SqliteCodebase.hs index befccabdfd..5f3d0017b4 100644 --- a/parser-typechecker/src/Unison/Codebase/SqliteCodebase.hs +++ b/parser-typechecker/src/Unison/Codebase/SqliteCodebase.hs @@ -839,13 +839,17 @@ sqliteCodebase debugName root localOrRemote action = do (`finally` finalizer) $ runReaderT Q.schemaVersion conn >>= \case SchemaVersion 2 -> Right <$> action codebase SchemaVersion 1 -> do + liftIO $ putStrLn ("Migrating from schema version 1 -> 2.") case localOrRemote of Local -> liftIO do backupPath <- backupCodebasePath <$> getPOSIXTime copyFile (root codebasePath) (root backupPath) -- FIXME prettify - putStrLn ("I backed up your codebase to " ++ (root backupPath)) + putStrLn ("📋 I backed up your codebase to " ++ (root backupPath)) + putStrLn "⚠️ Please close all other ucm processes and wait for the migration to complete before interacting with your codebase." + putStrLn "Press to start the migration once all other ucm processes are shutdown..." + void $ liftIO getLine Remote -> pure () migrateSchema12 conn codebase -- it's ok to pass codebase along; whatever it cached during the migration won't break anything diff --git a/parser-typechecker/src/Unison/Codebase/SqliteCodebase/MigrateSchema12.hs b/parser-typechecker/src/Unison/Codebase/SqliteCodebase/MigrateSchema12.hs index 467413594c..10b2542037 100644 --- a/parser-typechecker/src/Unison/Codebase/SqliteCodebase/MigrateSchema12.hs +++ b/parser-typechecker/src/Unison/Codebase/SqliteCodebase/MigrateSchema12.hs @@ -24,6 +24,8 @@ import qualified Data.Set as Set import Data.Tuple (swap) import Data.Tuple.Extra ((***)) import qualified Data.Zip as Zip +import System.Environment (lookupEnv) +import System.IO.Unsafe (unsafePerformIO) import U.Codebase.HashTags (BranchHash (..), CausalHash (..), PatchHash (..)) import qualified U.Codebase.Reference as UReference import qualified U.Codebase.Referent as UReferent @@ -104,18 +106,26 @@ import UnliftIO.Exception (bracket_, onException) -- * [x] Update the schema version in the database after migrating so we only migrate -- once. +verboseOutput :: Bool +verboseOutput = + isJust (unsafePerformIO (lookupEnv "UNISON_MIGRATION_DEBUG")) +{-# NOINLINE verboseOutput #-} + migrateSchema12 :: forall a m v. (MonadUnliftIO m, Var v) => Connection -> Codebase m v a -> m () migrateSchema12 conn codebase = do withinSavepoint "MIGRATESCHEMA12" $ do + liftIO $ putStrLn $ "Starting codebase migration, this may take a while." rootCausalHashId <- runDB conn (liftQ Q.loadNamespaceRoot) + numEntitiesToMigrate <- runDB conn . liftQ $ do + sum <$> sequenceA [Q.countObjects, Q.countCausals, Q.countWatches] watches <- foldMapM (\watchKind -> map (W watchKind) <$> Codebase.watches codebase (Cv.watchKind2to1 watchKind)) [WK.RegularWatch, WK.TestWatch] migrationState <- - (Sync.sync @_ @Entity migrationSync progress (CausalE rootCausalHashId : watches)) + (Sync.sync @_ @Entity migrationSync (progress numEntitiesToMigrate) (CausalE rootCausalHashId : watches)) `runReaderT` Env {db = conn, codebase} - `execStateT` MigrationState Map.empty Map.empty Map.empty Set.empty + `execStateT` MigrationState Map.empty Map.empty Map.empty Set.empty 0 let (_, newRootCausalHashId) = causalMapping migrationState ^?! ix rootCausalHashId liftIO $ putStrLn $ "Updating Namespace Root..." runDB conn . liftQ $ Q.setNamespaceRoot newRootCausalHashId @@ -138,17 +148,28 @@ migrateSchema12 conn codebase = do (runDB conn $ Q.savepoint name) (runDB conn $ Q.release name) (act `onException` runDB conn (Q.rollbackTo name)) - progress :: Sync.Progress (ReaderT (Env m v a) (StateT MigrationState m)) Entity - progress = - let need :: Entity -> ReaderT (Env m v a) (StateT MigrationState m) () - need e = liftIO $ putStrLn $ "Need: " ++ show e + progress :: Int -> Sync.Progress (ReaderT (Env m v a) (StateT MigrationState m)) Entity + progress numToMigrate = + let incrementProgress :: ReaderT (Env m v a) (StateT MigrationState m) () + incrementProgress = do + numDone <- field @"numMigrated" <+= 1 + liftIO $ putStr $ "\r 🏗 " <> show numDone <> " / ~" <> show numToMigrate <> " entities migrated. 🚧" + need :: Entity -> ReaderT (Env m v a) (StateT MigrationState m) () + need e = when verboseOutput $ liftIO $ putStrLn $ "Need: " ++ show e done :: Entity -> ReaderT (Env m v a) (StateT MigrationState m) () - done e = liftIO $ putStrLn $ "Done: " ++ show e - error :: Entity -> ReaderT (Env m v a) (StateT MigrationState m) () - error e = liftIO $ putStrLn $ "Error: " ++ show e + done e = do + when verboseOutput $ liftIO $ putStrLn $ "Done: " ++ show e + incrementProgress + errorHandler :: Entity -> ReaderT (Env m v a) (StateT MigrationState m) () + errorHandler e = do + case e of + -- We expect non-fatal errors when migrating watches. + W {} -> pure () + e -> liftIO $ putStrLn $ "Error: " ++ show e + incrementProgress allDone :: ReaderT (Env m v a) (StateT MigrationState m) () - allDone = liftIO $ putStrLn $ "Finished migrating, initiating cleanup." - in Sync.Progress {need, done, error, allDone} + allDone = liftIO $ putStrLn $ "\nFinished migrating, initiating cleanup.\n This will take a moment..." + in Sync.Progress {need, done, error = errorHandler, allDone} type Old a = a @@ -166,7 +187,8 @@ data MigrationState = MigrationState -- convert Object Reference IDs into Hash Reference IDs so we can use the referenceMapping. objLookup :: Map (Old ObjectId) (New ObjectId, New HashId, New Hash, Old Hash), -- Remember the hashes of term/decls that we have already migrated to avoid migrating them twice. - migratedDefnHashes :: Set (Old Hash) + migratedDefnHashes :: Set (Old Hash), + numMigrated :: Int } deriving (Generic) From 1f945a5caec8448a70bf4c20ef80f8fab743a81a Mon Sep 17 00:00:00 2001 From: Chris Penner Date: Fri, 21 Jan 2022 12:07:05 -0600 Subject: [PATCH 210/297] Formatting cleanups --- .../src/Unison/Codebase/SqliteCodebase/MigrateSchema12.hs | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/parser-typechecker/src/Unison/Codebase/SqliteCodebase/MigrateSchema12.hs b/parser-typechecker/src/Unison/Codebase/SqliteCodebase/MigrateSchema12.hs index 10b2542037..46c7a36d83 100644 --- a/parser-typechecker/src/Unison/Codebase/SqliteCodebase/MigrateSchema12.hs +++ b/parser-typechecker/src/Unison/Codebase/SqliteCodebase/MigrateSchema12.hs @@ -114,7 +114,7 @@ verboseOutput = migrateSchema12 :: forall a m v. (MonadUnliftIO m, Var v) => Connection -> Codebase m v a -> m () migrateSchema12 conn codebase = do withinSavepoint "MIGRATESCHEMA12" $ do - liftIO $ putStrLn $ "Starting codebase migration, this may take a while." + liftIO $ putStrLn $ "Starting codebase migration. This may take a while, it's a good time to make some tea ☕️" rootCausalHashId <- runDB conn (liftQ Q.loadNamespaceRoot) numEntitiesToMigrate <- runDB conn . liftQ $ do sum <$> sequenceA [Q.countObjects, Q.countCausals, Q.countWatches] @@ -168,7 +168,7 @@ migrateSchema12 conn codebase = do e -> liftIO $ putStrLn $ "Error: " ++ show e incrementProgress allDone :: ReaderT (Env m v a) (StateT MigrationState m) () - allDone = liftIO $ putStrLn $ "\nFinished migrating, initiating cleanup.\n This will take a moment..." + allDone = liftIO $ putStrLn $ "\nFinished migrating, initiating cleanup." in Sync.Progress {need, done, error = errorHandler, allDone} type Old a = a From c1e659d0d9e5e061d332662de5e0076fe99a4821 Mon Sep 17 00:00:00 2001 From: Chris Penner Date: Fri, 21 Jan 2022 12:12:26 -0600 Subject: [PATCH 211/297] Update hash version in sqlite query. --- codebase2/codebase-sqlite/U/Codebase/Sqlite/Queries.hs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/codebase2/codebase-sqlite/U/Codebase/Sqlite/Queries.hs b/codebase2/codebase-sqlite/U/Codebase/Sqlite/Queries.hs index 984669ad8b..fff34f834c 100644 --- a/codebase2/codebase-sqlite/U/Codebase/Sqlite/Queries.hs +++ b/codebase2/codebase-sqlite/U/Codebase/Sqlite/Queries.hs @@ -363,7 +363,7 @@ saveHashObject hId oId version = execute sql (hId, oId, version) where saveObject :: DB m => HashId -> ObjectType -> ByteString -> m ObjectId saveObject h t blob = do oId <- execute sql (h, t, blob) >> queryOne (maybeObjectIdForPrimaryHashId h) - saveHashObject h oId 1 -- todo: remove this from here, and add it to other relevant places once there are v1 and v2 hashes + saveHashObject h oId 2 -- todo: remove this from here, and add it to other relevant places once there are v1 and v2 hashes pure oId where sql = [here| From c3c5d69ac3c77ff3b08e60e5c441ff6749a1b375 Mon Sep 17 00:00:00 2001 From: Chris Penner Date: Fri, 21 Jan 2022 15:06:34 -0600 Subject: [PATCH 212/297] Fix hashes in gitsync tests --- unison-cli/tests/Unison/Test/GitSync.hs | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/unison-cli/tests/Unison/Test/GitSync.hs b/unison-cli/tests/Unison/Test/GitSync.hs index 9515ca0978..747de5b60f 100644 --- a/unison-cli/tests/Unison/Test/GitSync.hs +++ b/unison-cli/tests/Unison/Test/GitSync.hs @@ -283,7 +283,7 @@ test = scope "gitsync22" . tests $ ```ucm .> pull ${repo} .> history - .> reset-root #0u7no051k7 + .> reset-root #l43v9nr16v .> history ``` |]) -- Not sure why this hash is here. @@ -595,7 +595,7 @@ gistTest fmt = userScript repo = [i| ```ucm - .> pull ${repo}:#frj8ob9ugr + .> pull ${repo}:#td09c6jlks .> find ``` ```unison From f430921b121cc7e5f2af12b9a9ed2d1d4b221a04 Mon Sep 17 00:00:00 2001 From: Mitchell Rosen Date: Mon, 24 Jan 2022 12:43:11 -0500 Subject: [PATCH 213/297] =?UTF-8?q?=E2=85=84=20trunk=20=E2=86=92=20topic/r?= =?UTF-8?q?ehash-codebase?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- CONTRIBUTORS.markdown | 2 + .../src/Unison/Codebase/Editor/Git.hs | 23 ++- .../Codebase/SqliteCodebase/Conversions.hs | 15 +- parser-typechecker/src/Unison/FileParser.hs | 45 ++++++ parser-typechecker/src/Unison/Lexer.hs | 6 +- parser-typechecker/src/Unison/Parser.hs | 17 +-- parser-typechecker/src/Unison/PrintError.hs | 100 +++++++------ parser-typechecker/src/Unison/Runtime/ANF.hs | 4 +- .../src/Unison/Runtime/Builtin.hs | 43 ++++-- .../src/Unison/Runtime/Foreign.hs | 24 ++- .../src/Unison/Runtime/IOSource.hs | 4 +- .../src/Unison/Runtime/Interface.hs | 32 +++- .../src/Unison/Runtime/Machine.hs | 4 +- .../src/Unison/Runtime/Pattern.hs | 4 +- parser-typechecker/src/Unison/TermParser.hs | 3 +- parser-typechecker/src/Unison/TermPrinter.hs | 56 ++++--- .../src/Unison/Typechecker/Context.hs | 96 ++++++------ parser-typechecker/src/Unison/UnisonFile.hs | 6 +- parser-typechecker/src/Unison/Util/Star3.hs | 15 +- parser-typechecker/tests/Unison/Test/MCode.hs | 2 +- .../tests/Unison/Test/TermPrinter.hs | 1 - .../src/Unison/Codebase/Editor/HandleInput.hs | 5 +- .../src/Unison/Codebase/Editor/Propagate.hs | 28 +++- .../Unison/Codebase/Editor/SlurpComponent.hs | 138 ++++++++++-------- .../src/Unison/Codebase/TranscriptParser.hs | 79 ++++++---- unison-cli/src/Unison/CommandLine/Globbing.hs | 79 +++++----- unison-cli/tests/Unison/Test/GitSync.hs | 75 +++++++++- unison-core/src/Unison/HashQualified.hs | 1 + unison-core/src/Unison/Reference.hs | 5 +- unison-core/src/Unison/Referent.hs | 8 +- unison-core/src/Unison/Term.hs | 41 ++++++ unison-src/tests/324.u | 2 +- unison-src/tests/cce.u | 6 +- unison-src/tests/compose-inference.u | 2 +- unison-src/tests/handler-stacking.u | 6 +- unison-src/tests/hang.u | 6 +- unison-src/tests/inner-lambda1.u | 2 +- unison-src/tests/inner-lambda2.u | 2 +- .../tests/lambda-closing-over-effectful-fn.u | 2 +- unison-src/tests/mergesort.u | 6 +- unison-src/tests/methodical/cycle-minimize.u | 2 +- unison-src/tests/methodical/loop.u | 2 +- unison-src/tests/methodical/nat.u | 8 +- .../tests/methodical/overapply-ability.u | 2 +- unison-src/tests/methodical/scopedtypevars.u | 6 +- unison-src/tests/methodical/universals.u | 8 +- unison-src/tests/soe.u | 6 +- unison-src/tests/soe2.u | 4 +- unison-src/tests/spurious-ability-fail.u | 6 +- unison-src/tests/stream.u | 4 +- unison-src/tests/tictactoe.u | 8 +- unison-src/tests/underscore-parsing.u | 4 - .../doc.md.files/syntax.u | 8 + .../transcripts-using-base/doc.output.md | 24 +++ .../test-watch-dependencies.md | 43 ++++++ .../test-watch-dependencies.output.md | 91 ++++++++++++ unison-src/transcripts/blocks.md | 2 +- unison-src/transcripts/blocks.output.md | 2 +- .../boolean-op-pretty-print-2819.md | 18 +++ .../boolean-op-pretty-print-2819.output.md | 35 +++++ .../transcripts/bug-strange-closure.output.md | 55 +++++++ unison-src/transcripts/builtins.md | 72 ++++----- unison-src/transcripts/builtins.output.md | 72 ++++----- .../transcripts/duplicate-term-detection.md | 42 ++++++ .../duplicate-term-detection.output.md | 98 +++++++++++++ unison-src/transcripts/fix1578.md | 2 +- unison-src/transcripts/fix1578.output.md | 2 +- unison-src/transcripts/fix2053.output.md | 9 +- unison-src/transcripts/io.md | 5 +- unison-src/transcripts/io.output.md | 18 ++- unison-src/transcripts/type-deps.md | 32 ++++ unison-src/transcripts/type-deps.output.md | 56 +++++++ unison-src/transcripts/universal-cmp.md | 10 ++ .../transcripts/universal-cmp.output.md | 43 ++++++ 74 files changed, 1334 insertions(+), 460 deletions(-) create mode 100644 unison-src/transcripts-using-base/test-watch-dependencies.md create mode 100644 unison-src/transcripts-using-base/test-watch-dependencies.output.md create mode 100644 unison-src/transcripts/boolean-op-pretty-print-2819.md create mode 100644 unison-src/transcripts/boolean-op-pretty-print-2819.output.md create mode 100644 unison-src/transcripts/duplicate-term-detection.md create mode 100644 unison-src/transcripts/duplicate-term-detection.output.md create mode 100644 unison-src/transcripts/type-deps.md create mode 100644 unison-src/transcripts/type-deps.output.md diff --git a/CONTRIBUTORS.markdown b/CONTRIBUTORS.markdown index 68040bd282..5cc30bb67e 100644 --- a/CONTRIBUTORS.markdown +++ b/CONTRIBUTORS.markdown @@ -63,3 +63,5 @@ The format for this list: name, GitHub handle, and then optional blurb about wha * Alberto Flores (@albertoefguerrero) * Shawn Bachlet (@shawn-bachlet) * Solomon Bothwell (@solomon-b) +* Sameer Kolhar (@kolharsam) +* Nicole Prindle (@nprindle) diff --git a/parser-typechecker/src/Unison/Codebase/Editor/Git.hs b/parser-typechecker/src/Unison/Codebase/Editor/Git.hs index a68072d9d3..7e253cde11 100644 --- a/parser-typechecker/src/Unison/Codebase/Editor/Git.hs +++ b/parser-typechecker/src/Unison/Codebase/Editor/Git.hs @@ -9,6 +9,9 @@ module Unison.Codebase.Editor.Git withIOError, withStatus, withIsolatedRepo, + + -- * Exported for testing + gitCacheDir, ) where @@ -126,10 +129,18 @@ pullRepo repo@(ReadGitRepo uri) = do withStatus ("Updating cached copy of " ++ Text.unpack uri ++ " ...") $ do -- Fetch only the latest commit, we don't need history. gitIn localPath (["fetch", "origin", remoteRef, "--quiet"] ++ ["--depth", "1"]) - -- Reset our branch to point at the latest code from the remote. - gitIn localPath ["reset", "--hard", "--quiet", "FETCH_HEAD"] - -- Wipe out any unwanted files which might be sitting around, but aren't in the commit. - gitIn localPath ["clean", "-d", "--force", "--quiet"] + fetchHeadHash <- gitTextIn localPath ["rev-parse", "FETCH_HEAD"] + headHash <- gitTextIn localPath ["rev-parse", "HEAD"] + -- Only do a hard reset if the remote has actually changed. + -- This allows us to persist any codebase migrations in the dirty work tree, + -- and avoid re-migrating a codebase we've migrated before. + when (fetchHeadHash /= headHash) do + -- Reset our branch to point at the latest code from the remote. + gitIn localPath ["reset", "--hard", "--quiet", "FETCH_HEAD"] + -- Wipe out any unwanted files which might be sitting around, but aren't in the commit. + -- Note that this wipes out any in-progress work which other ucm processes may + -- have in progress, which we may want to handle more nicely in the future. + gitIn localPath ["clean", "-d", "--force", "--quiet"] pure True when (not succeeded) $ goFromScratch @@ -137,7 +148,9 @@ pullRepo repo@(ReadGitRepo uri) = do remoteRef :: Text remoteRef = fromMaybe "HEAD" maybeRemoteRef goFromScratch :: (MonadIO m, MonadError GitProtocolError m) => m () - goFromScratch = do wipeDir localPath; checkOutNew localPath Nothing + goFromScratch = do + wipeDir localPath + checkOutNew localPath Nothing isEmptyGitRepo :: MonadIO m => FilePath -> m Bool isEmptyGitRepo localPath = liftIO $ diff --git a/parser-typechecker/src/Unison/Codebase/SqliteCodebase/Conversions.hs b/parser-typechecker/src/Unison/Codebase/SqliteCodebase/Conversions.hs index 3bad48d580..191746ff96 100644 --- a/parser-typechecker/src/Unison/Codebase/SqliteCodebase/Conversions.hs +++ b/parser-typechecker/src/Unison/Codebase/SqliteCodebase/Conversions.hs @@ -4,12 +4,13 @@ module Unison.Codebase.SqliteCodebase.Conversions where import Data.Bifunctor (Bifunctor (bimap)) +import Data.Bitraversable (Bitraversable (bitraverse)) import Data.Either (fromRight) import Data.Foldable (Foldable (foldl', toList)) import Data.Map (Map) import qualified Data.Map as Map import qualified Data.Set as Set -import Data.Text (Text, pack) +import Data.Text (Text, pack, unpack) import qualified U.Codebase.Branch as V2.Branch import qualified U.Codebase.Causal as V2 import qualified U.Codebase.Decl as V2.Decl @@ -30,7 +31,6 @@ import qualified U.Codebase.WatchKind as V2.WatchKind import qualified U.Core.ABT as V2.ABT import qualified U.Util.Hash as V2 import qualified U.Util.Hash as V2.Hash -import qualified Unison.Util.Map as Map import qualified Unison.ABT as V1.ABT import qualified Unison.Codebase.Branch as V1.Branch import qualified Unison.Codebase.Causal.Type as V1.Causal @@ -39,10 +39,10 @@ import qualified Unison.Codebase.Patch as V1 import qualified Unison.Codebase.ShortBranchHash as V1 import qualified Unison.Codebase.TermEdit as V1.TermEdit import qualified Unison.Codebase.TypeEdit as V1.TypeEdit -import qualified Unison.ConstructorReference as V1 (GConstructorReference(..)) +import qualified Unison.ConstructorReference as V1 (GConstructorReference (..)) import qualified Unison.ConstructorType as CT import qualified Unison.DataDeclaration as V1.Decl -import Unison.Hash (Hash) +import Unison.Hash (Hash, base32Hex) import qualified Unison.Hash as V1 import qualified Unison.Kind as V1.Kind import qualified Unison.NameSegment as V1 @@ -56,6 +56,7 @@ import qualified Unison.Referent as V1.Referent import qualified Unison.Symbol as V1 import qualified Unison.Term as V1.Term import qualified Unison.Type as V1.Type +import qualified Unison.Util.Map as Map import qualified Unison.Util.Relation as Relation import qualified Unison.Util.Star3 as V1.Star3 import qualified Unison.Var as Var @@ -119,7 +120,7 @@ term1to2 h = V1.Term.Match e cases -> V2.Term.Match e (goCase <$> cases) V1.Term.TermLink r -> V2.Term.TermLink (rreferent1to2 h r) V1.Term.TypeLink r -> V2.Term.TypeLink (reference1to2 r) - V1.Term.Blank _ -> error "can't serialize term with blanks" + V1.Term.Blank _ -> error ("can't serialize term with blanks (" ++ unpack (base32Hex h) ++ ")") goCase (V1.Term.MatchCase p g b) = V2.Term.MatchCase (goPat p) g b @@ -248,8 +249,8 @@ symbol1to2 (V1.Symbol i varType) = V2.Symbol i (Var.rawName varType) shortHashSuffix1to2 :: Text -> V1.Reference.Pos shortHashSuffix1to2 = - fromRight (error "todo: move suffix parsing to frontend") - . V1.Reference.readSuffix + -- todo: move suffix parsing to frontend + either error id . V1.Reference.readSuffix abt2to1 :: Functor f => V2.ABT.Term f v a -> V1.ABT.Term f v a abt2to1 (V2.ABT.Term fv a out) = V1.ABT.Term fv a (go out) diff --git a/parser-typechecker/src/Unison/FileParser.hs b/parser-typechecker/src/Unison/FileParser.hs index a63801b849..bc8d7cdc1a 100644 --- a/parser-typechecker/src/Unison/FileParser.hs +++ b/parser-typechecker/src/Unison/FileParser.hs @@ -37,6 +37,7 @@ import qualified Unison.NamesWithHistory as NamesWithHistory import qualified Unison.Names as Names import qualified Unison.Names.ResolutionResult as Names import qualified Unison.Name as Name +import qualified Unison.UnisonFile as UF resolutionFailures :: Ord v => [Names.ResolutionFailure v Ann] -> P v x resolutionFailures es = P.customFailure (ResolutionFailures es) @@ -123,8 +124,52 @@ file = do ] uf = UnisonFileId (UF.datasId env) (UF.effectsId env) (terms <> join accessors) (List.multimap watches) + validateUnisonFile uf pure uf +-- | Final validations and sanity checks to perform before finishing parsing. +validateUnisonFile :: forall v . Var v => UnisonFile v Ann -> P v () +validateUnisonFile uf = + checkForDuplicateTermsAndConstructors uf + +-- | Because types and abilities can introduce their own constructors and fields it's difficult +-- to detect all duplicate terms during parsing itself. Here we collect all terms and +-- constructors and verify that no duplicates exist in the file, triggering an error if needed. +checkForDuplicateTermsAndConstructors :: + forall v. + (Ord v) => + UnisonFile v Ann -> + P v () +checkForDuplicateTermsAndConstructors uf = do + when (not . null $ duplicates) $ do + let dupeList :: [(v, [Ann])] + dupeList = duplicates + & fmap Set.toList + & Map.toList + P.customFailure (DuplicateTermNames dupeList) + where + effectDecls :: [DataDeclaration v Ann] + effectDecls = (Map.elems . fmap (DD.toDataDecl . snd) $ (effectDeclarationsId uf)) + dataDecls :: [DataDeclaration v Ann] + dataDecls = fmap snd $ Map.elems (dataDeclarationsId uf) + allConstructors :: [(v, Ann)] + allConstructors = + (dataDecls <> effectDecls) + & foldMap DD.constructors' + & fmap (\(ann, v, _typ) -> (v, ann)) + allTerms :: [(v, Ann)] + allTerms = + UF.terms uf + <&> (\(v, t) -> (v, ABT.annotation t)) + mergedTerms :: Map v (Set Ann) + mergedTerms = (allConstructors <> allTerms) + & (fmap . fmap) Set.singleton + & Map.fromListWith (<>) + duplicates :: Map v (Set Ann) + duplicates = + -- Any vars with multiple annotations are duplicates. + Map.filter ((> 1) . Set.size) mergedTerms + -- A stanza is either a watch expression like: -- > 1 + x -- > z = x + 1 diff --git a/parser-typechecker/src/Unison/Lexer.hs b/parser-typechecker/src/Unison/Lexer.hs index cc7fcb9ba3..d0177903dc 100644 --- a/parser-typechecker/src/Unison/Lexer.hs +++ b/parser-typechecker/src/Unison/Lexer.hs @@ -295,7 +295,7 @@ lexemes' eof = P.optional space >> do where toks = doc2 <|> doc <|> token numeric <|> token character <|> reserved <|> token symbolyId <|> token blank <|> token wordyId - <|> (asum . map token) [ semi, textual, backticks, hash ] + <|> (asum . map token) [ semi, textual, hash ] wordySep c = isSpace c || not (wordyIdChar c) positioned p = do start <- pos; a <- p; stop <- pos; pure (start, a, stop) @@ -682,10 +682,6 @@ lexemes' eof = P.optional space >> do where sp = lit "\\s" $> ' ' character = Character <$> (char '?' *> (spEsc <|> LP.charLiteral)) where spEsc = P.try (char '\\' *> char 's' $> ' ') - backticks = tick <$> (t *> wordyId <* t) - where tick (WordyId n sh) = Backticks n sh - tick t = t - t = char '`' <* P.notFollowedBy (char '`') wordyId :: P Lexeme wordyId = P.label wordyMsg . P.try $ do dot <- P.optional (lit ".") diff --git a/parser-typechecker/src/Unison/Parser.hs b/parser-typechecker/src/Unison/Parser.hs index c7afddb719..5c4d7d599d 100644 --- a/parser-typechecker/src/Unison/Parser.hs +++ b/parser-typechecker/src/Unison/Parser.hs @@ -325,13 +325,9 @@ symbolyIdString = queryToken $ \case L.SymbolyId s Nothing -> Just s _ -> Nothing --- Parse an infix id e.g. + or `cons`, discarding any hash +-- Parse an infix id e.g. + or Docs.++, discarding any hash infixDefinitionName :: Var v => P v (L.Token v) -infixDefinitionName = symbolyDefinitionName <|> backticked where - backticked :: Var v => P v (L.Token v) - backticked = queryToken $ \case - L.Backticks s _ -> Just $ Var.nameds s - _ -> Nothing +infixDefinitionName = symbolyDefinitionName -- Parse a symboly ID like >>= or &&, discarding any hash symbolyDefinitionName :: Var v => P v (L.Token v) @@ -344,7 +340,7 @@ parenthesize p = P.try (openBlockWith "(" *> p) <* closeBlock hqPrefixId, hqInfixId :: Ord v => P v (L.Token (HQ.HashQualified Name)) hqPrefixId = hqWordyId_ <|> parenthesize hqSymbolyId_ -hqInfixId = hqSymbolyId_ <|> hqBacktickedId_ +hqInfixId = hqSymbolyId_ -- Parse a hash-qualified alphanumeric identifier hqWordyId_ :: Ord v => P v (L.Token (HQ.HashQualified Name)) @@ -364,13 +360,6 @@ hqSymbolyId_ = queryToken $ \case L.SymbolyId s Nothing -> Just $ HQ.NameOnly (Name.unsafeFromString s) _ -> Nothing -hqBacktickedId_ :: Ord v => P v (L.Token (HQ.HashQualified Name)) -hqBacktickedId_ = queryToken $ \case - L.Backticks "" (Just h) -> Just $ HQ.HashOnly h - L.Backticks s (Just h) -> Just $ HQ.HashQualified (Name.unsafeFromString s) h - L.Backticks s Nothing -> Just $ HQ.NameOnly (Name.unsafeFromString s) - _ -> Nothing - -- Parse a reserved word reserved :: Ord v => String -> P v (L.Token String) reserved w = label w $ queryToken getReserved diff --git a/parser-typechecker/src/Unison/PrintError.hs b/parser-typechecker/src/Unison/PrintError.hs index 02e48f1adb..61dd156fff 100644 --- a/parser-typechecker/src/Unison/PrintError.hs +++ b/parser-typechecker/src/Unison/PrintError.hs @@ -526,6 +526,15 @@ renderTypeError e env src = case e of [Type.var () (Var.named "e"), Type.var () (Var.named "a") ]) (Type.var () (Var.named "o")) + Other (C.cause -> C.PatternArityMismatch loc typ num) -> + Pr.lines [ + Pr.wrap "This pattern has the wrong number of arguments", "", + annotatedAsErrorSite src loc, + "The constructor has type ", "", + Pr.indentN 2 (stylePretty Type1 (Pr.group (renderType' env typ))), "", + "but you supplied " <> (Pr.shown num) <> " arguments." + ] + Other note -> mconcat [ "Sorry, you hit an error we didn't make a nice message for yet.\n\n" , "Here is a summary of the Note:\n" @@ -589,52 +598,53 @@ renderTypeError e env src = case e of , " " , simpleCause (C.cause note) , "\n" - , case toList (C.path note) of - [] -> " path: (empty)\n" - l -> " path:\n" <> mconcat (simplePath <$> l) +-- This can be very slow to print in large file. This was taking several minutes to print out the path in a file when the error occurred deep in the file after many other let bindings - stew +-- , case toList (C.path note) of +-- [] -> " path: (empty)\n" +-- l -> " path:\n" <> mconcat (simplePath <$> l) ] - simplePath :: C.PathElement v loc -> Pretty ColorText - simplePath e = " " <> simplePath' e <> "\n" - simplePath' :: C.PathElement v loc -> Pretty ColorText - simplePath' = \case - C.InSynthesize e -> "InSynthesize e=" <> renderTerm env e - C.InEquate t1 t2 -> - "InEquate t1=" <> renderType' env t1 <> - ", t2=" <> renderType' env t2 - C.InSubtype t1 t2 -> - "InSubtype t1=" <> renderType' env t1 <> ", t2=" <> renderType' env t2 - C.InCheck e t -> - "InCheck e=" <> renderTerm env e <> "," <> " t=" <> renderType' env t - C.InInstantiateL v t -> - "InInstantiateL v=" <> renderVar v <> ", t=" <> renderType' env t - C.InInstantiateR t v -> - "InInstantiateR t=" <> renderType' env t <> " v=" <> renderVar v - C.InSynthesizeApp t e n -> - "InSynthesizeApp t=" - <> renderType' env t - <> ", e=" - <> renderTerm env e - <> ", n=" - <> fromString (show n) - C.InFunctionCall vs f ft es -> - "InFunctionCall vs=[" - <> commas renderVar vs - <> "]" - <> ", f=" - <> renderTerm env f - <> ", ft=" - <> renderType' env ft - <> ", es=[" - <> commas (renderTerm env) es - <> "]" - C.InIfCond -> "InIfCond" - C.InIfBody loc -> "InIfBody thenBody=" <> annotatedToEnglish loc - C.InAndApp -> "InAndApp" - C.InOrApp -> "InOrApp" - C.InVectorApp loc -> "InVectorApp firstTerm=" <> annotatedToEnglish loc - C.InMatch loc -> "InMatch firstBody=" <> annotatedToEnglish loc - C.InMatchGuard -> "InMatchGuard" - C.InMatchBody -> "InMatchBody" +-- simplePath :: C.PathElement v loc -> Pretty ColorText +-- simplePath e = " " <> simplePath' e <> "\n" +-- simplePath' :: C.PathElement v loc -> Pretty ColorText +-- simplePath' = \case +-- C.InSynthesize e -> "InSynthesize e=" <> renderTerm env e +-- C.InEquate t1 t2 -> +-- "InEquate t1=" <> renderType' env t1 <> +-- ", t2=" <> renderType' env t2 +-- C.InSubtype t1 t2 -> +-- "InSubtype t1=" <> renderType' env t1 <> ", t2=" <> renderType' env t2 +-- C.InCheck e t -> +-- "InCheck e=" <> renderTerm env e <> "," <> " t=" <> renderType' env t +-- C.InInstantiateL v t -> +-- "InInstantiateL v=" <> renderVar v <> ", t=" <> renderType' env t +-- C.InInstantiateR t v -> +-- "InInstantiateR t=" <> renderType' env t <> " v=" <> renderVar v +-- C.InSynthesizeApp t e n -> +-- "InSynthesizeApp t=" +-- <> renderType' env t +-- <> ", e=" +-- <> renderTerm env e +-- <> ", n=" +-- <> fromString (show n) +-- C.InFunctionCall vs f ft es -> +-- "InFunctionCall vs=[" +-- <> commas renderVar vs +-- <> "]" +-- <> ", f=" +-- <> renderTerm env f +-- <> ", ft=" +-- <> renderType' env ft +-- <> ", es=[" +-- <> commas (renderTerm env) es +-- <> "]" +-- C.InIfCond -> "InIfCond" +-- C.InIfBody loc -> "InIfBody thenBody=" <> annotatedToEnglish loc +-- C.InAndApp -> "InAndApp" +-- C.InOrApp -> "InOrApp" +-- C.InVectorApp loc -> "InVectorApp firstTerm=" <> annotatedToEnglish loc +-- C.InMatch loc -> "InMatch firstBody=" <> annotatedToEnglish loc +-- C.InMatchGuard -> "InMatchGuard" +-- C.InMatchBody -> "InMatchBody" simpleCause :: C.Cause v loc -> Pretty ColorText simpleCause = \case C.TypeMismatch c -> diff --git a/parser-typechecker/src/Unison/Runtime/ANF.hs b/parser-typechecker/src/Unison/Runtime/ANF.hs index 6afcd6b37e..55536aa0e2 100644 --- a/parser-typechecker/src/Unison/Runtime/ANF.hs +++ b/parser-typechecker/src/Unison/Runtime/ANF.hs @@ -372,9 +372,11 @@ defaultCaseVisitor func m@(Match' scrut cases) a = ABT.annotation m v = Var.freshIn mempty $ typed Var.Blank txt = "pattern match failure in function `" <> func <> "`" + msg = text a $ Data.Text.pack txt + bu = ref a (Builtin "bug") dflt = MatchCase (P.Var a) Nothing . ABT.abs' a v - $ apps' (placeholder a txt) [var a v] + $ apps bu [(a, Ty.tupleTerm [msg, var a v])] defaultCaseVisitor _ _ = Nothing inlineAlias :: Var v => Monoid a => Term v a -> Term v a diff --git a/parser-typechecker/src/Unison/Runtime/Builtin.hs b/parser-typechecker/src/Unison/Runtime/Builtin.hs index de3fc3acfd..93f73a87f4 100644 --- a/parser-typechecker/src/Unison/Runtime/Builtin.hs +++ b/parser-typechecker/src/Unison/Runtime/Builtin.hs @@ -866,24 +866,39 @@ set'buffering instr $ outIoFailUnit s1 s2 s3 u f r (handle,bmode,tag,n,w,s1,s2,s3,u,f,r) = fresh11 -get'buffering'output :: forall v. Var v => v -> v -> v -> ANormal v -get'buffering'output bu n w = - TMatch bu . MatchSum $ mapFromList - [ no'buf --> [] --> TCon Ty.bufferModeRef no'buf [] - , line'buf --> [] --> TCon Ty.bufferModeRef line'buf [] - , block'buf --> [] --> TCon Ty.bufferModeRef block'buf [] - , sblock'buf --> [UN] --> - TAbs w - . TLetD n BX (TCon Ty.natRef 0 [w]) - $ TCon Ty.bufferModeRef sblock'buf [n] - ] +get'buffering'output :: forall v. Var v => v -> v -> v -> v -> v -> v -> ANormal v +get'buffering'output eitherResult stack1 stack2 resultTag failVar successVar = + TMatch eitherResult . MatchSum $ mapFromList + [ (0, ([BX, BX],) + . TAbss [stack1, stack2] + . TLetD failVar BX (TCon Ty.failureRef 0 [stack1, stack2]) + $ left failVar) + , (1, ([UN],) + . TAbs resultTag + . TMatch resultTag . MatchSum $ mapFromList + [ no'buf --> [] --> + TLetD successVar BX (TCon Ty.bufferModeRef no'buf []) + $ right successVar + , line'buf --> [] --> + TLetD successVar BX (TCon Ty.bufferModeRef line'buf []) + $ right successVar + , block'buf --> [] --> + TLetD successVar BX (TCon Ty.bufferModeRef block'buf []) + $ right successVar + , sblock'buf --> [UN] --> + TAbs stack1 + . TLetD stack2 BX (TCon Ty.natRef 0 [stack1]) + . TLetD successVar BX (TCon Ty.bufferModeRef sblock'buf [stack2]) + $ right successVar + ]) + ] get'buffering :: ForeignOp get'buffering - = inBx arg1 result - $ get'buffering'output result n n2 + = inBx arg1 eitherResult + $ get'buffering'output eitherResult n n2 resultTag failVar successVar where - (arg1, result, n, n2) = fresh4 + (arg1, eitherResult, n, n2, resultTag, failVar, successVar) = fresh7 crypto'hash :: ForeignOp crypto'hash instr diff --git a/parser-typechecker/src/Unison/Runtime/Foreign.hs b/parser-typechecker/src/Unison/Runtime/Foreign.hs index a57102554f..a4400984f9 100644 --- a/parser-typechecker/src/Unison/Runtime/Foreign.hs +++ b/parser-typechecker/src/Unison/Runtime/Foreign.hs @@ -40,11 +40,27 @@ data Foreign where promote :: (a -> a -> r) -> b -> c -> r promote (~~) x y = unsafeCoerce x ~~ unsafeCoerce y +tylEq :: Reference -> Reference -> Bool +tylEq r l = r == l +{-# noinline tylEq #-} + +tmlEq :: Referent -> Referent -> Bool +tmlEq r l = r == l +{-# noinline tmlEq #-} + +tylCmp :: Reference -> Reference -> Ordering +tylCmp r l = compare r l +{-# noinline tylCmp #-} + +tmlCmp :: Referent -> Referent -> Ordering +tmlCmp r l = compare r l +{-# noinline tmlCmp #-} + ref2eq :: Reference -> Maybe (a -> b -> Bool) ref2eq r | r == Ty.textRef = Just $ promote ((==) @Text) - | r == Ty.termLinkRef = Just $ promote ((==) @Referent) - | r == Ty.typeLinkRef = Just $ promote ((==) @Reference) + | r == Ty.termLinkRef = Just $ promote tmlEq + | r == Ty.typeLinkRef = Just $ promote tylEq | r == Ty.bytesRef = Just $ promote ((==) @Bytes) -- Note: MVar equality is just reference equality, so it shouldn't -- matter what type the MVar holds. @@ -57,8 +73,8 @@ ref2eq r ref2cmp :: Reference -> Maybe (a -> b -> Ordering) ref2cmp r | r == Ty.textRef = Just $ promote (compare @Text) - | r == Ty.termLinkRef = Just $ promote (compare @Referent) - | r == Ty.typeLinkRef = Just $ promote (compare @Reference) + | r == Ty.termLinkRef = Just $ promote tmlCmp + | r == Ty.typeLinkRef = Just $ promote tylCmp | r == Ty.bytesRef = Just $ promote (compare @Bytes) | r == Ty.threadIdRef = Just $ promote (compare @ThreadId) | otherwise = Nothing diff --git a/parser-typechecker/src/Unison/Runtime/IOSource.hs b/parser-typechecker/src/Unison/Runtime/IOSource.hs index 67ac47906f..60ce2bd484 100644 --- a/parser-typechecker/src/Unison/Runtime/IOSource.hs +++ b/parser-typechecker/src/Unison/Runtime/IOSource.hs @@ -311,8 +311,8 @@ d1 Doc.++ d2 = use Doc2 match (d1,d2) with (Join ds, Join ds2) -> Join (ds List.++ ds2) - (Join ds, _) -> Join (ds `List.snoc` d2) - (_, Join ds) -> Join (d1 `List.cons` ds) + (Join ds, _) -> Join (List.snoc ds d2) + (_, Join ds) -> Join (List.cons d1 ds) _ -> Join [d1,d2] unique[q1905679b27a97a4098bc965574da880c1074183a2c55ff1d481619c7fb8a1e1] type diff --git a/parser-typechecker/src/Unison/Runtime/Interface.hs b/parser-typechecker/src/Unison/Runtime/Interface.hs index f7e1a2f55c..d0d2ddfe2d 100644 --- a/parser-typechecker/src/Unison/Runtime/Interface.hs +++ b/parser-typechecker/src/Unison/Runtime/Interface.hs @@ -280,15 +280,17 @@ intermediateTerm -> Term Symbol -> (SuperGroup Symbol, Map.Map Word64 (Term Symbol)) intermediateTerm ppe ref ctx tm - = final + = first ( superNormalize + . splitPatterns (dspec ctx) + . addDefaultCases tmName + ) + . memorize . lamLift - . splitPatterns (dspec ctx) - . addDefaultCases tmName . saturate (uncurryDspec $ dspec ctx) . inlineAlias $ tm where - final (ll, dcmp) = (superNormalize ll, backrefLifted ll dcmp) + memorize (ll, dcmp) = (ll, backrefLifted ll dcmp) tmName = HQ.toString . termName ppe $ RF.Ref ref prepareEvaluation @@ -362,9 +364,16 @@ executeMainComb -> CCache -> IO () executeMainComb init cc - = eval0 cc Nothing + = flip catch (putStrLn . toANSI 50 <=< handler) + . eval0 cc Nothing . Ins (Pack RF.unitRef 0 ZArgs) $ Call True init (BArg1 0) + where + handler (PE _ msg) = pure msg + handler (BU nm c) = do + crs <- readTVarIO (combRefs cc) + let decom = decompile (backReferenceTm crs (decompTm $ cacheContext cc)) + pure . either id (bugMsg mempty nm) $ decom c bugMsg :: PrettyPrintEnv -> Text -> Term Symbol -> Pretty ColorText @@ -393,6 +402,19 @@ bugMsg ppe name tm , "" , P.indentN 2 $ pretty ppe tm ] + | name == "builtin.bug" + , RF.TupleTerm' [Tm.Text' msg, x] <- tm + , "pattern match failure" `isPrefixOf` msg + = P.callout icon . P.lines $ + [ P.wrap ("I've encountered a" <> P.red (P.text msg) + <> "while scrutinizing:") + , "" + , P.indentN 2 $ pretty ppe x + , "" + , "This happens when calling a function that doesn't handle all \ + \possible inputs" + , sorryMsg + ] bugMsg ppe name tm = P.callout icon . P.lines $ [ P.wrap ("I've encountered a call to" <> P.red (P.text name) <> "with the following value:") diff --git a/parser-typechecker/src/Unison/Runtime/Machine.hs b/parser-typechecker/src/Unison/Runtime/Machine.hs index 0e6430fc60..b501f9f09c 100644 --- a/parser-typechecker/src/Unison/Runtime/Machine.hs +++ b/parser-typechecker/src/Unison/Runtime/Machine.hs @@ -146,7 +146,9 @@ eval0 :: CCache -> ActiveThreads -> Section -> IO () eval0 !env !activeThreads !co = do ustk <- alloc bstk <- alloc - eval env mempty activeThreads ustk bstk KE co + (denv, k) <- + topDEnv <$> readTVarIO (refTy env) <*> readTVarIO (refTm env) + eval env denv activeThreads ustk bstk (k KE) co topDEnv :: M.Map Reference Word64 diff --git a/parser-typechecker/src/Unison/Runtime/Pattern.hs b/parser-typechecker/src/Unison/Runtime/Pattern.hs index dc559293e8..7162d27c02 100644 --- a/parser-typechecker/src/Unison/Runtime/Pattern.hs +++ b/parser-typechecker/src/Unison/Runtime/Pattern.hs @@ -584,7 +584,9 @@ lookupAbil rf (Map.lookup rf -> Just econs) lookupAbil rf _ = Left $ "unknown ability reference: " ++ show rf compile :: Var v => DataSpec -> Ctx v -> PatternMatrix v -> Term v -compile _ _ (PM []) = placeholder () "pattern match failure" +compile _ _ (PM []) = apps' bu [text () "pattern match failure"] + where + bu = ref () (Builtin "bug") compile spec ctx m@(PM (r:rs)) | rowIrrefutable r = case guard r of diff --git a/parser-typechecker/src/Unison/TermParser.hs b/parser-typechecker/src/Unison/TermParser.hs index e1b42df841..7f9c7c1ea8 100644 --- a/parser-typechecker/src/Unison/TermParser.hs +++ b/parser-typechecker/src/Unison/TermParser.hs @@ -910,8 +910,7 @@ destructuringBind = do binding :: forall v. Var v => P v ((Ann, v), Term v Ann) binding = label "binding" $ do typ <- optional typedecl - -- a ++ b = ... OR - -- foo `mappend` bar = ... + -- a ++ b = ... let infixLhs = do (arg1, op) <- P.try $ (,) <$> prefixDefinitionName <*> infixDefinitionName diff --git a/parser-typechecker/src/Unison/TermPrinter.hs b/parser-typechecker/src/Unison/TermPrinter.hs index f2b39ed57b..d10721868f 100644 --- a/parser-typechecker/src/Unison/TermPrinter.hs +++ b/parser-typechecker/src/Unison/TermPrinter.hs @@ -277,18 +277,6 @@ pretty0 pf = branch f branch tm = let (im', uses) = calcImports im tm in uses $ [pretty0 n (ac 0 Block im' doc) tm] - And' x y -> - paren (p >= 10) $ PP.spaced [ - pretty0 n (ac 10 Normal im doc) x, - fmt S.ControlKeyword "&&", - pretty0 n (ac 10 Normal im doc) y - ] - Or' x y -> - paren (p >= 10) $ PP.spaced [ - pretty0 n (ac 10 Normal im doc) x, - fmt S.ControlKeyword "||", - pretty0 n (ac 10 Normal im doc) y - ] LetBlock bs e -> let (im', uses) = calcImports im term in printLet elideUnit bc bs e im' uses @@ -356,7 +344,18 @@ pretty0 fmt S.BytesLiteral "0xs" <> (PP.shown $ Bytes.fromWord8s (map fromIntegral bs)) BinaryAppsPred' apps lastArg -> paren (p >= 3) $ binaryApps apps (pretty0 n (ac 3 Normal im doc) lastArg) + -- Note that && and || are at the same precedence, which can cause + -- confusion, so for clarity we do not want to elide the parentheses in a + -- case like `(x || y) && z`. + (Ands' xs lastArg, _) -> paren (p >= 10) $ + booleanOps (fmt S.ControlKeyword "&&") xs (pretty0 n (ac 10 Normal im doc) lastArg) + (Ors' xs lastArg, _) -> paren (p >= 10) $ + booleanOps (fmt S.ControlKeyword "||") xs (pretty0 n (ac 10 Normal im doc) lastArg) _ -> case (term, nonForcePred) of + OverappliedBinaryAppPred' f a b r | binaryOpsPred f -> + -- Special case for overapplied binary op + paren True (binaryApps [(f, a)] (pretty0 n (ac 3 Normal im doc) b) `PP.hang` + PP.spacedMap (pretty0 n (ac 10 Normal im doc)) r) AppsPred' f args -> paren (p >= 10) $ pretty0 n (ac 10 Normal im doc) f `PP.hang` PP.spacedMap (pretty0 n (ac 10 Normal im doc)) args @@ -397,13 +396,12 @@ pretty0 Normal -> \x -> (fmt S.ControlKeyword "let") `PP.hang` x -- This predicate controls which binary functions we render as infix - -- operators. At the moment the policy is just to render symbolic - -- operators as infix - not 'wordy' function names. So we produce - -- "x + y" and "foo x y" but not "x `foo` y". + -- operators. At the moment the policy is just to render symbolic + -- operators as infix. binaryOpsPred :: Var v => Term3 v PrintAnnotation -> Bool binaryOpsPred = \case - Ref' r | isSymbolic (PrettyPrintEnv.termName n (Referent.Ref r)) -> True - Var' v | isSymbolic (HQ.unsafeFromVar v) -> True + Ref' r -> isSymbolic $ PrettyPrintEnv.termName n (Referent.Ref r) + Var' v -> isSymbolic $ HQ.unsafeFromVar v _ -> False nonForcePred :: Term3 v PrintAnnotation -> Bool @@ -440,6 +438,30 @@ pretty0 , pretty0 n (AmbientContext 10 Normal Infix im doc False) f ] + -- Render sequence of infix &&s or ||s, like [x2, x1], + -- meaning (x1 && x2) && (x3 rendered by the caller), producing + -- "x1 && x2 &&". The result is built from the right. + booleanOps + :: Var v + => Pretty SyntaxText + -> [Term3 v PrintAnnotation] + -> Pretty SyntaxText + -> Pretty SyntaxText + booleanOps op xs last = unbroken `PP.orElse` broken + where + unbroken = PP.spaced (ps ++ [last]) + broken = PP.hang (head ps) . PP.column2 . psCols $ (tail ps ++ [last]) + psCols ps = case take 2 ps of + [x, y] -> (x, y) : psCols (drop 2 ps) + [x] -> [(x, "")] + [] -> [] + _ -> undefined + ps = r =<< reverse xs + r a = + [ pretty0 n (ac (if isBlock a then 12 else 10) Normal im doc) a + , op + ] + prettyPattern :: forall v loc . Var v => PrettyPrintEnv diff --git a/parser-typechecker/src/Unison/Typechecker/Context.hs b/parser-typechecker/src/Unison/Typechecker/Context.hs index 9c7f1ffb81..4867bf1ee3 100644 --- a/parser-typechecker/src/Unison/Typechecker/Context.hs +++ b/parser-typechecker/src/Unison/Typechecker/Context.hs @@ -43,12 +43,13 @@ import Unison.Prelude import Control.Lens (over, _2) import qualified Control.Monad.Fail as MonadFail -import Control.Monad.Reader.Class import Control.Monad.State ( get + , gets , put , StateT , runStateT , evalState + , MonadState ) import Data.Bifunctor ( first , second @@ -126,16 +127,17 @@ instance (Ord loc, Var v) => Eq (Element v loc) where Marker v == Marker v2 = v == v2 _ == _ = False +-- The typechecking state data Env v loc = Env { freshId :: Word64, ctx :: Context v loc } type DataDeclarations v loc = Map Reference (DataDeclaration v loc) type EffectDeclarations v loc = Map Reference (EffectDeclaration v loc) -data Result v loc a = Success (Seq (InfoNote v loc)) a - | TypeError (NESeq (ErrorNote v loc)) (Seq (InfoNote v loc)) - | CompilerBug (CompilerBug v loc) - (Seq (ErrorNote v loc)) -- type errors before hitting the bug - (Seq (InfoNote v loc)) -- info notes before hitting the bug +data Result v loc a = Success !(Seq (InfoNote v loc)) !a + | TypeError !(NESeq (ErrorNote v loc)) !(Seq (InfoNote v loc)) + | CompilerBug !(CompilerBug v loc) + !(Seq (ErrorNote v loc)) -- type errors before hitting the bug + !(Seq (InfoNote v loc)) -- info notes before hitting the bug deriving (Functor) instance Applicative (Result v loc) where @@ -145,11 +147,13 @@ instance Applicative (Result v loc) where TypeError es is <*> r' = TypeError (es NESeq.|>< (typeErrors r')) (is <> infoNotes r') Success is _ <*> TypeError es' is' = TypeError es' (is <> is') Success is f <*> Success is' a = Success (is <> is') (f a) + {-# INLINE (<*>) #-} instance Monad (Result v loc) where s@(Success _ a) >>= f = s *> f a TypeError es is >>= _ = TypeError es is CompilerBug bug es is >>= _ = CompilerBug bug es is + {-# INLINE (>>=) #-} btw' :: InfoNote v loc -> Result v loc () btw' note = Success (Seq.singleton note) () @@ -179,8 +183,14 @@ mapErrors f r = case r of s@(Success _ _) -> s newtype MT v loc f a = MT { - runM :: MEnv v loc -> f (a, Env v loc) -} + runM :: + -- Data declarations in scope + DataDeclarations v loc -> + -- Effect declarations in scope + EffectDeclarations v loc -> + Env v loc -> + f (a, Env v loc) +} deriving stock (Functor) -- | Typechecking monad type M v loc = MT v loc (Result v loc) @@ -190,10 +200,10 @@ type M v loc = MT v loc (Result v loc) type TotalM v loc = MT v loc (Either (CompilerBug v loc)) liftResult :: Result v loc a -> M v loc a -liftResult r = MT (\m -> (, env m) <$> r) +liftResult r = MT (\_ _ env -> (, env) <$> r) liftTotalM :: TotalM v loc a -> M v loc a -liftTotalM (MT m) = MT $ \menv -> case m menv of +liftTotalM (MT m) = MT $ \datas effects env -> case m datas effects env of Left bug -> CompilerBug bug mempty mempty Right a -> Success mempty a @@ -207,7 +217,7 @@ modEnv :: (Env v loc -> Env v loc) -> M v loc () modEnv f = modEnv' $ ((), ) . f modEnv' :: (Env v loc -> (a, Env v loc)) -> M v loc a -modEnv' f = MT (\menv -> pure . f $ env menv) +modEnv' f = MT (\_ _ env -> pure . f $ env) data Unknown = Data | Effect deriving Show @@ -360,14 +370,7 @@ scope' p (ErrorNote cause path) = ErrorNote cause (path `mappend` pure p) -- Add `p` onto the end of the `path` of any `ErrorNote`s emitted by the action scope :: PathElement v loc -> M v loc a -> M v loc a -scope p (MT m) = MT (mapErrors (scope' p) . m) - --- | The typechecking environment -data MEnv v loc = MEnv { - env :: Env v loc, -- The typechecking state - dataDecls :: DataDeclarations v loc, -- Data declarations in scope - effectDecls :: EffectDeclarations v loc -- Effect declarations in scope -} +scope p (MT m) = MT \datas effects env -> mapErrors (scope' p) (m datas effects env) newtype Context v loc = Context [(Element v loc, Info v loc)] @@ -500,11 +503,8 @@ _logContext msg = when debugEnabled $ do usedVars :: Ord v => Context v loc -> Set v usedVars = allVars . info -fromMEnv :: (MEnv v loc -> a) -> M v loc a -fromMEnv f = f <$> ask - getContext :: M v loc (Context v loc) -getContext = fromMEnv $ ctx . env +getContext = gets ctx setContext :: Context v loc -> M v loc () setContext ctx = modEnv (\e -> e { ctx = ctx }) @@ -527,8 +527,9 @@ extendContext e = isReserved (varOf e) >>= \case " That means `freshenVar` is allowed to return it as a fresh variable, which would be wrong." replaceContext :: (Var v, Ord loc) => Element v loc -> [Element v loc] -> M v loc () -replaceContext elem replacement = - fromMEnv (\menv -> find (not . (`isReservedIn` env menv) . varOf) replacement) >>= \case +replaceContext elem replacement = do + env <- get + case find (not . (`isReservedIn` env) . varOf) replacement of Nothing -> modifyContext (replace elem replacement) Just e -> getContext >>= \ctx -> compilerCrash $ IllegalContextExtension ctx e $ @@ -542,7 +543,7 @@ varOf (Ann v _) = v varOf (Marker v) = v isReserved :: Var v => v -> M v loc Bool -isReserved v = fromMEnv $ (v `isReservedIn`) . env +isReserved v = (v `isReservedIn`) <$> get isReservedIn :: Var v => v -> Env v loc -> Bool isReservedIn v e = freshId e > Var.freshId v @@ -659,7 +660,7 @@ extendN ctx es = foldM (flip extend) ctx es -- | doesn't combine notes orElse :: M v loc a -> M v loc a -> M v loc a orElse m1 m2 = MT go where - go menv = runM m1 menv <|> runM m2 menv + go datas effects env = runM m1 datas effects env <|> runM m2 datas effects env s@(Success _ _) <|> _ = s TypeError _ _ <|> r = r CompilerBug _ _ _ <|> r = r -- swallowing bugs for now: when checking whether a type annotation @@ -673,10 +674,10 @@ orElse m1 m2 = MT go where -- hoistMaybe f (Result es is a) = Result es is (f a) getDataDeclarations :: M v loc (DataDeclarations v loc) -getDataDeclarations = fromMEnv dataDecls +getDataDeclarations = MT \datas _ env -> pure (datas, env) getEffectDeclarations :: M v loc (EffectDeclarations v loc) -getEffectDeclarations = fromMEnv effectDecls +getEffectDeclarations = MT \_ effects env -> pure (effects, env) compilerCrash :: CompilerBug v loc -> M v loc a compilerCrash bug = liftResult $ compilerBug bug @@ -2638,8 +2639,8 @@ run -> f a run datas effects m = fmap fst - . runM m - $ MEnv (Env 1 context0) datas effects + . runM m datas effects + $ Env 1 context0 synthesizeClosed' :: (Var v, Ord loc) => [Type v loc] @@ -2660,12 +2661,12 @@ synthesizeClosed' abilities term = do -- Check if the given typechecking action succeeds. succeeds :: M v loc a -> TotalM v loc Bool -succeeds m = do - e <- ask - case runM m e of - Success _ _ -> pure True - TypeError _ _ -> pure False - CompilerBug bug _ _ -> MT (\_ -> Left bug) +succeeds m = + MT \datas effects env -> + case runM m datas effects env of + Success _ _ -> Right (True, env) + TypeError _ _ -> Right (False, env) + CompilerBug bug _ _ -> Left bug -- Check if `t1` is a subtype of `t2`. Doesn't update the typechecking context. isSubtype' :: (Var v, Ord loc) => Type v loc -> Type v loc -> TotalM v loc Bool @@ -2734,24 +2735,19 @@ instance (Ord loc, Var v) => Show (Context v loc) where showElem ctx (Ann v t) = Text.unpack (Var.name v) ++ " : " ++ TP.prettyStr Nothing mempty (apply ctx t) showElem _ (Marker v) = "|"++Text.unpack (Var.name v)++"|" --- MEnv v loc -> (Seq (ErrorNote v loc), (a, Env v loc)) instance Monad f => Monad (MT v loc f) where - return a = MT (\menv -> pure (a, env menv)) - m >>= f = MT go where - go menv = do - (a, env1) <- runM m menv - runM (f a) (menv { env = env1 }) + return = pure + m >>= f = MT \datas effects env0 -> do + (a, env1) <- runM m datas effects env0 + runM (f a) datas effects $! env1 instance Monad f => MonadFail.MonadFail (MT v loc f) where fail = error instance Monad f => Applicative (MT v loc f) where - pure a = MT (\menv -> pure (a, env menv)) + pure a = MT (\_ _ env -> pure (a, env)) (<*>) = ap -instance Functor f => Functor (MT v loc f) where - fmap f (MT m) = MT (\menv -> fmap (first f) (m menv)) - -instance Monad f => MonadReader (MEnv v loc) (MT v loc f) where - ask = MT (\e -> pure (e, env e)) - local f m = MT $ runM m . f +instance Monad f => MonadState (Env v loc) (MT v loc f) where + get = MT \_ _ env -> pure (env, env) + put env = MT \_ _ _ -> pure ((), env) diff --git a/parser-typechecker/src/Unison/UnisonFile.hs b/parser-typechecker/src/Unison/UnisonFile.hs index 0febb11b43..f7a7034e1e 100644 --- a/parser-typechecker/src/Unison/UnisonFile.hs +++ b/parser-typechecker/src/Unison/UnisonFile.hs @@ -135,9 +135,13 @@ indexByReference uf = (tms, tys) tms = Map.fromList [ (r, (tm,ty)) | (Reference.DerivedId r, _wk, tm, ty) <- toList (hashTerms uf) ] +-- | A mapping of all terms in the file by their var name. +-- The returned terms refer to other definitions in the file by their +-- var, not by reference. +-- Includes test watches. allTerms :: Ord v => TypecheckedUnisonFile v a -> Map v (Term v a) allTerms uf = - Map.fromList [ (v, t) | (v, t, _) <- join $ topLevelComponents' uf ] + Map.fromList [ (v, t) | (v, t, _) <- join $ topLevelComponents uf ] -- |the top level components (no watches) plus test watches. topLevelComponents :: TypecheckedUnisonFile v a diff --git a/parser-typechecker/src/Unison/Util/Star3.hs b/parser-typechecker/src/Unison/Util/Star3.hs index d95fec1800..628ffe1435 100644 --- a/parser-typechecker/src/Unison/Util/Star3.hs +++ b/parser-typechecker/src/Unison/Util/Star3.hs @@ -165,16 +165,27 @@ deleteD3 :: (Ord fact, Ord d1, Ord d2, Ord d3) => (fact, d3) -> Star3 fact d1 d2 d3 -> Star3 fact d1 d2 d3 -deleteD3 (f, x) s = Star3 (fact s) (d1 s) (d2 s) d3' where +deleteD3 (f, x) s = garbageCollect f (Star3 (fact s) (d1 s) (d2 s) d3') where d3' = R.delete f x (d3 s) deleteD2 :: (Ord fact, Ord d1, Ord d2, Ord d3) => (fact, d2) -> Star3 fact d1 d2 d3 -> Star3 fact d1 d2 d3 -deleteD2 (f, x) s = Star3 (fact s) (d1 s) d2' (d3 s) where +deleteD2 (f, x) s = garbageCollect f (Star3 (fact s) (d1 s) d2' (d3 s)) where d2' = R.delete f x (d2 s) +-- | Given a possibly-invalid Star3, which may contain the given fact in its fact set that are not related to any d1, +-- d2, or d3, return a valid Star3, with this fact possibly removed. +garbageCollect :: Ord fact => fact -> Star3 fact d1 d2 d3 -> Star3 fact d1 d2 d3 +garbageCollect f star = + star + { fact = + if R.memberDom f (d1 star) || R.memberDom f (d2 star) || R.memberDom f (d3 star) + then fact star + else Set.delete f (fact star) + } + deleteFact :: (Ord fact, Ord d1, Ord d2, Ord d3) => Set fact -> Star3 fact d1 d2 d3 -> Star3 fact d1 d2 d3 deleteFact facts Star3{..} = diff --git a/parser-typechecker/tests/Unison/Test/MCode.hs b/parser-typechecker/tests/Unison/Test/MCode.hs index 84c34408da..34d873cd3f 100644 --- a/parser-typechecker/tests/Unison/Test/MCode.hs +++ b/parser-typechecker/tests/Unison/Test/MCode.hs @@ -72,7 +72,7 @@ env m = mapInsert (bit 24) m asrt :: Section asrt = Ins (Unpack Nothing 0) $ Match 0 - $ Test1 1 (Yield ZArgs) + $ Test1 1 (Yield (BArg1 0)) (Die "assertion failed") multRec :: String diff --git a/parser-typechecker/tests/Unison/Test/TermPrinter.hs b/parser-typechecker/tests/Unison/Test/TermPrinter.hs index 1b3be8df45..373c8d382c 100644 --- a/parser-typechecker/tests/Unison/Test/TermPrinter.hs +++ b/parser-typechecker/tests/Unison/Test/TermPrinter.hs @@ -314,7 +314,6 @@ test = scope "termprinter" $ tests \ x + 1" -- TODO parser looks like lambda body should be a block, but we hit 'unexpected =' , tc "x + y" , tc "x ~ y" - , tcDiff "x `foo` y" "foo x y" , tc "x + (y + z)" , tc "x + y + z" , tc "x + y * z" -- i.e. (x + y) * z ! diff --git a/unison-cli/src/Unison/Codebase/Editor/HandleInput.hs b/unison-cli/src/Unison/Codebase/Editor/HandleInput.hs index abb3b0ba08..bc78a73312 100644 --- a/unison-cli/src/Unison/Codebase/Editor/HandleInput.hs +++ b/unison-cli/src/Unison/Codebase/Editor/HandleInput.hs @@ -228,8 +228,6 @@ loop = do eval . Eval $ Branch.getPatch seg (Branch.head b) withFile ambient sourceName lexed@(text, tokens) k = do let getHQ = \case - L.Backticks s (Just sh) -> - Just (HQ.HashQualified (Name.unsafeFromString s) sh) L.WordyId s (Just sh) -> Just (HQ.HashQualified (Name.unsafeFromString s) sh) L.SymbolyId s (Just sh) -> @@ -1820,7 +1818,7 @@ handleUpdate input maybePatchPath hqs = do eval . Eval $ Branch.getPatch seg (Branch.head b) let patchPath = fromMaybe defaultPatchPath maybePatchPath slurpCheckNames <- slurpResultNames - currentPathNames <- currentPathNames + let currentPathNames = slurpCheckNames let sr :: SlurpResult v sr = applySelection hqs uf @@ -3179,7 +3177,6 @@ lexedSource :: Monad m => SourceName -> Source -> Action' m v (NamesWithHistory, lexedSource name src = do let tokens = L.lexer (Text.unpack name) (Text.unpack src) getHQ = \case - L.Backticks s (Just sh) -> Just (HQ.HashQualified (Name.unsafeFromString s) sh) L.WordyId s (Just sh) -> Just (HQ.HashQualified (Name.unsafeFromString s) sh) L.SymbolyId s (Just sh) -> Just (HQ.HashQualified (Name.unsafeFromString s) sh) L.Hash sh -> Just (HQ.HashOnly sh) diff --git a/unison-cli/src/Unison/Codebase/Editor/Propagate.hs b/unison-cli/src/Unison/Codebase/Editor/Propagate.hs index 212a31694d..38d5fd23c5 100644 --- a/unison-cli/src/Unison/Codebase/Editor/Propagate.hs +++ b/unison-cli/src/Unison/Codebase/Editor/Propagate.hs @@ -41,6 +41,7 @@ import Unison.Term ( Term ) import Unison.Util.Free ( Free , eval ) +import Unison.Util.Monoid ( foldMapM ) import qualified Unison.Util.Relation as R import Unison.Util.TransitiveClosure ( transitiveClosure ) import Unison.Var ( Var ) @@ -262,7 +263,7 @@ propagate rootNames patch b = case validatePatch patch of [] -> Referent.toString r n : _ -> show n - initialDirty <- R.dom <$> computeFrontier (eval . GetDependents) patch (Names.contains names0) + initialDirty <- computeDirty (eval . GetDependents) patch (Names.contains names0) let initialTypeReplacements = Map.mapMaybe TypeEdit.toReference initialTypeEdits -- TODO: once patches can directly contain constructor replacements, this @@ -632,6 +633,31 @@ applyPropagate patch Edits {..} = do -- typePreservingTermEdits Patch {..} = Patch termEdits mempty -- where termEdits = R.filterRan TermEdit.isTypePreserving _termEdits +-- | Compute the set of "dirty" references. They each: +-- +-- 1. Depend directly on some reference that was edited in the given patch +-- 2. Have a name in the current namespace (the given Names) +-- 3. Are not themselves edited in the given patch. +-- +-- Note: computeDirty a b c = R.dom <$> computeFrontier a b c +computeDirty + :: forall m + . Monad m + => (Reference -> m (Set Reference)) -- eg Codebase.dependents codebase + -> Patch + -> (Reference -> Bool) + -> m (Set Reference) +computeDirty getDependents patch shouldUpdate = + foldMapM (\ref -> keepDirtyDependents <$> getDependents ref) edited + where + -- Given a set of dependent references (satisfying 1. above), keep only the dirty ones (per 2. and 3. above) + keepDirtyDependents :: Set Reference -> Set Reference + keepDirtyDependents = + (`Set.difference` edited) . Set.filter shouldUpdate + + edited :: Set Reference + edited = R.dom (Patch._termEdits patch) <> R.dom (Patch._typeEdits patch) + -- (d, f) when d is "dirty" (needs update), -- f is in the frontier (an edited dependency of d), -- and d depends on f diff --git a/unison-cli/src/Unison/Codebase/Editor/SlurpComponent.hs b/unison-cli/src/Unison/Codebase/Editor/SlurpComponent.hs index 04139bf4e4..9a03644fc1 100644 --- a/unison-cli/src/Unison/Codebase/Editor/SlurpComponent.hs +++ b/unison-cli/src/Unison/Codebase/Editor/SlurpComponent.hs @@ -1,88 +1,102 @@ -{- ORMOLU_DISABLE -} -- Remove this when the file is ready to be auto-formatted {-# LANGUAGE PatternSynonyms #-} module Unison.Codebase.Editor.SlurpComponent where -import Unison.Prelude - -import Data.Tuple (swap) -import Unison.Reference ( Reference ) -import Unison.UnisonFile (TypecheckedUnisonFile) import qualified Data.Map as Map import qualified Data.Set as Set +import Data.Tuple (swap) import qualified Unison.DataDeclaration as DD +import Unison.Prelude +import Unison.Reference (Reference) import qualified Unison.Term as Term +import Unison.UnisonFile (TypecheckedUnisonFile) import qualified Unison.UnisonFile as UF -data SlurpComponent v = - SlurpComponent { types :: Set v, terms :: Set v } - deriving (Eq,Ord,Show) +data SlurpComponent v = SlurpComponent {types :: Set v, terms :: Set v} + deriving (Eq, Ord, Show) isEmpty :: SlurpComponent v -> Bool isEmpty sc = Set.null (types sc) && Set.null (terms sc) empty :: Ord v => SlurpComponent v -empty = SlurpComponent mempty mempty +empty = SlurpComponent {types = mempty, terms = mempty} difference :: Ord v => SlurpComponent v -> SlurpComponent v -> SlurpComponent v -difference c1 c2 = SlurpComponent types' terms' where - types' = types c1 `Set.difference` types c2 - terms' = terms c1 `Set.difference` terms c2 +difference c1 c2 = SlurpComponent {types = types', terms = terms'} + where + types' = types c1 `Set.difference` types c2 + terms' = terms c1 `Set.difference` terms c2 intersection :: Ord v => SlurpComponent v -> SlurpComponent v -> SlurpComponent v -intersection c1 c2 = SlurpComponent types' terms' where - types' = types c1 `Set.intersection` types c2 - terms' = terms c1 `Set.intersection` terms c2 +intersection c1 c2 = SlurpComponent {types = types', terms = terms'} + where + types' = types c1 `Set.intersection` types c2 + terms' = terms c1 `Set.intersection` terms c2 instance Ord v => Semigroup (SlurpComponent v) where (<>) = mappend -instance Ord v => Monoid (SlurpComponent v) where - mempty = SlurpComponent mempty mempty - c1 `mappend` c2 = SlurpComponent (types c1 <> types c2) - (terms c1 <> terms c2) +instance Ord v => Monoid (SlurpComponent v) where + mempty = SlurpComponent {types = mempty, terms = mempty} + c1 `mappend` c2 = + SlurpComponent + { types = types c1 <> types c2, + terms = terms c1 <> terms c2 + } -- I'm calling this `closeWithDependencies` because it doesn't just compute -- the dependencies of the inputs, it mixes them together. Make sure this -- is what you want. -closeWithDependencies :: forall v a. Ord v - => TypecheckedUnisonFile v a -> SlurpComponent v -> SlurpComponent v -closeWithDependencies uf inputs = seenDefns where - seenDefns = foldl' termDeps (SlurpComponent mempty seenTypes) (terms inputs) - seenTypes = foldl' typeDeps mempty (types inputs) - - termDeps :: SlurpComponent v -> v -> SlurpComponent v - termDeps seen v | Set.member v (terms seen) = seen - termDeps seen v = fromMaybe seen $ do - term <- findTerm v - let -- get the `v`s for the transitive dependency types - -- (the ones for terms are just the `freeVars below`) - -- although this isn't how you'd do it for a term that's already in codebase - tdeps :: [v] - tdeps = resolveTypes $ Term.dependencies term - seenTypes :: Set v - seenTypes = foldl' typeDeps (types seen) tdeps - seenTerms = Set.insert v (terms seen) - pure $ foldl' termDeps (seen { types = seenTypes - , terms = seenTerms}) - (Term.freeVars term) - - typeDeps :: Set v -> v -> Set v - typeDeps seen v | Set.member v seen = seen - typeDeps seen v = fromMaybe seen $ do - dd <- fmap snd (Map.lookup v (UF.dataDeclarations' uf)) <|> - fmap (DD.toDataDecl . snd) (Map.lookup v (UF.effectDeclarations' uf)) - pure $ foldl' typeDeps (Set.insert v seen) (resolveTypes $ DD.dependencies dd) - - resolveTypes :: Set Reference -> [v] - resolveTypes rs = [ v | r <- Set.toList rs, Just v <- [Map.lookup r typeNames]] - - findTerm :: v -> Maybe (Term.Term v a) - findTerm v = Map.lookup v allTerms - - allTerms = UF.allTerms uf - - typeNames :: Map Reference v - typeNames = invert (fst <$> UF.dataDeclarations' uf) <> invert (fst <$> UF.effectDeclarations' uf) - - invert :: forall k v . Ord k => Ord v => Map k v -> Map v k - invert m = Map.fromList (swap <$> Map.toList m) +closeWithDependencies :: + forall v a. + Ord v => + TypecheckedUnisonFile v a -> + SlurpComponent v -> + SlurpComponent v +closeWithDependencies uf inputs = seenDefns + where + seenDefns = foldl' termDeps (SlurpComponent {types = seenTypes, terms = mempty}) (terms inputs) + seenTypes = foldl' typeDeps mempty (types inputs) + + termDeps :: SlurpComponent v -> v -> SlurpComponent v + termDeps seen v | Set.member v (terms seen) = seen + termDeps seen v = fromMaybe seen $ do + term <- findTerm v + let -- get the `v`s for the transitive dependency types + -- (the ones for terms are just the `freeVars below`) + -- although this isn't how you'd do it for a term that's already in codebase + tdeps :: [v] + tdeps = resolveTypes $ Term.dependencies term + seenTypes :: Set v + seenTypes = foldl' typeDeps (types seen) tdeps + seenTerms = Set.insert v (terms seen) + pure $ + foldl' + termDeps + ( seen + { types = seenTypes, + terms = seenTerms + } + ) + (Term.freeVars term) + + typeDeps :: Set v -> v -> Set v + typeDeps seen v | Set.member v seen = seen + typeDeps seen v = fromMaybe seen $ do + dd <- + fmap snd (Map.lookup v (UF.dataDeclarations' uf)) + <|> fmap (DD.toDataDecl . snd) (Map.lookup v (UF.effectDeclarations' uf)) + pure $ foldl' typeDeps (Set.insert v seen) (resolveTypes $ DD.dependencies dd) + + resolveTypes :: Set Reference -> [v] + resolveTypes rs = [v | r <- Set.toList rs, Just v <- [Map.lookup r typeNames]] + + findTerm :: v -> Maybe (Term.Term v a) + findTerm v = Map.lookup v allTerms + + allTerms = UF.allTerms uf + + typeNames :: Map Reference v + typeNames = invert (fst <$> UF.dataDeclarations' uf) <> invert (fst <$> UF.effectDeclarations' uf) + + invert :: forall k v. Ord k => Ord v => Map k v -> Map v k + invert m = Map.fromList (swap <$> Map.toList m) diff --git a/unison-cli/src/Unison/Codebase/TranscriptParser.hs b/unison-cli/src/Unison/Codebase/TranscriptParser.hs index 084a569663..70d2a6c826 100644 --- a/unison-cli/src/Unison/Codebase/TranscriptParser.hs +++ b/unison-cli/src/Unison/Codebase/TranscriptParser.hs @@ -11,7 +11,7 @@ module Unison.Codebase.TranscriptParser ExpectingError, Hidden, Err, - UcmCommand (..), + UcmLine (..), run, parse, parseFile, @@ -72,16 +72,19 @@ type FenceType = Text data Hidden = Shown | HideOutput | HideAll deriving (Eq, Show) -data UcmCommand = UcmCommand Path.Absolute Text +data UcmLine = + UcmCommand Path.Absolute Text + | UcmComment Text -- Text does not include the '--' prefix. data Stanza - = Ucm Hidden ExpectingError [UcmCommand] + = Ucm Hidden ExpectingError [UcmLine] | Unison Hidden ExpectingError (Maybe ScratchFileName) Text | UnprocessedFence FenceType Text | Unfenced Text -instance Show UcmCommand where +instance Show UcmLine where show (UcmCommand path txt) = show path <> ">" <> Text.unpack txt + show (UcmComment txt) = "--" ++ Text.unpack txt instance Show Stanza where show s = case s of @@ -182,21 +185,26 @@ run version dir configFile stanzas codebase = do dieUnexpectedSuccess awaitInput -- ucm command to run - Just (Just p@(UcmCommand path lineTxt)) -> do - curPath <- readIORef pathRef - if curPath /= path then do - atomically $ Q.undequeue cmdQueue (Just p) - pure $ Right (SwitchBranchI $ Just (Path.absoluteToPath' path)) - else case words . Text.unpack $ lineTxt of - [] -> awaitInput - args -> do - output ("\n" <> show p <> "\n") - numberedArgs <- readIORef numberedArgsRef - currentRoot <- Branch.head <$> readIORef rootBranchRef - case parseInput currentRoot curPath numberedArgs patternMap args of - -- invalid command is treated as a failure - Left msg -> dieWithMsg $ P.toPlain terminalWidth msg - Right input -> pure $ Right input + Just (Just ucmLine) -> do + case ucmLine of + p@(UcmComment {}) -> do + output ("\n" <> show p) + awaitInput + p@(UcmCommand path lineTxt) -> do + curPath <- readIORef pathRef + if curPath /= path then do + atomically $ Q.undequeue cmdQueue (Just p) + pure $ Right (SwitchBranchI $ Just (Path.absoluteToPath' path)) + else case words . Text.unpack $ lineTxt of + [] -> awaitInput + args -> do + output ("\n" <> show p <> "\n") + numberedArgs <- readIORef numberedArgsRef + currentRoot <- Branch.head <$> readIORef rootBranchRef + case parseInput currentRoot curPath numberedArgs patternMap args of + -- invalid command is treated as a failure + Left msg -> dieWithMsg $ P.toPlain terminalWidth msg + Right input -> pure $ Right input Nothing -> do dieUnexpectedSuccess @@ -345,17 +353,26 @@ type P = P.Parsec () Text stanzas :: P [Stanza] stanzas = P.many (fenced <|> unfenced) -ucmCommand :: P UcmCommand -ucmCommand = do - P.lookAhead (word ".") - path <- P.takeWhile1P Nothing (/= '>') - void $ word ">" - line <- P.takeWhileP Nothing (/= '\n') <* spaces - path <- case Path.parsePath' (Text.unpack path) of - Right (Path.unPath' -> Left abs) -> pure abs - Right _ -> fail "expected absolute path" - Left e -> fail e - pure $ UcmCommand path line +ucmLine :: P UcmLine +ucmLine = ucmCommand <|> ucmComment + where + ucmCommand :: P UcmLine + ucmCommand = do + P.lookAhead (word ".") + path <- P.takeWhile1P Nothing (/= '>') + void $ word ">" + line <- P.takeWhileP Nothing (/= '\n') <* spaces + path <- case Path.parsePath' (Text.unpack path) of + Right (Path.unPath' -> Left abs) -> pure abs + Right _ -> fail "expected absolute path" + Left e -> fail e + pure $ UcmCommand path line + + ucmComment :: P UcmLine + ucmComment = do + word "--" + line <- P.takeWhileP Nothing (/= '\n') <* spaces + pure $ UcmComment line fenced :: P Stanza fenced = do @@ -366,7 +383,7 @@ fenced = do hide <- hidden err <- expectingError _ <- spaces - cmds <- many ucmCommand + cmds <- many ucmLine pure $ Ucm hide err cmds else if fenceType == "unison" then do -- todo: this has to be more interesting diff --git a/unison-cli/src/Unison/CommandLine/Globbing.hs b/unison-cli/src/Unison/CommandLine/Globbing.hs index b0ba76784b..67d5c3a017 100644 --- a/unison-cli/src/Unison/CommandLine/Globbing.hs +++ b/unison-cli/src/Unison/CommandLine/Globbing.hs @@ -1,28 +1,27 @@ -{- ORMOLU_DISABLE -} -- Remove this when the file is ready to be auto-formatted -{-# LANGUAGE RankNTypes #-} {-# LANGUAGE OverloadedStrings #-} +{-# LANGUAGE RankNTypes #-} {-# LANGUAGE ViewPatterns #-} -{-| Provides Globbing for selecting types, terms and namespaces using wildcards. -} +-- | Provides Globbing for selecting types, terms and namespaces using wildcards. module Unison.CommandLine.Globbing - ( expandGlobs - , TargetType(..) - ) where -import Unison.NameSegment (NameSegment (NameSegment)) -import Unison.Codebase.Branch (Branch0) -import qualified Unison.Codebase.Path as Path -import Data.Text (Text) + ( expandGlobs, + TargetType (..), + ) +where + import Control.Lens as Lens hiding (noneOf) +import qualified Data.Either as Either +import qualified Data.Set as Set +import qualified Data.Text as Text +import Unison.Codebase.Branch (Branch0) import qualified Unison.Codebase.Branch as Branch +import qualified Unison.Codebase.Path as Path +import Unison.NameSegment (NameSegment (NameSegment)) import qualified Unison.NameSegment as NameSegment -import qualified Data.Text as Text -import qualified Unison.Util.Star3 as Star3 -import qualified Unison.Util.Relation as Relation -import qualified Data.Set as Set -import Data.Set (Set) +import Unison.Prelude import qualified Unison.Util.Monoid as Monoid -import qualified Data.Either as Either -import Control.Monad (when) +import qualified Unison.Util.Relation as Relation +import qualified Unison.Util.Star3 as Star3 -- | Possible targets which a glob may select. data TargetType @@ -37,9 +36,10 @@ type GlobPath = [Either NameSegment GlobArg] -- | Represents a name segment containing a glob pattern -- e.g. start?end -> GlobArg "start" "end" data GlobArg = GlobArg - { namespacePrefix :: Text - , namespaceSuffix :: Text - } deriving (Show) + { namespacePrefix :: Text, + namespaceSuffix :: Text + } + deriving (Show) -- | Constructs a namespace "matcher" from a 'GlobArg' globPredicate :: Either NameSegment GlobArg -> (NameSegment -> Bool) @@ -61,8 +61,8 @@ expandGlobToNameSegments targets branch globPath = [] -> [] -- If we're at the end of the path, add any targets which match. [segment] -> - Monoid.whenM (Set.member Term targets) matchingTerms - <> Monoid.whenM (Set.member Type targets) matchingTypes + Monoid.whenM (Set.member Term targets) matchingTerms + <> Monoid.whenM (Set.member Type targets) matchingTypes <> Monoid.whenM (Set.member Namespace targets) matchingNamespaces where predicate :: NameSegment -> Bool @@ -74,26 +74,24 @@ expandGlobToNameSegments targets branch globPath = matchingNamesInStar :: (NameSegment -> Bool) -> Branch.Star a NameSegment -> [[NameSegment]] matchingNamesInStar predicate star = star & Star3.d1 - & Relation.ran - & Set.toList - & filter predicate - & fmap (pure @[]) -- Embed each name segment into a path. - -- If we have multiple remaining segments, descend into any children matching the current - -- segment, then keep matching on the remainder of the path. - (segment:rest) -> recursiveMatches + & Relation.ran + & Set.toList + & filter predicate + & fmap (pure @[]) -- Embed each name segment into a path. + -- If we have multiple remaining segments, descend into any children matching the current + -- segment, then keep matching on the remainder of the path. + (segment : rest) -> recursiveMatches where nextBranches :: [(NameSegment, (Branch0 m))] nextBranches = branch ^@.. matchingChildBranches (globPredicate segment) recursiveMatches :: [[NameSegment]] recursiveMatches = - foldMap (\(ns, b) -> (ns:) <$> expandGlobToNameSegments targets b rest) nextBranches + foldMap (\(ns, b) -> (ns :) <$> expandGlobToNameSegments targets b rest) nextBranches -- | Find all child branches whose name matches a predicate. matchingChildBranches :: (NameSegment -> Bool) -> IndexedTraversal' NameSegment (Branch0 m) (Branch0 m) matchingChildBranches keyPredicate = Branch.children0 . indices keyPredicate -data GlobFailure = NoGlobs | NoTargets - -- | Expand a single glob pattern into all matching targets of the specified types. expandGlobs :: forall m. @@ -107,11 +105,10 @@ expandGlobs :: -- | Nothing if arg was not a glob. -- otherwise, fully expanded, absolute paths. E.g. [".base.List.map"] Maybe [String] -expandGlobs targets rootBranch currentPath s = either recover Just $ do +expandGlobs targets rootBranch currentPath s = do + guard (not . null $ targets) let (isAbsolute, globPath) = globbedPathParser (Text.pack s) - -- If we don't have any actual globs, we can fail to fall back to the original argument. - when (not . any Either.isRight $ globPath) (Left NoGlobs) - when (null targets) (Left NoTargets) + guard (any Either.isRight $ globPath) let currentBranch :: Branch0 m currentBranch | isAbsolute = rootBranch @@ -121,10 +118,6 @@ expandGlobs targets rootBranch currentPath s = either recover Just $ do | isAbsolute = (Path.Absolute . Path.unrelative) <$> paths | otherwise = Path.resolve currentPath <$> paths pure (Path.convert <$> relocatedPaths) - where - recover = \case - NoGlobs -> Nothing - NoTargets -> Just [] -- | Parses a single name segment into a GlobArg or a bare segment according to whether -- there's a glob. @@ -133,11 +126,11 @@ expandGlobs targets rootBranch currentPath s = either recover Just $ do -- "to?" -> Left (GlobArg "to" "") -- We unintuitively use '?' for glob patterns right now since they're not valid in names. globbedPathParser :: Text -> (Bool, GlobPath) -globbedPathParser txt = - let (isAbsolute, segments) = +globbedPathParser txt = + let (isAbsolute, segments) = case Text.split (== '.') txt of -- An initial '.' creates an empty split - ("":segments) -> (True, segments) + ("" : segments) -> (True, segments) (segments) -> (False, segments) in (isAbsolute, fmap globArgParser segments) diff --git a/unison-cli/tests/Unison/Test/GitSync.hs b/unison-cli/tests/Unison/Test/GitSync.hs index 747de5b60f..b572e3b39b 100644 --- a/unison-cli/tests/Unison/Test/GitSync.hs +++ b/unison-cli/tests/Unison/Test/GitSync.hs @@ -11,7 +11,7 @@ import Data.String.Here.Interpolated (i) import qualified Data.Text as Text import EasyTest import Shellmet () -import System.Directory (removeDirectoryRecursive) +import System.Directory (removeDirectoryRecursive, doesFileExist) import System.FilePath (()) import qualified System.IO.Temp as Temp import qualified Unison.Codebase as Codebase @@ -22,6 +22,8 @@ import Unison.Symbol (Symbol) import Unison.Test.Ucm (CodebaseFormat, Transcript) import qualified Unison.Test.Ucm as Ucm import Unison.WatchKind (pattern TestWatch) +import qualified Data.Text.IO as Text +import Unison.Codebase.Editor.Git (gitCacheDir) transcriptOutputFile :: String -> FilePath transcriptOutputFile name = @@ -37,6 +39,8 @@ test :: Test () test = scope "gitsync22" . tests $ fastForwardPush : nonFastForwardPush : + localStatePersistence : + localStateUpdate : destroyedRemote : flip map [(Ucm.CodebaseFormat2, "sc")] \(fmt, name) -> scope name $ tests [ @@ -618,6 +622,75 @@ fastForwardPush = scope "fastforward-push" do |] ok +localStatePersistence :: Test () +localStatePersistence = scope "local-state-persistence" do + repo <- io initGitRepo + cachedRepoDir <- io $ gitCacheDir (Text.pack repo) + -- Create some local state in the cached git repo. + let someFilePath = cachedRepoDir "myfile.txt" + let someText = "SOME TEXT" + io $ do + codebase <- Ucm.initCodebase Ucm.CodebaseFormat2 + -- Push some state the remote codebase + -- Then pull to ensure we have a non-empty local git repo. + void $ Ucm.runTranscript codebase [i| + ```ucm + .lib> alias.type ##Nat Nat + .lib> push.create ${repo} + .lib> pull ${repo} + ``` + |] + -- Write a file to our local git cache to represent some changes we may have made to our + -- codebase, e.g. a migration. + Text.writeFile someFilePath someText + void $ Ucm.runTranscript codebase [i| + ```ucm + .lib> pull ${repo} + .lib> push ${repo} + ``` + |] + -- We expect the state in the cached git repo to remain untouched iff the remote + -- hasn't changed. This is helpful for when we need to migrate a remote codebase, + -- we don't want to re-migrate if nothing has changed. + txt <- io $ Text.readFile someFilePath + expectEqual someText txt + +-- | Any untracked files in the cache directory should be wiped out if there's a change +-- on the remote. +localStateUpdate :: Test () +localStateUpdate = scope "local-state-update" do + repo <- io initGitRepo + cachedRepoDir <- io $ gitCacheDir (Text.pack repo) + -- Create some local state in the cached git repo. + let someFilePath = cachedRepoDir "myfile.txt" + let someText = "SOME TEXT" + io $ do + codebase <- Ucm.initCodebase Ucm.CodebaseFormat2 + -- Push some state the remote codebase + -- Then pull to ensure we have a non-empty local git repo. + void $ Ucm.runTranscript codebase [i| + ```ucm + .lib> alias.type ##Nat Nat + .lib> push.create ${repo} + .lib> pull ${repo} + ``` + |] + -- Write a file to our local git cache to represent some changes we may have made to our + -- codebase, e.g. a migration. + Text.writeFile someFilePath someText + -- Push the new change, then pull the new remote. + void $ Ucm.runTranscript codebase [i| + ```ucm + .lib> alias.type ##Nat Nat2 + .lib> push ${repo} + .lib> pull ${repo} + ``` + |] + -- We expect the state in the cached git repo to be wiped out, since there's a new commit on + -- the remote. + fileExists <- io $ doesFileExist someFilePath + expect (not fileExists) + nonFastForwardPush :: Test () nonFastForwardPush = scope "non-fastforward-push" do io do diff --git a/unison-core/src/Unison/HashQualified.hs b/unison-core/src/Unison/HashQualified.hs index a4735caee4..a97af4d77f 100644 --- a/unison-core/src/Unison/HashQualified.hs +++ b/unison-core/src/Unison/HashQualified.hs @@ -28,6 +28,7 @@ data HashQualified n deriving (Eq, Foldable, Traversable, Functor, Show, Generic) stripNamespace :: Text -> HashQualified Name -> HashQualified Name +stripNamespace "" hq = hq stripNamespace namespace hq = case hq of NameOnly name -> NameOnly $ strip name HashQualified name sh -> HashQualified (strip name) sh diff --git a/unison-core/src/Unison/Reference.hs b/unison-core/src/Unison/Reference.hs index 81cfddf183..2f4dbbb8a1 100644 --- a/unison-core/src/Unison/Reference.hs +++ b/unison-core/src/Unison/Reference.hs @@ -126,8 +126,9 @@ showSuffix = Text.pack . show readSuffix :: Text -> Either String Pos readSuffix = \case - pos | Text.all isDigit pos -> Right (read (Text.unpack pos)) - t -> Left ("suffix decoding error: " ++ show t) + pos | Text.all isDigit pos, + Just pos' <- readMaybe (Text.unpack pos) -> Right pos' + t -> Left $ "Invalid reference suffix: " <> show t isPrefixOf :: ShortHash -> Reference -> Bool isPrefixOf sh r = SH.isPrefixOf sh (toShortHash r) diff --git a/unison-core/src/Unison/Referent.hs b/unison-core/src/Unison/Referent.hs index 5d063eac8d..7f5bc47f34 100644 --- a/unison-core/src/Unison/Referent.hs +++ b/unison-core/src/Unison/Referent.hs @@ -115,11 +115,13 @@ fromText t = either (const Nothing) Just $ -- if the string has just one hash at the start, it's just a reference if Text.length refPart == 1 then Ref <$> R.fromText t - else if Text.all Char.isDigit cidPart then do + else if Text.all Char.isDigit cidPart && (not . Text.null) cidPart then do r <- R.fromText (Text.dropEnd 1 refPart) ctorType <- ctorType - let cid = read (Text.unpack cidPart) - pure $ Con (ConstructorReference r cid) ctorType + let maybeCid = readMaybe (Text.unpack cidPart) + case maybeCid of + Nothing -> Left ("invalid constructor id: " <> Text.unpack cidPart) + Just cid -> Right $ Con (ConstructorReference r cid) ctorType else Left ("invalid constructor id: " <> Text.unpack cidPart) where diff --git a/unison-core/src/Unison/Term.hs b/unison-core/src/Unison/Term.hs index d3875b34c0..31cadf58eb 100644 --- a/unison-core/src/Unison/Term.hs +++ b/unison-core/src/Unison/Term.hs @@ -462,10 +462,14 @@ pattern Or' x y <- (ABT.out -> ABT.Tm (Or x y)) pattern Handle' h body <- (ABT.out -> ABT.Tm (Handle h body)) pattern Apps' f args <- (unApps -> Just (f, args)) -- begin pretty-printer helper patterns +pattern Ands' ands lastArg <- (unAnds -> Just (ands, lastArg)) +pattern Ors' ors lastArg <- (unOrs -> Just (ors, lastArg)) pattern AppsPred' f args <- (unAppsPred -> Just (f, args)) pattern BinaryApp' f arg1 arg2 <- (unBinaryApp -> Just (f, arg1, arg2)) pattern BinaryApps' apps lastArg <- (unBinaryApps -> Just (apps, lastArg)) pattern BinaryAppsPred' apps lastArg <- (unBinaryAppsPred -> Just (apps, lastArg)) +pattern OverappliedBinaryAppPred' f arg1 arg2 rest <- + (unOverappliedBinaryAppPred -> Just (f, arg1, arg2, rest)) -- end pretty-printer helper patterns pattern Ann' x t <- (ABT.out -> ABT.Tm (Ann x t)) pattern List' xs <- (ABT.out -> ABT.Tm (List xs)) @@ -795,6 +799,30 @@ unLetRec (unLetRecNamed -> Just (isTop, bs, e)) = Just ) unLetRec _ = Nothing +unAnds + :: Term2 vt at ap v a + -> Maybe + ( [Term2 vt at ap v a] + , Term2 vt at ap v a + ) +unAnds t = case t of + And' i o -> case unAnds i of + Just (as, xLast) -> Just (xLast:as, o) + Nothing -> Just ([i], o) + _ -> Nothing + +unOrs + :: Term2 vt at ap v a + -> Maybe + ( [Term2 vt at ap v a] + , Term2 vt at ap v a + ) +unOrs t = case t of + Or' i o -> case unOrs i of + Just (as, xLast) -> Just (xLast:as, o) + Nothing -> Just ([i], o) + _ -> Nothing + unApps :: Term2 vt at ap v a -> Maybe (Term2 vt at ap v a, [Term2 vt at ap v a]) @@ -817,6 +845,19 @@ unBinaryApp t = case unApps t of Just (f, [arg1, arg2]) -> Just (f, arg1, arg2) _ -> Nothing +-- Special case for overapplied binary operators +unOverappliedBinaryAppPred + :: (Term2 vt at ap v a, Term2 vt at ap v a -> Bool) + -> Maybe + ( Term2 vt at ap v a + , Term2 vt at ap v a + , Term2 vt at ap v a + , [Term2 vt at ap v a] + ) +unOverappliedBinaryAppPred (t, pred) = case unApps t of + Just (f, arg1 : arg2 : rest) | pred f -> Just (f, arg1, arg2, rest) + _ -> Nothing + -- "((a1 `f1` a2) `f2` a3)" becomes "Just ([(a2, f2), (a1, f1)], a3)" unBinaryApps :: Term2 vt at ap v a diff --git a/unison-src/tests/324.u b/unison-src/tests/324.u index 8856a76cf3..72e98d867a 100644 --- a/unison-src/tests/324.u +++ b/unison-src/tests/324.u @@ -1,5 +1,5 @@ foo a b = - if a `Text.eq` "" then + if Text.eq a "" then match Text.size b with 1 -> false _ -> true diff --git a/unison-src/tests/cce.u b/unison-src/tests/cce.u index de53c56965..b26d14c785 100644 --- a/unison-src/tests/cce.u +++ b/unison-src/tests/cce.u @@ -48,7 +48,7 @@ List.map : (a ->{e} b) -> [a] ->{e} [b] List.map f as = go f acc as i = match List.at i as with None -> acc - Some a -> go f (acc `snoc` f a) as (i + 1) + Some a -> go f (snoc acc (f a)) as (i + 1) go f [] as 0 structural type Monoid a = Monoid (a -> a -> a) a @@ -68,8 +68,8 @@ merge lte a b = Some hd1 -> match at 0 b with None -> acc ++ a Some hd2 -> - if hd1 `lte` hd2 then go (acc `snoc` hd1) (drop 1 a) b - else go (acc `snoc` hd2) a (drop 1 b) + if lte hd1 hd2 then go (snoc acc hd1) (drop 1 a) b + else go (snoc acc hd2) a (drop 1 b) go [] a b dmap : (a ->{Remote} b) -> [a] ->{Remote} [b] diff --git a/unison-src/tests/compose-inference.u b/unison-src/tests/compose-inference.u index 984abc6467..29c8e50ba1 100644 --- a/unison-src/tests/compose-inference.u +++ b/unison-src/tests/compose-inference.u @@ -1,4 +1,4 @@ -f `compose` g = x -> f (g x) +compose f g = x -> f (g x) > compose diff --git a/unison-src/tests/handler-stacking.u b/unison-src/tests/handler-stacking.u index 46c2d5c456..456f785087 100644 --- a/unison-src/tests/handler-stacking.u +++ b/unison-src/tests/handler-stacking.u @@ -11,9 +11,9 @@ main = '(tell get) replicate : Nat -> '{e} () -> {e} () replicate n x = - if n `Nat.eq` 0 then () else + if Nat.eq n 0 then () else !x - replicate (n `drop` 1) x + replicate (drop n 1) x structural ability State a where get : {State a} a @@ -30,5 +30,5 @@ stateHandler s = cases writerHandler : [w] -> Request {Writer w} a -> ([w], a) writerHandler ww = cases - { Writer.tell w -> k } -> handle k () with writerHandler (ww `snoc` w) + { Writer.tell w -> k } -> handle k () with writerHandler (snoc ww w) { a } -> (ww, a) diff --git a/unison-src/tests/hang.u b/unison-src/tests/hang.u index 49cd4210af..83038aa2ab 100644 --- a/unison-src/tests/hang.u +++ b/unison-src/tests/hang.u @@ -47,7 +47,7 @@ List.map : (a ->{e} b) -> [a] ->{e} [b] List.map f as = go f acc as i = match List.at i as with None -> acc - Some a -> go f (acc `snoc` f a) as (i + 1) + Some a -> go f (snoc acc (f a)) as (i + 1) go f [] as 0 merge : (a -> a -> Boolean) -> [a] -> [a] -> [a] @@ -58,8 +58,8 @@ merge lte a b = Some hd1 -> match at 0 b with None -> acc ++ a Some hd2 -> - if hd1 `lte` hd2 then go (acc `snoc` hd1) (drop 1 a) b - else go (acc `snoc` hd2) a (drop 1 b) + if lte hd1 hd2 then go (snoc acc hd1) (drop 1 a) b + else go (snoc acc hd2) a (drop 1 b) go [] a b dsort2 : (a -> a -> Boolean) -> [a] ->{Remote} [a] diff --git a/unison-src/tests/inner-lambda1.u b/unison-src/tests/inner-lambda1.u index fb91d0d964..708a59e780 100644 --- a/unison-src/tests/inner-lambda1.u +++ b/unison-src/tests/inner-lambda1.u @@ -10,6 +10,6 @@ search hit bot top = mid = (bot + top) / 2 match hit mid with +0 -> Some mid - -1 -> go bot (mid `drop` 1) + -1 -> go bot (drop mid 1) +1 -> go (mid + 1) top go bot top diff --git a/unison-src/tests/inner-lambda2.u b/unison-src/tests/inner-lambda2.u index 0c8dc80e55..6900e8fd3f 100644 --- a/unison-src/tests/inner-lambda2.u +++ b/unison-src/tests/inner-lambda2.u @@ -11,6 +11,6 @@ search hit bot top = mid = (bot + top) / 2 match hit mid with +0 -> Some mid - -1 -> go bot (mid `drop` 1) + -1 -> go bot (drop mid 1) +1 -> go (mid + 1) top go bot top diff --git a/unison-src/tests/lambda-closing-over-effectful-fn.u b/unison-src/tests/lambda-closing-over-effectful-fn.u index 25db1a0354..fc377d5fb7 100644 --- a/unison-src/tests/lambda-closing-over-effectful-fn.u +++ b/unison-src/tests/lambda-closing-over-effectful-fn.u @@ -4,7 +4,7 @@ unfold : s -> (s ->{z} Optional (a, s)) ->{z} [a] unfold s f = go s acc = match f s with None -> acc - Some (hd, s) -> go s (acc `List.snoc` hd) + Some (hd, s) -> go s (List.snoc acc hd) go s [] > unfold 0 (n -> if n < 5 then Some (n, n + 1) else None) diff --git a/unison-src/tests/mergesort.u b/unison-src/tests/mergesort.u index 1f46d7ba26..2d143f9250 100644 --- a/unison-src/tests/mergesort.u +++ b/unison-src/tests/mergesort.u @@ -19,8 +19,8 @@ merge lte a b = Some hd1 -> match at 0 b with None -> acc ++ a Some hd2 -> - if hd1 `lte` hd2 then - go (acc `snoc` hd1) (drop 1 a) b + if lte hd1 hd2 then + go (snoc acc hd1) (drop 1 a) b else - go (acc `snoc` hd2) a (drop 1 b) + go (snoc acc hd2) a (drop 1 b) go [] a b diff --git a/unison-src/tests/methodical/cycle-minimize.u b/unison-src/tests/methodical/cycle-minimize.u index 837bb58ca2..4028c8b3c6 100644 --- a/unison-src/tests/methodical/cycle-minimize.u +++ b/unison-src/tests/methodical/cycle-minimize.u @@ -7,5 +7,5 @@ structural ability SpaceAttack where ex x = ping x = pong (x + 1) launchMissiles "saturn" - pong x = ping (x `Nat.drop` 1) + pong x = ping (Nat.drop x 1) launchMissiles "neptune" diff --git a/unison-src/tests/methodical/loop.u b/unison-src/tests/methodical/loop.u index 7d5a2484f4..acf06b7c09 100644 --- a/unison-src/tests/methodical/loop.u +++ b/unison-src/tests/methodical/loop.u @@ -3,6 +3,6 @@ use Universal == loop acc n = if n == 0 then acc - else loop (acc + n) (n `drop` 1) + else loop (acc + n) (drop n 1) > loop 0 10000 diff --git a/unison-src/tests/methodical/nat.u b/unison-src/tests/methodical/nat.u index baa4a3c95f..256f022378 100644 --- a/unison-src/tests/methodical/nat.u +++ b/unison-src/tests/methodical/nat.u @@ -20,12 +20,12 @@ withDefault opt d = match opt with trailingZeros 0, trailingZeros 1, trailingZeros 8, - 7 `and` 10, - 7 `or` 10, - 7 `xor` 14, + and 7 10, + or 7 10, + xor 7 14, complement 0, sub 3 2, toFloat 3, toInt 3, toText 3) - \ No newline at end of file + diff --git a/unison-src/tests/methodical/overapply-ability.u b/unison-src/tests/methodical/overapply-ability.u index bb6dfab74d..9157dbb4c1 100644 --- a/unison-src/tests/methodical/overapply-ability.u +++ b/unison-src/tests/methodical/overapply-ability.u @@ -8,7 +8,7 @@ structural ability Zing where unzing = cases {a} -> a - {Zing.zing n -> k} -> handle k (x -> x `drop` n) with unzing + {Zing.zing n -> k} -> handle k (x -> drop x n) with unzing {Zing.zing2 n1 n2 -> k} -> handle k (n3 n4 -> [n1, n2, n3, n4]) with unzing exacth = handle diff --git a/unison-src/tests/methodical/scopedtypevars.u b/unison-src/tests/methodical/scopedtypevars.u index aaf8904bde..990f1c93c2 100644 --- a/unison-src/tests/methodical/scopedtypevars.u +++ b/unison-src/tests/methodical/scopedtypevars.u @@ -22,8 +22,8 @@ merge lte a b = Some hd1 -> match at 0 b with None -> acc ++ a Some hd2 -> - if hd1 `lte` hd2 then - go (acc `snoc` hd1) (drop 1 a) b + if lte hd1 hd2 then + go (snoc acc hd1) (drop 1 a) b else - go (acc `snoc` hd2) a (drop 1 b) + go (snoc acc hd2) a (drop 1 b) go [] a b diff --git a/unison-src/tests/methodical/universals.u b/unison-src/tests/methodical/universals.u index 384cca961a..56979935b3 100644 --- a/unison-src/tests/methodical/universals.u +++ b/unison-src/tests/methodical/universals.u @@ -1,9 +1,9 @@ use Universal == < > <= >= compare -> ([1,2,3] `compare` [1,2,3], - [1,2,3] `compare` [1], - [1] `compare` [1,2,3], - ?a `compare` ?b, +> (compare [1,2,3] [1,2,3], + compare [1,2,3] [1], + compare [1] [1,2,3], + compare ?a ?b, ("hi", "there") == ("hi", "there"), 1 < 1, 1 < 2, diff --git a/unison-src/tests/soe.u b/unison-src/tests/soe.u index 0ce0392ee1..fe640f04e4 100644 --- a/unison-src/tests/soe.u +++ b/unison-src/tests/soe.u @@ -47,7 +47,7 @@ List.map : (a ->{e} b) -> [a] ->{e} [b] List.map f as = go f acc as i = match List.at i as with None -> acc - Some a -> go f (acc `snoc` f a) as (i + 1) + Some a -> go f (snoc acc (f a)) as (i + 1) go f [] as 0 structural type Monoid a = Monoid (a -> a -> a) a @@ -67,8 +67,8 @@ merge lte a b = Some hd1 -> match at 0 b with None -> acc ++ a Some hd2 -> - if hd1 `lte` hd2 then go (acc `snoc` hd1) (drop 1 a) b - else go (acc `snoc` hd2) a (drop 1 b) + if lte hd1 hd2 then go (snoc acc hd1) (drop 1 a) b + else go (snoc acc hd2) a (drop 1 b) go [] a b dmap : (a ->{Remote} b) -> [a] ->{Remote} [b] diff --git a/unison-src/tests/soe2.u b/unison-src/tests/soe2.u index 86001e78f0..9d2011fc10 100644 --- a/unison-src/tests/soe2.u +++ b/unison-src/tests/soe2.u @@ -12,8 +12,8 @@ merge lte a b = (None, _) -> acc ++ b (_, None) -> acc ++ a (Some (h1,t1), Some (h2,t2)) -> - if h1 `lte` h2 then go (acc `snoc` h1) (drop 1 a) b - else go (acc `snoc` h2) a (drop 1 b) + if lte h1 h2 then go (snoc acc h1) (drop 1 a) b + else go (snoc acc h2) a (drop 1 b) go [] a b -- let's make sure it works diff --git a/unison-src/tests/spurious-ability-fail.u b/unison-src/tests/spurious-ability-fail.u index 4bee905a42..9d9372c49e 100644 --- a/unison-src/tests/spurious-ability-fail.u +++ b/unison-src/tests/spurious-ability-fail.u @@ -1,14 +1,14 @@ --The expression in red needs the {𝛆} ability, but this location only has access to the {𝛆} ability, -- --- 8 | odd n = if n == 1 then true else even2 (n `drop` 1) +-- 8 | odd n = if n == 1 then true else even2 (drop n 1) use Universal == even : Nat -> Boolean -even n = if n == 0 then true else odd (n `drop` 1) +even n = if n == 0 then true else odd (drop n 1) odd : Nat -> Boolean -odd n = if n == 1 then true else even2 (n `drop` 1) +odd n = if n == 1 then true else even2 (drop n 1) even2 = even diff --git a/unison-src/tests/stream.u b/unison-src/tests/stream.u index bd170a7042..ea17192b61 100644 --- a/unison-src/tests/stream.u +++ b/unison-src/tests/stream.u @@ -33,7 +33,7 @@ take n s = if n == 0 then () else Emit.emit a - handle k () with step (n `drop` 1) + handle k () with step (drop n 1) {r} -> () Stream ' handle run s with step n @@ -49,7 +49,7 @@ map f s = -- toSeq : Stream {e} a r ->{e} [a] toSeq s = step acc = cases - {Emit.emit a -> k} -> handle k () with step (acc `snoc` a) + {Emit.emit a -> k} -> handle k () with step (snoc acc a) {_} -> acc handle run s with step [] diff --git a/unison-src/tests/tictactoe.u b/unison-src/tests/tictactoe.u index 390b69f33a..584f39544d 100644 --- a/unison-src/tests/tictactoe.u +++ b/unison-src/tests/tictactoe.u @@ -7,6 +7,8 @@ use Board Board use P O X E use Optional Some None +a |> f = f a + orElse a b = match a with None -> b @@ -25,9 +27,9 @@ isWin board = Board a b c d e f g h i -> - (same a b c `orElse` same d e f `orElse` same g h i `orElse` - same a d g `orElse` same b e h `orElse` same c f i `orElse` - same a e i `orElse` same g e c) + (same a b c |> orElse (same d e f) |> orElse (same g h i) + |> orElse (same a d g) |> orElse (same b e h) |> orElse (same c f i) + |> orElse (same a e i) |> orElse (same g e c)) > isWin (Board X O X O X X diff --git a/unison-src/tests/underscore-parsing.u b/unison-src/tests/underscore-parsing.u index 928cebb1a2..2490593b58 100644 --- a/unison-src/tests/underscore-parsing.u +++ b/unison-src/tests/underscore-parsing.u @@ -1,7 +1,3 @@ _prefix = 1 prefix_ _x = _x _prefix_ _ = 2 - -_x `_infix` y_ = (_x, y_) -x_ `infix_` _y = (x_, _y) -_ `_infix_` _ = () diff --git a/unison-src/transcripts-using-base/doc.md.files/syntax.u b/unison-src/transcripts-using-base/doc.md.files/syntax.u index a34ac8d503..3c77d03411 100644 --- a/unison-src/transcripts-using-base/doc.md.files/syntax.u +++ b/unison-src/transcripts-using-base/doc.md.files/syntax.u @@ -73,6 +73,14 @@ id x = x id (sqr 10) ``` +also: + +``` +match 1 with + 1 -> "hi" + _ -> "goodbye" +``` + To include a typechecked snippet of code without evaluating it, you can do: @typecheck ``` diff --git a/unison-src/transcripts-using-base/doc.output.md b/unison-src/transcripts-using-base/doc.output.md index 5905af1fd7..cd9415d268 100644 --- a/unison-src/transcripts-using-base/doc.output.md +++ b/unison-src/transcripts-using-base/doc.output.md @@ -257,6 +257,14 @@ and the rendered output using `display`: id (sqr 10) ``` + also: + + ``` + match 1 with + 1 -> "hi" + _ -> "goodbye" + ``` + To include a typechecked snippet of code without evaluating it, you can do: @@ -281,6 +289,14 @@ and the rendered output using `display`: ⧨ 100 + also: + + match 1 with + 1 -> "hi" + _ -> "goodbye" + ⧨ + "hi" + To include a typechecked snippet of code without evaluating it, you can do: @@ -628,6 +644,14 @@ Lastly, it's common to build longer documents including subdocuments via `{{ sub ⧨ 100 + also: + + match 1 with + 1 -> "hi" + _ -> "goodbye" + ⧨ + "hi" + To include a typechecked snippet of code without evaluating it, you can do: diff --git a/unison-src/transcripts-using-base/test-watch-dependencies.md b/unison-src/transcripts-using-base/test-watch-dependencies.md new file mode 100644 index 0000000000..4d12bfac08 --- /dev/null +++ b/unison-src/transcripts-using-base/test-watch-dependencies.md @@ -0,0 +1,43 @@ +# Ensure test watch dependencies are properly considered. + +https://github.com/unisonweb/unison/issues/2195 + +```ucm:hide +.> builtins.merge +``` + +We add a simple definition. + +```unison:hide +x = 999 +``` + +```ucm:hide +.> add +``` + +Now, we update that definition and define a test-watch which depends on it. + +```unison +x = 1000 +test> mytest = checks [x + 1 == 1001] +``` + +We expect this 'add' to fail because the test is blocked by the update to `x`. + +```ucm:error +.> add +``` + +--- + +```unison +y = 42 +test> useY = checks [y + 1 == 43] +``` + +This should correctly identify `y` as a dependency and add that too. + +```ucm +.> add useY +``` diff --git a/unison-src/transcripts-using-base/test-watch-dependencies.output.md b/unison-src/transcripts-using-base/test-watch-dependencies.output.md new file mode 100644 index 0000000000..62f015c7e8 --- /dev/null +++ b/unison-src/transcripts-using-base/test-watch-dependencies.output.md @@ -0,0 +1,91 @@ +# Ensure test watch dependencies are properly considered. + +https://github.com/unisonweb/unison/issues/2195 + +We add a simple definition. + +```unison +x = 999 +``` + +Now, we update that definition and define a test-watch which depends on it. + +```unison +x = 1000 +test> mytest = checks [x + 1 == 1001] +``` + +```ucm + + I found and typechecked these definitions in scratch.u. If you + do an `add` or `update`, here's how your codebase would + change: + + ⍟ These new definitions are ok to `add`: + + mytest : [Result] + + ⍟ These names already exist. You can `update` them to your + new definition: + + x : Nat + + Now evaluating any watch expressions (lines starting with + `>`)... Ctrl+C cancels. + + 2 | test> mytest = checks [x + 1 == 1001] + + ✅ Passed Passed + +``` +We expect this 'add' to fail because the test is blocked by the update to `x`. + +```ucm +.> add + + x These definitions failed: + + Reason + needs update x : Nat + blocked mytest : [Result] + + Tip: Use `help filestatus` to learn more. + +``` +--- + +```unison +y = 42 +test> useY = checks [y + 1 == 43] +``` + +```ucm + + I found and typechecked these definitions in scratch.u. If you + do an `add` or `update`, here's how your codebase would + change: + + ⍟ These new definitions are ok to `add`: + + useY : [Result] + y : Nat + + Now evaluating any watch expressions (lines starting with + `>`)... Ctrl+C cancels. + + 2 | test> useY = checks [y + 1 == 43] + + ✅ Passed Passed + +``` +This should correctly identify `y` as a dependency and add that too. + +```ucm +.> add useY + + ⍟ I've added these definitions: + + useY : [Result] + y : Nat + +``` diff --git a/unison-src/transcripts/blocks.md b/unison-src/transcripts/blocks.md index f767391012..bac7ef1874 100644 --- a/unison-src/transcripts/blocks.md +++ b/unison-src/transcripts/blocks.md @@ -68,7 +68,7 @@ sumTo n = -- A recursive function, defined inside a block go acc n = if n == 0 then acc - else go (acc + n) (n `drop` 1) + else go (acc + n) (drop n 1) go 0 n ex n = diff --git a/unison-src/transcripts/blocks.output.md b/unison-src/transcripts/blocks.output.md index 28628efede..458c08efa4 100644 --- a/unison-src/transcripts/blocks.output.md +++ b/unison-src/transcripts/blocks.output.md @@ -136,7 +136,7 @@ sumTo n = -- A recursive function, defined inside a block go acc n = if n == 0 then acc - else go (acc + n) (n `drop` 1) + else go (acc + n) (drop n 1) go 0 n ex n = diff --git a/unison-src/transcripts/boolean-op-pretty-print-2819.md b/unison-src/transcripts/boolean-op-pretty-print-2819.md new file mode 100644 index 0000000000..efdf493e9f --- /dev/null +++ b/unison-src/transcripts/boolean-op-pretty-print-2819.md @@ -0,0 +1,18 @@ +Regression test for https://github.com/unisonweb/unison/pull/2819 + +```ucm:hide +.> builtins.merge +``` + +```unison +hangExample : Boolean +hangExample = + ("a long piece of text to hang the line" == "") + && ("a long piece of text to hang the line" == "") +``` + +```ucm +.> add +.> view hangExample +``` + diff --git a/unison-src/transcripts/boolean-op-pretty-print-2819.output.md b/unison-src/transcripts/boolean-op-pretty-print-2819.output.md new file mode 100644 index 0000000000..c952f4de1e --- /dev/null +++ b/unison-src/transcripts/boolean-op-pretty-print-2819.output.md @@ -0,0 +1,35 @@ +Regression test for https://github.com/unisonweb/unison/pull/2819 + +```unison +hangExample : Boolean +hangExample = + ("a long piece of text to hang the line" == "") + && ("a long piece of text to hang the line" == "") +``` + +```ucm + + I found and typechecked these definitions in scratch.u. If you + do an `add` or `update`, here's how your codebase would + change: + + ⍟ These new definitions are ok to `add`: + + hangExample : Boolean + +``` +```ucm +.> add + + ⍟ I've added these definitions: + + hangExample : Boolean + +.> view hangExample + + hangExample : Boolean + hangExample = + ("a long piece of text to hang the line" == "") + && ("a long piece of text to hang the line" == "") + +``` diff --git a/unison-src/transcripts/bug-strange-closure.output.md b/unison-src/transcripts/bug-strange-closure.output.md index 9e71e2ecc7..e4f8d58675 100644 --- a/unison-src/transcripts/bug-strange-closure.output.md +++ b/unison-src/transcripts/bug-strange-closure.output.md @@ -77,6 +77,14 @@ We can display the guide before and after adding it to the codebase: ⧨ 100 + also: + + match 1 with + 1 -> "hi" + _ -> "goodbye" + ⧨ + "hi" + To include a typechecked snippet of code without evaluating it, you can do: @@ -280,6 +288,14 @@ We can display the guide before and after adding it to the codebase: ⧨ 100 + also: + + match 1 with + 1 -> "hi" + _ -> "goodbye" + ⧨ + "hi" + To include a typechecked snippet of code without evaluating it, you can do: @@ -489,6 +505,14 @@ rendered = Pretty.get (docFormatConsole doc.guide) ⧨ 100 + also: + + match 1 with + 1 -> "hi" + _ -> "goodbye" + ⧨ + "hi" + To include a typechecked snippet of code without evaluating it, you can do: @@ -685,6 +709,14 @@ rendered = Pretty.get (docFormatConsole doc.guide) ⧨ 100 + also: + + match 1 with + 1 -> "hi" + _ -> "goodbye" + ⧨ + "hi" + To include a typechecked snippet of code without evaluating it, you can do: @@ -1803,6 +1835,29 @@ rendered = Pretty.get (docFormatConsole doc.guide) !Indent (!Lit (Right (Plain " "))) (!Lit (Right (Plain " "))) + (!Annotated.Group + (!Wrap + (!Lit (Right (Plain "also:"))))), + !Lit (Right (Plain "\n")), + !Lit (Right (Plain "\n")), + !Indent + (!Lit (Right (Plain " "))) + (!Lit (Right (Plain " "))) + (!Annotated.Group + (!Lit + (Left + (Eval + (Term.Term + (Any + (_ -> + (match 1 with + 1 -> "hi" + _ -> "goodbye")))))))), + !Lit (Right (Plain "\n")), + !Lit (Right (Plain "\n")), + !Indent + (!Lit (Right (Plain " "))) + (!Lit (Right (Plain " "))) (!Annotated.Group (!Wrap (!Annotated.Append diff --git a/unison-src/transcripts/builtins.md b/unison-src/transcripts/builtins.md index 698fc683d2..db1aaeef3a 100644 --- a/unison-src/transcripts/builtins.md +++ b/unison-src/transcripts/builtins.md @@ -18,18 +18,18 @@ use Int -- want to be able to tell which one is failing test> Int.tests.arithmetic = checks [ - +1 + +1 `eq` +2, + eq (+1 + +1) +2, +10 - +4 == +6, - +11 * +6 `eq` +66, - +11 * +6 `eq` +66, + eq (+11 * +6) +66, + eq (+11 * +6) +66, +10 / +3 == +3, +10 / +5 == +2, - +10 `mod` +3 == +1, - +10 `mod` +2 == +0, - -13 `mod` +3 == +2, - -13 `mod` -3 == -1, - -13 `mod` -5 == -3, - -13 `mod` +5 == +2, + mod +10 +3 == +1, + mod +10 +2 == +0, + mod -13 +3 == +2, + mod -13 -3 == -1, + mod -13 -5 == -3, + mod -13 +5 == +2, negate +99 == -99, increment +99 == +100, not (isEven +99), @@ -39,20 +39,20 @@ test> Int.tests.arithmetic = signum +99 == +1, signum -3949 == -1, signum +0 == +0, - +42 `gt` -1, - +42 `lt` +1000, - +43 `lteq` +43, - +43 `lteq` +44, - +43 `gteq` +43, - +43 `gteq` +41 + gt +42 -1, + lt +42 +1000, + lteq +43 +43, + lteq +43 +44, + gteq +43 +43, + gteq +43 +41 ] test> Int.tests.bitTwiddling = checks [ - +5 `and` +4 == +4, - +5 `and` +1 == +1, - +4 `or` +1 == +5, - +5 `xor` +1 == +4, + and +5 +4 == +4, + and +5 +1 == +1, + or +4 +1 == +5, + xor +5 +1 == +4, complement -1 == +0, popCount +1 == 1, popCount +2 == 1, @@ -94,35 +94,35 @@ use Nat test> Nat.tests.arithmetic = checks [ - 1 + 1 `eq` 2, - 10 `drop` 4 == 6, - 10 `sub` 12 == -2, - 11 * 6 `eq` 66, + eq (1 + 1) 2, + drop 10 4 == 6, + sub 10 12 == -2, + eq (11 * 6) 66, 10 / 3 == 3, 10 / 5 == 2, - 10 `mod` 3 == 1, - 10 `mod` 2 == 0, + mod 10 3 == 1, + mod 10 2 == 0, 18446744073709551615 / 2 == 9223372036854775807, - 18446744073709551615 `mod` 2 == 1, + mod 18446744073709551615 2 == 1, increment 99 == 100, not (isEven 99), isEven 100, isOdd 105, not (isOdd 108), - 42 `gt` 1, - 42 `lt` 1000, - 43 `lteq` 43, - 43 `lteq` 44, - 43 `gteq` 43, - 43 `gteq` 41, + gt 42 1, + lt 42 1000, + lteq 43 43, + lteq 43 44, + gteq 43 43, + gteq 43 41, ] test> Nat.tests.bitTwiddling = checks [ - 5 `and` 4 == 4, - 5 `and` 1 == 1, - 4 `or` 1 == 5, - 5 `xor` 1 == 4, + and 5 4 == 4, + and 5 1 == 1, + or 4 1 == 5, + xor 5 1 == 4, complement (complement 0) == 0, popCount 1 == 1, popCount 2 == 1, diff --git a/unison-src/transcripts/builtins.output.md b/unison-src/transcripts/builtins.output.md index 40fa1adcf1..2b6006e00c 100644 --- a/unison-src/transcripts/builtins.output.md +++ b/unison-src/transcripts/builtins.output.md @@ -11,18 +11,18 @@ use Int -- want to be able to tell which one is failing test> Int.tests.arithmetic = checks [ - +1 + +1 `eq` +2, + eq (+1 + +1) +2, +10 - +4 == +6, - +11 * +6 `eq` +66, - +11 * +6 `eq` +66, + eq (+11 * +6) +66, + eq (+11 * +6) +66, +10 / +3 == +3, +10 / +5 == +2, - +10 `mod` +3 == +1, - +10 `mod` +2 == +0, - -13 `mod` +3 == +2, - -13 `mod` -3 == -1, - -13 `mod` -5 == -3, - -13 `mod` +5 == +2, + mod +10 +3 == +1, + mod +10 +2 == +0, + mod -13 +3 == +2, + mod -13 -3 == -1, + mod -13 -5 == -3, + mod -13 +5 == +2, negate +99 == -99, increment +99 == +100, not (isEven +99), @@ -32,20 +32,20 @@ test> Int.tests.arithmetic = signum +99 == +1, signum -3949 == -1, signum +0 == +0, - +42 `gt` -1, - +42 `lt` +1000, - +43 `lteq` +43, - +43 `lteq` +44, - +43 `gteq` +43, - +43 `gteq` +41 + gt +42 -1, + lt +42 +1000, + lteq +43 +43, + lteq +43 +44, + gteq +43 +43, + gteq +43 +41 ] test> Int.tests.bitTwiddling = checks [ - +5 `and` +4 == +4, - +5 `and` +1 == +1, - +4 `or` +1 == +5, - +5 `xor` +1 == +4, + and +5 +4 == +4, + and +5 +1 == +1, + or +4 +1 == +5, + xor +5 +1 == +4, complement -1 == +0, popCount +1 == 1, popCount +2 == 1, @@ -83,35 +83,35 @@ use Nat test> Nat.tests.arithmetic = checks [ - 1 + 1 `eq` 2, - 10 `drop` 4 == 6, - 10 `sub` 12 == -2, - 11 * 6 `eq` 66, + eq (1 + 1) 2, + drop 10 4 == 6, + sub 10 12 == -2, + eq (11 * 6) 66, 10 / 3 == 3, 10 / 5 == 2, - 10 `mod` 3 == 1, - 10 `mod` 2 == 0, + mod 10 3 == 1, + mod 10 2 == 0, 18446744073709551615 / 2 == 9223372036854775807, - 18446744073709551615 `mod` 2 == 1, + mod 18446744073709551615 2 == 1, increment 99 == 100, not (isEven 99), isEven 100, isOdd 105, not (isOdd 108), - 42 `gt` 1, - 42 `lt` 1000, - 43 `lteq` 43, - 43 `lteq` 44, - 43 `gteq` 43, - 43 `gteq` 41, + gt 42 1, + lt 42 1000, + lteq 43 43, + lteq 43 44, + gteq 43 43, + gteq 43 41, ] test> Nat.tests.bitTwiddling = checks [ - 5 `and` 4 == 4, - 5 `and` 1 == 1, - 4 `or` 1 == 5, - 5 `xor` 1 == 4, + and 5 4 == 4, + and 5 1 == 1, + or 4 1 == 5, + xor 5 1 == 4, complement (complement 0) == 0, popCount 1 == 1, popCount 2 == 1, diff --git a/unison-src/transcripts/duplicate-term-detection.md b/unison-src/transcripts/duplicate-term-detection.md new file mode 100644 index 0000000000..61b2a8ebf1 --- /dev/null +++ b/unison-src/transcripts/duplicate-term-detection.md @@ -0,0 +1,42 @@ +# Duplicate Term Detection + +```ucm:hide +.> builtins.merge +``` + + +Trivial duplicate terms should be detected: + +```unison:error +x = 1 +x = 2 +``` + +Equivalent duplicate terms should be detected: + +```unison:error +x = 1 +x = 1 +``` + +Duplicates from record accessors/setters should be detected + +```unison:error +structural type Record = {x: Nat, y: Nat} +Record.x = 1 +Record.x.set = 2 +Record.x.modify = 2 +``` + +Duplicate terms and constructors should be detected: + +```unison:error +structural type SumType = X + +SumType.X = 1 + +structural ability AnAbility where + thing : Nat -> () + +AnAbility.thing = 2 +``` diff --git a/unison-src/transcripts/duplicate-term-detection.output.md b/unison-src/transcripts/duplicate-term-detection.output.md new file mode 100644 index 0000000000..c48aa9592e --- /dev/null +++ b/unison-src/transcripts/duplicate-term-detection.output.md @@ -0,0 +1,98 @@ +# Duplicate Term Detection + +Trivial duplicate terms should be detected: + +```unison +x = 1 +x = 2 +``` + +```ucm + + ❗️ + + I found multiple bindings with the name x: + 1 | x = 1 + 2 | x = 2 + + +``` +Equivalent duplicate terms should be detected: + +```unison +x = 1 +x = 1 +``` + +```ucm + + ❗️ + + I found multiple bindings with the name x: + 1 | x = 1 + 2 | x = 1 + + +``` +Duplicates from record accessors/setters should be detected + +```unison +structural type Record = {x: Nat, y: Nat} +Record.x = 1 +Record.x.set = 2 +Record.x.modify = 2 +``` + +```ucm + + ❗️ + + I found multiple bindings with the name Record.x: + 1 | structural type Record = {x: Nat, y: Nat} + 2 | Record.x = 1 + + + I found multiple bindings with the name Record.x.modify: + 1 | structural type Record = {x: Nat, y: Nat} + 2 | Record.x = 1 + 3 | Record.x.set = 2 + 4 | Record.x.modify = 2 + + + I found multiple bindings with the name Record.x.set: + 1 | structural type Record = {x: Nat, y: Nat} + 2 | Record.x = 1 + 3 | Record.x.set = 2 + + +``` +Duplicate terms and constructors should be detected: + +```unison +structural type SumType = X + +SumType.X = 1 + +structural ability AnAbility where + thing : Nat -> () + +AnAbility.thing = 2 +``` + +```ucm + + ❗️ + + I found multiple bindings with the name AnAbility.thing: + 6 | thing : Nat -> () + 7 | + 8 | AnAbility.thing = 2 + + + I found multiple bindings with the name SumType.X: + 1 | structural type SumType = X + 2 | + 3 | SumType.X = 1 + + +``` diff --git a/unison-src/transcripts/fix1578.md b/unison-src/transcripts/fix1578.md index 7e825348d4..8e2e7958f3 100644 --- a/unison-src/transcripts/fix1578.md +++ b/unison-src/transcripts/fix1578.md @@ -9,7 +9,7 @@ This transcript shows how suffix-based name resolution works when definitions in As setup, we'll add a data type `Day` and a definition `foo.bar : Nat`. ```unison:hide -unique type Day = Sun | Mon | Tue | Wed | Thu | Fri | Sat | Sun +unique type Day = Sun | Mon | Tue | Wed | Thu | Fri | Sat foo.bar : Nat foo.bar = 23 diff --git a/unison-src/transcripts/fix1578.output.md b/unison-src/transcripts/fix1578.output.md index 9ced24641d..1b57bcabd6 100644 --- a/unison-src/transcripts/fix1578.output.md +++ b/unison-src/transcripts/fix1578.output.md @@ -5,7 +5,7 @@ This transcript shows how suffix-based name resolution works when definitions in As setup, we'll add a data type `Day` and a definition `foo.bar : Nat`. ```unison -unique type Day = Sun | Mon | Tue | Wed | Thu | Fri | Sat | Sun +unique type Day = Sun | Mon | Tue | Wed | Thu | Fri | Sat foo.bar : Nat foo.bar = 23 diff --git a/unison-src/transcripts/fix2053.output.md b/unison-src/transcripts/fix2053.output.md index 60fe87aa41..d311f85f82 100644 --- a/unison-src/transcripts/fix2053.output.md +++ b/unison-src/transcripts/fix2053.output.md @@ -2,12 +2,11 @@ .> display List.map go f i as acc = - _pattern = List.at i as - match _pattern with - None -> acc - Some _pattern1 -> + match List.at i as with + None -> acc + Some a -> use Nat + - go f (i + 1) as (acc :+ f _pattern) + go f (i + 1) as (acc :+ f a) f a -> go f 0 a [] ``` diff --git a/unison-src/transcripts/io.md b/unison-src/transcripts/io.md index 965c5fe229..3a27a4001e 100644 --- a/unison-src/transcripts/io.md +++ b/unison-src/transcripts/io.md @@ -76,6 +76,7 @@ testOpenClose _ = handle1 = openFile fooFile FileMode.Write check "file should be open" (isFileOpen handle1) setBuffering handle1 (SizedBlockBuffering 1024) + check "file handle buffering should match what we just set." (getBuffering handle1 == SizedBlockBuffering 1024) setBuffering handle1 (getBuffering handle1) putBytes handle1 0xs01 setBuffering handle1 NoBuffering @@ -107,10 +108,8 @@ testOpenClose _ = ```ucm .> add -``` --- getBuffering.impl is currently broken on trunk (https://github.com/unisonweb/unison/issues/2767) --- Add this back to the ucm block once that's fixed. .> io.test testOpenClose +``` ### Seeking in open files diff --git a/unison-src/transcripts/io.output.md b/unison-src/transcripts/io.output.md index 7c10234a2a..4b0f4098a5 100644 --- a/unison-src/transcripts/io.output.md +++ b/unison-src/transcripts/io.output.md @@ -94,6 +94,7 @@ testOpenClose _ = handle1 = openFile fooFile FileMode.Write check "file should be open" (isFileOpen handle1) setBuffering handle1 (SizedBlockBuffering 1024) + check "file handle buffering should match what we just set." (getBuffering handle1 == SizedBlockBuffering 1024) setBuffering handle1 (getBuffering handle1) putBytes handle1 0xs01 setBuffering handle1 NoBuffering @@ -141,11 +142,22 @@ testOpenClose _ = testOpenClose : '{IO} [Result] -``` --- getBuffering.impl is currently broken on trunk (https://github.com/unisonweb/unison/issues/2767) --- Add this back to the ucm block once that's fixed. .> io.test testOpenClose + New test results: + + ◉ testOpenClose file should be open + ◉ testOpenClose file handle buffering should match what we just set. + ◉ testOpenClose file should be closed + ◉ testOpenClose bytes have been written + ◉ testOpenClose bytes have been written + ◉ testOpenClose file should be closed + + ✅ 6 test(s) passing + + Tip: Use view testOpenClose to view the source of a test. + +``` ### Seeking in open files Tests: openFile diff --git a/unison-src/transcripts/type-deps.md b/unison-src/transcripts/type-deps.md new file mode 100644 index 0000000000..142265c786 --- /dev/null +++ b/unison-src/transcripts/type-deps.md @@ -0,0 +1,32 @@ +# Ensure type dependencies are properly considered in slurping + +https://github.com/unisonweb/unison/pull/2821 + +```ucm:hide +.> builtins.merge +``` + + +Define a type. + +```unison:hide +structural type Y = Y +``` + +```ucm:hide +.> add +``` + +Now, we update `Y`, and add a new type `Z` which depends on it. + +```unison +structural type Z = Z Y +structural type Y = Y Nat +``` + +Adding should fail for BOTH definitions, `Y` needs an update and `Z` is blocked by `Y`. +```ucm:error +.> add +-- This shouldn't exist, because it should've been blocked. +.> view Z +``` diff --git a/unison-src/transcripts/type-deps.output.md b/unison-src/transcripts/type-deps.output.md new file mode 100644 index 0000000000..92f11d6d8a --- /dev/null +++ b/unison-src/transcripts/type-deps.output.md @@ -0,0 +1,56 @@ +# Ensure type dependencies are properly considered in slurping + +https://github.com/unisonweb/unison/pull/2821 + +Define a type. + +```unison +structural type Y = Y +``` + +Now, we update `Y`, and add a new type `Z` which depends on it. + +```unison +structural type Z = Z Y +structural type Y = Y Nat +``` + +```ucm + + I found and typechecked these definitions in scratch.u. If you + do an `add` or `update`, here's how your codebase would + change: + + ⍟ These new definitions are ok to `add`: + + structural type Z + + ⍟ These names already exist. You can `update` them to your + new definition: + + structural type Y + (The old definition is also named builtin.Unit. I'll + update this name too.) + +``` +Adding should fail for BOTH definitions, `Y` needs an update and `Z` is blocked by `Y`. +```ucm +.> add + + x These definitions failed: + + Reason + needs update structural type Y + blocked structural type Z + + Tip: Use `help filestatus` to learn more. + +-- This shouldn't exist, because it should've been blocked. +.> view Z + + ⚠️ + + The following names were not found in the codebase. Check your spelling. + Z + +``` diff --git a/unison-src/transcripts/universal-cmp.md b/unison-src/transcripts/universal-cmp.md index fc5611a10f..c0d555847b 100644 --- a/unison-src/transcripts/universal-cmp.md +++ b/unison-src/transcripts/universal-cmp.md @@ -7,6 +7,8 @@ cases exist for built-in types. Just making sure they don't crash. ``` ```unison +unique type A = A + threadEyeDeez _ = t1 = forkComp '() t2 = forkComp '() @@ -16,5 +18,13 @@ threadEyeDeez _ = ``` ```ucm +.> add .> run threadEyeDeez ``` + +```unison +> typeLink A == typeLink A +> typeLink Text == typeLink Text +> typeLink Text == typeLink A +> termLink threadEyeDeez == termLink threadEyeDeez +``` diff --git a/unison-src/transcripts/universal-cmp.output.md b/unison-src/transcripts/universal-cmp.output.md index afa0b30b79..ddb2d975c7 100644 --- a/unison-src/transcripts/universal-cmp.output.md +++ b/unison-src/transcripts/universal-cmp.output.md @@ -3,6 +3,8 @@ File for test cases making sure that universal equality/comparison cases exist for built-in types. Just making sure they don't crash. ```unison +unique type A = A + threadEyeDeez _ = t1 = forkComp '() t2 = forkComp '() @@ -19,10 +21,51 @@ threadEyeDeez _ = ⍟ These new definitions are ok to `add`: + unique type A threadEyeDeez : ∀ _. _ ->{IO} () ``` ```ucm +.> add + + ⍟ I've added these definitions: + + unique type A + threadEyeDeez : ∀ _. _ ->{IO} () + .> run threadEyeDeez ``` +```unison +> typeLink A == typeLink A +> typeLink Text == typeLink Text +> typeLink Text == typeLink A +> termLink threadEyeDeez == termLink threadEyeDeez +``` + +```ucm + + ✅ + + scratch.u changed. + + Now evaluating any watch expressions (lines starting with + `>`)... Ctrl+C cancels. + + 1 | > typeLink A == typeLink A + ⧩ + true + + 2 | > typeLink Text == typeLink Text + ⧩ + true + + 3 | > typeLink Text == typeLink A + ⧩ + false + + 4 | > termLink threadEyeDeez == termLink threadEyeDeez + ⧩ + true + +``` From 805ef567d8ac62231513ac72a9cb7810048510e3 Mon Sep 17 00:00:00 2001 From: Chris Penner Date: Mon, 24 Jan 2022 14:38:26 -0600 Subject: [PATCH 214/297] Fix redundant imports after merge --- .../src/Unison/Codebase/SqliteCodebase/Conversions.hs | 2 -- 1 file changed, 2 deletions(-) diff --git a/parser-typechecker/src/Unison/Codebase/SqliteCodebase/Conversions.hs b/parser-typechecker/src/Unison/Codebase/SqliteCodebase/Conversions.hs index 191746ff96..f9921c6977 100644 --- a/parser-typechecker/src/Unison/Codebase/SqliteCodebase/Conversions.hs +++ b/parser-typechecker/src/Unison/Codebase/SqliteCodebase/Conversions.hs @@ -4,8 +4,6 @@ module Unison.Codebase.SqliteCodebase.Conversions where import Data.Bifunctor (Bifunctor (bimap)) -import Data.Bitraversable (Bitraversable (bitraverse)) -import Data.Either (fromRight) import Data.Foldable (Foldable (foldl', toList)) import Data.Map (Map) import qualified Data.Map as Map From c2b1d9c7b9b39cb92038335eb3cb8091879c3f6c Mon Sep 17 00:00:00 2001 From: Chris Penner Date: Fri, 28 Jan 2022 14:42:59 -0600 Subject: [PATCH 215/297] Apply Pull Request feedback --- .../src/Unison/Codebase/Editor/Slurp.hs | 76 ++++++++++--------- 1 file changed, 40 insertions(+), 36 deletions(-) diff --git a/unison-cli/src/Unison/Codebase/Editor/Slurp.hs b/unison-cli/src/Unison/Codebase/Editor/Slurp.hs index 459965ed54..22fab90103 100644 --- a/unison-cli/src/Unison/Codebase/Editor/Slurp.hs +++ b/unison-cli/src/Unison/Codebase/Editor/Slurp.hs @@ -27,6 +27,7 @@ import qualified Unison.Referent as Referent import qualified Unison.Referent' as Referent import qualified Unison.UnisonFile as UF import qualified Unison.UnisonFile.Names as UF +import qualified Unison.Util.Map as Map import qualified Unison.Util.Relation as Rel import qualified Unison.Util.Set as Set import Unison.Var (Var) @@ -99,7 +100,9 @@ pickPriorityStatus a b = (_, Ok Updated) -> Ok Updated (NeedsUpdate v, _) -> NeedsUpdate v (_, NeedsUpdate v) -> NeedsUpdate v - -- 'New' definitions take precedence over duplicated dependencies + -- 'New' definitions take precedence over duplicated dependencies when reporting status. + -- E.g. if a definition has dependencies which are duplicated, but it is itself a new + -- definition, we report it as New. (Ok New, _) -> Ok New (_, Ok New) -> Ok New (Ok Duplicated, _) -> Ok Duplicated @@ -116,6 +119,9 @@ slurpFile :: SR.SlurpResult v slurpFile uf defsToConsider slurpOp unalteredCodebaseNames = let -- A mapping of all vars in the file to their references. + -- TypeVars are keyed to Type references + -- TermVars are keyed to Term references + -- ConstructorVars are keyed to Constructor references varReferences :: Map (TaggedVar v) LD.LabeledDependency varReferences = buildVarReferences uf -- All variables which were either: @@ -300,6 +306,8 @@ computeInvolvedVars uf defsToConsider varReferences requestedVarsWhichActuallyExist = Set.fromList $ do v <- Set.toList defsToConsider -- We don't know whether each var is a type or term, so we try both. + -- We don't test ConstructorVar because you can't request to add/update a Constructor in + -- ucm, you add/update the type instead. tv <- [TypeVar v, TermVar v] guard (Map.member tv varReferences) pure tv @@ -312,15 +320,12 @@ computeVarDeps :: Set (TaggedVar v) -> Map (TaggedVar v) (Set (TaggedVar v)) computeVarDeps uf allInvolvedVars = - let depMap :: (Map (TaggedVar v) (Set (TaggedVar v))) - depMap = - allInvolvedVars - & Set.toList - & fmap - ( \tv -> (tv, Set.delete tv $ varClosure uf (Set.singleton tv)) - ) - & Map.fromListWith (<>) - in depMap + allInvolvedVars + & Set.toList + & fmap + ( \tv -> (tv, Set.delete tv $ varClosure uf (Set.singleton tv)) + ) + & Map.fromAscList -- | Compute the closure of all vars which the provided vars depend on. -- A type depends on its constructors. @@ -337,56 +342,55 @@ buildVarReferences uf = terms :: Map (TaggedVar v) LD.LabeledDependency terms = UF.hashTermsId uf - & Map.toList - -- TODO: ensure we handle watches with assignments correctly. - -- Filter out watches - & mapMaybe + -- Filter out non-test watch expressions + & Map.filter ( \case - (v, (refId, w, _, _)) - | w == Just TestWatch || w == Nothing -> - Just (TermVar v, LD.derivedTerm refId) - _ -> Nothing + (_, w, _, _) + | w == Just TestWatch || w == Nothing -> True + | otherwise -> False ) - & Map.fromList + & Map.bimap + TermVar + (\(refId, _, _, _) -> LD.derivedTerm refId) decls :: Map (TaggedVar v) LD.LabeledDependency decls = UF.dataDeclarationsId' uf - & Map.toList - & fmap (\(v, (refId, _)) -> (TypeVar v, LD.derivedType refId)) - & Map.fromList + & Map.bimap + TypeVar + (\(refId, _) -> LD.derivedType refId) effects :: Map (TaggedVar v) LD.LabeledDependency effects = UF.effectDeclarationsId' uf - & Map.toList - & fmap (\(v, (refId, _)) -> (TypeVar v, (LD.derivedType refId))) - & Map.fromList + & Map.bimap + TypeVar + (\(refId, _) -> LD.derivedType refId) constructors :: Map (TaggedVar v) LD.LabeledDependency constructors = let effectConstructors :: Map (TaggedVar v) LD.LabeledDependency effectConstructors = Map.fromList $ do - (_, (typeRefId, effect)) <- Map.toList (UF.effectDeclarationsId' uf) + (_, (typeRefId, effect)) <- Map.toList (UF.effectDeclarations' uf) let decl = DD.toDataDecl effect (conId, constructorV) <- zip (DD.constructorIds decl) (DD.constructorVars decl) - pure $ (ConstructorVar constructorV, LD.effectConstructor (CR.ConstructorReference (Ref.fromId typeRefId) conId)) + pure $ (ConstructorVar constructorV, LD.effectConstructor (CR.ConstructorReference typeRefId conId)) dataConstructors :: Map (TaggedVar v) LD.LabeledDependency dataConstructors = Map.fromList $ do - (_, (typeRefId, decl)) <- Map.toList (UF.dataDeclarationsId' uf) + (_, (typeRefId, decl)) <- Map.toList (UF.dataDeclarations' uf) (conId, constructorV) <- zip (DD.constructorIds decl) (DD.constructorVars decl) - pure $ (ConstructorVar constructorV, LD.dataConstructor (CR.ConstructorReference (Ref.fromId typeRefId) conId)) + pure $ (ConstructorVar constructorV, LD.dataConstructor (CR.ConstructorReference typeRefId conId)) in effectConstructors <> dataConstructors -- A helper type just used by 'toSlurpResult' for partitioning results. data SlurpingSummary v = SlurpingSummary - { adds :: SlurpComponent v, - duplicates :: SlurpComponent v, - updates :: SlurpComponent v, - termCtorColl :: SlurpComponent v, - ctorTermColl :: SlurpComponent v, - blocked :: SlurpComponent v, - conflicts :: SlurpComponent v + { adds :: !(SlurpComponent v), + duplicates :: !(SlurpComponent v), + updates :: !(SlurpComponent v), + termCtorColl :: !(SlurpComponent v), + ctorTermColl :: !(SlurpComponent v), + blocked :: !(SlurpComponent v), + conflicts :: !(SlurpComponent v) } instance (Ord v) => Semigroup (SlurpingSummary v) where From 2f615ee6d1ac8f468812c485b222337d67e917e3 Mon Sep 17 00:00:00 2001 From: Chris Penner Date: Fri, 28 Jan 2022 15:12:22 -0600 Subject: [PATCH 216/297] Reduce redundancy between Status types --- .../src/Unison/Codebase/Editor/Slurp.hs | 71 ++++++++++--------- 1 file changed, 39 insertions(+), 32 deletions(-) diff --git a/unison-cli/src/Unison/Codebase/Editor/Slurp.hs b/unison-cli/src/Unison/Codebase/Editor/Slurp.hs index 22fab90103..22fcacf792 100644 --- a/unison-cli/src/Unison/Codebase/Editor/Slurp.hs +++ b/unison-cli/src/Unison/Codebase/Editor/Slurp.hs @@ -21,7 +21,6 @@ import Unison.Names (Names) import qualified Unison.Names as Names import Unison.Parser.Ann (Ann) import Unison.Prelude -import qualified Unison.Reference as Ref import Unison.Referent (Referent) import qualified Unison.Referent as Referent import qualified Unison.Referent' as Referent @@ -73,14 +72,13 @@ data SlurpErr data DefnStatus = DefOk SlurpOk | DefErr SlurpErr - deriving (Show) + deriving (Eq, Ord, Show) -- | A definition's final status, incorporating the statuses of all of its dependencies. data SummarizedStatus v - = Ok SlurpOk - | NeedsUpdate (TaggedVar v) - | ErrFrom (TaggedVar v) SlurpErr - | SelfErr SlurpErr + = SelfStatus DefnStatus + | -- Dependency Status + DepStatus (TaggedVar v) DefnStatus deriving (Eq, Ord, Show) -- | Ideally we would display all available information about each var to the end-user, @@ -90,22 +88,25 @@ pickPriorityStatus :: SummarizedStatus v -> SummarizedStatus v -> SummarizedStat pickPriorityStatus a b = case (a, b) of -- If the definition has its own error, that takes highest priority. - (SelfErr err, _) -> SelfErr err - (_, SelfErr err) -> SelfErr err + (SelfStatus (DefErr err), _) -> SelfStatus (DefErr err) + (_, SelfStatus (DefErr err)) -> SelfStatus (DefErr err) -- Next we care if a dependency has an error - (ErrFrom v err, _) -> ErrFrom v err - (_, ErrFrom v err) -> ErrFrom v err + (DepStatus v (DefErr err), _) -> DepStatus v (DefErr err) + (_, DepStatus v (DefErr err)) -> DepStatus v (DefErr err) -- If our definition needs its own update then we don't care if dependencies need updates. - (Ok Updated, _) -> Ok Updated - (_, Ok Updated) -> Ok Updated - (NeedsUpdate v, _) -> NeedsUpdate v - (_, NeedsUpdate v) -> NeedsUpdate v + (SelfStatus (DefOk Updated), _) -> SelfStatus (DefOk Updated) + (_, SelfStatus (DefOk Updated)) -> SelfStatus (DefOk Updated) + (DepStatus v (DefOk Updated), _) -> DepStatus v (DefOk Updated) + (_, DepStatus v (DefOk Updated)) -> DepStatus v (DefOk Updated) + -- Any other 'ok' dependency status doesn't meaningfully affect anything the summary. + (DepStatus _ (DefOk _), x) -> x + (x, DepStatus _ (DefOk _)) -> x -- 'New' definitions take precedence over duplicated dependencies when reporting status. -- E.g. if a definition has dependencies which are duplicated, but it is itself a new -- definition, we report it as New. - (Ok New, _) -> Ok New - (_, Ok New) -> Ok New - (Ok Duplicated, _) -> Ok Duplicated + (SelfStatus (DefOk New), _) -> SelfStatus (DefOk New) + (_, SelfStatus (DefOk New)) -> SelfStatus (DefOk New) + (SelfStatus (DefOk Duplicated), _) -> SelfStatus (DefOk Duplicated) -- | Analyze a file and determine the status of all of its definitions with respect to a set -- of vars to analyze and an operation you wish to perform. @@ -277,13 +278,13 @@ summarizeTransitiveStatus statuses deps = toSummary :: (Ord v, Show v) => Bool -> DefnStatus -> TaggedVar v -> SummarizedStatus v toSummary isDep defNotes tv = case defNotes of - DefOk Updated -> if isDep then NeedsUpdate tv else Ok Updated + DefOk Updated -> if isDep then DepStatus tv (DefOk Updated) else SelfStatus (DefOk Updated) DefErr err -> if isDep - then ErrFrom tv err - else SelfErr err - DefOk New -> Ok New - DefOk Duplicated -> Ok Duplicated + then DepStatus tv (DefErr err) + else SelfStatus (DefErr err) + DefOk New -> SelfStatus (DefOk New) + DefOk Duplicated -> SelfStatus (DefOk Duplicated) -- | Determine all variables which should be considered in analysis. -- I.e. any variable requested by the user and all of their dependencies, @@ -453,10 +454,10 @@ toSlurpResult uf op requestedVars involvedVars fileNames codebaseNames summarize ( \tv status -> let sc = scFromTaggedVar tv in case status of - Ok New -> mempty {adds = sc} - Ok Duplicated -> mempty {duplicates = sc} - Ok Updated -> mempty {updates = sc} - NeedsUpdate _ -> + SelfStatus (DefOk New) -> mempty {adds = sc} + SelfStatus (DefOk Duplicated) -> mempty {duplicates = sc} + SelfStatus (DefOk Updated) -> mempty {updates = sc} + DepStatus _ (DefOk Updated) -> case op of AddOp -> mempty {blocked = sc} @@ -464,12 +465,18 @@ toSlurpResult uf op requestedVars involvedVars fileNames codebaseNames summarize mempty {adds = sc} CheckOp -> mempty {adds = sc} - ErrFrom _ TermCtorCollision -> mempty {blocked = sc} - ErrFrom _ CtorTermCollision -> mempty {blocked = sc} - ErrFrom _ Conflict -> mempty {blocked = sc} - SelfErr TermCtorCollision -> mempty {termCtorColl = sc} - SelfErr CtorTermCollision -> mempty {ctorTermColl = sc} - SelfErr Conflict -> mempty {conflicts = sc} + -- It shouldn't be possible for the two following cases to occur, + -- since a 'SelfStatus' would take priority when summarizing. + DepStatus _ (DefOk New) -> + error $ "Unexpected summary status for " <> show tv <> ": " <> show status + DepStatus _ (DefOk Duplicated) -> + error $ "Unexpected summary status for " <> show tv <> ": " <> show status + DepStatus _ (DefErr TermCtorCollision) -> mempty {blocked = sc} + DepStatus _ (DefErr CtorTermCollision) -> mempty {blocked = sc} + DepStatus _ (DefErr Conflict) -> mempty {blocked = sc} + SelfStatus (DefErr TermCtorCollision) -> mempty {termCtorColl = sc} + SelfStatus (DefErr CtorTermCollision) -> mempty {ctorTermColl = sc} + SelfStatus (DefErr Conflict) -> mempty {conflicts = sc} ) scFromTaggedVar :: TaggedVar v -> SlurpComponent v From 4e25775c0b0dbc03a9f884d416af237c1801476a Mon Sep 17 00:00:00 2001 From: Chris Penner Date: Mon, 31 Jan 2022 10:15:21 -0600 Subject: [PATCH 217/297] Cleanup --- unison-cli/src/Unison/Codebase/Editor/Slurp.hs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/unison-cli/src/Unison/Codebase/Editor/Slurp.hs b/unison-cli/src/Unison/Codebase/Editor/Slurp.hs index 22fcacf792..79fc56a283 100644 --- a/unison-cli/src/Unison/Codebase/Editor/Slurp.hs +++ b/unison-cli/src/Unison/Codebase/Editor/Slurp.hs @@ -298,7 +298,7 @@ computeInvolvedVars :: Set (TaggedVar v) computeInvolvedVars uf defsToConsider varReferences -- If nothing was specified, consider every var in the file. - | Set.null defsToConsider = Set.fromList $ Map.keys varReferences + | Set.null defsToConsider = Map.keysSet varReferences | otherwise = varClosure uf requestedVarsWhichActuallyExist where -- The user specifies _untyped_ names, which may not even exist in the file. From eef7cbcd5a9cb5cc496e2c7a0d069bb0c49ff34d Mon Sep 17 00:00:00 2001 From: Chris Penner Date: Wed, 2 Feb 2022 14:10:56 -0600 Subject: [PATCH 218/297] Recover from missing branch objects by replacing with empty branches. (#2844) * Replace missing objects with the empty object when migrating. * Add warning message when causals are missing branch objects. --- .../U/Codebase/Sqlite/Branch/Full.hs | 4 ++ .../U/Codebase/Sqlite/Queries.hs | 13 ++++++ .../SqliteCodebase/MigrateSchema12.hs | 46 +++++++++++++++---- 3 files changed, 55 insertions(+), 8 deletions(-) diff --git a/codebase2/codebase-sqlite/U/Codebase/Sqlite/Branch/Full.hs b/codebase2/codebase-sqlite/U/Codebase/Sqlite/Branch/Full.hs index ae1cf4037b..184d023c6a 100644 --- a/codebase2/codebase-sqlite/U/Codebase/Sqlite/Branch/Full.hs +++ b/codebase2/codebase-sqlite/U/Codebase/Sqlite/Branch/Full.hs @@ -18,6 +18,7 @@ import U.Codebase.Sqlite.LocalIds (LocalBranchChildId, LocalDefnId, LocalPatchOb import qualified Unison.Util.Set as Set import Unison.Prelude import qualified Unison.Util.Map as Map +import qualified Data.Map as Map -- | -- @ @@ -51,6 +52,9 @@ data Branch' t h p c = Branch } deriving (Show, Generic) +emptyBranch :: Branch' t h p c +emptyBranch = Branch Map.empty Map.empty Map.empty Map.empty + branchHashes_ :: (Ord h', Ord t, Ord h) => Traversal (Branch' t h p c) (Branch' t h' p c) h h' branchHashes_ f Branch {..} = do newTerms <- for terms (Map.bitraversed both metadataSetFormatReferences_ . Reference.h_ %%~ f) diff --git a/codebase2/codebase-sqlite/U/Codebase/Sqlite/Queries.hs b/codebase2/codebase-sqlite/U/Codebase/Sqlite/Queries.hs index fff34f834c..2e173b1ead 100644 --- a/codebase2/codebase-sqlite/U/Codebase/Sqlite/Queries.hs +++ b/codebase2/codebase-sqlite/U/Codebase/Sqlite/Queries.hs @@ -116,6 +116,7 @@ module U.Codebase.Sqlite.Queries ( countObjects, countCausals, countWatches, + getCausalsWithoutBranchObjects, -- * db misc createSchema, @@ -853,6 +854,18 @@ namespaceHashIdByBase32Prefix prefix = queryAtoms sql (Only $ prefix <> "%") whe INNER JOIN hash ON id = value_hash_id WHERE base32 LIKE ? |] + +-- | Finds all causals that refer to a branch for which we don't have an object stored. +-- Although there are plans to support this in the future, currently all such cases +-- are the result of database inconsistencies and are unexpected. +getCausalsWithoutBranchObjects :: DB m => m [CausalHashId] +getCausalsWithoutBranchObjects = queryAtoms_ sql + where sql = [here| + SELECT self_hash_id from causal + WHERE value_hash_id NOT IN (SELECT hash_id FROM hash_object) +|] + + {- ORMOLU_ENABLE -} before :: DB m => CausalHashId -> CausalHashId -> m Bool diff --git a/parser-typechecker/src/Unison/Codebase/SqliteCodebase/MigrateSchema12.hs b/parser-typechecker/src/Unison/Codebase/SqliteCodebase/MigrateSchema12.hs index 46c7a36d83..18460522b7 100644 --- a/parser-typechecker/src/Unison/Codebase/SqliteCodebase/MigrateSchema12.hs +++ b/parser-typechecker/src/Unison/Codebase/SqliteCodebase/MigrateSchema12.hs @@ -115,9 +115,18 @@ migrateSchema12 :: forall a m v. (MonadUnliftIO m, Var v) => Connection -> Codeb migrateSchema12 conn codebase = do withinSavepoint "MIGRATESCHEMA12" $ do liftIO $ putStrLn $ "Starting codebase migration. This may take a while, it's a good time to make some tea ☕️" + corruptedCausals <- runDB conn (liftQ Q.getCausalsWithoutBranchObjects) + when (not . null $ corruptedCausals) $ do + liftIO $ putStrLn $ "⚠️ I detected " <> show (length corruptedCausals) <> " corrupted namespace(s) in the history of the codebase." + liftIO $ putStrLn $ "This is due to a bug in a previous version of ucm." + liftIO $ putStrLn $ "This only affects the history of your codebase, the most up-to-date iteration will remain intact." + liftIO $ putStrLn $ "I'll go ahead with the migration, but will replace any corrupted namespaces with empty ones." + + liftIO $ putStrLn $ "Updating Namespace Root..." rootCausalHashId <- runDB conn (liftQ Q.loadNamespaceRoot) numEntitiesToMigrate <- runDB conn . liftQ $ do sum <$> sequenceA [Q.countObjects, Q.countCausals, Q.countWatches] + v2EmptyBranchHashInfo <- saveV2EmptyBranch conn watches <- foldMapM (\watchKind -> map (W watchKind) <$> Codebase.watches codebase (Cv.watchKind2to1 watchKind)) @@ -125,7 +134,7 @@ migrateSchema12 conn codebase = do migrationState <- (Sync.sync @_ @Entity migrationSync (progress numEntitiesToMigrate) (CausalE rootCausalHashId : watches)) `runReaderT` Env {db = conn, codebase} - `execStateT` MigrationState Map.empty Map.empty Map.empty Set.empty 0 + `execStateT` MigrationState Map.empty Map.empty Map.empty Set.empty 0 v2EmptyBranchHashInfo let (_, newRootCausalHashId) = causalMapping migrationState ^?! ix rootCausalHashId liftIO $ putStrLn $ "Updating Namespace Root..." runDB conn . liftQ $ Q.setNamespaceRoot newRootCausalHashId @@ -188,7 +197,8 @@ data MigrationState = MigrationState objLookup :: Map (Old ObjectId) (New ObjectId, New HashId, New Hash, Old Hash), -- Remember the hashes of term/decls that we have already migrated to avoid migrating them twice. migratedDefnHashes :: Set (Old Hash), - numMigrated :: Int + numMigrated :: Int, + v2EmptyBranchHashInfo :: (BranchHashId, Hash) } deriving (Generic) @@ -242,13 +252,15 @@ migrateCausal conn oldCausalHashId = fmap (either id id) . runExceptT $ do oldBranchHashId <- runDB conn . liftQ $ Q.loadCausalValueHashId oldCausalHashId oldCausalParentHashIds <- runDB conn . liftQ $ Q.loadCausalParents oldCausalHashId - branchObjId <- runDB conn . liftQ $ Q.expectObjectIdForAnyHashId (unBranchHashId oldBranchHashId) + maybeOldBranchObjId <- + runDB conn . liftQ $ + Q.maybeObjectIdForAnyHashId (unBranchHashId oldBranchHashId) migratedObjIds <- gets objLookup -- If the branch for this causal hasn't been migrated, migrate it first. let unmigratedBranch = - if (branchObjId `Map.notMember` migratedObjIds) - then [BranchE branchObjId] - else [] + case maybeOldBranchObjId of + Just branchObjId | branchObjId `Map.notMember` migratedObjIds -> [BranchE branchObjId] + _ -> [] migratedCausals <- gets causalMapping let unmigratedParents = @@ -258,7 +270,14 @@ migrateCausal conn oldCausalHashId = fmap (either id id) . runExceptT $ do let unmigratedEntities = unmigratedBranch <> unmigratedParents when (not . null $ unmigratedParents <> unmigratedBranch) (throwE $ Sync.Missing unmigratedEntities) - (_, _, newBranchHash, _) <- gets (\MigrationState {objLookup} -> objLookup ^?! ix branchObjId) + (newBranchHashId, newBranchHash) <- case maybeOldBranchObjId of + -- Some codebases are corrupted, likely due to interrupted save operations. + -- It's unfortunate, but rather than fail the whole migration we'll just replace them + -- with an empty branch. + Nothing -> use (field @"v2EmptyBranchHashInfo") + Just branchObjId -> do + let (_, newBranchHashId, newBranchHash, _) = migratedObjIds ^?! ix branchObjId + pure (BranchHashId newBranchHashId, newBranchHash) let (newParentHashes, newParentHashIds) = oldCausalParentHashIds @@ -280,7 +299,7 @@ migrateCausal conn oldCausalHashId = fmap (either id id) . runExceptT $ do let newCausal = DbCausal { selfHash = newCausalHashId, - valueHash = BranchHashId $ view _2 (migratedObjIds ^?! ix branchObjId), + valueHash = newBranchHashId, parents = newParentHashIds } runDB conn do @@ -887,3 +906,14 @@ someReferenceIdToEntity = \case foldSetter :: LensLike (Writer [a]) s t a a -> s -> [a] foldSetter t s = execWriter (s & t %%~ \a -> tell [a] *> pure a) + +-- | Save an empty branch and get its new hash to use when replacing +-- branches which are missing due to database corruption. +saveV2EmptyBranch :: MonadIO m => Connection -> m (BranchHashId, Hash) +saveV2EmptyBranch conn = do + let branch = S.emptyBranch + let (localBranchIds, localBranch) = S.LocalizeObject.localizeBranch branch + newHash <- runDB conn (Ops.liftQ (Hashing.dbBranchHash branch)) + newHashId <- runDB conn (Ops.liftQ (Q.saveBranchHash (BranchHash (Cv.hash1to2 newHash)))) + _ <- runDB conn (Ops.saveBranchObject newHashId localBranchIds localBranch) + pure (newHashId, newHash) From 33d455ec241c33a9d463759b480aff85300a7a8b Mon Sep 17 00:00:00 2001 From: Chris Penner Date: Wed, 9 Feb 2022 14:15:26 -0600 Subject: [PATCH 219/297] Don't crash when typechecking higher-rank fields (#2866) --- parser-typechecker/src/Unison/DeclPrinter.hs | 103 ++++++++++--------- unison-src/transcripts/higher-rank.md | 15 ++- unison-src/transcripts/higher-rank.output.md | 22 ++++ 3 files changed, 90 insertions(+), 50 deletions(-) diff --git a/parser-typechecker/src/Unison/DeclPrinter.hs b/parser-typechecker/src/Unison/DeclPrinter.hs index bf1fcb053a..611d20875c 100644 --- a/parser-typechecker/src/Unison/DeclPrinter.hs +++ b/parser-typechecker/src/Unison/DeclPrinter.hs @@ -133,55 +133,60 @@ prettyDataDecl (PrettyPrintEnvDecl unsuffixifiedPPE suffixifiedPPE) r name dd = -- -- This function bails with `Nothing` if the names aren't an exact match for -- the expected record naming convention. -fieldNames - :: forall v a . Var v - => PrettyPrintEnv - -> Reference - -> HashQualified Name - -> DataDeclaration v a - -> Maybe [HashQualified Name] -fieldNames env r name dd = case DD.constructors dd of - [(_, typ)] -> let - vars :: [v] - vars = [ Var.freshenId (fromIntegral n) (Var.named "_") | n <- [0..Type.arity typ - 1]] - accessors :: [(v, Term.Term v ())] - accessors = DD.generateRecordAccessors (map (,()) vars) (HQ.toVar name) r - accessorsWithTypes :: [(v, Term.Term v (), Type.Type v ())] - accessorsWithTypes = accessors <&> \(v, trm) -> - case Result.result (Typechecker.synthesize typecheckingEnv trm) of - Nothing -> error $ "Failed to typecheck record field: " <> show v - Just typ -> (v, trm, typ) - typeLookup :: TypeLookup v () - typeLookup = - TypeLookup - { TypeLookup.typeOfTerms = mempty, - TypeLookup.dataDecls = Map.singleton r (void dd), - TypeLookup.effectDecls = mempty - } - typecheckingEnv :: Typechecker.Env v () - typecheckingEnv = - Typechecker.Env - { Typechecker._ambientAbilities = mempty, - Typechecker._typeLookup = typeLookup, - Typechecker._termsByShortname = mempty - } - hashes = Hashing.hashTermComponents (Map.fromList . fmap (\(v, trm, typ) -> (v, (trm, typ))) $ accessorsWithTypes) - names = [ (r, HQ.toString . PPE.termName env . Referent.Ref $ DerivedId r) - | r <- (\(refId, _trm, _typ) -> refId) <$> Map.elems hashes ] - fieldNames = Map.fromList - [ (r, f) | (r, n) <- names - , typename <- pure (HQ.toString name) - , typename `isPrefixOf` n - , rest <- pure $ drop (length typename + 1) n - , (f, rest) <- pure $ span (/= '.') rest - , rest `elem` ["",".set",".modify"] ] - in if Map.size fieldNames == length names then - Just [ HQ.unsafeFromString name - | v <- vars - , Just (ref, _, _) <- [Map.lookup (Var.namespaced [HQ.toVar name, v]) hashes] - , Just name <- [Map.lookup ref fieldNames] ] - else Nothing - _ -> Nothing +fieldNames :: + forall v a. + Var v => + PrettyPrintEnv -> + Reference -> + HashQualified Name -> + DataDeclaration v a -> + Maybe [HashQualified Name] +fieldNames env r name dd = do + typ <- case DD.constructors dd of + [(_, typ)] -> Just typ + _ -> Nothing + let vars :: [v] + vars = [Var.freshenId (fromIntegral n) (Var.named "_") | n <- [0 .. Type.arity typ - 1]] + let accessors :: [(v, Term.Term v ())] + accessors = DD.generateRecordAccessors (map (,()) vars) (HQ.toVar name) r + let typeLookup :: TypeLookup v () + typeLookup = + TypeLookup + { TypeLookup.typeOfTerms = mempty, + TypeLookup.dataDecls = Map.singleton r (void dd), + TypeLookup.effectDecls = mempty + } + let typecheckingEnv :: Typechecker.Env v () + typecheckingEnv = + Typechecker.Env + { Typechecker._ambientAbilities = mempty, + Typechecker._typeLookup = typeLookup, + Typechecker._termsByShortname = mempty + } + accessorsWithTypes :: [(v, Term.Term v (), Type.Type v ())] + <- for accessors \(v, trm) -> + case Result.result (Typechecker.synthesize typecheckingEnv trm) of + Nothing -> Nothing + Just typ -> Just (v, trm, typ) + let hashes = Hashing.hashTermComponents (Map.fromList . fmap (\(v, trm, typ) -> (v, (trm, typ))) $ accessorsWithTypes) + let names = + [ (r, HQ.toString . PPE.termName env . Referent.Ref $ DerivedId r) + | r <- (\(refId, _trm, _typ) -> refId) <$> Map.elems hashes + ] + let fieldNames = + Map.fromList + [ (r, f) | (r, n) <- names, typename <- pure (HQ.toString name), typename `isPrefixOf` n, rest <- pure $ drop (length typename + 1) n, (f, rest) <- pure $ span (/= '.') rest, rest `elem` ["", ".set", ".modify"] + ] + + if Map.size fieldNames == length names + then + Just + [ HQ.unsafeFromString name + | v <- vars, + Just (ref, _, _) <- [Map.lookup (Var.namespaced [HQ.toVar name, v]) hashes], + Just name <- [Map.lookup ref fieldNames] + ] + else Nothing prettyModifier :: DD.Modifier -> Pretty SyntaxText prettyModifier DD.Structural = fmt S.DataTypeModifier "structural" diff --git a/unison-src/transcripts/higher-rank.md b/unison-src/transcripts/higher-rank.md index ee303a8a45..87b720f4f5 100644 --- a/unison-src/transcripts/higher-rank.md +++ b/unison-src/transcripts/higher-rank.md @@ -66,4 +66,17 @@ Loc.transform2 nt = cases Loc f -> f' : forall t a . '{Remote t} a ->{Remote t} t a f' a = f (nt a) Loc f' -``` \ No newline at end of file +``` + +## Types with polymorphic fields + +```unison:hide +structural type HigherRanked = HigherRanked (forall a. a -> a) +``` + +We should be able to add and view records with higher-rank fields. + +```ucm +.higher_ranked> add +.higher_ranked> view HigherRanked +``` diff --git a/unison-src/transcripts/higher-rank.output.md b/unison-src/transcripts/higher-rank.output.md index 5570cddbed..d66a35e528 100644 --- a/unison-src/transcripts/higher-rank.output.md +++ b/unison-src/transcripts/higher-rank.output.md @@ -124,3 +124,25 @@ Loc.transform2 nt = cases Loc f -> -> Loc ``` +## Types with polymorphic fields + +```unison +structural type HigherRanked = HigherRanked (forall a. a -> a) +``` + +We should be able to add and view records with higher-rank fields. + +```ucm + ☝️ The namespace .higher_ranked is empty. + +.higher_ranked> add + + ⍟ I've added these definitions: + + structural type HigherRanked + +.higher_ranked> view HigherRanked + + structural type HigherRanked = HigherRanked (∀ a. a -> a) + +``` From a14f69c04c611ddcda810f760a06a76911342c8a Mon Sep 17 00:00:00 2001 From: Phil de Joux Date: Thu, 10 Feb 2022 12:31:38 -0500 Subject: [PATCH 220/297] Add Phil de Joux as contributor. --- CONTRIBUTORS.markdown | 1 + 1 file changed, 1 insertion(+) diff --git a/CONTRIBUTORS.markdown b/CONTRIBUTORS.markdown index 645326f20e..42095b9611 100644 --- a/CONTRIBUTORS.markdown +++ b/CONTRIBUTORS.markdown @@ -66,3 +66,4 @@ The format for this list: name, GitHub handle, and then optional blurb about wha * Sameer Kolhar (@kolharsam) * Nicole Prindle (@nprindle) * Harald Gliebe (@hagl) +* Phil de Joux (@philderbeast) - floating point, pretty printing and parsing From 2d701af053ef7d52861be93a168ffa45aec3ee7f Mon Sep 17 00:00:00 2001 From: Chris Penner Date: Fri, 11 Feb 2022 10:54:54 -0600 Subject: [PATCH 221/297] PR feedback --- parser-typechecker/src/Unison/UnisonFile.hs | 8 ++++---- unison-cli/src/Unison/Codebase/Editor/HandleInput.hs | 6 +++--- 2 files changed, 7 insertions(+), 7 deletions(-) diff --git a/parser-typechecker/src/Unison/UnisonFile.hs b/parser-typechecker/src/Unison/UnisonFile.hs index f961b70d6b..117e7667a3 100644 --- a/parser-typechecker/src/Unison/UnisonFile.hs +++ b/parser-typechecker/src/Unison/UnisonFile.hs @@ -23,7 +23,7 @@ module Unison.UnisonFile discardTypes, effectDeclarations', hashConstructors, - constructorsForTypeVars, + constructorsForDecls, hashTerms, indexByReference, lookupDecl, @@ -206,9 +206,9 @@ hashConstructors file = [ (v, Referent.ConId (ConstructorReference ref i) CT.Effect) | (v,i) <- DD.constructorVars (DD.toDataDecl dd) `zip` [0 ..] ] in Map.fromList (ctors1 ++ ctors2) --- | Returns the set of constructor names for type names in the given `Set`. -constructorsForTypeVars :: Ord v => Set v -> TypecheckedUnisonFile v a -> Set v -constructorsForTypeVars types uf = +-- | Returns the set of constructor names for decls whose names in the given Set. +constructorsForDecls :: Ord v => Set v -> TypecheckedUnisonFile v a -> Set v +constructorsForDecls types uf = let dataConstructors = dataDeclarationsId' uf & Map.filterWithKey (\k _ -> Set.member k types) diff --git a/unison-cli/src/Unison/Codebase/Editor/HandleInput.hs b/unison-cli/src/Unison/Codebase/Editor/HandleInput.hs index f9b79b70c4..37858b9e60 100644 --- a/unison-cli/src/Unison/Codebase/Editor/HandleInput.hs +++ b/unison-cli/src/Unison/Codebase/Editor/HandleInput.hs @@ -1852,9 +1852,9 @@ handleUpdate input maybePatchPath requestedNames = do ([old], [new]) -> (n, (old, new)) actual -> error $ - "Expected unique matches for " + "Expected unique matches for var \"" ++ Var.nameStr v - ++ " but got: " + ++ "\" but got: " ++ show actual where n = Name.unsafeFromVar v @@ -2871,7 +2871,7 @@ doSlurpAdds slurp uf = Branch.batchUpdates (typeActions <> termActions) typeActions = map doType . toList $ SC.types slurp termActions = map doTerm . toList $ - SC.terms slurp <> UF.constructorsForTypeVars (SC.types slurp) uf + SC.terms slurp <> UF.constructorsForDecls (SC.types slurp) uf names = UF.typecheckedToNames uf tests = Set.fromList $ fst <$> UF.watchesOfKind WK.TestWatch (UF.discardTypes uf) (isTestType, isTestValue) = isTest From f3b558cb19c683699e8559252501098628d8467a Mon Sep 17 00:00:00 2001 From: Chris Penner Date: Fri, 11 Feb 2022 11:06:17 -0600 Subject: [PATCH 222/297] Fix bad rename --- unison-cli/src/Unison/Codebase/Editor/SlurpComponent.hs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/unison-cli/src/Unison/Codebase/Editor/SlurpComponent.hs b/unison-cli/src/Unison/Codebase/Editor/SlurpComponent.hs index f7ec548dbe..54a43342ab 100644 --- a/unison-cli/src/Unison/Codebase/Editor/SlurpComponent.hs +++ b/unison-cli/src/Unison/Codebase/Editor/SlurpComponent.hs @@ -61,7 +61,7 @@ closeWithDependencies uf inputs = seenDefns {ctors = constructorDeps} seenTypes = foldl' typeDeps mempty (types inputs) constructorDeps :: Set v - constructorDeps = UF.constructorsForTypeVars seenTypes uf + constructorDeps = UF.constructorsForDecls seenTypes uf termDeps :: SlurpComponent v -> v -> SlurpComponent v termDeps seen v | Set.member v (terms seen) = seen From 1b81304f44e4385442a41052a377e143bf8849ad Mon Sep 17 00:00:00 2001 From: Dan Doel Date: Fri, 11 Feb 2022 12:21:21 -0500 Subject: [PATCH 223/297] Tweak ability variables for better inference This inserts some processing when ungeneralizing types that should lead to better inference with the newer ability algorithm. It rewrites types like: (a ->{e} b ->{e} c) ->{e} d to: (a ->{e1} b ->{e2} c) ->{e1,e2} d which behave a bit better. --- .../src/Unison/Typechecker/Context.hs | 80 +++++++++++++++++-- 1 file changed, 75 insertions(+), 5 deletions(-) diff --git a/parser-typechecker/src/Unison/Typechecker/Context.hs b/parser-typechecker/src/Unison/Typechecker/Context.hs index 1b5e2d1fdb..13b69d109d 100644 --- a/parser-typechecker/src/Unison/Typechecker/Context.hs +++ b/parser-typechecker/src/Unison/Typechecker/Context.hs @@ -1513,13 +1513,83 @@ ungeneralize :: (Var v, Ord loc) => Type v loc -> M v loc (Type v loc) ungeneralize t = snd <$> ungeneralize' t ungeneralize' :: (Var v, Ord loc) => Type v loc -> M v loc ([v], Type v loc) -ungeneralize' (Type.Forall' t) = do - v <- ABT.freshen t freshenTypeVar - appendContext [existential v] - t <- pure $ ABT.bindInheritAnnotation t (existential' () B.Blank v) - first (v:) <$> ungeneralize' t +ungeneralize' (Type.ForallNamed' v t) = do + (vs, t) <- tweakEffects v t + first (vs++) <$> ungeneralize' t ungeneralize' t = pure ([], t) +-- Tries to massage types like: +-- +-- (a ->{e} b ->{e} c) ->{e} d +-- +-- by rewriting them into: +-- +-- (a ->{e1} b ->{e2} c) ->{e1,e2} d +-- +-- The strategy is to find all negative occurrences of `e` and +-- introduce a new variable for each, and then replace any +-- non-negative occurrences with the row of all negative +-- variables. The reason this is valid is that `e` can be +-- instantiated with the entire row, and then all the negative +-- rows can be pared down to the single variable via subtyping. +-- +-- This is meant to occur when a polymorphic type is +-- de-generalized, and replaces simple freshening of the +-- polymorphic variable. +tweakEffects + :: Var v + => Ord loc + => TypeVar v loc + -> Type v loc + -> M v loc ([v], Type v loc) +tweakEffects v0 t0 + | isEffectVar v0 t0 = rewrite (Just False) t0 >>= \case + ([], ty) -> + freshenTypeVar v0 >>= \out -> finish [out] ty + (vs, ty) -> finish vs ty + | otherwise + = freshenTypeVar v0 >>= \out -> finish [out] t0 + where + negative = fromMaybe False + + typ [v] = existential' () B.Blank v + typ vs = Type.effects () $ existential' () B.Blank <$> vs + + finish vs ty = do + appendContext (existential <$> vs) + pure (vs, ABT.substInheritAnnotation v0 (typ vs) ty) + + rewrite p ty@(Type.ForallNamed' v t) + | v0 /= v = second (Type.forall a v) <$> rewrite p t + where a = loc ty + rewrite p ty@(Type.Arrow'' i es o) = do + (vis, i) <- rewrite (not <$> p) i + (vos, o) <- rewrite p o + ess <- traverse (rewrite p) es + let es = snd <$> ess ; ves = fst =<< ess + pure (vis ++ ves ++ vos, Type.arrow a i (Type.effect a es o)) + where a = loc ty + rewrite p ty@(Type.Var' v) + | v0 == v && negative p = do + u <- freshenTypeVar v0 + pure ([u], existential' (loc ty) B.Blank u) + rewrite p ty@(Type.App' f x) = do + (vfs, f) <- rewrite p f + (vxs, x) <- rewrite Nothing x + pure (vfs ++ vxs, Type.app (loc ty) f x) + rewrite _ ty = pure ([], ty) + +isEffectVar :: Var v => TypeVar v loc -> Type v loc -> Bool +isEffectVar u (Type.ForallNamed' v t) + | u == v = False + | otherwise = isEffectVar u t +isEffectVar u (Type.Arrow'' i es o) = + any p es || isEffectVar u i || isEffectVar u o + where + p (Type.Var' v) = v == u + p _ = False +isEffectVar _ _ = False + skolemize :: Var v => Ord loc From 63b3515030ed7bf7a4c1d19fc7f81d9243ea38d1 Mon Sep 17 00:00:00 2001 From: Paul Chiusano Date: Fri, 11 Feb 2022 13:27:07 -0600 Subject: [PATCH 224/297] fix indentation for multiline lists with composite expressions that span multiple lines --- parser-typechecker/src/Unison/TermPrinter.hs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/parser-typechecker/src/Unison/TermPrinter.hs b/parser-typechecker/src/Unison/TermPrinter.hs index fa89b45cc8..4acb0ceefa 100644 --- a/parser-typechecker/src/Unison/TermPrinter.hs +++ b/parser-typechecker/src/Unison/TermPrinter.hs @@ -258,7 +258,7 @@ pretty0 List' xs -> PP.group $ (fmt S.DelimiterChar $ l "[") <> optSpace <> intercalateMap ((fmt S.DelimiterChar $ l ",") <> PP.softbreak <> optSpace <> optSpace) - (pretty0 n (ac 0 Normal im doc)) + (PP.indentNAfterNewline 2 . pretty0 n (ac 0 Normal im doc)) xs <> optSpace <> (fmt S.DelimiterChar $ l "]") where optSpace = PP.orElse "" " " From ad2c2ac2648111381184ddc11839fc515f96cb2e Mon Sep 17 00:00:00 2001 From: Paul Chiusano Date: Fri, 11 Feb 2022 13:42:03 -0600 Subject: [PATCH 225/297] Don't use ! syntax when it's just unit being passed to a constructor or Any --- parser-typechecker/src/Unison/TermPrinter.hs | 5 + .../transcripts/bug-strange-closure.output.md | 5317 +++++++++++------ .../constructor-applied-to-unit.md | 11 + .../constructor-applied-to-unit.output.md | 52 + 4 files changed, 3499 insertions(+), 1886 deletions(-) create mode 100644 unison-src/transcripts/constructor-applied-to-unit.md create mode 100644 unison-src/transcripts/constructor-applied-to-unit.output.md diff --git a/parser-typechecker/src/Unison/TermPrinter.hs b/parser-typechecker/src/Unison/TermPrinter.hs index 4acb0ceefa..6c4a2ed62b 100644 --- a/parser-typechecker/src/Unison/TermPrinter.hs +++ b/parser-typechecker/src/Unison/TermPrinter.hs @@ -327,12 +327,17 @@ pretty0 t -> l "error: " <> l (show t) where + goNormal prec tm = pretty0 n (ac prec Normal im doc) tm specialCases term _go | Just p <- prettyDoc2 n a term = p specialCases term go = case (term, binaryOpsPred) of (DD.Doc, _) | doc == MaybeDoc -> if isDocLiteral term then prettyDoc n im term else pretty0 n (a {docContext = NoDoc}) term + (App' f@(Builtin' "Any.Any") arg, _) -> + paren (p >= 10) $ goNormal 9 f `PP.hang` goNormal 10 arg + (Apps' f@(Constructor' _) args, _) -> + paren (p >= 10) $ goNormal 9 f `PP.hang` PP.spacedMap (goNormal 10) args (TupleTerm' [x], _) -> let conRef = DD.pairCtorRef diff --git a/unison-src/transcripts/bug-strange-closure.output.md b/unison-src/transcripts/bug-strange-closure.output.md index bca2e46d30..f31e80c898 100644 --- a/unison-src/transcripts/bug-strange-closure.output.md +++ b/unison-src/transcripts/bug-strange-closure.output.md @@ -860,1954 +860,3499 @@ rendered = Pretty.get (docFormatConsole doc.guide) 3 | > rendered ⧩ - !Annotated.Group - (!Annotated.Append - [ !Indent - (!Lit (Right (Plain "# "))) - (!Lit (Right (Plain " "))) - (!Annotated.Group - (!Wrap - (!Annotated.Append - [ !Lit - (Right - (ConsoleText.Bold (Plain "Unison"))), - !Lit - (Right - (ConsoleText.Bold (Plain "computable"))), - !Lit - (Right - (ConsoleText.Bold - (Plain "documentation"))) ]))), - !Lit (Right (Plain "\n")), - !Lit (Right (Plain "\n")), - !Indent - (!Lit (Right (Plain " "))) - (!Lit (Right (Plain " "))) - (!Annotated.Group - (!Wrap - (!Annotated.Group - (!Annotated.Append - [ !Indent - (!Lit (Right (Plain "# "))) - (!Lit (Right (Plain " "))) - (!Annotated.Group - (!Wrap - (!Annotated.Append - [ !Lit - (Right - (ConsoleText.Bold - (Plain "Basic"))), - !Lit - (Right - (ConsoleText.Bold - (Plain "formatting"))) ]))), - !Lit (Right (Plain "\n")), - !Lit (Right (Plain "\n")), - !Indent - (!Lit (Right (Plain " "))) - (!Lit (Right (Plain " "))) - (!Annotated.Group - (!Wrap - (!Annotated.Append - [ !Lit - (Right (Plain "Paragraphs")), - !Lit (Right (Plain "are")), - !Lit - (Right (Plain "separated")), - !Lit (Right (Plain "by")), - !Lit (Right (Plain "one")), - !Lit (Right (Plain "or")), - !Lit (Right (Plain "more")), - !Lit - (Right (Plain "blanklines.")), - !Lit - (Right (Plain "Sections")), - !Lit (Right (Plain "have")), - !Lit (Right (Plain "a")), - !Lit (Right (Plain "title")), - !Lit (Right (Plain "and")), - !Lit (Right (Plain "0")), - !Lit (Right (Plain "or")), - !Lit (Right (Plain "more")), - !Lit - (Right (Plain "paragraphs")), - !Lit (Right (Plain "or")), - !Lit (Right (Plain "other")), - !Lit (Right (Plain "section")), - !Lit - (Right (Plain "elements.")) ]))), - !Lit (Right (Plain "\n")), - !Lit (Right (Plain "\n")), - !Indent - (!Lit (Right (Plain " "))) - (!Lit (Right (Plain " "))) - (!Annotated.Group - (!Wrap - (!Annotated.Append - [ !Lit (Right (Plain "Text")), - !Lit (Right (Plain "can")), - !Lit (Right (Plain "be")), - !Annotated.Group - (!Annotated.Append - [ !Wrap - (!Lit + Annotated.Group + () + (Annotated.Append + () + [ Indent + () + (Lit () (Right (Plain "# "))) + (Lit () (Right (Plain " "))) + (Annotated.Group + () + (Wrap + () + (Annotated.Append + () + [ Lit + () + (Right + (ConsoleText.Bold (Plain "Unison"))), + Lit + () + (Right + (ConsoleText.Bold + (Plain "computable"))), + Lit + () + (Right + (ConsoleText.Bold + (Plain "documentation"))) ]))), + Lit () (Right (Plain "\n")), + Lit () (Right (Plain "\n")), + Indent + () + (Lit () (Right (Plain " "))) + (Lit () (Right (Plain " "))) + (Annotated.Group + () + (Wrap + () + (Annotated.Group + () + (Annotated.Append + () + [ Indent + () + (Lit () (Right (Plain "# "))) + (Lit () (Right (Plain " "))) + (Annotated.Group + () + (Wrap + () + (Annotated.Append + () + [ Lit + () (Right (ConsoleText.Bold - (Plain "bold")))), - !Lit (Right (Plain ",")) ]), - !Annotated.Group - (!Annotated.Append - [ !Annotated.Group - (!Annotated.Append - [ !Lit - (Right (Plain "*")), - !Wrap - (!Lit - (Right - (Plain - "italicized"))), - !Lit - (Right (Plain "*")) ]), - !Lit (Right (Plain ",")) ]), - !Annotated.Group - (!Annotated.Append - [ !Annotated.Group - (!Annotated.Append - [ !Lit - (Right (Plain "~~")), - !Wrap - (!Lit - (Right - (Plain - "strikethrough"))), - !Lit - (Right (Plain "~~")) ]), - !Lit (Right (Plain ",")) ]), - !Lit (Right (Plain "or")), - !Annotated.Group - (!Annotated.Append - [ !Annotated.Group - (!Annotated.Append - [ !Lit - (Right (Plain "`")), - !Lit - (Right - (Plain "monospaced")), - !Lit - (Right (Plain "`")) ]), - !Lit (Right (Plain ".")) ]) ]))), - !Lit (Right (Plain "\n")), - !Lit (Right (Plain "\n")), - !Indent - (!Lit (Right (Plain " "))) - (!Lit (Right (Plain " "))) - (!Annotated.Group - (!Wrap - (!Annotated.Append - [ !Lit (Right (Plain "You")), - !Lit (Right (Plain "can")), - !Lit (Right (Plain "link")), - !Lit (Right (Plain "to")), - !Lit (Right (Plain "Unison")), - !Lit (Right (Plain "terms,")), - !Lit (Right (Plain "types,")), - !Lit (Right (Plain "and")), - !Lit - (Right (Plain "external")), - !Lit (Right (Plain "URLs:")) ]))), - !Lit (Right (Plain "\n")), - !Lit (Right (Plain "\n")), - !Indent - (!Lit (Right (Plain " "))) - (!Lit (Right (Plain " "))) - (!Annotated.Group - (!Annotated.Group - (!Annotated.Append - [ !Indent - (!Lit (Right (Plain "* "))) - (!Lit (Right (Plain " "))) - (!Wrap - (!Wrap - (!Annotated.Append - [ !Lit - (Right - (Underline - (Plain "An"))), - !Lit - (Right - (Underline - (Plain "external"))), - !Lit - (Right - (Underline - (Plain "url"))) ]))), - !Lit (Right (Plain "\n")), - !Indent - (!Lit (Right (Plain "* "))) - (!Lit (Right (Plain " "))) - (!Wrap - (!Annotated.Append - [ !Lit + (Plain "Basic"))), + Lit + () + (Right + (ConsoleText.Bold + (Plain "formatting"))) ]))), + Lit () (Right (Plain "\n")), + Lit () (Right (Plain "\n")), + Indent + () + (Lit () (Right (Plain " "))) + (Lit () (Right (Plain " "))) + (Annotated.Group + () + (Wrap + () + (Annotated.Append + () + [ Lit + () + (Right + (Plain "Paragraphs")), + Lit + () (Right (Plain "are")), + Lit + () + (Right + (Plain "separated")), + Lit + () (Right (Plain "by")), + Lit + () (Right (Plain "one")), + Lit + () (Right (Plain "or")), + Lit + () + (Right (Plain "more")), + Lit + () + (Right + (Plain "blanklines.")), + Lit + () + (Right + (Plain "Sections")), + Lit + () + (Right (Plain "have")), + Lit () (Right (Plain "a")), + Lit + () + (Right (Plain "title")), + Lit + () (Right (Plain "and")), + Lit () (Right (Plain "0")), + Lit + () (Right (Plain "or")), + Lit + () + (Right (Plain "more")), + Lit + () + (Right + (Plain "paragraphs")), + Lit + () (Right (Plain "or")), + Lit + () + (Right (Plain "other")), + Lit + () + (Right (Plain "section")), + Lit + () + (Right + (Plain "elements.")) ]))), + Lit () (Right (Plain "\n")), + Lit () (Right (Plain "\n")), + Indent + () + (Lit () (Right (Plain " "))) + (Lit () (Right (Plain " "))) + (Annotated.Group + () + (Wrap + () + (Annotated.Append + () + [ Lit + () + (Right (Plain "Text")), + Lit + () (Right (Plain "can")), + Lit + () (Right (Plain "be")), + Annotated.Group + () + (Annotated.Append + () + [ Wrap + () + (Lit + () + (Right + (ConsoleText.Bold + (Plain + "bold")))), + Lit + () + (Right (Plain ",")) ]), + Annotated.Group + () + (Annotated.Append + () + [ Annotated.Group + () + (Annotated.Append + () + [ Lit + () + (Right + (Plain "*")), + Wrap + () + (Lit + () + (Right + (Plain + "italicized"))), + Lit + () + (Right + (Plain "*")) ]), + Lit + () + (Right (Plain ",")) ]), + Annotated.Group + () + (Annotated.Append + () + [ Annotated.Group + () + (Annotated.Append + () + [ Lit + () + (Right + (Plain + "~~")), + Wrap + () + (Lit + () + (Right + (Plain + "strikethrough"))), + Lit + () + (Right + (Plain + "~~")) ]), + Lit + () + (Right (Plain ",")) ]), + Lit + () (Right (Plain "or")), + Annotated.Group + () + (Annotated.Append + () + [ Annotated.Group + () + (Annotated.Append + () + [ Lit + () + (Right + (Plain "`")), + Lit + () + (Right + (Plain + "monospaced")), + Lit + () + (Right + (Plain "`")) ]), + Lit + () + (Right (Plain ".")) ]) ]))), + Lit () (Right (Plain "\n")), + Lit () (Right (Plain "\n")), + Indent + () + (Lit () (Right (Plain " "))) + (Lit () (Right (Plain " "))) + (Annotated.Group + () + (Wrap + () + (Annotated.Append + () + [ Lit + () (Right (Plain "You")), + Lit + () (Right (Plain "can")), + Lit + () + (Right (Plain "link")), + Lit + () (Right (Plain "to")), + Lit + () + (Right (Plain "Unison")), + Lit + () + (Right (Plain "terms,")), + Lit + () + (Right (Plain "types,")), + Lit + () (Right (Plain "and")), + Lit + () + (Right + (Plain "external")), + Lit + () + (Right (Plain "URLs:")) ]))), + Lit () (Right (Plain "\n")), + Lit () (Right (Plain "\n")), + Indent + () + (Lit () (Right (Plain " "))) + (Lit () (Right (Plain " "))) + (Annotated.Group + () + (Annotated.Group + () + (Annotated.Append + () + [ Indent + () + (Lit + () + (Right (Plain "* "))) + (Lit + () + (Right (Plain " "))) + (Wrap + () + (Wrap + () + (Annotated.Append + () + [ Lit + () + (Right + (Underline + (Plain + "An"))), + Lit + () + (Right + (Underline + (Plain + "external"))), + Lit + () + (Right + (Underline + (Plain + "url"))) ]))), + Lit + () (Right (Plain "\n")), + Indent + () + (Lit + () + (Right (Plain "* "))) + (Lit + () + (Right (Plain " "))) + (Wrap + () + (Annotated.Append + () + [ Lit + () + (Left + (SpecialForm.Link + (Right + (Term.Term + (Any + (_ -> + Some)))))), + Lit + () + (Right + (Plain "is")), + Lit + () + (Right + (Plain "a")), + Lit + () + (Right + (Plain "term")), + Lit + () + (Right + (Plain "link;")), + Lit + () + (Left + (SpecialForm.Link + (Left + (typeLink Optional)))), + Lit + () + (Right + (Plain "is")), + Lit + () + (Right + (Plain "a")), + Lit + () + (Right + (Plain "type")), + Lit + () + (Right + (Plain "link")) ])), + Lit + () (Right (Plain "\n")), + Indent + () + (Lit + () + (Right (Plain "* "))) + (Lit + () + (Right (Plain " "))) + (Wrap + () + (Annotated.Append + () + [ Wrap + () + (Annotated.Append + () + [ Lit + () + (Right + (Underline + (Plain + "A"))), + Lit + () + (Right + (Underline + (Plain + "named"))), + Lit + () + (Right + (Underline + (Plain + "type"))), + Lit + () + (Right + (Underline + (Plain + "link"))) ]), + Lit + () + (Right + (Plain "and")), + Annotated.Group + () + (Annotated.Append + () + [ Wrap + () + (Annotated.Append + () + [ Lit + () + (Right + (Underline + (Plain + "a"))), + Lit + () + (Right + (Underline + (Plain + "named"))), + Lit + () + (Right + (Underline + (Plain + "term"))), + Lit + () + (Right + (Underline + (Plain + "link"))) ]), + Lit + () + (Right + (Plain + ".")) ]), + Lit + () + (Right + (Plain "Term")), + Lit + () + (Right + (Plain "links")), + Lit + () + (Right + (Plain "are")), + Lit + () + (Right + (Plain "handy")), + Lit + () + (Right + (Plain "for")), + Lit + () + (Right + (Plain + "linking")), + Lit + () + (Right + (Plain "to")), + Lit + () + (Right + (Plain "other")), + Lit + () + (Right + (Plain + "documents!")) ])) ]))), + Lit () (Right (Plain "\n")), + Lit () (Right (Plain "\n")), + Indent + () + (Lit () (Right (Plain " "))) + (Lit () (Right (Plain " "))) + (Annotated.Group + () + (Wrap + () + (Annotated.Append + () + [ Lit + () (Right (Plain "You")), + Lit + () (Right (Plain "can")), + Lit + () (Right (Plain "use")), + Annotated.Group + () + (Annotated.Append + () + [ Lit + () + (Right (Plain "`")), + Lit + () + (Right + (Plain + "{{ .. }}")), + Lit + () + (Right (Plain "`")) ]), + Lit + () (Right (Plain "to")), + Lit + () + (Right (Plain "escape")), + Lit + () (Right (Plain "out")), + Lit + () (Right (Plain "to")), + Lit + () + (Right (Plain "regular")), + Lit + () + (Right (Plain "Unison")), + Lit + () + (Right (Plain "syntax,")), + Lit + () (Right (Plain "for")), + Lit + () + (Right + (Plain "instance")), + Annotated.Group + () + (Annotated.Append + () + [ Lit + () + (Right + (Plain + "__not bold__")), + Lit + () + (Right (Plain ".")) ]), + Lit + () + (Right (Plain "This")), + Lit + () (Right (Plain "is")), + Lit + () + (Right (Plain "useful")), + Lit + () (Right (Plain "for")), + Lit + () + (Right + (Plain "creating")), + Lit + () + (Right + (Plain "documents")), + Lit + () + (Right + (Plain + "programmatically")), + Lit + () (Right (Plain "or")), + Lit + () + (Right (Plain "just")), + Lit + () + (Right + (Plain "including")), + Lit + () + (Right (Plain "other")), + Lit + () + (Right + (Plain "documents.")) ]))), + Lit () (Right (Plain "\n")), + Lit () (Right (Plain "\n")), + Indent + () + (Lit () (Right (Plain " "))) + (Lit () (Right (Plain " "))) + (Annotated.Group + () + (Wrap + () + (Annotated.Append + () + [ Annotated.Group + () + (Annotated.Append + () + [ Lit + () + (Right (Plain "*")), + Lit + () + (Right + (Plain "Next")) ]), + Annotated.Group + () + (Annotated.Append + () + [ Lit + () + (Right + (Plain "up:")), + Lit + () + (Right (Plain "*")) ]), + Lit + () (Left (SpecialForm.Link (Right (Term.Term - (Any (_ -> Some)))))), - !Lit - (Right (Plain "is")), - !Lit (Right (Plain "a")), - !Lit - (Right (Plain "term")), - !Lit - (Right (Plain "link;")), - !Lit - (Left - (SpecialForm.Link - (Left - (typeLink Optional)))), - !Lit - (Right (Plain "is")), - !Lit (Right (Plain "a")), - !Lit - (Right (Plain "type")), - !Lit - (Right (Plain "link")) ])), - !Lit (Right (Plain "\n")), - !Indent - (!Lit (Right (Plain "* "))) - (!Lit (Right (Plain " "))) - (!Wrap - (!Annotated.Append - [ !Wrap - (!Annotated.Append - [ !Lit - (Right - (Underline - (Plain "A"))), - !Lit - (Right - (Underline - (Plain "named"))), - !Lit - (Right - (Underline - (Plain "type"))), - !Lit - (Right - (Underline - (Plain "link"))) ]), - !Lit - (Right (Plain "and")), - !Annotated.Group - (!Annotated.Append - [ !Wrap - (!Annotated.Append - [ !Lit - (Right - (Underline - (Plain "a"))), - !Lit - (Right - (Underline + (Any + (_ -> lists)))))) ]))) ])))), + Lit () (Right (Plain "\n")), + Lit () (Right (Plain "\n")), + Indent + () + (Lit () (Right (Plain " "))) + (Lit () (Right (Plain " "))) + (Annotated.Group + () + (Wrap + () + (Annotated.Group + () + (Annotated.Append + () + [ Indent + () + (Lit () (Right (Plain "# "))) + (Lit () (Right (Plain " "))) + (Annotated.Group + () + (Wrap + () + (Lit + () + (Right + (ConsoleText.Bold + (Plain "Lists")))))), + Lit () (Right (Plain "\n")), + Lit () (Right (Plain "\n")), + Indent + () + (Lit () (Right (Plain " "))) + (Lit () (Right (Plain " "))) + (Annotated.Group + () + (Annotated.Group + () + (Annotated.Append + () + [ Indent + () + (Lit + () + (Right (Plain "# "))) + (Lit + () + (Right (Plain " "))) + (Annotated.Group + () + (Wrap + () + (Annotated.Append + () + [ Lit + () + (Right + (ConsoleText.Bold + (Plain + "Bulleted"))), + Lit + () + (Right + (ConsoleText.Bold + (Plain + "lists"))) ]))), + Lit + () (Right (Plain "\n")), + Lit + () (Right (Plain "\n")), + Indent + () + (Lit + () + (Right (Plain " "))) + (Lit + () + (Right (Plain " "))) + (Annotated.Group + () + (Wrap + () + (Annotated.Append + () + [ Lit + () + (Right (Plain - "named"))), - !Lit - (Right - (Underline + "Bulleted")), + Lit + () + (Right (Plain - "term"))), - !Lit - (Right - (Underline + "lists")), + Lit + () + (Right + (Plain "can")), + Lit + () + (Right + (Plain "use")), + Annotated.Group + () + (Annotated.Append + () + [ Annotated.Group + () + (Annotated.Append + () + [ Lit + () + (Right + (Plain + "`")), + Lit + () + (Right + (Plain + "+")), + Lit + () + (Right + (Plain + "`")) ]), + Lit + () + (Right + (Plain + ",")) ]), + Annotated.Group + () + (Annotated.Append + () + [ Annotated.Group + () + (Annotated.Append + () + [ Lit + () + (Right + (Plain + "`")), + Lit + () + (Right + (Plain + "-")), + Lit + () + (Right + (Plain + "`")) ]), + Lit + () + (Right + (Plain + ",")) ]), + Lit + () + (Right + (Plain "or")), + Annotated.Group + () + (Annotated.Append + () + [ Lit + () + (Right + (Plain + "`")), + Lit + () + (Right + (Plain + "*")), + Lit + () + (Right + (Plain + "`")) ]), + Lit + () + (Right + (Plain "for")), + Lit + () + (Right + (Plain "the")), + Lit + () + (Right (Plain - "link"))) ]), - !Lit - (Right (Plain ".")) ]), - !Lit - (Right (Plain "Term")), - !Lit - (Right (Plain "links")), - !Lit - (Right (Plain "are")), - !Lit - (Right (Plain "handy")), - !Lit - (Right (Plain "for")), - !Lit - (Right (Plain "linking")), - !Lit - (Right (Plain "to")), - !Lit - (Right (Plain "other")), - !Lit + "bullets")), + Lit + () + (Right + (Plain + "(though")), + Lit + () + (Right + (Plain "the")), + Lit + () + (Right + (Plain + "choice")), + Lit + () + (Right + (Plain + "will")), + Lit + () + (Right + (Plain "be")), + Lit + () + (Right + (Plain + "normalized")), + Lit + () + (Right + (Plain + "away")), + Lit + () + (Right + (Plain "by")), + Lit + () + (Right + (Plain "the")), + Lit + () + (Right + (Plain + "pretty-printer).")), + Lit + () + (Right + (Plain + "They")), + Lit + () + (Right + (Plain "can")), + Lit + () + (Right + (Plain "be")), + Lit + () + (Right + (Plain + "nested,")), + Lit + () + (Right + (Plain "to")), + Lit + () + (Right + (Plain "any")), + Lit + () + (Right + (Plain + "depth:")) ]))), + Lit + () (Right (Plain "\n")), + Lit + () (Right (Plain "\n")), + Indent + () + (Lit + () + (Right (Plain " "))) + (Lit + () + (Right (Plain " "))) + (Annotated.Group + () + (Annotated.Group + () + (Annotated.Append + () + [ Indent + () + (Lit + () + (Right + (Plain + "* "))) + (Lit + () + (Right + (Plain + " "))) + (Wrap + () + (Lit + () + (Right + (Plain + "A")))), + Lit + () + (Right + (Plain "\n")), + Indent + () + (Lit + () + (Right + (Plain + "* "))) + (Lit + () + (Right + (Plain + " "))) + (Wrap + () + (Lit + () + (Right + (Plain + "B")))), + Lit + () + (Right + (Plain "\n")), + Indent + () + (Lit + () + (Right + (Plain + "* "))) + (Lit + () + (Right + (Plain + " "))) + (Annotated.Append + () + [ Wrap + () + (Lit + () + (Right + (Plain + "C"))), + Lit + () + (Right + (Plain + "\n")), + Annotated.Group + () + (Annotated.Append + () + [ Indent + () + (Lit + () + (Right + (Plain + "* "))) + (Lit + () + (Right + (Plain + " "))) + (Wrap + () + (Lit + () + (Right + (Plain + "C1")))), + Lit + () + (Right + (Plain + "\n")), + Indent + () + (Lit + () + (Right + (Plain + "* "))) + (Lit + () + (Right + (Plain + " "))) + (Wrap + () + (Lit + () + (Right + (Plain + "C2")))) ]) ]) ]))) ]))), + Lit () (Right (Plain "\n")), + Lit () (Right (Plain "\n")), + Indent + () + (Lit () (Right (Plain " "))) + (Lit () (Right (Plain " "))) + (Annotated.Group + () + (Annotated.Group + () + (Annotated.Append + () + [ Indent + () + (Lit + () + (Right (Plain "# "))) + (Lit + () + (Right (Plain " "))) + (Annotated.Group + () + (Wrap + () + (Annotated.Append + () + [ Lit + () + (Right + (ConsoleText.Bold + (Plain + "Numbered"))), + Lit + () + (Right + (ConsoleText.Bold + (Plain + "lists"))) ]))), + Lit + () (Right (Plain "\n")), + Lit + () (Right (Plain "\n")), + Indent + () + (Lit + () + (Right (Plain " "))) + (Lit + () + (Right (Plain " "))) + (Annotated.Group + () + (Annotated.Group + () + (Annotated.Append + () + [ Indent + () + (Lit + () + (Right + (Plain + "1. "))) + (Lit + () + (Right + (Plain + " "))) + (Wrap + () + (Lit + () + (Right + (Plain + "A")))), + Lit + () + (Right + (Plain "\n")), + Indent + () + (Lit + () + (Right + (Plain + "2. "))) + (Lit + () + (Right + (Plain + " "))) + (Wrap + () + (Lit + () + (Right + (Plain + "B")))), + Lit + () + (Right + (Plain "\n")), + Indent + () + (Lit + () + (Right + (Plain + "3. "))) + (Lit + () + (Right + (Plain + " "))) + (Wrap + () + (Lit + () + (Right + (Plain + "C")))) ]))), + Lit + () (Right (Plain "\n")), + Lit + () (Right (Plain "\n")), + Indent + () + (Lit + () + (Right (Plain " "))) + (Lit + () + (Right (Plain " "))) + (Annotated.Group + () + (Wrap + () + (Annotated.Append + () + [ Lit + () + (Right + (Plain "The")), + Lit + () + (Right + (Plain + "first")), + Lit + () + (Right + (Plain + "number")), + Lit + () + (Right + (Plain "of")), + Lit + () + (Right + (Plain "the")), + Lit + () + (Right + (Plain + "list")), + Lit + () + (Right + (Plain + "determines")), + Lit + () + (Right + (Plain "the")), + Lit + () + (Right + (Plain + "starting")), + Lit + () + (Right + (Plain + "number")), + Lit + () + (Right + (Plain "in")), + Lit + () + (Right + (Plain "the")), + Lit + () + (Right + (Plain + "rendered")), + Lit + () + (Right + (Plain + "output.")), + Lit + () + (Right + (Plain "The")), + Lit + () + (Right + (Plain + "other")), + Lit + () + (Right + (Plain + "numbers")), + Lit + () + (Right + (Plain "are")), + Lit + () + (Right + (Plain + "ignored:")) ]))), + Lit + () (Right (Plain "\n")), + Lit + () (Right (Plain "\n")), + Indent + () + (Lit + () + (Right (Plain " "))) + (Lit + () + (Right (Plain " "))) + (Annotated.Group + () + (Annotated.Group + () + (Annotated.Append + () + [ Indent + () + (Lit + () + (Right + (Plain + "10. "))) + (Lit + () + (Right + (Plain + " "))) + (Wrap + () + (Lit + () + (Right + (Plain + "A")))), + Lit + () + (Right + (Plain "\n")), + Indent + () + (Lit + () + (Right + (Plain + "11. "))) + (Lit + () + (Right + (Plain + " "))) + (Wrap + () + (Lit + () + (Right + (Plain + "B")))), + Lit + () + (Right + (Plain "\n")), + Indent + () + (Lit + () + (Right + (Plain + "12. "))) + (Lit + () + (Right + (Plain + " "))) + (Wrap + () + (Lit + () + (Right + (Plain + "C")))) ]))), + Lit + () (Right (Plain "\n")), + Lit + () (Right (Plain "\n")), + Indent + () + (Lit + () + (Right (Plain " "))) + (Lit + () + (Right (Plain " "))) + (Annotated.Group + () + (Wrap + () + (Annotated.Append + () + [ Lit + () + (Right + (Plain + "Numbered")), + Lit + () + (Right + (Plain + "lists")), + Lit + () + (Right + (Plain "can")), + Lit + () + (Right + (Plain "be")), + Lit + () + (Right + (Plain + "nested")), + Lit + () + (Right + (Plain "as")), + Lit + () + (Right + (Plain + "well,")), + Lit + () + (Right + (Plain "and")), + Lit + () + (Right + (Plain + "combined")), + Lit + () + (Right + (Plain + "with")), + Lit + () + (Right + (Plain + "bulleted")), + Lit + () + (Right + (Plain + "lists:")) ]))), + Lit + () (Right (Plain "\n")), + Lit + () (Right (Plain "\n")), + Indent + () + (Lit + () + (Right (Plain " "))) + (Lit + () + (Right (Plain " "))) + (Annotated.Group + () + (Annotated.Group + () + (Annotated.Append + () + [ Indent + () + (Lit + () + (Right + (Plain + "1. "))) + (Lit + () + (Right + (Plain + " "))) + (Annotated.Append + () + [ Wrap + () + (Annotated.Append + () + [ Lit + () + (Right + (Plain + "Wake")), + Lit + () + (Right + (Plain + "up.")) ]), + Lit + () + (Right + (Plain + "\n")), + Annotated.Group + () + (Annotated.Append + () + [ Indent + () + (Lit + () + (Right + (Plain + "* "))) + (Lit + () + (Right + (Plain + " "))) + (Wrap + () + (Annotated.Append + () + [ Lit + () + (Right + (Plain + "What")), + Lit + () + (Right + (Plain + "am")), + Lit + () + (Right + (Plain + "I")), + Lit + () + (Right + (Plain + "doing")), + Lit + () + (Right + (Plain + "here?")) ])), + Lit + () + (Right + (Plain + "\n")), + Indent + () + (Lit + () + (Right + (Plain + "* "))) + (Lit + () + (Right + (Plain + " "))) + (Wrap + () + (Annotated.Append + () + [ Lit + () + (Right + (Plain + "In")), + Lit + () + (Right + (Plain + "this")), + Lit + () + (Right + (Plain + "nested")), + Lit + () + (Right + (Plain + "list.")) ])) ]) ]), + Lit + () + (Right + (Plain "\n")), + Indent + () + (Lit + () + (Right + (Plain + "2. "))) + (Lit + () + (Right + (Plain + " "))) + (Wrap + () + (Annotated.Append + () + [ Lit + () + (Right + (Plain + "Take")), + Lit + () + (Right + (Plain + "shower.")) ])), + Lit + () + (Right + (Plain "\n")), + Indent + () + (Lit + () + (Right + (Plain + "3. "))) + (Lit + () + (Right + (Plain + " "))) + (Wrap + () + (Annotated.Append + () + [ Lit + () + (Right + (Plain + "Get")), + Lit + () + (Right + (Plain + "dressed.")) ])) ]))) ]))) ])))), + Lit () (Right (Plain "\n")), + Lit () (Right (Plain "\n")), + Indent + () + (Lit () (Right (Plain " "))) + (Lit () (Right (Plain " "))) + (Annotated.Group + () + (Wrap + () + (Annotated.Group + () + (Annotated.Append + () + [ Indent + () + (Lit () (Right (Plain "# "))) + (Lit () (Right (Plain " "))) + (Annotated.Group + () + (Wrap + () + (Lit + () + (Right + (ConsoleText.Bold + (Plain "Evaluation")))))), + Lit () (Right (Plain "\n")), + Lit () (Right (Plain "\n")), + Indent + () + (Lit () (Right (Plain " "))) + (Lit () (Right (Plain " "))) + (Annotated.Group + () + (Wrap + () + (Annotated.Append + () + [ Lit + () (Right - (Plain "documents!")) ])) ]))), - !Lit (Right (Plain "\n")), - !Lit (Right (Plain "\n")), - !Indent - (!Lit (Right (Plain " "))) - (!Lit (Right (Plain " "))) - (!Annotated.Group - (!Wrap - (!Annotated.Append - [ !Lit (Right (Plain "You")), - !Lit (Right (Plain "can")), - !Lit (Right (Plain "use")), - !Annotated.Group - (!Annotated.Append - [ !Lit (Right (Plain "`")), - !Lit - (Right (Plain "{{ .. }}")), - !Lit (Right (Plain "`")) ]), - !Lit (Right (Plain "to")), - !Lit (Right (Plain "escape")), - !Lit (Right (Plain "out")), - !Lit (Right (Plain "to")), - !Lit (Right (Plain "regular")), - !Lit (Right (Plain "Unison")), - !Lit (Right (Plain "syntax,")), - !Lit (Right (Plain "for")), - !Lit - (Right (Plain "instance")), - !Annotated.Group - (!Annotated.Append - [ !Lit - (Right - (Plain "__not bold__")), - !Lit (Right (Plain ".")) ]), - !Lit (Right (Plain "This")), - !Lit (Right (Plain "is")), - !Lit (Right (Plain "useful")), - !Lit (Right (Plain "for")), - !Lit - (Right (Plain "creating")), - !Lit - (Right (Plain "documents")), - !Lit - (Right - (Plain "programmatically")), - !Lit (Right (Plain "or")), - !Lit (Right (Plain "just")), - !Lit - (Right (Plain "including")), - !Lit (Right (Plain "other")), - !Lit - (Right (Plain "documents.")) ]))), - !Lit (Right (Plain "\n")), - !Lit (Right (Plain "\n")), - !Indent - (!Lit (Right (Plain " "))) - (!Lit (Right (Plain " "))) - (!Annotated.Group - (!Wrap - (!Annotated.Append - [ !Annotated.Group - (!Annotated.Append - [ !Lit (Right (Plain "*")), - !Lit - (Right (Plain "Next")) ]), - !Annotated.Group - (!Annotated.Append - [ !Lit (Right (Plain "up:")), - !Lit (Right (Plain "*")) ]), - !Lit + (Plain "Expressions")), + Lit + () (Right (Plain "can")), + Lit + () (Right (Plain "be")), + Lit + () + (Right + (Plain "evaluated")), + Lit + () + (Right (Plain "inline,")), + Lit + () (Right (Plain "for")), + Lit + () + (Right + (Plain "instance")), + Annotated.Group + () + (Annotated.Append + () + [ Lit + () + (Left + (EvalInline + (Term.Term + (Any + (_ -> + 1 + Nat.+ 1))))), + Lit + () + (Right (Plain ".")) ]) ]))), + Lit () (Right (Plain "\n")), + Lit () (Right (Plain "\n")), + Indent + () + (Lit () (Right (Plain " "))) + (Lit () (Right (Plain " "))) + (Annotated.Group + () + (Wrap + () + (Annotated.Append + () + [ Lit + () + (Right (Plain "Blocks")), + Lit + () (Right (Plain "of")), + Lit + () + (Right (Plain "code")), + Lit + () (Right (Plain "can")), + Lit + () (Right (Plain "be")), + Lit + () + (Right + (Plain "evaluated")), + Lit + () (Right (Plain "as")), + Lit + () + (Right (Plain "well,")), + Lit + () (Right (Plain "for")), + Lit + () + (Right + (Plain "instance:")) ]))), + Lit () (Right (Plain "\n")), + Lit () (Right (Plain "\n")), + Indent + () + (Lit () (Right (Plain " "))) + (Lit () (Right (Plain " "))) + (Annotated.Group + () (Lit + () (Left + (Eval + (Term.Term + (Any + (_ -> + id x = x + id (sqr 10)))))))), + Lit () (Right (Plain "\n")), + Lit () (Right (Plain "\n")), + Indent + () + (Lit () (Right (Plain " "))) + (Lit () (Right (Plain " "))) + (Annotated.Group + () + (Wrap + () + (Lit + () (Right (Plain "also:"))))), + Lit () (Right (Plain "\n")), + Lit () (Right (Plain "\n")), + Indent + () + (Lit () (Right (Plain " "))) + (Lit () (Right (Plain " "))) + (Annotated.Group + () + (Lit + () (Left - (SpecialForm.Link - (Right - (Term.Term - (Any (_ -> lists)))))) ]))) ])))), - !Lit (Right (Plain "\n")), - !Lit (Right (Plain "\n")), - !Indent - (!Lit (Right (Plain " "))) - (!Lit (Right (Plain " "))) - (!Annotated.Group - (!Wrap - (!Annotated.Group - (!Annotated.Append - [ !Indent - (!Lit (Right (Plain "# "))) - (!Lit (Right (Plain " "))) - (!Annotated.Group - (!Wrap - (!Lit - (Right - (ConsoleText.Bold - (Plain "Lists")))))), - !Lit (Right (Plain "\n")), - !Lit (Right (Plain "\n")), - !Indent - (!Lit (Right (Plain " "))) - (!Lit (Right (Plain " "))) - (!Annotated.Group - (!Annotated.Group - (!Annotated.Append - [ !Indent - (!Lit (Right (Plain "# "))) - (!Lit (Right (Plain " "))) - (!Annotated.Group - (!Wrap - (!Annotated.Append - [ !Lit - (Right - (ConsoleText.Bold - (Plain "Bulleted"))), - !Lit - (Right - (ConsoleText.Bold - (Plain "lists"))) ]))), - !Lit (Right (Plain "\n")), - !Lit (Right (Plain "\n")), - !Indent - (!Lit (Right (Plain " "))) - (!Lit (Right (Plain " "))) - (!Annotated.Group - (!Wrap - (!Annotated.Append - [ !Lit - (Right - (Plain "Bulleted")), - !Lit - (Right (Plain "lists")), - !Lit - (Right (Plain "can")), - !Lit - (Right (Plain "use")), - !Annotated.Group - (!Annotated.Append - [ !Annotated.Group - (!Annotated.Append - [ !Lit - (Right - (Plain "`")), - !Lit - (Right - (Plain "+")), - !Lit - (Right - (Plain "`")) ]), - !Lit + (Eval + (Term.Term + (Any + (_ -> + (match 1 with + 1 -> "hi" + _ -> "goodbye")))))))), + Lit () (Right (Plain "\n")), + Lit () (Right (Plain "\n")), + Indent + () + (Lit () (Right (Plain " "))) + (Lit () (Right (Plain " "))) + (Annotated.Group + () + (Wrap + () + (Annotated.Append + () + [ Lit + () (Right (Plain "To")), + Lit + () + (Right (Plain "include")), + Lit () (Right (Plain "a")), + Lit + () + (Right + (Plain "typechecked")), + Lit + () + (Right (Plain "snippet")), + Lit + () (Right (Plain "of")), + Lit + () + (Right (Plain "code")), + Lit + () + (Right (Plain "without")), + Lit + () + (Right + (Plain "evaluating")), + Lit + () (Right (Plain "it,")), + Lit + () (Right (Plain "you")), + Lit + () (Right (Plain "can")), + Lit + () (Right (Plain "do:")) ]))), + Lit () (Right (Plain "\n")), + Lit () (Right (Plain "\n")), + Indent + () + (Lit () (Right (Plain " "))) + (Lit () (Right (Plain " "))) + (Annotated.Group + () (Lit + () (Left + (ExampleBlock + 0 (Term.Term + (Any + (_ -> + cube : Nat -> Nat + cube x = + use Nat * + x * x * x + ()))))))) ])))), + Lit () (Right (Plain "\n")), + Lit () (Right (Plain "\n")), + Indent + () + (Lit () (Right (Plain " "))) + (Lit () (Right (Plain " "))) + (Annotated.Group + () + (Wrap + () + (Annotated.Group + () + (Annotated.Append + () + [ Indent + () + (Lit () (Right (Plain "# "))) + (Lit () (Right (Plain " "))) + (Annotated.Group + () + (Wrap + () + (Annotated.Append + () + [ Lit + () + (Right + (ConsoleText.Bold + (Plain "Including"))), + Lit + () + (Right + (ConsoleText.Bold + (Plain "Unison"))), + Lit + () + (Right + (ConsoleText.Bold + (Plain "source"))), + Lit + () + (Right + (ConsoleText.Bold + (Plain "code"))) ]))), + Lit () (Right (Plain "\n")), + Lit () (Right (Plain "\n")), + Indent + () + (Lit () (Right (Plain " "))) + (Lit () (Right (Plain " "))) + (Annotated.Group + () + (Wrap + () + (Annotated.Append + () + [ Lit + () + (Right (Plain "Unison")), + Lit + () + (Right + (Plain "definitions")), + Lit + () (Right (Plain "can")), + Lit + () (Right (Plain "be")), + Lit + () + (Right + (Plain "included")), + Lit + () (Right (Plain "in")), + Lit + () + (Right (Plain "docs.")), + Lit + () (Right (Plain "For")), + Lit + () + (Right + (Plain "instance:")) ]))), + Lit () (Right (Plain "\n")), + Lit () (Right (Plain "\n")), + Indent + () + (Lit () (Right (Plain " "))) + (Lit () (Right (Plain " "))) + (Annotated.Group + () + (Wrap + () + (Lit + () + (Left + (SpecialForm.Source + [ Cons + (Left + (typeLink Optional)) + (Cons [] ()), + Cons + (Right + (Term.Term + (Any (_ -> sqr)))) + (Cons [] ()) ]))))), + Lit () (Right (Plain "\n")), + Lit () (Right (Plain "\n")), + Indent + () + (Lit () (Right (Plain " "))) + (Lit () (Right (Plain " "))) + (Annotated.Group + () + (Wrap + () + (Annotated.Append + () + [ Lit + () + (Right (Plain "Some")), + Lit + () + (Right + (Plain "rendering")), + Lit + () + (Right (Plain "targets")), + Lit + () + (Right (Plain "also")), + Lit + () + (Right (Plain "support")), + Lit + () + (Right (Plain "folded")), + Lit + () + (Right (Plain "source:")) ]))), + Lit () (Right (Plain "\n")), + Lit () (Right (Plain "\n")), + Indent + () + (Lit () (Right (Plain " "))) + (Lit () (Right (Plain " "))) + (Annotated.Group + () + (Wrap + () + (Lit + () + (Left + (FoldedSource + [ Cons + (Left + (typeLink Optional)) + (Cons [] ()), + Cons + (Right + (Term.Term + (Any (_ -> sqr)))) + (Cons [] ()) ]))))), + Lit () (Right (Plain "\n")), + Lit () (Right (Plain "\n")), + Indent + () + (Lit () (Right (Plain " "))) + (Lit () (Right (Plain " "))) + (Annotated.Group + () + (Wrap + () + (Annotated.Append + () + [ Lit + () (Right (Plain "You")), + Lit + () (Right (Plain "can")), + Lit + () + (Right (Plain "also")), + Lit + () + (Right (Plain "include")), + Lit + () + (Right (Plain "just")), + Lit () (Right (Plain "a")), + Lit + () + (Right + (Plain "signature,")), + Lit + () + (Right (Plain "inline,")), + Lit + () + (Right (Plain "with")), + Annotated.Group + () + (Annotated.Append + () + [ Lit + () + (Left + (SignatureInline + (Term.Term + (Any + (_ -> sqr))))), + Lit + () (Right (Plain ",")) ]), - !Annotated.Group - (!Annotated.Append - [ !Annotated.Group - (!Annotated.Append - [ !Lit + Lit + () (Right (Plain "or")), + Lit + () (Right (Plain "you")), + Lit + () (Right (Plain "can")), + Lit + () + (Right (Plain "include")), + Lit + () (Right (Plain "one")), + Lit + () (Right (Plain "or")), + Lit + () + (Right (Plain "more")), + Lit + () + (Right + (Plain "signatures")), + Lit + () (Right (Plain "as")), + Lit () (Right (Plain "a")), + Lit + () + (Right (Plain "block:")) ]))), + Lit () (Right (Plain "\n")), + Lit () (Right (Plain "\n")), + Indent + () + (Lit () (Right (Plain " "))) + (Lit () (Right (Plain " "))) + (Annotated.Group + () + (Wrap + () + (Lit + () + (Left + (SpecialForm.Signature + [ Term.Term + (Any (_ -> sqr)), + Term.Term + (Any (_ -> (Nat.+))) ]))))), + Lit () (Right (Plain "\n")), + Lit () (Right (Plain "\n")), + Indent + () + (Lit () (Right (Plain " "))) + (Lit () (Right (Plain " "))) + (Annotated.Group + () + (Wrap + () + (Annotated.Append + () + [ Lit + () (Right (Plain "Or")), + Lit + () + (Right + (Plain "alternately:")) ]))), + Lit () (Right (Plain "\n")), + Lit () (Right (Plain "\n")), + Indent + () + (Lit () (Right (Plain " "))) + (Lit () (Right (Plain " "))) + (Annotated.Group + () + (Wrap + () + (Lit + () + (Left + (SpecialForm.Signature + [ Term.Term + (Any (_ -> List.map)) ]))))), + Lit () (Right (Plain "\n")), + Lit () (Right (Plain "\n")), + Indent + () + (Lit () (Right (Plain " "))) + (Lit () (Right (Plain " "))) + (Annotated.Group + () + (Annotated.Group + () + (Annotated.Append + () + [ Indent + () + (Lit + () + (Right (Plain "# "))) + (Lit + () + (Right (Plain " "))) + (Annotated.Group + () + (Wrap + () + (Annotated.Append + () + [ Lit + () (Right - (Plain "`")), - !Lit + (ConsoleText.Bold + (Plain + "Inline"))), + Lit + () (Right - (Plain "-")), - !Lit + (ConsoleText.Bold + (Plain + "snippets"))) ]))), + Lit + () (Right (Plain "\n")), + Lit + () (Right (Plain "\n")), + Indent + () + (Lit + () + (Right (Plain " "))) + (Lit + () + (Right (Plain " "))) + (Annotated.Group + () + (Wrap + () + (Annotated.Append + () + [ Lit + () (Right - (Plain "`")) ]), - !Lit - (Right (Plain ",")) ]), - !Lit - (Right (Plain "or")), - !Annotated.Group - (!Annotated.Append - [ !Lit - (Right (Plain "`")), - !Lit - (Right (Plain "*")), - !Lit - (Right (Plain "`")) ]), - !Lit - (Right (Plain "for")), - !Lit - (Right (Plain "the")), - !Lit - (Right - (Plain "bullets")), - !Lit - (Right - (Plain "(though")), - !Lit - (Right (Plain "the")), - !Lit - (Right - (Plain "choice")), - !Lit - (Right (Plain "will")), - !Lit - (Right (Plain "be")), - !Lit - (Right - (Plain "normalized")), - !Lit - (Right (Plain "away")), - !Lit - (Right (Plain "by")), - !Lit - (Right (Plain "the")), - !Lit - (Right - (Plain - "pretty-printer).")), - !Lit - (Right (Plain "They")), - !Lit - (Right (Plain "can")), - !Lit - (Right (Plain "be")), - !Lit - (Right - (Plain "nested,")), - !Lit - (Right (Plain "to")), - !Lit - (Right (Plain "any")), - !Lit - (Right - (Plain "depth:")) ]))), - !Lit (Right (Plain "\n")), - !Lit (Right (Plain "\n")), - !Indent - (!Lit (Right (Plain " "))) - (!Lit (Right (Plain " "))) - (!Annotated.Group - (!Annotated.Group - (!Annotated.Append - [ !Indent - (!Lit - (Right (Plain "* "))) - (!Lit - (Right (Plain " "))) - (!Wrap - (!Lit - (Right (Plain "A")))), - !Lit - (Right (Plain "\n")), - !Indent - (!Lit - (Right (Plain "* "))) - (!Lit - (Right (Plain " "))) - (!Wrap - (!Lit - (Right (Plain "B")))), - !Lit - (Right (Plain "\n")), - !Indent - (!Lit - (Right (Plain "* "))) - (!Lit - (Right (Plain " "))) - (!Annotated.Append - [ !Wrap - (!Lit - (Right - (Plain "C"))), - !Lit - (Right - (Plain "\n")), - !Annotated.Group - (!Annotated.Append - [ !Indent - (!Lit + (Plain "You")), + Lit + () + (Right + (Plain "can")), + Lit + () + (Right + (Plain + "include")), + Lit + () + (Right + (Plain + "typechecked")), + Lit + () + (Right + (Plain + "code")), + Lit + () + (Right + (Plain + "snippets")), + Lit + () + (Right + (Plain + "inline,")), + Lit + () + (Right + (Plain "for")), + Lit + () + (Right + (Plain + "instance:")) ]))), + Lit + () (Right (Plain "\n")), + Lit + () (Right (Plain "\n")), + Indent + () + (Lit + () + (Right (Plain " "))) + (Lit + () + (Right (Plain " "))) + (Annotated.Group + () + (Annotated.Group + () + (Annotated.Append + () + [ Indent + () + (Lit + () (Right (Plain "* "))) - (!Lit + (Lit + () (Right (Plain " "))) - (!Wrap - (!Lit - (Right - (Plain - "C1")))), - !Lit + (Wrap + () + (Annotated.Append + () + [ Lit + () + (Left + (Example + 2 + (Term.Term + (Any + '(f + x -> + f + x + Nat.+ sqr + 1))))), + Lit + () + (Right + (Plain + "-")), + Lit + () + (Right + (Plain + "the")), + Annotated.Group + () + (Annotated.Append + () + [ Lit + () + (Right + (Plain + "`")), + Lit + () + (Right + (Plain + "2")), + Lit + () + (Right + (Plain + "`")) ]), + Lit + () + (Right + (Plain + "says")), + Lit + () + (Right + (Plain + "to")), + Lit + () + (Right + (Plain + "ignore")), + Lit + () + (Right + (Plain + "the")), + Lit + () + (Right + (Plain + "first")), + Lit + () + (Right + (Plain + "two")), + Lit + () + (Right + (Plain + "arguments")), + Lit + () + (Right + (Plain + "when")), + Lit + () + (Right + (Plain + "rendering.")), + Lit + () + (Right + (Plain + "In")), + Lit + () + (Right + (Plain + "richer")), + Lit + () + (Right + (Plain + "renderers,")), + Lit + () + (Right + (Plain + "the")), + Annotated.Group + () + (Annotated.Append + () + [ Lit + () + (Right + (Plain + "`")), + Lit + () + (Right + (Plain + "sqr")), + Lit + () + (Right + (Plain + "`")) ]), + Lit + () + (Right + (Plain + "link")), + Lit + () + (Right + (Plain + "will")), + Lit + () + (Right + (Plain + "be")), + Lit + () + (Right + (Plain + "clickable.")) ])), + Lit + () (Right (Plain "\n")), - !Indent - (!Lit + Indent + () + (Lit + () (Right (Plain "* "))) - (!Lit + (Lit + () (Right (Plain " "))) - (!Wrap - (!Lit - (Right - (Plain - "C2")))) ]) ]) ]))) ]))), - !Lit (Right (Plain "\n")), - !Lit (Right (Plain "\n")), - !Indent - (!Lit (Right (Plain " "))) - (!Lit (Right (Plain " "))) - (!Annotated.Group - (!Annotated.Group - (!Annotated.Append - [ !Indent - (!Lit (Right (Plain "# "))) - (!Lit (Right (Plain " "))) - (!Annotated.Group - (!Wrap - (!Annotated.Append - [ !Lit - (Right - (ConsoleText.Bold - (Plain "Numbered"))), - !Lit - (Right - (ConsoleText.Bold - (Plain "lists"))) ]))), - !Lit (Right (Plain "\n")), - !Lit (Right (Plain "\n")), - !Indent - (!Lit (Right (Plain " "))) - (!Lit (Right (Plain " "))) - (!Annotated.Group - (!Annotated.Group - (!Annotated.Append - [ !Indent - (!Lit - (Right (Plain "1. "))) - (!Lit - (Right (Plain " "))) - (!Wrap - (!Lit - (Right (Plain "A")))), - !Lit - (Right (Plain "\n")), - !Indent - (!Lit - (Right (Plain "2. "))) - (!Lit - (Right (Plain " "))) - (!Wrap - (!Lit - (Right (Plain "B")))), - !Lit + (Wrap + () + (Annotated.Append + () + [ Lit + () + (Right + (Plain + "If")), + Lit + () + (Right + (Plain + "your")), + Lit + () + (Right + (Plain + "snippet")), + Lit + () + (Right + (Plain + "expression")), + Lit + () + (Right + (Plain + "is")), + Lit + () + (Right + (Plain + "just")), + Lit + () + (Right + (Plain + "a")), + Lit + () + (Right + (Plain + "single")), + Lit + () + (Right + (Plain + "function")), + Lit + () + (Right + (Plain + "application,")), + Lit + () + (Right + (Plain + "you")), + Lit + () + (Right + (Plain + "can")), + Lit + () + (Right + (Plain + "put")), + Lit + () + (Right + (Plain + "it")), + Lit + () + (Right + (Plain + "in")), + Lit + () + (Right + (Plain + "double")), + Lit + () + (Right + (Plain + "backticks,")), + Lit + () + (Right + (Plain + "like")), + Lit + () + (Right + (Plain + "so:")), + Annotated.Group + () + (Annotated.Append + () + [ Lit + () + (Left + (Example + 1 + (Term.Term + (Any + (_ + x -> + sqr + x))))), + Lit + () + (Right + (Plain + ".")) ]), + Lit + () + (Right + (Plain + "This")), + Lit + () + (Right + (Plain + "is")), + Lit + () + (Right + (Plain + "equivalent")), + Lit + () + (Right + (Plain + "to")), + Annotated.Group + () + (Annotated.Append + () + [ Lit + () + (Left + (Example + 1 + (Term.Term + (Any + '(x -> + sqr + x))))), + Lit + () + (Right + (Plain + ".")) ]) ])) ]))) ]))) ])))), + Lit () (Right (Plain "\n")), + Lit () (Right (Plain "\n")), + Indent + () + (Lit () (Right (Plain " "))) + (Lit () (Right (Plain " "))) + (Annotated.Group + () + (Wrap + () + (Annotated.Group + () + (Annotated.Append + () + [ Indent + () + (Lit () (Right (Plain "# "))) + (Lit () (Right (Plain " "))) + (Annotated.Group + () + (Wrap + () + (Annotated.Append + () + [ Lit + () + (Right + (ConsoleText.Bold + (Plain "Non-Unison"))), + Lit + () + (Right + (ConsoleText.Bold + (Plain "code"))), + Lit + () + (Right + (ConsoleText.Bold + (Plain "blocks"))) ]))), + Lit () (Right (Plain "\n")), + Lit () (Right (Plain "\n")), + Indent + () + (Lit () (Right (Plain " "))) + (Lit () (Right (Plain " "))) + (Annotated.Group + () + (Wrap + () + (Annotated.Append + () + [ Lit + () (Right (Plain "Use")), + Lit + () + (Right (Plain "three")), + Lit + () (Right (Plain "or")), + Lit + () + (Right (Plain "more")), + Lit + () + (Right (Plain "single")), + Lit + () + (Right (Plain "quotes")), + Lit + () (Right (Plain "to")), + Lit + () + (Right (Plain "start")), + Lit () (Right (Plain "a")), + Lit + () + (Right (Plain "block")), + Lit + () + (Right (Plain "with")), + Lit + () (Right (Plain "no")), + Lit + () + (Right (Plain "syntax")), + Lit + () + (Right + (Plain "highlighting:")) ]))), + Lit () (Right (Plain "\n")), + Lit () (Right (Plain "\n")), + Indent + () + (Lit () (Right (Plain " "))) + (Lit () (Right (Plain " "))) + (Annotated.Group + () + (Wrap + () + (Annotated.Group + () + (Annotated.Append + () + [ Lit + () + (Right (Plain "``` ")), + Annotated.Group + () + (Lit + () + (Right (Plain "raw"))), + Lit + () (Right (Plain "\n")), - !Indent - (!Lit - (Right (Plain "3. "))) - (!Lit - (Right (Plain " "))) - (!Wrap - (!Lit - (Right (Plain "C")))) ]))), - !Lit (Right (Plain "\n")), - !Lit (Right (Plain "\n")), - !Indent - (!Lit (Right (Plain " "))) - (!Lit (Right (Plain " "))) - (!Annotated.Group - (!Wrap - (!Annotated.Append - [ !Lit - (Right (Plain "The")), - !Lit - (Right (Plain "first")), - !Lit - (Right - (Plain "number")), - !Lit - (Right (Plain "of")), - !Lit - (Right (Plain "the")), - !Lit - (Right (Plain "list")), - !Lit - (Right - (Plain "determines")), - !Lit - (Right (Plain "the")), - !Lit - (Right - (Plain "starting")), - !Lit + Lit + () (Right - (Plain "number")), - !Lit - (Right (Plain "in")), - !Lit - (Right (Plain "the")), - !Lit - (Right - (Plain "rendered")), - !Lit - (Right - (Plain "output.")), - !Lit - (Right (Plain "The")), - !Lit - (Right (Plain "other")), - !Lit - (Right - (Plain "numbers")), - !Lit - (Right (Plain "are")), - !Lit + (Plain + " _____ _ \n | | |___|_|___ ___ ___ \n | | | | |_ -| . | |\n |_____|_|_|_|___|___|_|_|\n ")), + Lit + () + (Right (Plain "\n")), + Lit + () + (Right (Plain "```")) ])))), + Lit () (Right (Plain "\n")), + Lit () (Right (Plain "\n")), + Indent + () + (Lit () (Right (Plain " "))) + (Lit () (Right (Plain " "))) + (Annotated.Group + () + (Wrap + () + (Annotated.Append + () + [ Lit + () (Right (Plain "You")), + Lit + () (Right (Plain "can")), + Lit + () (Right (Plain "use")), + Lit + () + (Right (Plain "three")), + Lit + () (Right (Plain "or")), + Lit + () + (Right (Plain "more")), + Lit + () + (Right + (Plain "backticks")), + Lit + () + (Right (Plain "plus")), + Lit () (Right (Plain "a")), + Lit + () + (Right + (Plain "language")), + Lit + () + (Right (Plain "name")), + Lit + () (Right (Plain "for")), + Lit + () + (Right (Plain "blocks")), + Lit + () + (Right (Plain "with")), + Lit + () + (Right (Plain "syntax")), + Lit + () + (Right + (Plain "highlighting:")) ]))), + Lit () (Right (Plain "\n")), + Lit () (Right (Plain "\n")), + Indent + () + (Lit () (Right (Plain " "))) + (Lit () (Right (Plain " "))) + (Annotated.Group + () + (Annotated.Group + () + (Annotated.Append + () + [ Lit + () + (Right (Plain "``` ")), + Annotated.Group + () + (Lit + () (Right - (Plain "ignored:")) ]))), - !Lit (Right (Plain "\n")), - !Lit (Right (Plain "\n")), - !Indent - (!Lit (Right (Plain " "))) - (!Lit (Right (Plain " "))) - (!Annotated.Group - (!Annotated.Group - (!Annotated.Append - [ !Indent - (!Lit + (Plain "Haskell"))), + Lit + () (Right (Plain "\n")), + Lit + () + (Right + (Plain + "-- A fenced code block which isn't parsed by Unison\nreverse = foldl (flip (:)) []")), + Lit + () (Right (Plain "\n")), + Lit + () (Right (Plain "```")) ]))), + Lit () (Right (Plain "\n")), + Lit () (Right (Plain "\n")), + Indent + () + (Lit () (Right (Plain " "))) + (Lit () (Right (Plain " "))) + (Annotated.Group + () + (Annotated.Group + () + (Annotated.Append + () + [ Lit + () + (Right (Plain "``` ")), + Annotated.Group + () + (Lit + () + (Right (Plain "Scala"))), + Lit + () (Right (Plain "\n")), + Lit + () + (Right + (Plain + "// A fenced code block which isn't parsed by Unison\ndef reverse[A](xs: List[A]) = \n xs.foldLeft(Nil : List[A])((acc,a) => a +: acc)")), + Lit + () (Right (Plain "\n")), + Lit + () (Right (Plain "```")) ]))) ])))), + Lit () (Right (Plain "\n")), + Lit () (Right (Plain "\n")), + Indent + () + (Lit () (Right (Plain " "))) + (Lit () (Right (Plain " "))) + (Annotated.Group + () + (Wrap + () + (Annotated.Group + () + (Annotated.Append + () + [ Annotated.Group + () + (Wrap + () + (Annotated.Append + () + [ Lit + () (Right (Plain "There")), + Lit () (Right (Plain "are")), + Lit + () (Right (Plain "also")), + Lit + () + (Right (Plain "asides,")), + Lit + () + (Right (Plain "callouts,")), + Lit + () + (Right (Plain "tables,")), + Lit + () + (Right (Plain "tooltips,")), + Lit () (Right (Plain "and")), + Lit + () (Right (Plain "more.")), + Lit + () (Right (Plain "These")), + Lit + () (Right (Plain "don't")), + Lit + () + (Right (Plain "currently")), + Lit + () (Right (Plain "have")), + Lit + () + (Right (Plain "special")), + Lit + () + (Right (Plain "syntax;")), + Lit + () (Right (Plain "just")), + Lit () (Right (Plain "use")), + Lit () (Right (Plain "the")), + Annotated.Group + () + (Annotated.Append + () + [ Lit + () + (Right (Plain "`")), + Lit + () (Right - (Plain "10. "))) - (!Lit + (Plain "{{ }}")), + Lit + () + (Right (Plain "`")) ]), + Lit + () + (Right (Plain "syntax")), + Lit () (Right (Plain "to")), + Lit + () (Right (Plain "call")), + Lit + () (Right (Plain "these")), + Lit + () + (Right (Plain "functions")), + Lit + () + (Right (Plain "directly.")) ])), + Lit () (Right (Plain "\n")), + Lit () (Right (Plain "\n")), + Annotated.Group + () + (Wrap + () + (Lit + () + (Left + (SpecialForm.Signature + [ Term.Term + (Any (_ -> docAside)), + Term.Term + (Any (_ -> docCallout)), + Term.Term + (Any + (_ -> docBlockquote)), + Term.Term + (Any (_ -> docTooltip)), + Term.Term + (Any (_ -> docTable)) ])))), + Lit () (Right (Plain "\n")), + Lit () (Right (Plain "\n")), + Annotated.Group + () + (Wrap + () + (Annotated.Append + () + [ Lit + () (Right (Plain "This")), + Lit () (Right (Plain "is")), + Lit () (Right (Plain "an")), + Lit + () + (Right (Plain "aside.")), + Lit + () + (Right + (Foreground + BrightBlack + (Plain "("))), + Wrap + () + (Annotated.Append + () + [ Lit + () (Right - (Plain " "))) - (!Wrap - (!Lit - (Right (Plain "A")))), - !Lit - (Right (Plain "\n")), - !Indent - (!Lit + (Foreground + BrightBlack + (Plain "Some"))), + Lit + () (Right - (Plain "11. "))) - (!Lit + (Foreground + BrightBlack + (Plain "extra"))), + Lit + () (Right - (Plain " "))) - (!Wrap - (!Lit - (Right (Plain "B")))), - !Lit - (Right (Plain "\n")), - !Indent - (!Lit + (Foreground + BrightBlack + (Plain "detail"))), + Lit + () (Right - (Plain "12. "))) - (!Lit + (Foreground + BrightBlack + (Plain "that"))), + Lit + () (Right - (Plain " "))) - (!Wrap - (!Lit - (Right (Plain "C")))) ]))), - !Lit (Right (Plain "\n")), - !Lit (Right (Plain "\n")), - !Indent - (!Lit (Right (Plain " "))) - (!Lit (Right (Plain " "))) - (!Annotated.Group - (!Wrap - (!Annotated.Append - [ !Lit - (Right - (Plain "Numbered")), - !Lit - (Right (Plain "lists")), - !Lit - (Right (Plain "can")), - !Lit - (Right (Plain "be")), - !Lit - (Right - (Plain "nested")), - !Lit - (Right (Plain "as")), - !Lit - (Right (Plain "well,")), - !Lit - (Right (Plain "and")), - !Lit - (Right - (Plain "combined")), - !Lit - (Right (Plain "with")), - !Lit - (Right - (Plain "bulleted")), - !Lit - (Right - (Plain "lists:")) ]))), - !Lit (Right (Plain "\n")), - !Lit (Right (Plain "\n")), - !Indent - (!Lit (Right (Plain " "))) - (!Lit (Right (Plain " "))) - (!Annotated.Group - (!Annotated.Group - (!Annotated.Append - [ !Indent - (!Lit - (Right (Plain "1. "))) - (!Lit - (Right (Plain " "))) - (!Annotated.Append - [ !Wrap - (!Annotated.Append - [ !Lit - (Right - (Plain - "Wake")), - !Lit - (Right - (Plain "up.")) ]), - !Lit - (Right - (Plain "\n")), - !Annotated.Group - (!Annotated.Append - [ !Indent - (!Lit - (Right - (Plain - "* "))) - (!Lit - (Right - (Plain - " "))) - (!Wrap - (!Annotated.Append - [ !Lit - (Right - (Plain - "What")), - !Lit - (Right - (Plain - "am")), - !Lit - (Right - (Plain - "I")), - !Lit - (Right - (Plain - "doing")), - !Lit - (Right - (Plain - "here?")) ])), - !Lit - (Right - (Plain "\n")), - !Indent - (!Lit - (Right - (Plain - "* "))) - (!Lit - (Right - (Plain - " "))) - (!Wrap - (!Annotated.Append - [ !Lit - (Right - (Plain - "In")), - !Lit - (Right - (Plain - "this")), - !Lit - (Right - (Plain - "nested")), - !Lit - (Right - (Plain - "list.")) ])) ]) ]), - !Lit + (Foreground + BrightBlack + (Plain "doesn't"))), + Lit + () + (Right + (Foreground + BrightBlack + (Plain "belong"))), + Lit + () + (Right + (Foreground + BrightBlack + (Plain "in"))), + Lit + () + (Right + (Foreground + BrightBlack + (Plain "main"))), + Lit + () + (Right + (Foreground + BrightBlack + (Plain "text."))) ]), + Lit + () + (Right + (Foreground + BrightBlack + (Plain ")"))) ])), + Lit () (Right (Plain "\n")), + Lit () (Right (Plain "\n")), + Annotated.Group + () + (Wrap + () + (Annotated.Group + () + (Indent + () + (Lit + () (Right (Plain " | "))) + (Lit + () (Right (Plain " | "))) + (Wrap + () + (Annotated.Append + () + [ Lit + () + (Right + (Plain "This")), + Lit + () + (Right (Plain "is")), + Lit + () + (Right (Plain "an")), + Lit + () + (Right + (Plain "important")), + Lit + () + (Right + (Plain "callout,")), + Lit + () + (Right + (Plain "with")), + Lit + () + (Right (Plain "no")), + Lit + () + (Right + (Plain "icon.")) ]))))), + Lit () (Right (Plain "\n")), + Lit () (Right (Plain "\n")), + Annotated.Group + () + (Wrap + () + (Annotated.Group + () + (Indent + () + (Lit + () (Right (Plain " | "))) + (Lit + () (Right (Plain " | "))) + (Annotated.Append + () + [ Wrap + () + (Lit + () + (Right + (ConsoleText.Bold + (Plain "🌻")))), + Lit + () (Right (Plain "\n")), - !Indent - (!Lit - (Right (Plain "2. "))) - (!Lit - (Right (Plain " "))) - (!Wrap - (!Annotated.Append - [ !Lit - (Right - (Plain "Take")), - !Lit - (Right - (Plain - "shower.")) ])), - !Lit + Lit + () (Right (Plain "")), + Lit + () (Right (Plain "\n")), - !Indent - (!Lit - (Right (Plain "3. "))) - (!Lit - (Right (Plain " "))) - (!Wrap - (!Annotated.Append - [ !Lit - (Right - (Plain "Get")), - !Lit + Wrap + () + (Annotated.Append + () + [ Lit + () (Right - (Plain - "dressed.")) ])) ]))) ]))) ])))), - !Lit (Right (Plain "\n")), - !Lit (Right (Plain "\n")), - !Indent - (!Lit (Right (Plain " "))) - (!Lit (Right (Plain " "))) - (!Annotated.Group - (!Wrap - (!Annotated.Group - (!Annotated.Append - [ !Indent - (!Lit (Right (Plain "# "))) - (!Lit (Right (Plain " "))) - (!Annotated.Group - (!Wrap - (!Lit - (Right - (ConsoleText.Bold - (Plain "Evaluation")))))), - !Lit (Right (Plain "\n")), - !Lit (Right (Plain "\n")), - !Indent - (!Lit (Right (Plain " "))) - (!Lit (Right (Plain " "))) - (!Annotated.Group - (!Wrap - (!Annotated.Append - [ !Lit - (Right (Plain "Expressions")), - !Lit (Right (Plain "can")), - !Lit (Right (Plain "be")), - !Lit - (Right (Plain "evaluated")), - !Lit (Right (Plain "inline,")), - !Lit (Right (Plain "for")), - !Lit - (Right (Plain "instance")), - !Annotated.Group - (!Annotated.Append - [ !Lit - (Left - (EvalInline - (Term.Term - (Any - (_ -> 1 Nat.+ 1))))), - !Lit (Right (Plain ".")) ]) ]))), - !Lit (Right (Plain "\n")), - !Lit (Right (Plain "\n")), - !Indent - (!Lit (Right (Plain " "))) - (!Lit (Right (Plain " "))) - (!Annotated.Group - (!Wrap - (!Annotated.Append - [ !Lit (Right (Plain "Blocks")), - !Lit (Right (Plain "of")), - !Lit (Right (Plain "code")), - !Lit (Right (Plain "can")), - !Lit (Right (Plain "be")), - !Lit - (Right (Plain "evaluated")), - !Lit (Right (Plain "as")), - !Lit (Right (Plain "well,")), - !Lit (Right (Plain "for")), - !Lit - (Right (Plain "instance:")) ]))), - !Lit (Right (Plain "\n")), - !Lit (Right (Plain "\n")), - !Indent - (!Lit (Right (Plain " "))) - (!Lit (Right (Plain " "))) - (!Annotated.Group - (!Lit - (Left - (Eval - (Term.Term - (Any - (_ -> - id x = x - id (sqr 10)))))))), - !Lit (Right (Plain "\n")), - !Lit (Right (Plain "\n")), - !Indent - (!Lit (Right (Plain " "))) - (!Lit (Right (Plain " "))) - (!Annotated.Group - (!Wrap - (!Lit (Right (Plain "also:"))))), - !Lit (Right (Plain "\n")), - !Lit (Right (Plain "\n")), - !Indent - (!Lit (Right (Plain " "))) - (!Lit (Right (Plain " "))) - (!Annotated.Group - (!Lit - (Left - (Eval - (Term.Term - (Any - (_ -> - (match 1 with - 1 -> "hi" - _ -> "goodbye")))))))), - !Lit (Right (Plain "\n")), - !Lit (Right (Plain "\n")), - !Indent - (!Lit (Right (Plain " "))) - (!Lit (Right (Plain " "))) - (!Annotated.Group - (!Wrap - (!Annotated.Append - [ !Lit (Right (Plain "To")), - !Lit (Right (Plain "include")), - !Lit (Right (Plain "a")), - !Lit - (Right (Plain "typechecked")), - !Lit (Right (Plain "snippet")), - !Lit (Right (Plain "of")), - !Lit (Right (Plain "code")), - !Lit (Right (Plain "without")), - !Lit - (Right (Plain "evaluating")), - !Lit (Right (Plain "it,")), - !Lit (Right (Plain "you")), - !Lit (Right (Plain "can")), - !Lit (Right (Plain "do:")) ]))), - !Lit (Right (Plain "\n")), - !Lit (Right (Plain "\n")), - !Indent - (!Lit (Right (Plain " "))) - (!Lit (Right (Plain " "))) - (!Annotated.Group - (!Lit - (Left - (ExampleBlock - 0 (Term.Term - (Any - (_ -> - cube : Nat -> Nat - cube x = - use Nat * - x * x * x - ()))))))) ])))), - !Lit (Right (Plain "\n")), - !Lit (Right (Plain "\n")), - !Indent - (!Lit (Right (Plain " "))) - (!Lit (Right (Plain " "))) - (!Annotated.Group - (!Wrap - (!Annotated.Group - (!Annotated.Append - [ !Indent - (!Lit (Right (Plain "# "))) - (!Lit (Right (Plain " "))) - (!Annotated.Group - (!Wrap - (!Annotated.Append - [ !Lit - (Right - (ConsoleText.Bold - (Plain "Including"))), - !Lit - (Right - (ConsoleText.Bold - (Plain "Unison"))), - !Lit - (Right - (ConsoleText.Bold - (Plain "source"))), - !Lit - (Right - (ConsoleText.Bold - (Plain "code"))) ]))), - !Lit (Right (Plain "\n")), - !Lit (Right (Plain "\n")), - !Indent - (!Lit (Right (Plain " "))) - (!Lit (Right (Plain " "))) - (!Annotated.Group - (!Wrap - (!Annotated.Append - [ !Lit (Right (Plain "Unison")), - !Lit - (Right (Plain "definitions")), - !Lit (Right (Plain "can")), - !Lit (Right (Plain "be")), - !Lit - (Right (Plain "included")), - !Lit (Right (Plain "in")), - !Lit (Right (Plain "docs.")), - !Lit (Right (Plain "For")), - !Lit - (Right (Plain "instance:")) ]))), - !Lit (Right (Plain "\n")), - !Lit (Right (Plain "\n")), - !Indent - (!Lit (Right (Plain " "))) - (!Lit (Right (Plain " "))) - (!Annotated.Group - (!Wrap - (!Lit - (Left - (SpecialForm.Source - [ (Left (typeLink Optional), - []), - (Right - (Term.Term - (Any (_ -> sqr))), - []) ]))))), - !Lit (Right (Plain "\n")), - !Lit (Right (Plain "\n")), - !Indent - (!Lit (Right (Plain " "))) - (!Lit (Right (Plain " "))) - (!Annotated.Group - (!Wrap - (!Annotated.Append - [ !Lit (Right (Plain "Some")), - !Lit - (Right (Plain "rendering")), - !Lit (Right (Plain "targets")), - !Lit (Right (Plain "also")), - !Lit (Right (Plain "support")), - !Lit (Right (Plain "folded")), - !Lit (Right (Plain "source:")) ]))), - !Lit (Right (Plain "\n")), - !Lit (Right (Plain "\n")), - !Indent - (!Lit (Right (Plain " "))) - (!Lit (Right (Plain " "))) - (!Annotated.Group - (!Wrap - (!Lit - (Left - (FoldedSource - [ (Left (typeLink Optional), - []), - (Right - (Term.Term - (Any (_ -> sqr))), - []) ]))))), - !Lit (Right (Plain "\n")), - !Lit (Right (Plain "\n")), - !Indent - (!Lit (Right (Plain " "))) - (!Lit (Right (Plain " "))) - (!Annotated.Group - (!Wrap - (!Annotated.Append - [ !Lit (Right (Plain "You")), - !Lit (Right (Plain "can")), - !Lit (Right (Plain "also")), - !Lit (Right (Plain "include")), - !Lit (Right (Plain "just")), - !Lit (Right (Plain "a")), - !Lit - (Right (Plain "signature,")), - !Lit (Right (Plain "inline,")), - !Lit (Right (Plain "with")), - !Annotated.Group - (!Annotated.Append - [ !Lit - (Left - (SignatureInline - (Term.Term - (Any (_ -> sqr))))), - !Lit (Right (Plain ",")) ]), - !Lit (Right (Plain "or")), - !Lit (Right (Plain "you")), - !Lit (Right (Plain "can")), - !Lit (Right (Plain "include")), - !Lit (Right (Plain "one")), - !Lit (Right (Plain "or")), - !Lit (Right (Plain "more")), - !Lit - (Right (Plain "signatures")), - !Lit (Right (Plain "as")), - !Lit (Right (Plain "a")), - !Lit (Right (Plain "block:")) ]))), - !Lit (Right (Plain "\n")), - !Lit (Right (Plain "\n")), - !Indent - (!Lit (Right (Plain " "))) - (!Lit (Right (Plain " "))) - (!Annotated.Group - (!Wrap - (!Lit - (Left - (SpecialForm.Signature - [ Term.Term (Any (_ -> sqr)), - Term.Term - (Any (_ -> (Nat.+))) ]))))), - !Lit (Right (Plain "\n")), - !Lit (Right (Plain "\n")), - !Indent - (!Lit (Right (Plain " "))) - (!Lit (Right (Plain " "))) - (!Annotated.Group - (!Wrap - (!Annotated.Append - [ !Lit (Right (Plain "Or")), - !Lit - (Right (Plain "alternately:")) ]))), - !Lit (Right (Plain "\n")), - !Lit (Right (Plain "\n")), - !Indent - (!Lit (Right (Plain " "))) - (!Lit (Right (Plain " "))) - (!Annotated.Group - (!Wrap - (!Lit - (Left - (SpecialForm.Signature - [ Term.Term - (Any (_ -> List.map)) ]))))), - !Lit (Right (Plain "\n")), - !Lit (Right (Plain "\n")), - !Indent - (!Lit (Right (Plain " "))) - (!Lit (Right (Plain " "))) - (!Annotated.Group - (!Annotated.Group - (!Annotated.Append - [ !Indent - (!Lit (Right (Plain "# "))) - (!Lit (Right (Plain " "))) - (!Annotated.Group - (!Wrap - (!Annotated.Append - [ !Lit - (Right - (ConsoleText.Bold - (Plain "Inline"))), - !Lit - (Right - (ConsoleText.Bold - (Plain "snippets"))) ]))), - !Lit (Right (Plain "\n")), - !Lit (Right (Plain "\n")), - !Indent - (!Lit (Right (Plain " "))) - (!Lit (Right (Plain " "))) - (!Annotated.Group - (!Wrap - (!Annotated.Append - [ !Lit - (Right (Plain "You")), - !Lit - (Right (Plain "can")), - !Lit - (Right - (Plain "include")), - !Lit - (Right - (Plain "typechecked")), - !Lit - (Right (Plain "code")), - !Lit - (Right - (Plain "snippets")), - !Lit - (Right - (Plain "inline,")), - !Lit - (Right (Plain "for")), - !Lit - (Right - (Plain "instance:")) ]))), - !Lit (Right (Plain "\n")), - !Lit (Right (Plain "\n")), - !Indent - (!Lit (Right (Plain " "))) - (!Lit (Right (Plain " "))) - (!Annotated.Group - (!Annotated.Group - (!Annotated.Append - [ !Indent - (!Lit - (Right (Plain "* "))) - (!Lit - (Right (Plain " "))) - (!Wrap - (!Annotated.Append - [ !Lit - (Left - (Example - 2 - (Term.Term - (Any - '(f x -> - f x - Nat.+ sqr - 1))))), - !Lit - (Right - (Plain "-")), - !Lit + (Plain "This")), + Lit + () (Right - (Plain "the")), - !Annotated.Group - (!Annotated.Append - [ !Lit - (Right - (Plain "`")), - !Lit - (Right - (Plain "2")), - !Lit - (Right - (Plain "`")) ]), - !Lit + (Plain "is")), + Lit + () (Right - (Plain "says")), - !Lit + (Plain "an")), + Lit + () (Right - (Plain "to")), - !Lit + (Plain + "important")), + Lit + () (Right (Plain - "ignore")), - !Lit + "callout,")), + Lit + () (Right - (Plain "the")), - !Lit + (Plain "with")), + Lit + () (Right - (Plain "first")), - !Lit + (Plain "an")), + Lit + () (Right - (Plain "two")), - !Lit + (Plain "icon.")), + Lit + () (Right - (Plain - "arguments")), - !Lit + (Plain "The")), + Lit + () (Right - (Plain "when")), - !Lit + (Plain "text")), + Lit + () (Right - (Plain - "rendering.")), - !Lit + (Plain "wraps")), + Lit + () (Right - (Plain "In")), - !Lit + (Plain "onto")), + Lit + () (Right (Plain - "richer")), - !Lit + "multiple")), + Lit + () (Right (Plain - "renderers,")), - !Lit - (Right - (Plain "the")), - !Annotated.Group - (!Annotated.Append - [ !Lit + "lines.")) ]) ])))), + Lit () (Right (Plain "\n")), + Lit () (Right (Plain "\n")), + Annotated.Group + () + (Wrap + () + (Annotated.Group + () + (Indent + () + (Lit () (Right (Plain "> "))) + (Lit () (Right (Plain "> "))) + (Annotated.Group + () + (Annotated.Append + () + [ Annotated.Group + () + (Wrap + () + (Annotated.Append + () + [ Lit + () (Right - (Plain "`")), - !Lit + (Plain + "\"And")), + Lit + () (Right (Plain - "sqr")), - !Lit + "what")), + Lit + () (Right - (Plain "`")) ]), - !Lit - (Right - (Plain "link")), - !Lit - (Right - (Plain "will")), - !Lit - (Right - (Plain "be")), - !Lit - (Right - (Plain - "clickable.")) ])), - !Lit - (Right (Plain "\n")), - !Indent - (!Lit - (Right (Plain "* "))) - (!Lit - (Right (Plain " "))) - (!Wrap - (!Annotated.Append - [ !Lit - (Right - (Plain "If")), - !Lit - (Right - (Plain "your")), - !Lit - (Right - (Plain - "snippet")), - !Lit - (Right - (Plain - "expression")), - !Lit - (Right - (Plain "is")), - !Lit - (Right - (Plain "just")), - !Lit - (Right - (Plain "a")), - !Lit - (Right - (Plain - "single")), - !Lit - (Right - (Plain - "function")), - !Lit - (Right - (Plain - "application,")), - !Lit - (Right - (Plain "you")), - !Lit - (Right - (Plain "can")), - !Lit - (Right - (Plain "put")), - !Lit - (Right - (Plain "it")), - !Lit - (Right - (Plain "in")), - !Lit - (Right - (Plain - "double")), - !Lit - (Right - (Plain - "backticks,")), - !Lit - (Right - (Plain "like")), - !Lit - (Right - (Plain "so:")), - !Annotated.Group - (!Annotated.Append - [ !Lit - (Left - (Example - 1 - (Term.Term - (Any - (_ - x -> - sqr - x))))), - !Lit + (Plain + "is")), + Lit + () (Right - (Plain ".")) ]), - !Lit - (Right - (Plain "This")), - !Lit - (Right - (Plain "is")), - !Lit - (Right - (Plain - "equivalent")), - !Lit - (Right - (Plain "to")), - !Annotated.Group - (!Annotated.Append - [ !Lit - (Left - (Example - 1 - (Term.Term - (Any - '(x -> - sqr - x))))), - !Lit + (Plain + "the")), + Lit + () (Right - (Plain ".")) ]) ])) ]))) ]))) ])))), - !Lit (Right (Plain "\n")), - !Lit (Right (Plain "\n")), - !Indent - (!Lit (Right (Plain " "))) - (!Lit (Right (Plain " "))) - (!Annotated.Group - (!Wrap - (!Annotated.Group - (!Annotated.Append - [ !Indent - (!Lit (Right (Plain "# "))) - (!Lit (Right (Plain " "))) - (!Annotated.Group - (!Wrap - (!Annotated.Append - [ !Lit - (Right - (ConsoleText.Bold - (Plain "Non-Unison"))), - !Lit - (Right - (ConsoleText.Bold - (Plain "code"))), - !Lit - (Right - (ConsoleText.Bold - (Plain "blocks"))) ]))), - !Lit (Right (Plain "\n")), - !Lit (Right (Plain "\n")), - !Indent - (!Lit (Right (Plain " "))) - (!Lit (Right (Plain " "))) - (!Annotated.Group - (!Wrap - (!Annotated.Append - [ !Lit (Right (Plain "Use")), - !Lit (Right (Plain "three")), - !Lit (Right (Plain "or")), - !Lit (Right (Plain "more")), - !Lit (Right (Plain "single")), - !Lit (Right (Plain "quotes")), - !Lit (Right (Plain "to")), - !Lit (Right (Plain "start")), - !Lit (Right (Plain "a")), - !Lit (Right (Plain "block")), - !Lit (Right (Plain "with")), - !Lit (Right (Plain "no")), - !Lit (Right (Plain "syntax")), - !Lit - (Right (Plain "highlighting:")) ]))), - !Lit (Right (Plain "\n")), - !Lit (Right (Plain "\n")), - !Indent - (!Lit (Right (Plain " "))) - (!Lit (Right (Plain " "))) - (!Annotated.Group - (!Wrap - (!Annotated.Group - (!Annotated.Append - [ !Lit (Right (Plain "``` ")), - !Annotated.Group - (!Lit (Right (Plain "raw"))), - !Lit (Right (Plain "\n")), - !Lit - (Right - (Plain - " _____ _ \n | | |___|_|___ ___ ___ \n | | | | |_ -| . | |\n |_____|_|_|_|___|___|_|_|\n ")), - !Lit (Right (Plain "\n")), - !Lit (Right (Plain "```")) ])))), - !Lit (Right (Plain "\n")), - !Lit (Right (Plain "\n")), - !Indent - (!Lit (Right (Plain " "))) - (!Lit (Right (Plain " "))) - (!Annotated.Group - (!Wrap - (!Annotated.Append - [ !Lit (Right (Plain "You")), - !Lit (Right (Plain "can")), - !Lit (Right (Plain "use")), - !Lit (Right (Plain "three")), - !Lit (Right (Plain "or")), - !Lit (Right (Plain "more")), - !Lit - (Right (Plain "backticks")), - !Lit (Right (Plain "plus")), - !Lit (Right (Plain "a")), - !Lit - (Right (Plain "language")), - !Lit (Right (Plain "name")), - !Lit (Right (Plain "for")), - !Lit (Right (Plain "blocks")), - !Lit (Right (Plain "with")), - !Lit (Right (Plain "syntax")), - !Lit - (Right (Plain "highlighting:")) ]))), - !Lit (Right (Plain "\n")), - !Lit (Right (Plain "\n")), - !Indent - (!Lit (Right (Plain " "))) - (!Lit (Right (Plain " "))) - (!Annotated.Group - (!Annotated.Group - (!Annotated.Append - [ !Lit (Right (Plain "``` ")), - !Annotated.Group - (!Lit - (Right (Plain "Haskell"))), - !Lit (Right (Plain "\n")), - !Lit - (Right - (Plain - "-- A fenced code block which isn't parsed by Unison\nreverse = foldl (flip (:)) []")), - !Lit (Right (Plain "\n")), - !Lit (Right (Plain "```")) ]))), - !Lit (Right (Plain "\n")), - !Lit (Right (Plain "\n")), - !Indent - (!Lit (Right (Plain " "))) - (!Lit (Right (Plain " "))) - (!Annotated.Group - (!Annotated.Group - (!Annotated.Append - [ !Lit (Right (Plain "``` ")), - !Annotated.Group - (!Lit (Right (Plain "Scala"))), - !Lit (Right (Plain "\n")), - !Lit - (Right - (Plain - "// A fenced code block which isn't parsed by Unison\ndef reverse[A](xs: List[A]) = \n xs.foldLeft(Nil : List[A])((acc,a) => a +: acc)")), - !Lit (Right (Plain "\n")), - !Lit (Right (Plain "```")) ]))) ])))), - !Lit (Right (Plain "\n")), - !Lit (Right (Plain "\n")), - !Indent - (!Lit (Right (Plain " "))) - (!Lit (Right (Plain " "))) - (!Annotated.Group - (!Wrap - (!Annotated.Group - (!Annotated.Append - [ !Annotated.Group - (!Wrap - (!Annotated.Append - [ !Lit (Right (Plain "There")), - !Lit (Right (Plain "are")), - !Lit (Right (Plain "also")), - !Lit (Right (Plain "asides,")), - !Lit (Right (Plain "callouts,")), - !Lit (Right (Plain "tables,")), - !Lit (Right (Plain "tooltips,")), - !Lit (Right (Plain "and")), - !Lit (Right (Plain "more.")), - !Lit (Right (Plain "These")), - !Lit (Right (Plain "don't")), - !Lit (Right (Plain "currently")), - !Lit (Right (Plain "have")), - !Lit (Right (Plain "special")), - !Lit (Right (Plain "syntax;")), - !Lit (Right (Plain "just")), - !Lit (Right (Plain "use")), - !Lit (Right (Plain "the")), - !Annotated.Group - (!Annotated.Append - [ !Lit (Right (Plain "`")), - !Lit (Right (Plain "{{ }}")), - !Lit (Right (Plain "`")) ]), - !Lit (Right (Plain "syntax")), - !Lit (Right (Plain "to")), - !Lit (Right (Plain "call")), - !Lit (Right (Plain "these")), - !Lit (Right (Plain "functions")), - !Lit (Right (Plain "directly.")) ])), - !Lit (Right (Plain "\n")), - !Lit (Right (Plain "\n")), - !Annotated.Group - (!Wrap - (!Lit - (Left - (SpecialForm.Signature - [ Term.Term - (Any (_ -> docAside)), - Term.Term - (Any (_ -> docCallout)), - Term.Term - (Any (_ -> docBlockquote)), - Term.Term - (Any (_ -> docTooltip)), - Term.Term - (Any (_ -> docTable)) ])))), - !Lit (Right (Plain "\n")), - !Lit (Right (Plain "\n")), - !Annotated.Group - (!Wrap - (!Annotated.Append - [ !Lit (Right (Plain "This")), - !Lit (Right (Plain "is")), - !Lit (Right (Plain "an")), - !Lit (Right (Plain "aside.")), - !Lit - (Right - (Foreground - BrightBlack (Plain "("))), - !Wrap - (!Annotated.Append - [ !Lit - (Right - (Foreground - BrightBlack - (Plain "Some"))), - !Lit - (Right - (Foreground - BrightBlack - (Plain "extra"))), - !Lit - (Right - (Foreground - BrightBlack - (Plain "detail"))), - !Lit - (Right - (Foreground - BrightBlack - (Plain "that"))), - !Lit - (Right - (Foreground - BrightBlack - (Plain "doesn't"))), - !Lit - (Right - (Foreground - BrightBlack - (Plain "belong"))), - !Lit - (Right - (Foreground - BrightBlack (Plain "in"))), - !Lit - (Right - (Foreground - BrightBlack - (Plain "main"))), - !Lit - (Right - (Foreground - BrightBlack - (Plain "text."))) ]), - !Lit - (Right - (Foreground - BrightBlack (Plain ")"))) ])), - !Lit (Right (Plain "\n")), - !Lit (Right (Plain "\n")), - !Annotated.Group - (!Wrap - (!Annotated.Group - (!Indent - (!Lit (Right (Plain " | "))) - (!Lit (Right (Plain " | "))) - (!Wrap - (!Annotated.Append - [ !Lit - (Right (Plain "This")), - !Lit (Right (Plain "is")), - !Lit (Right (Plain "an")), - !Lit - (Right (Plain "important")), - !Lit - (Right (Plain "callout,")), - !Lit - (Right (Plain "with")), - !Lit (Right (Plain "no")), - !Lit - (Right (Plain "icon.")) ]))))), - !Lit (Right (Plain "\n")), - !Lit (Right (Plain "\n")), - !Annotated.Group - (!Wrap - (!Annotated.Group - (!Indent - (!Lit (Right (Plain " | "))) - (!Lit (Right (Plain " | "))) - (!Annotated.Append - [ !Wrap - (!Lit - (Right - (ConsoleText.Bold - (Plain "🌻")))), - !Lit (Right (Plain "\n")), - !Lit (Right (Plain "")), - !Lit (Right (Plain "\n")), - !Wrap - (!Annotated.Append - [ !Lit - (Right (Plain "This")), - !Lit - (Right (Plain "is")), - !Lit - (Right (Plain "an")), - !Lit - (Right - (Plain "important")), - !Lit - (Right - (Plain "callout,")), - !Lit - (Right (Plain "with")), - !Lit - (Right (Plain "an")), - !Lit - (Right (Plain "icon.")), - !Lit - (Right (Plain "The")), - !Lit - (Right (Plain "text")), - !Lit - (Right (Plain "wraps")), - !Lit - (Right (Plain "onto")), - !Lit - (Right - (Plain "multiple")), - !Lit - (Right (Plain "lines.")) ]) ])))), - !Lit (Right (Plain "\n")), - !Lit (Right (Plain "\n")), - !Annotated.Group - (!Wrap - (!Annotated.Group - (!Indent - (!Lit (Right (Plain "> "))) - (!Lit (Right (Plain "> "))) - (!Annotated.Group - (!Annotated.Append - [ !Annotated.Group - (!Wrap - (!Annotated.Append - [ !Lit - (Right - (Plain "\"And")), - !Lit - (Right - (Plain "what")), - !Lit - (Right (Plain "is")), - !Lit - (Right (Plain "the")), - !Lit - (Right (Plain "use")), - !Lit - (Right (Plain "of")), - !Lit - (Right (Plain "a")), - !Lit - (Right - (Plain "book,\"")), - !Lit - (Right - (Plain "thought")), - !Lit - (Right - (Plain "Alice,")), - !Lit - (Right - (Plain "\"without")), - !Lit - (Right - (Plain "pictures")), - !Lit - (Right (Plain "or")), - !Lit - (Right - (Plain - "conversation?\"")) ])), - !Lit (Right (Plain "\n")), - !Lit (Right (Plain "\n")), - !Annotated.Group - (!Wrap - (!Annotated.Append - [ !Annotated.Group - (!Annotated.Append - [ !Lit - (Right - (Plain "*")), - !Lit - (Right - (Plain "Lewis")) ]), - !Lit - (Right - (Plain "Carroll,")), - !Lit - (Right - (Plain "Alice's")), - !Lit - (Right - (Plain - "Adventures")), - !Lit - (Right (Plain "in")), - !Annotated.Group - (!Annotated.Append - [ !Lit - (Right - (Plain - "Wonderland")), - !Lit - (Right - (Plain "*")) ]) ])) ]))))), - !Lit (Right (Plain "\n")), - !Lit (Right (Plain "\n")), - !Annotated.Group - (!Wrap - (!Wrap - (!Annotated.Append - [ !Lit (Right (Plain "Hover")), - !Lit (Right (Plain "over")), - !Lit (Right (Plain "me")) ]))), - !Lit (Right (Plain "\n")), - !Lit (Right (Plain "\n")), - !Annotated.Group - (!Wrap - (!Annotated.Table - [ [ !Wrap - (!Lit (Right (Plain "a"))), - !Wrap (!Lit (Right (Plain "b"))), - !Wrap - (!Annotated.Append - [ !Lit (Right (Plain "A")), - !Lit - (Right (Plain "longer")), - !Lit - (Right (Plain "paragraph")), - !Lit (Right (Plain "that")), - !Lit (Right (Plain "will")), - !Lit (Right (Plain "split")), - !Lit (Right (Plain "onto")), - !Lit - (Right (Plain "multiple")), - !Lit - (Right (Plain "lines,")), - !Lit (Right (Plain "such")), - !Lit (Right (Plain "that")), - !Lit (Right (Plain "this")), - !Lit (Right (Plain "row")), - !Lit - (Right (Plain "occupies")), - !Lit - (Right (Plain "multiple")), - !Lit (Right (Plain "lines")), - !Lit (Right (Plain "in")), - !Lit (Right (Plain "the")), - !Lit - (Right (Plain "rendered")), - !Lit - (Right (Plain "table.")) ]) ], - [ !Wrap - (!Annotated.Append - [ !Lit (Right (Plain "Some")), - !Lit (Right (Plain "text")) ]), - !Wrap - (!Annotated.Append - [ !Lit (Right (Plain "More")), - !Lit (Right (Plain "text")) ]), - !Wrap - (!Lit (Right (Plain "Zounds!"))) ] ])) ])))) ]) + (Plain + "use")), + Lit + () + (Right + (Plain + "of")), + Lit + () + (Right + (Plain "a")), + Lit + () + (Right + (Plain + "book,\"")), + Lit + () + (Right + (Plain + "thought")), + Lit + () + (Right + (Plain + "Alice,")), + Lit + () + (Right + (Plain + "\"without")), + Lit + () + (Right + (Plain + "pictures")), + Lit + () + (Right + (Plain + "or")), + Lit + () + (Right + (Plain + "conversation?\"")) ])), + Lit + () + (Right (Plain "\n")), + Lit + () + (Right (Plain "\n")), + Annotated.Group + () + (Wrap + () + (Annotated.Append + () + [ Annotated.Group + () + (Annotated.Append + () + [ Lit + () + (Right + (Plain + "*")), + Lit + () + (Right + (Plain + "Lewis")) ]), + Lit + () + (Right + (Plain + "Carroll,")), + Lit + () + (Right + (Plain + "Alice's")), + Lit + () + (Right + (Plain + "Adventures")), + Lit + () + (Right + (Plain + "in")), + Annotated.Group + () + (Annotated.Append + () + [ Lit + () + (Right + (Plain + "Wonderland")), + Lit + () + (Right + (Plain + "*")) ]) ])) ]))))), + Lit () (Right (Plain "\n")), + Lit () (Right (Plain "\n")), + Annotated.Group + () + (Wrap + () + (Wrap + () + (Annotated.Append + () + [ Lit + () + (Right (Plain "Hover")), + Lit + () + (Right (Plain "over")), + Lit + () (Right (Plain "me")) ]))), + Lit () (Right (Plain "\n")), + Lit () (Right (Plain "\n")), + Annotated.Group + () + (Wrap + () + (Annotated.Table + () + [ [ Wrap + () + (Lit + () (Right (Plain "a"))), + Wrap + () + (Lit + () (Right (Plain "b"))), + Wrap + () + (Annotated.Append + () + [ Lit + () + (Right (Plain "A")), + Lit + () + (Right + (Plain "longer")), + Lit + () + (Right + (Plain + "paragraph")), + Lit + () + (Right + (Plain "that")), + Lit + () + (Right + (Plain "will")), + Lit + () + (Right + (Plain "split")), + Lit + () + (Right + (Plain "onto")), + Lit + () + (Right + (Plain + "multiple")), + Lit + () + (Right + (Plain "lines,")), + Lit + () + (Right + (Plain "such")), + Lit + () + (Right + (Plain "that")), + Lit + () + (Right + (Plain "this")), + Lit + () + (Right + (Plain "row")), + Lit + () + (Right + (Plain + "occupies")), + Lit + () + (Right + (Plain + "multiple")), + Lit + () + (Right + (Plain "lines")), + Lit + () + (Right + (Plain "in")), + Lit + () + (Right + (Plain "the")), + Lit + () + (Right + (Plain + "rendered")), + Lit + () + (Right + (Plain "table.")) ]) ], + [ Wrap + () + (Annotated.Append + () + [ Lit + () + (Right + (Plain "Some")), + Lit + () + (Right + (Plain "text")) ]), + Wrap + () + (Annotated.Append + () + [ Lit + () + (Right + (Plain "More")), + Lit + () + (Right + (Plain "text")) ]), + Wrap + () + (Lit + () + (Right + (Plain "Zounds!"))) ] ])) ])))) ]) ``` diff --git a/unison-src/transcripts/constructor-applied-to-unit.md b/unison-src/transcripts/constructor-applied-to-unit.md new file mode 100644 index 0000000000..df1341aa5c --- /dev/null +++ b/unison-src/transcripts/constructor-applied-to-unit.md @@ -0,0 +1,11 @@ +```ucm:hide +.> alias.type ##Nat Nat +.> alias.term ##Any.Any Any +``` + +```unison +structural type Zoink a b c = Zoink a b c + +> Any () +> [ Zoink [0,1,2,3,4,5] [6,3,3,3,3,3,3,3,3,3,3,4,4,4,4,4,4,4,4,4,3] () ] +``` diff --git a/unison-src/transcripts/constructor-applied-to-unit.output.md b/unison-src/transcripts/constructor-applied-to-unit.output.md new file mode 100644 index 0000000000..feca7924d7 --- /dev/null +++ b/unison-src/transcripts/constructor-applied-to-unit.output.md @@ -0,0 +1,52 @@ +```unison +structural type Zoink a b c = Zoink a b c + +> Any () +> [ Zoink [0,1,2,3,4,5] [6,3,3,3,3,3,3,3,3,3,3,4,4,4,4,4,4,4,4,4,3] () ] +``` + +```ucm + + I found and typechecked these definitions in scratch.u. If you + do an `add` or `update`, here's how your codebase would + change: + + ⍟ These new definitions are ok to `add`: + + structural type Zoink a b c + + Now evaluating any watch expressions (lines starting with + `>`)... Ctrl+C cancels. + + 3 | > Any () + ⧩ + Any () + + 4 | > [ Zoink [0,1,2,3,4,5] [6,3,3,3,3,3,3,3,3,3,3,4,4,4,4,4,4,4,4,4,3] () ] + ⧩ + [ Zoink + [0, 1, 2, 3, 4, 5] + [ 6, + 3, + 3, + 3, + 3, + 3, + 3, + 3, + 3, + 3, + 3, + 4, + 4, + 4, + 4, + 4, + 4, + 4, + 4, + 4, + 3 ] + () ] + +``` From d649ad4b3dd9c3bd7d4c37212231193f5cf7e74b Mon Sep 17 00:00:00 2001 From: Dan Doel Date: Fri, 11 Feb 2022 17:33:41 -0500 Subject: [PATCH 226/297] Test cases. --- unison-src/transcripts/fix2423.md | 31 +++++++++++++ unison-src/transcripts/fix2423.output.md | 46 +++++++++++++++++++ unison-src/transcripts/old-fold-right.md | 17 +++++++ .../transcripts/old-fold-right.output.md | 25 ++++++++++ 4 files changed, 119 insertions(+) create mode 100644 unison-src/transcripts/fix2423.md create mode 100644 unison-src/transcripts/fix2423.output.md create mode 100644 unison-src/transcripts/old-fold-right.md create mode 100644 unison-src/transcripts/old-fold-right.output.md diff --git a/unison-src/transcripts/fix2423.md b/unison-src/transcripts/fix2423.md new file mode 100644 index 0000000000..4f5d073c0a --- /dev/null +++ b/unison-src/transcripts/fix2423.md @@ -0,0 +1,31 @@ +```ucm:hide +.> builtins.merge +``` + +```unison +structural ability Split where + skip! : x + both : a -> a -> a + +Split.append : '{Split, g} a -> '{Split, g} a -> '{Split, g} a +Split.append s1 s2 _ = force (both s1 s2) + +force a = !a + +Split.zipSame : '{Split, g} a -> '{Split, g} b -> '{Split, g} (a, b) +Split.zipSame sa sb _ = + go : '{Split,g2} y -> Request {Split} x ->{Split,g2} (x,y) + go sb = cases + { a } -> (a, !sb) + { skip! -> _ } -> skip! + { both la ra -> k } -> + handle !sb with cases + { _ } -> skip! + { skip! -> k } -> skip! + { both lb rb -> k2 } -> + force (Split.append + (zipSame '(k la) '(k2 lb)) + (zipSame '(k ra) '(k2 rb))) + + handle !sa with go sb +``` diff --git a/unison-src/transcripts/fix2423.output.md b/unison-src/transcripts/fix2423.output.md new file mode 100644 index 0000000000..a51c1a32fa --- /dev/null +++ b/unison-src/transcripts/fix2423.output.md @@ -0,0 +1,46 @@ +```unison +structural ability Split where + skip! : x + both : a -> a -> a + +Split.append : '{Split, g} a -> '{Split, g} a -> '{Split, g} a +Split.append s1 s2 _ = force (both s1 s2) + +force a = !a + +Split.zipSame : '{Split, g} a -> '{Split, g} b -> '{Split, g} (a, b) +Split.zipSame sa sb _ = + go : '{Split,g2} y -> Request {Split} x ->{Split,g2} (x,y) + go sb = cases + { a } -> (a, !sb) + { skip! -> _ } -> skip! + { both la ra -> k } -> + handle !sb with cases + { _ } -> skip! + { skip! -> k } -> skip! + { both lb rb -> k2 } -> + force (Split.append + (zipSame '(k la) '(k2 lb)) + (zipSame '(k ra) '(k2 rb))) + + handle !sa with go sb +``` + +```ucm + + I found and typechecked these definitions in scratch.u. If you + do an `add` or `update`, here's how your codebase would + change: + + ⍟ These new definitions are ok to `add`: + + structural ability Split + Split.append : '{g, Split} a + -> '{g, Split} a + -> '{g, Split} a + Split.zipSame : '{g, Split} a + -> '{g, Split} b + -> '{g, Split} (a, b) + force : '{g} o ->{g} o + +``` diff --git a/unison-src/transcripts/old-fold-right.md b/unison-src/transcripts/old-fold-right.md new file mode 100644 index 0000000000..f3c01d5d01 --- /dev/null +++ b/unison-src/transcripts/old-fold-right.md @@ -0,0 +1,17 @@ +```ucm:hide +.> builtins.merge +``` + +```unison +oldRight: (b ->{e} a ->{e} b) -> [a] ->{e} [b] +oldRight f la = bug "out" + +pecan: '{} [Text] +pecan = 'let + la = [1, 2, 3] + f: Text -> Nat -> Text + f = bug "out" + + oldRight f la +``` + diff --git a/unison-src/transcripts/old-fold-right.output.md b/unison-src/transcripts/old-fold-right.output.md new file mode 100644 index 0000000000..a9c04ea204 --- /dev/null +++ b/unison-src/transcripts/old-fold-right.output.md @@ -0,0 +1,25 @@ +```unison +oldRight: (b ->{e} a ->{e} b) -> [a] ->{e} [b] +oldRight f la = bug "out" + +pecan: '{} [Text] +pecan = 'let + la = [1, 2, 3] + f: Text -> Nat -> Text + f = bug "out" + + oldRight f la +``` + +```ucm + + I found and typechecked these definitions in scratch.u. If you + do an `add` or `update`, here's how your codebase would + change: + + ⍟ These new definitions are ok to `add`: + + oldRight : (b ->{e} a ->{e} b) -> [a] ->{e} [b] + pecan : '[Text] + +``` From be901d5dabc92dacfabe5cb971917ae868ee2fbc Mon Sep 17 00:00:00 2001 From: Dan Doel Date: Fri, 11 Feb 2022 17:42:39 -0500 Subject: [PATCH 227/297] More test cases. --- unison-src/transcripts/fix2712.md | 38 +++++++++ unison-src/transcripts/fix2712.output.md | 103 +++++++++++++++++++++++ 2 files changed, 141 insertions(+) create mode 100644 unison-src/transcripts/fix2712.md create mode 100644 unison-src/transcripts/fix2712.output.md diff --git a/unison-src/transcripts/fix2712.md b/unison-src/transcripts/fix2712.md new file mode 100644 index 0000000000..bd7869b079 --- /dev/null +++ b/unison-src/transcripts/fix2712.md @@ -0,0 +1,38 @@ +```ucm:hide +.> builtins.merge +``` + +```unison +unique type Map k v = Tip | Bin Nat k v (Map k v) (Map k v) + +mapWithKey : (k ->{e} a ->{e} b) -> Map k a ->{e} Map k b +mapWithKey f m = Tip +``` + +```ucm +.> add +``` + +```unison + +naiomi = + susan: Nat ->{} Nat -> () + susan a b = () + + pam: Map Nat Nat + pam = Tip + + mapWithKey susan pam + +``` + +```ucm +.> add +.> edit naiomi +.> undo +``` + +```ucm +.> load scratch.u +.> add +``` diff --git a/unison-src/transcripts/fix2712.output.md b/unison-src/transcripts/fix2712.output.md new file mode 100644 index 0000000000..3c07c93948 --- /dev/null +++ b/unison-src/transcripts/fix2712.output.md @@ -0,0 +1,103 @@ +```unison +unique type Map k v = Tip | Bin Nat k v (Map k v) (Map k v) + +mapWithKey : (k ->{e} a ->{e} b) -> Map k a ->{e} Map k b +mapWithKey f m = Tip +``` + +```ucm + + I found and typechecked these definitions in scratch.u. If you + do an `add` or `update`, here's how your codebase would + change: + + ⍟ These new definitions are ok to `add`: + + unique type Map k v + mapWithKey : (k ->{e} a ->{e} b) -> Map k a ->{e} Map k b + +``` +```ucm +.> add + + ⍟ I've added these definitions: + + unique type Map k v + mapWithKey : (k ->{e} a ->{e} b) -> Map k a ->{e} Map k b + +``` +```unison +naiomi = + susan: Nat ->{} Nat -> () + susan a b = () + + pam: Map Nat Nat + pam = Tip + + mapWithKey susan pam + +``` + +```ucm + + I found and typechecked these definitions in scratch.u. If you + do an `add` or `update`, here's how your codebase would + change: + + ⍟ These new definitions are ok to `add`: + + naiomi : Map Nat () + +``` +```ucm +.> add + + ⍟ I've added these definitions: + + naiomi : Map Nat () + +.> edit naiomi + + ☝️ + + I added these definitions to the top of + /home/dolio/Programming/unison/bugfixing/scratch.u + + naiomi : Map Nat () + naiomi = + susan : Nat -> Nat -> () + susan a b = () + pam : Map Nat Nat + pam = Tip + mapWithKey susan pam + + You can edit them there, then do `update` to replace the + definitions currently in this namespace. + +.> undo + + Here are the changes I undid + + Added definitions: + + 1. naiomi : Map Nat () + +``` +```ucm +.> load scratch.u + + I found and typechecked these definitions in scratch.u. If you + do an `add` or `update`, here's how your codebase would + change: + + ⍟ These new definitions are ok to `add`: + + naiomi : Map Nat () + +.> add + + ⍟ I've added these definitions: + + naiomi : Map Nat () + +``` From 8a46348ea272f495956d337f9af5872c1c17fd35 Mon Sep 17 00:00:00 2001 From: Dan Doel Date: Fri, 11 Feb 2022 19:16:07 -0500 Subject: [PATCH 228/297] Fix the 2712 test case to work for CI --- unison-src/transcripts/fix2712.md | 13 +----- unison-src/transcripts/fix2712.output.md | 54 +----------------------- 2 files changed, 2 insertions(+), 65 deletions(-) diff --git a/unison-src/transcripts/fix2712.md b/unison-src/transcripts/fix2712.md index bd7869b079..fce7511665 100644 --- a/unison-src/transcripts/fix2712.md +++ b/unison-src/transcripts/fix2712.md @@ -16,7 +16,7 @@ mapWithKey f m = Tip ```unison naiomi = - susan: Nat ->{} Nat -> () + susan: Nat -> Nat -> () susan a b = () pam: Map Nat Nat @@ -25,14 +25,3 @@ naiomi = mapWithKey susan pam ``` - -```ucm -.> add -.> edit naiomi -.> undo -``` - -```ucm -.> load scratch.u -.> add -``` diff --git a/unison-src/transcripts/fix2712.output.md b/unison-src/transcripts/fix2712.output.md index 3c07c93948..29f9d8f7bb 100644 --- a/unison-src/transcripts/fix2712.output.md +++ b/unison-src/transcripts/fix2712.output.md @@ -28,7 +28,7 @@ mapWithKey f m = Tip ``` ```unison naiomi = - susan: Nat ->{} Nat -> () + susan: Nat -> Nat -> () susan a b = () pam: Map Nat Nat @@ -49,55 +49,3 @@ naiomi = naiomi : Map Nat () ``` -```ucm -.> add - - ⍟ I've added these definitions: - - naiomi : Map Nat () - -.> edit naiomi - - ☝️ - - I added these definitions to the top of - /home/dolio/Programming/unison/bugfixing/scratch.u - - naiomi : Map Nat () - naiomi = - susan : Nat -> Nat -> () - susan a b = () - pam : Map Nat Nat - pam = Tip - mapWithKey susan pam - - You can edit them there, then do `update` to replace the - definitions currently in this namespace. - -.> undo - - Here are the changes I undid - - Added definitions: - - 1. naiomi : Map Nat () - -``` -```ucm -.> load scratch.u - - I found and typechecked these definitions in scratch.u. If you - do an `add` or `update`, here's how your codebase would - change: - - ⍟ These new definitions are ok to `add`: - - naiomi : Map Nat () - -.> add - - ⍟ I've added these definitions: - - naiomi : Map Nat () - -``` From 8477483bcd29f1a83a82406729ae7a6f107c2beb Mon Sep 17 00:00:00 2001 From: Paul Chiusano Date: Sat, 12 Feb 2022 19:18:02 -0600 Subject: [PATCH 229/297] add round trip regression test --- unison-src/transcripts-round-trip/main.md | 19 +++++++++++++++++++ 1 file changed, 19 insertions(+) diff --git a/unison-src/transcripts-round-trip/main.md b/unison-src/transcripts-round-trip/main.md index f48df53336..81da4fa582 100644 --- a/unison-src/transcripts-round-trip/main.md +++ b/unison-src/transcripts-round-trip/main.md @@ -354,3 +354,22 @@ foo = let .> load unison-src/transcripts-round-trip/nested.u ``` +## Multiline expressions in multiliine lists + +```unison:hide +foo a b c d e f g h i j = 42 + +use Nat + +x = [ 1 + 1 + 1 + 1 + 1 + 1 + 1 + 1 + 1 + 1 + 1 + 1 + 1 + 1 + 1 + 1 + 1 + 1 + 1 + , foo 12939233 2102020 329292 429292 522020 62929292 72020202 820202 920202 1020202 ] +``` + +```ucm +.> add +.> edit foo x +.> undo +``` + +```ucm +.> load scratch.u +``` \ No newline at end of file From cf38f0dc639bc140aa7d9e713f9297b83c1dee4f Mon Sep 17 00:00:00 2001 From: Paul Chiusano Date: Sat, 12 Feb 2022 22:56:15 -0600 Subject: [PATCH 230/297] fix tuple printing --- parser-typechecker/src/Unison/TermPrinter.hs | 9 ++++----- 1 file changed, 4 insertions(+), 5 deletions(-) diff --git a/parser-typechecker/src/Unison/TermPrinter.hs b/parser-typechecker/src/Unison/TermPrinter.hs index 6c4a2ed62b..5b7f028c54 100644 --- a/parser-typechecker/src/Unison/TermPrinter.hs +++ b/parser-typechecker/src/Unison/TermPrinter.hs @@ -334,10 +334,6 @@ pretty0 if isDocLiteral term then prettyDoc n im term else pretty0 n (a {docContext = NoDoc}) term - (App' f@(Builtin' "Any.Any") arg, _) -> - paren (p >= 10) $ goNormal 9 f `PP.hang` goNormal 10 arg - (Apps' f@(Constructor' _) args, _) -> - paren (p >= 10) $ goNormal 9 f `PP.hang` PP.spacedMap (goNormal 10) args (TupleTerm' [x], _) -> let conRef = DD.pairCtorRef @@ -350,7 +346,10 @@ pretty0 (TupleTerm' xs, _) -> let tupleLink p = fmt (S.TypeReference DD.unitRef) p in PP.group (tupleLink "(" <> commaList xs <> tupleLink ")") - + (App' f@(Builtin' "Any.Any") arg, _) -> + paren (p >= 10) $ goNormal 9 f `PP.hang` goNormal 10 arg + (Apps' f@(Constructor' _) args, _) -> + paren (p >= 10) $ goNormal 9 f `PP.hang` PP.spacedMap (goNormal 10) args (Bytes' bs, _) -> fmt S.BytesLiteral "0xs" <> (PP.shown $ Bytes.fromWord8s (map fromIntegral bs)) BinaryAppsPred' apps lastArg -> paren (p >= 3) $ From b3997ea97a508b5bd85801708689507de6e28ecb Mon Sep 17 00:00:00 2001 From: Paul Chiusano Date: Sat, 12 Feb 2022 23:20:06 -0600 Subject: [PATCH 231/297] transcript refresh --- .../transcripts-using-base/doc.output.md | 20 ++++++------ .../transcripts-using-base/fix2027.output.md | 2 +- .../transcripts/bug-strange-closure.output.md | 32 ++++++++----------- unison-src/transcripts/io.output.md | 8 ++--- .../top-level-exceptions.output.md | 2 +- 5 files changed, 30 insertions(+), 34 deletions(-) diff --git a/unison-src/transcripts-using-base/doc.output.md b/unison-src/transcripts-using-base/doc.output.md index ae8444735f..2b9a7e5596 100644 --- a/unison-src/transcripts-using-base/doc.output.md +++ b/unison-src/transcripts-using-base/doc.output.md @@ -494,16 +494,16 @@ and the rendered output using `display`: {{ docTable [ [ {{ - a - }}, - {{ - b - }}, - {{ - A longer paragraph that will split onto multiple lines, - such that this row occupies multiple lines in the rendered - table. - }} ], + a + }}, + {{ + b + }}, + {{ + A longer paragraph that will split onto multiple + lines, such that this row occupies multiple lines in + the rendered table. + }} ], [{{ Some text }}, {{ More text }}, {{ Zounds! }}] ] }} }} diff --git a/unison-src/transcripts-using-base/fix2027.output.md b/unison-src/transcripts-using-base/fix2027.output.md index 76bb3405dc..5acfd8b77e 100644 --- a/unison-src/transcripts-using-base/fix2027.output.md +++ b/unison-src/transcripts-using-base/fix2027.output.md @@ -90,7 +90,7 @@ myServer = unsafeRun! '(hello "127.0.0.1" "0") I've encountered a call to builtin.bug with the following value: - Failure (typeLink IOFailure) "problem" !Any + Failure (typeLink IOFailure) "problem" (Any ()) I'm sorry this message doesn't have more detail about the location of the failure. My makers plan to fix this in a diff --git a/unison-src/transcripts/bug-strange-closure.output.md b/unison-src/transcripts/bug-strange-closure.output.md index f31e80c898..ffeb147b87 100644 --- a/unison-src/transcripts/bug-strange-closure.output.md +++ b/unison-src/transcripts/bug-strange-closure.output.md @@ -2827,15 +2827,13 @@ rendered = Pretty.get (docFormatConsole doc.guide) () (Left (SpecialForm.Source - [ Cons - (Left - (typeLink Optional)) - (Cons [] ()), - Cons - (Right - (Term.Term - (Any (_ -> sqr)))) - (Cons [] ()) ]))))), + [ (Left + (typeLink Optional), + []), + (Right + (Term.Term + (Any (_ -> sqr))), + []) ]))))), Lit () (Right (Plain "\n")), Lit () (Right (Plain "\n")), Indent @@ -2884,15 +2882,13 @@ rendered = Pretty.get (docFormatConsole doc.guide) () (Left (FoldedSource - [ Cons - (Left - (typeLink Optional)) - (Cons [] ()), - Cons - (Right - (Term.Term - (Any (_ -> sqr)))) - (Cons [] ()) ]))))), + [ (Left + (typeLink Optional), + []), + (Right + (Term.Term + (Any (_ -> sqr))), + []) ]))))), Lit () (Right (Plain "\n")), Lit () (Right (Plain "\n")), Indent diff --git a/unison-src/transcripts/io.output.md b/unison-src/transcripts/io.output.md index 4b0f4098a5..7713bf0d61 100644 --- a/unison-src/transcripts/io.output.md +++ b/unison-src/transcripts/io.output.md @@ -507,7 +507,7 @@ Calling our examples with the wrong number of args will error. The program halted with an unhandled exception: - Failure (typeLink IOFailure) "called with args" !Any + Failure (typeLink IOFailure) "called with args" (Any ()) ``` ```ucm @@ -517,7 +517,7 @@ Calling our examples with the wrong number of args will error. The program halted with an unhandled exception: - Failure (typeLink IOFailure) "called with no args" !Any + Failure (typeLink IOFailure) "called with no args" (Any ()) ``` ```ucm @@ -528,7 +528,7 @@ Calling our examples with the wrong number of args will error. The program halted with an unhandled exception: Failure - (typeLink IOFailure) "called with too many args" !Any + (typeLink IOFailure) "called with too many args" (Any ()) ``` ```ucm @@ -538,6 +538,6 @@ Calling our examples with the wrong number of args will error. The program halted with an unhandled exception: - Failure (typeLink IOFailure) "called with no args" !Any + Failure (typeLink IOFailure) "called with no args" (Any ()) ``` diff --git a/unison-src/transcripts/top-level-exceptions.output.md b/unison-src/transcripts/top-level-exceptions.output.md index 5471c0c461..49b5014e80 100644 --- a/unison-src/transcripts/top-level-exceptions.output.md +++ b/unison-src/transcripts/top-level-exceptions.output.md @@ -90,6 +90,6 @@ unique type RuntimeError = The program halted with an unhandled exception: - Failure (typeLink RuntimeError) "oh noes!" !Any + Failure (typeLink RuntimeError) "oh noes!" (Any ()) ``` From c77288c430acedf730485120e846f434f93f7ef2 Mon Sep 17 00:00:00 2001 From: Arya Irani Date: Sun, 13 Feb 2022 13:17:00 -0700 Subject: [PATCH 232/297] stop using Shellmet in Git.hs --- .../src/Unison/Codebase/Editor/Git.hs | 36 ++++++++++++++++++- 1 file changed, 35 insertions(+), 1 deletion(-) diff --git a/parser-typechecker/src/Unison/Codebase/Editor/Git.hs b/parser-typechecker/src/Unison/Codebase/Editor/Git.hs index 6ac22e2dbc..d1ab8e71ff 100644 --- a/parser-typechecker/src/Unison/Codebase/Editor/Git.hs +++ b/parser-typechecker/src/Unison/Codebase/Editor/Git.hs @@ -24,7 +24,6 @@ import Control.Monad.Except (MonadError, throwError) import qualified Data.ByteString.Base16 as ByteString import qualified Data.Char as Char import qualified Data.Text as Text -import Shellmet (($?), ($^), ($|)) import System.Exit (ExitCode (ExitSuccess)) import System.FilePath (()) import System.IO.Unsafe (unsafePerformIO) @@ -307,3 +306,38 @@ gitTextIn :: MonadIO m => GitRepo -> [Text] -> m Text gitTextIn localPath args = do when debugGit $ traceM (Text.unpack . Text.unwords $ ["$ git"] <> setupGitDir localPath <> args) liftIO $ "git" $| setupGitDir localPath <> args + +-- copying the Shellmet API for now; we can rename or change it up later + +{- | This operator runs shell command with given options but doesn't print the +command itself. +>>> "echo" $^ ["Foo", "Bar"] +Foo Bar +-} +infix 5 $^ +($^) :: MonadIO m => FilePath -> [Text] -> m () +cmd $^ args = UnliftIO.callProcess cmd (map Text.unpack args) + +{- | Run shell command with given options and return stripped stdout of the +executed command. +>>> "echo" $| ["Foo", "Bar"] +"Foo Bar" +-} +infix 5 $| +($|) :: MonadIO m => FilePath -> [Text] -> m Text +cmd $| args = post <$> UnliftIO.readProcess cmd (map Text.unpack args) stdin + where + stdin = "" + post = Text.strip . Text.pack + +{- | Do some IO actions when process failed with 'IOError'. +>>> "exit" ["0"] $? putStrLn "Command failed" +⚙ exit 0 +>>> "exit" ["1"] $? putStrLn "Command failed" +⚙ exit 1 +Command failed +-} +infixl 4 $? +($?) :: IO a -> IO a -> IO a +action $? handler = action `UnliftIO.catch` \(_ :: IOError) -> handler +{-# INLINE ($?) #-} From 5eaa9e447a6eac62ab5ab68e3eb385410381d0be Mon Sep 17 00:00:00 2001 From: Phil de Joux Date: Mon, 14 Feb 2022 06:25:03 -0500 Subject: [PATCH 233/297] Don't say what's been contributed. Co-authored-by: Paul Chiusano --- CONTRIBUTORS.markdown | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/CONTRIBUTORS.markdown b/CONTRIBUTORS.markdown index 42095b9611..5bb084fd2c 100644 --- a/CONTRIBUTORS.markdown +++ b/CONTRIBUTORS.markdown @@ -66,4 +66,4 @@ The format for this list: name, GitHub handle, and then optional blurb about wha * Sameer Kolhar (@kolharsam) * Nicole Prindle (@nprindle) * Harald Gliebe (@hagl) -* Phil de Joux (@philderbeast) - floating point, pretty printing and parsing +* Phil de Joux (@philderbeast) From 782a50148445f22b30f424409e9d9d2cc302e33b Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Simon=20H=C3=B8jberg?= Date: Mon, 14 Feb 2022 15:11:45 -0500 Subject: [PATCH 234/297] docs.to-html headings should have an id To enable deep-linking to sections docs, construct an html id based on the heading text. --- .../src/Unison/Server/Doc/AsHtml.hs | 26 ++++++++++++------- 1 file changed, 16 insertions(+), 10 deletions(-) diff --git a/parser-typechecker/src/Unison/Server/Doc/AsHtml.hs b/parser-typechecker/src/Unison/Server/Doc/AsHtml.hs index 8206ce357a..ff883efe06 100644 --- a/parser-typechecker/src/Unison/Server/Doc/AsHtml.hs +++ b/parser-typechecker/src/Unison/Server/Doc/AsHtml.hs @@ -10,6 +10,7 @@ import Control.Monad.Trans.State (evalStateT) import Control.Monad.Writer.Class (MonadWriter) import qualified Control.Monad.Writer.Class as Writer import Control.Monad.Writer.Lazy (runWriterT) +import qualified Data.Char as Char import Data.Foldable import Data.Map (Map) import qualified Data.Map as Map @@ -348,8 +349,13 @@ toHtml docNamesByRef document = in ol_ [start_ $ Text.pack $ show startNum] <$> renderSequence itemToHtml (mergeWords " " items) Section title docs -> do + let sectionId = + Text.toLower $ + Text.filter (\c -> c == '-' || Char.isAlphaNum c) $ + toText "-" title + titleEl <- - h sectionLevel <$> currentSectionLevelToHtml title + h sectionLevel sectionId <$> currentSectionLevelToHtml title docs' <- renderSequence (sectionContentToHtml (toHtml_ (sectionLevel + 1))) docs @@ -454,16 +460,16 @@ toHtml docNamesByRef document = -- | Unison Doc allows endlessly deep section nesting with -- titles, but HTML only supports to h1-h6, so we clamp -- the sectionLevel when converting -h :: Nat -> (Html () -> Html ()) -h n = +h :: Nat -> Text -> (Html () -> Html ()) +h n anchorId = case n of - 1 -> h1_ - 2 -> h2_ - 3 -> h3_ - 4 -> h4_ - 5 -> h5_ - 6 -> h6_ - _ -> h6_ + 1 -> h1_ [id_ anchorId] + 2 -> h2_ [id_ anchorId] + 3 -> h3_ [id_ anchorId] + 4 -> h4_ [id_ anchorId] + 5 -> h5_ [id_ anchorId] + 6 -> h6_ [id_ anchorId] + _ -> h6_ [id_ anchorId] badge :: Html () -> Html () badge = From 094f71b7097c8943f473b52680b75cef9a7315d1 Mon Sep 17 00:00:00 2001 From: Chris Penner Date: Tue, 15 Feb 2022 10:41:14 -0600 Subject: [PATCH 235/297] Use 'v2' branch for auto-pull of base on topic/rehash-codebase (#2902) * Use 'v2' branch of base to avoid needing to migrate base on every pull. This also allows us to keep v1 and v2 versions of base around during the transition period. * Fix version parsing tests. Co-authored-by: Arya Irani <538571+aryairani@users.noreply.github.com> --- unison-cli/src/Unison/Codebase/Editor/VersionParser.hs | 8 +++++++- unison-cli/tests/Unison/Test/VersionParser.hs | 3 ++- 2 files changed, 9 insertions(+), 2 deletions(-) diff --git a/unison-cli/src/Unison/Codebase/Editor/VersionParser.hs b/unison-cli/src/Unison/Codebase/Editor/VersionParser.hs index 293617b2d8..23a6c48b6b 100644 --- a/unison-cli/src/Unison/Codebase/Editor/VersionParser.hs +++ b/unison-cli/src/Unison/Codebase/Editor/VersionParser.hs @@ -25,6 +25,12 @@ defaultBaseLib = fmap makeNS $ latest <|> release version = do Text.pack <$> some (alphaNumChar <|> ('_' <$ oneOf ['.', '_', '-'])) makeNS :: Text -> ReadRemoteNamespace - makeNS t = ( ReadGitRepo {url="https://github.com/unisonweb/base",ref=Nothing} + makeNS t = ( ReadGitRepo { url="https://github.com/unisonweb/base" + -- Use the 'v2' branch of base for now. + -- We can revert back to the main branch once enough people have upgraded ucm and + -- we're okay with pushing the v2 base codebase to main (perhaps by the next ucm + -- release). + , ref=Just "v2" + } , Nothing , Path.fromText t) diff --git a/unison-cli/tests/Unison/Test/VersionParser.hs b/unison-cli/tests/Unison/Test/VersionParser.hs index 66cfc2b4eb..9ad029b277 100644 --- a/unison-cli/tests/Unison/Test/VersionParser.hs +++ b/unison-cli/tests/Unison/Test/VersionParser.hs @@ -24,6 +24,7 @@ makeTest (version, path) = scope (unpack version) $ expectEqual (rightMay $ runParser defaultBaseLib "versionparser" version) (Just - ( ReadGitRepo "https://github.com/unisonweb/base" Nothing + -- We've hard-coded the v2 branch for base for now. See 'defaultBaseLib' + ( ReadGitRepo "https://github.com/unisonweb/base" (Just "v2") , Nothing , Path.fromText path )) From 74fd82fe0e91ed9cc11c4c766cb85a1aa21e7d12 Mon Sep 17 00:00:00 2001 From: Chris Penner Date: Tue, 15 Feb 2022 11:45:21 -0600 Subject: [PATCH 236/297] Don't sync ALL test watches. (#2904) * Only sync hashes and watch expressions which actually match the requested watch type * Don't sync all test watch expressions indiscriminately. --- .../U/Codebase/Sqlite/Sync22.hs | 31 +++++++++++-------- .../src/Unison/Codebase/SqliteCodebase.hs | 5 --- 2 files changed, 18 insertions(+), 18 deletions(-) diff --git a/codebase2/codebase-sqlite/U/Codebase/Sqlite/Sync22.hs b/codebase2/codebase-sqlite/U/Codebase/Sqlite/Sync22.hs index 1965a97f0d..205859178c 100644 --- a/codebase2/codebase-sqlite/U/Codebase/Sqlite/Sync22.hs +++ b/codebase2/codebase-sqlite/U/Codebase/Sqlite/Sync22.hs @@ -361,22 +361,27 @@ trySync tCache hCache oCache cCache = \case srcParents <- runSrc $ Q.loadCausalParents chId traverse syncCausal srcParents + -- Sync any watches of the given kinds to the dest if and only if watches of those kinds + -- exist in the src. syncWatch :: WK.WatchKind -> Sqlite.Reference.IdH -> m (TrySyncResult Entity) syncWatch wk r | debug && trace ("Sync22.syncWatch " ++ show wk ++ " " ++ show r) False = undefined syncWatch wk r = do - r' <- traverse syncHashLiteral r - doneKinds <- runDest (Q.loadWatchKindsByReference r') - if (notElem wk doneKinds) then do - runSrc (Q.loadWatch wk r) >>= traverse \blob -> do - TL.SyncWatchResult li body <- - either (throwError . DecodeError ErrWatchResult blob) pure $ runGetS S.decomposeWatchFormat blob - li' <- bitraverse syncTextLiteral syncHashLiteral li - when debug $ traceM $ "LocalIds for Source watch result " ++ show r ++ ": " ++ show li - when debug $ traceM $ "LocalIds for Dest watch result " ++ show r' ++ ": " ++ show li' - let blob' = runPutS $ S.recomposeWatchFormat (TL.SyncWatchResult li' body) - runDest (Q.saveWatch wk r' blob') - pure Sync.Done - else pure Sync.PreviouslyDone + runSrc (Q.loadWatch wk r) >>= \case + Nothing -> pure Sync.Done + Just blob -> do + r' <- traverse syncHashLiteral r + doneKinds <- runDest (Q.loadWatchKindsByReference r') + if (elem wk doneKinds) + then pure Sync.PreviouslyDone + else do + TL.SyncWatchResult li body <- + either (throwError . DecodeError ErrWatchResult blob) pure $ runGetS S.decomposeWatchFormat blob + li' <- bitraverse syncTextLiteral syncHashLiteral li + when debug $ traceM $ "LocalIds for Source watch result " ++ show r ++ ": " ++ show li + when debug $ traceM $ "LocalIds for Dest watch result " ++ show r' ++ ": " ++ show li' + let blob' = runPutS $ S.recomposeWatchFormat (TL.SyncWatchResult li' body) + runDest (Q.saveWatch wk r' blob') + pure Sync.Done syncSecondaryHashes oId oId' = runSrc (Q.hashIdWithVersionForObject oId) >>= traverse_ (go oId') diff --git a/parser-typechecker/src/Unison/Codebase/SqliteCodebase.hs b/parser-typechecker/src/Unison/Codebase/SqliteCodebase.hs index 045225edcc..7a8dbb3dc6 100644 --- a/parser-typechecker/src/Unison/Codebase/SqliteCodebase.hs +++ b/parser-typechecker/src/Unison/Codebase/SqliteCodebase.hs @@ -49,7 +49,6 @@ import qualified U.Codebase.Sqlite.Operations as Ops import qualified U.Codebase.Sqlite.Queries as Q import qualified U.Codebase.Sqlite.Sync22 as Sync22 import qualified U.Codebase.Sync as Sync -import qualified U.Codebase.WatchKind as WK import qualified U.Util.Cache as Cache import qualified U.Util.Hash as H2 import qualified U.Util.Monoid as Monoid @@ -962,10 +961,6 @@ syncInternal progress srcConn destConn b = time "syncInternal" do let progress' = Sync.transformProgress (lift . lift) progress bHash = Branch.headHash b se $ time "SyncInternal.processBranches" $ processBranches sync progress' [B bHash (pure b)] - testWatchRefs <- time "SyncInternal enumerate testWatches" $ - lift . fmap concat $ for [WK.TestWatch] \wk -> - fmap (Sync22.W wk) <$> flip runReaderT srcConn (Q.loadWatchesByWatchKind wk) - se . r $ Sync.sync sync progress' testWatchRefs let onSuccess a = runDB destConn (Q.release "sync") *> pure a onFailure e = do if debugCommitFailedTransaction From 40e88ffc61534036003c3ad0c6d7b317c6e6b44a Mon Sep 17 00:00:00 2001 From: Dan Doel Date: Wed, 16 Feb 2022 02:20:36 -0500 Subject: [PATCH 237/297] Fix an issue with effect variable linearity tweaking The rewriting was introducing empty effect lists, but this is a problem for effect signatures in an ability, which rely on there not being any effect ascriptions except the one for the ability being defined on the arrow spine. E.G. a -> b -> c ->{A} d is okay, but: a ->{} b ->{} c ->{A} d appears like the main effect happens on the first arrow to the checker. --- .../src/Unison/Typechecker/Context.hs | 41 +++++++++++-------- 1 file changed, 24 insertions(+), 17 deletions(-) diff --git a/parser-typechecker/src/Unison/Typechecker/Context.hs b/parser-typechecker/src/Unison/Typechecker/Context.hs index 13b69d109d..1c887be7e9 100644 --- a/parser-typechecker/src/Unison/Typechecker/Context.hs +++ b/parser-typechecker/src/Unison/Typechecker/Context.hs @@ -1559,25 +1559,32 @@ tweakEffects v0 t0 appendContext (existential <$> vs) pure (vs, ABT.substInheritAnnotation v0 (typ vs) ty) - rewrite p ty@(Type.ForallNamed' v t) - | v0 /= v = second (Type.forall a v) <$> rewrite p t - where a = loc ty - rewrite p ty@(Type.Arrow'' i es o) = do - (vis, i) <- rewrite (not <$> p) i - (vos, o) <- rewrite p o - ess <- traverse (rewrite p) es - let es = snd <$> ess ; ves = fst =<< ess - pure (vis ++ ves ++ vos, Type.arrow a i (Type.effect a es o)) - where a = loc ty - rewrite p ty@(Type.Var' v) - | v0 == v && negative p = do + rewrite p ty + | Type.ForallNamed' v t <- ty + , v0 /= v + = second (Type.forall a v) <$> rewrite p t + | Type.Arrow' i o <- ty = do + (vis, i) <- rewrite (not <$> p) i + (vos, o) <- rewrite p o + pure (vis ++ vos, Type.arrow a i o) + | Type.Effect1' e t <- ty = do + (ves, e) <- rewrite p e + (vts, t) <- rewrite p t + pure (ves ++ vts, Type.effect1 a e t) + | Type.Effects' es <- ty = do + ess <- traverse (rewrite p) es + let es = snd <$> ess ; ves = fst =<< ess + pure (ves, Type.effects a es) + | Type.Var' v <- ty + , v0 == v && negative p = do u <- freshenTypeVar v0 pure ([u], existential' (loc ty) B.Blank u) - rewrite p ty@(Type.App' f x) = do - (vfs, f) <- rewrite p f - (vxs, x) <- rewrite Nothing x - pure (vfs ++ vxs, Type.app (loc ty) f x) - rewrite _ ty = pure ([], ty) + | Type.App' f x <- ty = do + (vfs, f) <- rewrite p f + (vxs, x) <- rewrite Nothing x + pure (vfs ++ vxs, Type.app (loc ty) f x) + | otherwise = pure ([], ty) + where a = loc ty isEffectVar :: Var v => TypeVar v loc -> Type v loc -> Bool isEffectVar u (Type.ForallNamed' v t) From 09ebb94a95eb79b4d918bf268a2f35952d1d973a Mon Sep 17 00:00:00 2001 From: Dan Doel Date: Wed, 16 Feb 2022 03:39:21 -0500 Subject: [PATCH 238/297] Test case. --- unison-src/transcripts/abilities.md | 27 +++++++++++++++ unison-src/transcripts/abilities.output.md | 40 ++++++++++++++++++++++ 2 files changed, 67 insertions(+) create mode 100644 unison-src/transcripts/abilities.md create mode 100644 unison-src/transcripts/abilities.output.md diff --git a/unison-src/transcripts/abilities.md b/unison-src/transcripts/abilities.md new file mode 100644 index 0000000000..3bf6c47ec1 --- /dev/null +++ b/unison-src/transcripts/abilities.md @@ -0,0 +1,27 @@ + +```ucm:hide +.> builtins.merge +``` + +Some random ability stuff to ensure things work. + +```unison + +unique ability A where + one : Nat ->{A} Nat + two : Nat -> Nat ->{A} Nat + three : Nat -> Nat -> Nat ->{A} Nat + four : Nat ->{A} (Nat -> Nat -> Nat -> Nat) + +ha : Request {A} r -> r +ha = cases + { x } -> x + { one i -> c } -> handle c (i+1) with ha + { two i j -> c } -> handle c (i+j) with ha + { three i j k -> c } -> handle c (i+j+k) with ha + { four i -> c } -> handle c (j k l -> i+j+k+l) with ha +``` + +```ucm +.> add +``` diff --git a/unison-src/transcripts/abilities.output.md b/unison-src/transcripts/abilities.output.md new file mode 100644 index 0000000000..274edece3c --- /dev/null +++ b/unison-src/transcripts/abilities.output.md @@ -0,0 +1,40 @@ + +Some random ability stuff to ensure things work. + +```unison +unique ability A where + one : Nat ->{A} Nat + two : Nat -> Nat ->{A} Nat + three : Nat -> Nat -> Nat ->{A} Nat + four : Nat ->{A} (Nat -> Nat -> Nat -> Nat) + +ha : Request {A} r -> r +ha = cases + { x } -> x + { one i -> c } -> handle c (i+1) with ha + { two i j -> c } -> handle c (i+j) with ha + { three i j k -> c } -> handle c (i+j+k) with ha + { four i -> c } -> handle c (j k l -> i+j+k+l) with ha +``` + +```ucm + + I found and typechecked these definitions in scratch.u. If you + do an `add` or `update`, here's how your codebase would + change: + + ⍟ These new definitions are ok to `add`: + + unique ability A + ha : Request {A} r -> r + +``` +```ucm +.> add + + ⍟ I've added these definitions: + + unique ability A + ha : Request {A} r -> r + +``` From 4b57560b85ec6315f248da9fcbb57617c3b58a24 Mon Sep 17 00:00:00 2001 From: Dan Doel Date: Wed, 16 Feb 2022 14:40:04 -0500 Subject: [PATCH 239/297] Fix an issue computing sandbox info The sandboxed dependencies for combinators were being computed just from the previously known combinators. So, if multiple new, interrelated combinators were introduced simultaneously, their full transitive dependencies wouldn't take into account anything in the new set. To fix this, just iterate the dependency inference until a fixed point is reached. --- .../src/Unison/Runtime/Machine.hs | 16 +++++++++++---- unison-src/transcripts/builtins.md | 11 +++++++++- unison-src/transcripts/builtins.output.md | 20 +++++++++++++++---- 3 files changed, 38 insertions(+), 9 deletions(-) diff --git a/parser-typechecker/src/Unison/Runtime/Machine.hs b/parser-typechecker/src/Unison/Runtime/Machine.hs index 1be178bb79..b643cc04dc 100644 --- a/parser-typechecker/src/Unison/Runtime/Machine.hs +++ b/parser-typechecker/src/Unison/Runtime/Machine.hs @@ -1650,15 +1650,23 @@ expandSandbox :: Map Reference (Set Reference) -> [(Reference, SuperGroup Symbol)] -> [(Reference, Set Reference)] -expandSandbox sand = mapMaybe h +expandSandbox sand0 groups = fixed mempty where - f False r = fromMaybe mempty $ M.lookup r sand - f True _ = mempty + f sand False r = fromMaybe mempty $ M.lookup r sand + f _ True _ = mempty - h (r, groupLinks f -> s) + h sand (r, groupLinks (f sand) -> s) | S.null s = Nothing | otherwise = Just (r, s) + fixed extra + | extra == extra' = new + | otherwise = fixed extra' + where + new = mapMaybe (h $ extra <> sand0) groups + extra' = M.fromList new + + cacheAdd :: [(Reference, SuperGroup Symbol)] -> CCache diff --git a/unison-src/transcripts/builtins.md b/unison-src/transcripts/builtins.md index a1e7ceb4f4..dbfe406d77 100644 --- a/unison-src/transcripts/builtins.md +++ b/unison-src/transcripts/builtins.md @@ -272,8 +272,17 @@ test> Any.test2 = checks [(not (Any "hi" == Any 42))] ## Sandboxing functions ```unison +openFile1 t = openFile t +openFile2 t = openFile1 t + +openFiles = + [ not (validateSandboxed [] openFile) + , not (validateSandboxed [] openFile1) + , not (validateSandboxed [] openFile2) + ] + test> Sandbox.test1 = checks [validateSandboxed [] "hello"] -test> Sandbox.test2 = checks [not (validateSandboxed [] openFile)] +test> Sandbox.test2 = checks openFiles test> Sandbox.test3 = checks [validateSandboxed [termLink openFile.impl] openFile] ``` diff --git a/unison-src/transcripts/builtins.output.md b/unison-src/transcripts/builtins.output.md index 145c574f07..fe6f9076d0 100644 --- a/unison-src/transcripts/builtins.output.md +++ b/unison-src/transcripts/builtins.output.md @@ -268,8 +268,17 @@ test> Any.test2 = checks [(not (Any "hi" == Any 42))] ## Sandboxing functions ```unison +openFile1 t = openFile t +openFile2 t = openFile1 t + +openFiles = + [ not (validateSandboxed [] openFile) + , not (validateSandboxed [] openFile1) + , not (validateSandboxed [] openFile2) + ] + test> Sandbox.test1 = checks [validateSandboxed [] "hello"] -test> Sandbox.test2 = checks [not (validateSandboxed [] openFile)] +test> Sandbox.test2 = checks openFiles test> Sandbox.test3 = checks [validateSandboxed [termLink openFile.impl] openFile] ``` @@ -285,19 +294,22 @@ openFile] Sandbox.test1 : [Result] Sandbox.test2 : [Result] Sandbox.test3 : [Result] + openFile1 : Text -> FileMode ->{IO, Exception} Handle + openFile2 : Text -> FileMode ->{IO, Exception} Handle + openFiles : [Boolean] Now evaluating any watch expressions (lines starting with `>`)... Ctrl+C cancels. - 1 | test> Sandbox.test1 = checks [validateSandboxed [] "hello"] + 10 | test> Sandbox.test1 = checks [validateSandboxed [] "hello"] ✅ Passed Passed - 2 | test> Sandbox.test2 = checks [not (validateSandboxed [] openFile)] + 11 | test> Sandbox.test2 = checks openFiles ✅ Passed Passed - 3 | test> Sandbox.test3 = checks [validateSandboxed [termLink openFile.impl] + 12 | test> Sandbox.test3 = checks [validateSandboxed [termLink openFile.impl] ✅ Passed Passed From 8334fbd970043adede5221560c3e8501d198dc31 Mon Sep 17 00:00:00 2001 From: Chris Penner Date: Wed, 16 Feb 2022 13:52:18 -0600 Subject: [PATCH 240/297] Allow pushing to v2 codebases --- .../src/Unison/Codebase/Init.hs | 8 --- .../Codebase/Init/CreateCodebaseError.hs | 1 - .../Unison/Codebase/Init/OpenCodebaseError.hs | 6 +- .../src/Unison/Codebase/SqliteCodebase.hs | 60 ++++++++++--------- .../src/Unison/Codebase/Type.hs | 12 +++- unison-cli/tests/Unison/Test/Ucm.hs | 1 - unison-cli/unison/Main.hs | 3 - 7 files changed, 45 insertions(+), 46 deletions(-) diff --git a/parser-typechecker/src/Unison/Codebase/Init.hs b/parser-typechecker/src/Unison/Codebase/Init.hs index d48278a1b8..9fd5b122fb 100644 --- a/parser-typechecker/src/Unison/Codebase/Init.hs +++ b/parser-typechecker/src/Unison/Codebase/Init.hs @@ -112,7 +112,6 @@ withOpenOrCreateCodebase cbInit debugName initOptions action = do CreateWhenMissing dir -> createCodebaseWithResult cbInit debugName dir (\codebase -> action (CreatedCodebase, dir, codebase)) Left err@OpenCodebaseUnknownSchemaVersion{} -> pure (Left (resolvedPath, InitErrorOpen err)) - Left err@OpenCodebaseOther{} -> pure (Left (resolvedPath, InitErrorOpen err)) createCodebase :: MonadIO m => Init m v a -> DebugName -> CodebasePath -> (Codebase m v a -> m r) -> m (Either Pretty r) createCodebase cbInit debugName path action = do @@ -122,13 +121,6 @@ createCodebase cbInit debugName path action = do P.wrap $ "It looks like there's already a codebase in: " <> prettyDir - CreateCodebaseOther message -> - P.wrap ("I ran into an error when creating the codebase in: " <> prettyDir) - <> P.newline - <> P.newline - <> "The error was:" - <> P.newline - <> P.indentN 2 message -- * compatibility stuff diff --git a/parser-typechecker/src/Unison/Codebase/Init/CreateCodebaseError.hs b/parser-typechecker/src/Unison/Codebase/Init/CreateCodebaseError.hs index 560005610c..a2b4adf132 100644 --- a/parser-typechecker/src/Unison/Codebase/Init/CreateCodebaseError.hs +++ b/parser-typechecker/src/Unison/Codebase/Init/CreateCodebaseError.hs @@ -10,4 +10,3 @@ type Pretty = P.Pretty P.ColorText data CreateCodebaseError = CreateCodebaseAlreadyExists - | CreateCodebaseOther Pretty diff --git a/parser-typechecker/src/Unison/Codebase/Init/OpenCodebaseError.hs b/parser-typechecker/src/Unison/Codebase/Init/OpenCodebaseError.hs index a2eee580e6..0065a2425f 100644 --- a/parser-typechecker/src/Unison/Codebase/Init/OpenCodebaseError.hs +++ b/parser-typechecker/src/Unison/Codebase/Init/OpenCodebaseError.hs @@ -5,13 +5,11 @@ module Unison.Codebase.Init.OpenCodebaseError where import Unison.Prelude -import Unison.Util.Pretty (ColorText, Pretty) -- | An error that can occur when attempting to open a codebase. data OpenCodebaseError = -- | The codebase doesn't exist. OpenCodebaseDoesntExist - -- | The codebase exists, but its schema version is unknown to this application. - | OpenCodebaseUnknownSchemaVersion Word64 - | OpenCodebaseOther (Pretty ColorText) + | -- | The codebase exists, but its schema version is unknown to this application. + OpenCodebaseUnknownSchemaVersion Word64 deriving stock (Show) diff --git a/parser-typechecker/src/Unison/Codebase/SqliteCodebase.hs b/parser-typechecker/src/Unison/Codebase/SqliteCodebase.hs index 7a8dbb3dc6..8c8e225641 100644 --- a/parser-typechecker/src/Unison/Codebase/SqliteCodebase.hs +++ b/parser-typechecker/src/Unison/Codebase/SqliteCodebase.hs @@ -126,15 +126,31 @@ makeCodebaseDirPath root = root ".unison" "v2" init :: HasCallStack => (MonadUnliftIO m) => Codebase.Init m Symbol Ann init = Codebase.Init { withOpenCodebase=withCodebaseOrError - , withCreatedCodebase=createCodebaseOrError + , withCreatedCodebase=withCreatedCodebase' , codebasePath=makeCodebaseDirPath } + where + withCreatedCodebase' debugName path action = + createCodebaseOrError debugName path (action . fst) + +withOpenOrCreateCodebase :: + MonadUnliftIO m => + Codebase.DebugName -> + CodebasePath -> + LocalOrRemote -> + ((Codebase m Symbol Ann, Connection) -> m r) -> + m (Either Codebase1.OpenCodebaseError r) +withOpenOrCreateCodebase debugName codebasePath localOrRemote action = do + createCodebaseOrError debugName codebasePath action >>= \case + Left (Codebase1.CreateCodebaseAlreadyExists) -> do + sqliteCodebase debugName codebasePath localOrRemote action + Right r -> pure (Right r) createCodebaseOrError :: (MonadUnliftIO m) => Codebase.DebugName -> CodebasePath -> - (Codebase m Symbol Ann -> m r) -> + ((Codebase m Symbol Ann, Connection) -> m r) -> m (Either Codebase1.CreateCodebaseError r) createCodebaseOrError debugName path action = do ifM @@ -153,18 +169,6 @@ createCodebaseOrError debugName path action = do Left schemaVersion -> error ("Failed to open codebase with schema version: " ++ show schemaVersion ++ ", which is unexpected because I just created this codebase.") Right result -> pure (Right result) -withOpenOrCreateCodebaseConnection :: - (MonadUnliftIO m) => - Codebase.DebugName -> - FilePath -> - (Connection -> m r) -> - m r -withOpenOrCreateCodebaseConnection debugName path action = do - unlessM - (doesFileExist $ makeCodebasePath path) - (initSchemaIfNotExist path) - withConnection debugName path action - -- | Use the codebase in the provided path. -- The codebase is automatically closed when the action completes or throws an exception. withCodebaseOrError :: @@ -178,7 +182,7 @@ withCodebaseOrError debugName dir action = do doesFileExist (makeCodebasePath dir) >>= \case False -> pure (Left Codebase1.OpenCodebaseDoesntExist) True -> - sqliteCodebase debugName dir Local action <&> mapLeft \(SchemaVersion n) -> Codebase1.OpenCodebaseUnknownSchemaVersion n + sqliteCodebase debugName dir Local (action . fst) initSchemaIfNotExist :: MonadIO m => FilePath -> m () initSchemaIfNotExist path = liftIO do @@ -285,8 +289,8 @@ sqliteCodebase :: CodebasePath -> -- | When local, back up the existing codebase before migrating, in case there's a catastrophic bug in the migration. LocalOrRemote -> - (Codebase m Symbol Ann -> m r) -> - m (Either SchemaVersion r) + ((Codebase m Symbol Ann, Connection) -> m r) -> + m (Either Codebase1.OpenCodebaseError r) sqliteCodebase debugName root localOrRemote action = do Monad.when debug $ traceM $ "sqliteCodebase " ++ debugName ++ " " ++ root withConnection debugName root $ \conn -> do @@ -835,7 +839,7 @@ sqliteCodebase debugName root localOrRemote action = do -- Migrate if necessary. (`finally` finalizer) $ runReaderT Q.schemaVersion conn >>= \case - SchemaVersion 2 -> Right <$> action codebase + SchemaVersion 2 -> Right <$> action (codebase, conn) SchemaVersion 1 -> do liftIO $ putStrLn ("Migrating from schema version 1 -> 2.") case localOrRemote of @@ -851,8 +855,8 @@ sqliteCodebase debugName root localOrRemote action = do Remote -> pure () migrateSchema12 conn codebase -- it's ok to pass codebase along; whatever it cached during the migration won't break anything - Right <$> action codebase - v -> pure $ Left v + Right <$> action (codebase, conn) + v -> pure . Left $ Codebase1.OpenCodebaseUnknownSchemaVersion (fromIntegral v) -- well one or the other. :zany_face: the thinking being that they wouldn't hash-collide termExists', declExists' :: MonadIO m => Hash -> ReaderT Connection (ExceptT Ops.Error m) Bool @@ -1100,7 +1104,7 @@ viewRemoteBranch' (repo, sbh, path) gitBranchBehavior action = UnliftIO.try $ do -- Unexpected error from sqlite _ -> throwIO sqlError - result <- sqliteCodebase "viewRemoteBranch.gitCache" remotePath Remote \codebase -> do + result <- sqliteCodebase "viewRemoteBranch.gitCache" remotePath Remote \(codebase, _conn) -> do -- try to load the requested branch from it branch <- time "Git fetch (sbh)" $ case sbh of -- no sub-branch was specified, so use the root. @@ -1127,7 +1131,7 @@ viewRemoteBranch' (repo, sbh, path) gitBranchBehavior action = UnliftIO.try $ do Just b -> action (b, remotePath) Nothing -> throwIO . C.GitCodebaseError $ GitError.CouldntFindRemoteBranch repo path case result of - Left schemaVersion -> throwIO . C.GitSqliteCodebaseError $ GitError.UnrecognizedSchemaVersion repo remotePath schemaVersion + Left err -> throwIO . C.GitSqliteCodebaseError $ C.gitErrorFromOpenCodebaseError remotePath repo err Right inner -> pure inner -- Push a branch to a repo. Optionally attempt to set the branch as the new root, which fails if the branch is not after @@ -1140,7 +1144,7 @@ pushGitBranch :: WriteRepo -> PushGitBranchOpts -> m (Either C.GitError ()) -pushGitBranch srcConn branch repo (PushGitBranchOpts setRoot _syncMode) = mapLeft C.GitProtocolError <$> do +pushGitBranch srcConn branch repo (PushGitBranchOpts setRoot _syncMode) = UnliftIO.try do -- Pull the latest remote into our git cache -- Use a local git clone to copy this git repo into a temp-dir -- Delete the codebase in our temp-dir @@ -1155,10 +1159,12 @@ pushGitBranch srcConn branch repo (PushGitBranchOpts setRoot _syncMode) = mapLef -- Delete the temp-dir. -- -- set up the cache dir - withRepo readRepo Git.CreateBranchIfMissing $ \pushStaging -> do - withOpenOrCreateCodebaseConnection @m "push.dest" (Git.gitDirToPath pushStaging) $ \destConn -> do - flip runReaderT destConn $ Q.withSavepoint_ @(ReaderT _ m) "push" $ do - throwExceptT $ doSync (Git.gitDirToPath pushStaging) srcConn destConn + throwEitherMWith C.GitProtocolError . withRepo readRepo Git.CreateBranchIfMissing $ \pushStaging -> do + throwEitherMWith (C.GitSqliteCodebaseError . C.gitErrorFromOpenCodebaseError (Git.gitDirToPath pushStaging) readRepo) + + . withOpenOrCreateCodebase "push.dest" (Git.gitDirToPath pushStaging) Remote $ \(_destCodebase, destConn) -> do + flip runReaderT destConn $ Q.withSavepoint_ @(ReaderT _ m) "push" $ do + throwExceptT $ doSync (Git.gitDirToPath pushStaging) srcConn destConn void $ push pushStaging repo where readRepo :: ReadRepo diff --git a/parser-typechecker/src/Unison/Codebase/Type.hs b/parser-typechecker/src/Unison/Codebase/Type.hs index 42fae8474f..e60051800c 100644 --- a/parser-typechecker/src/Unison/Codebase/Type.hs +++ b/parser-typechecker/src/Unison/Codebase/Type.hs @@ -9,17 +9,18 @@ module Unison.Codebase.Type GitError (..), GetRootBranchError (..), SyncToDir, + gitErrorFromOpenCodebaseError, ) where import Unison.Codebase.Branch (Branch) import qualified Unison.Codebase.Branch as Branch -import Unison.Codebase.Editor.RemoteRepo (ReadRemoteNamespace, WriteRepo) +import Unison.Codebase.Editor.RemoteRepo (ReadRemoteNamespace, WriteRepo, ReadRepo) import Unison.Codebase.GitError (GitCodebaseError, GitProtocolError) import Unison.Codebase.Patch (Patch) import qualified Unison.Codebase.Reflog as Reflog import Unison.Codebase.ShortBranchHash (ShortBranchHash) -import Unison.Codebase.SqliteCodebase.GitError (GitSqliteCodebaseError) +import Unison.Codebase.SqliteCodebase.GitError (GitSqliteCodebaseError (..)) import Unison.Codebase.SyncMode (SyncMode) import Unison.CodebasePath (CodebasePath) import Unison.DataDeclaration (Decl) @@ -33,6 +34,7 @@ import Unison.Term (Term) import Unison.Type (Type) import qualified Unison.WatchKind as WK import qualified Unison.Codebase.Editor.Git as Git +import Unison.Codebase.Init.OpenCodebaseError (OpenCodebaseError(..)) type SyncToDir m = CodebasePath -> -- dest codebase @@ -177,3 +179,9 @@ data GitError deriving (Show) instance Exception GitError + +gitErrorFromOpenCodebaseError :: CodebasePath -> ReadRepo -> OpenCodebaseError -> GitSqliteCodebaseError +gitErrorFromOpenCodebaseError path repo = \case + OpenCodebaseDoesntExist -> NoDatabaseFile repo path + OpenCodebaseUnknownSchemaVersion v -> + UnrecognizedSchemaVersion repo path (fromIntegral v) diff --git a/unison-cli/tests/Unison/Test/Ucm.hs b/unison-cli/tests/Unison/Test/Ucm.hs index 57c7b9ee0f..4150c2ff68 100644 --- a/unison-cli/tests/Unison/Test/Ucm.hs +++ b/unison-cli/tests/Unison/Test/Ucm.hs @@ -57,7 +57,6 @@ initCodebase fmt = do result <- Codebase.Init.withCreatedCodebase cbInit "ucm-test" tmp (const $ pure ()) case result of Left CreateCodebaseAlreadyExists -> fail $ P.toANSI 80 "Codebase already exists" - Left (CreateCodebaseOther p) -> fail $ P.toANSI 80 p Right _ -> pure $ Codebase tmp fmt deleteCodebase :: Codebase -> IO () diff --git a/unison-cli/unison/Main.hs b/unison-cli/unison/Main.hs index ad2d08f896..8a6cf00920 100644 --- a/unison-cli/unison/Main.hs +++ b/unison-cli/unison/Main.hs @@ -386,9 +386,6 @@ getCodebaseOrExit codebasePathOption action = do , "Please upgrade your version of UCM." ]) - InitErrorOpen (OpenCodebaseOther errMessage) -> - pure errMessage - FoundV1Codebase -> pure (P.lines [ "Found a v1 codebase at " <> pDir <> ".", From c47594800ce8a437077f76a26da225a3eb661a2c Mon Sep 17 00:00:00 2001 From: Chris Penner Date: Wed, 16 Feb 2022 14:33:29 -0600 Subject: [PATCH 241/297] Docs --- parser-typechecker/src/Unison/Codebase/SqliteCodebase.hs | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/parser-typechecker/src/Unison/Codebase/SqliteCodebase.hs b/parser-typechecker/src/Unison/Codebase/SqliteCodebase.hs index 8c8e225641..3cfdcbec2e 100644 --- a/parser-typechecker/src/Unison/Codebase/SqliteCodebase.hs +++ b/parser-typechecker/src/Unison/Codebase/SqliteCodebase.hs @@ -133,6 +133,7 @@ init = Codebase.Init withCreatedCodebase' debugName path action = createCodebaseOrError debugName path (action . fst) +-- | Open the codebase at the given location, or create it if one doesn't already exist. withOpenOrCreateCodebase :: MonadUnliftIO m => Codebase.DebugName -> @@ -146,6 +147,7 @@ withOpenOrCreateCodebase debugName codebasePath localOrRemote action = do sqliteCodebase debugName codebasePath localOrRemote action Right r -> pure (Right r) +-- | Create a codebase at the given location. createCodebaseOrError :: (MonadUnliftIO m) => Codebase.DebugName -> @@ -1161,7 +1163,6 @@ pushGitBranch srcConn branch repo (PushGitBranchOpts setRoot _syncMode) = Unlift -- set up the cache dir throwEitherMWith C.GitProtocolError . withRepo readRepo Git.CreateBranchIfMissing $ \pushStaging -> do throwEitherMWith (C.GitSqliteCodebaseError . C.gitErrorFromOpenCodebaseError (Git.gitDirToPath pushStaging) readRepo) - . withOpenOrCreateCodebase "push.dest" (Git.gitDirToPath pushStaging) Remote $ \(_destCodebase, destConn) -> do flip runReaderT destConn $ Q.withSavepoint_ @(ReaderT _ m) "push" $ do throwExceptT $ doSync (Git.gitDirToPath pushStaging) srcConn destConn From 50f7d045b5e3832c2ca45b7925dd5da94daef8b3 Mon Sep 17 00:00:00 2001 From: Chris Penner Date: Wed, 16 Feb 2022 16:24:23 -0600 Subject: [PATCH 242/297] Fix case where we create a new codebase --- .../src/Unison/Codebase/SqliteCodebase.hs | 66 +++++++++++-------- 1 file changed, 38 insertions(+), 28 deletions(-) diff --git a/parser-typechecker/src/Unison/Codebase/SqliteCodebase.hs b/parser-typechecker/src/Unison/Codebase/SqliteCodebase.hs index 3cfdcbec2e..b734a4cc4a 100644 --- a/parser-typechecker/src/Unison/Codebase/SqliteCodebase.hs +++ b/parser-typechecker/src/Unison/Codebase/SqliteCodebase.hs @@ -133,19 +133,26 @@ init = Codebase.Init withCreatedCodebase' debugName path action = createCodebaseOrError debugName path (action . fst) +data CodebaseStatus = + ExistingCodebase + | CreatedCodebase + deriving (Eq) + -- | Open the codebase at the given location, or create it if one doesn't already exist. withOpenOrCreateCodebase :: MonadUnliftIO m => Codebase.DebugName -> CodebasePath -> LocalOrRemote -> - ((Codebase m Symbol Ann, Connection) -> m r) -> + ((CodebaseStatus, Codebase m Symbol Ann, Connection) -> m r) -> m (Either Codebase1.OpenCodebaseError r) withOpenOrCreateCodebase debugName codebasePath localOrRemote action = do - createCodebaseOrError debugName codebasePath action >>= \case + createCodebaseOrError debugName codebasePath (action' CreatedCodebase) >>= \case Left (Codebase1.CreateCodebaseAlreadyExists) -> do - sqliteCodebase debugName codebasePath localOrRemote action + sqliteCodebase debugName codebasePath localOrRemote (action' ExistingCodebase) Right r -> pure (Right r) + where + action' openOrCreate (codebase, conn) = action (openOrCreate, codebase, conn) -- | Create a codebase at the given location. createCodebaseOrError :: @@ -1163,40 +1170,43 @@ pushGitBranch srcConn branch repo (PushGitBranchOpts setRoot _syncMode) = Unlift -- set up the cache dir throwEitherMWith C.GitProtocolError . withRepo readRepo Git.CreateBranchIfMissing $ \pushStaging -> do throwEitherMWith (C.GitSqliteCodebaseError . C.gitErrorFromOpenCodebaseError (Git.gitDirToPath pushStaging) readRepo) - . withOpenOrCreateCodebase "push.dest" (Git.gitDirToPath pushStaging) Remote $ \(_destCodebase, destConn) -> do + . withOpenOrCreateCodebase "push.dest" (Git.gitDirToPath pushStaging) Remote $ \(codebaseStatus, _destCodebase, destConn) -> do flip runReaderT destConn $ Q.withSavepoint_ @(ReaderT _ m) "push" $ do - throwExceptT $ doSync (Git.gitDirToPath pushStaging) srcConn destConn + throwExceptT $ doSync codebaseStatus (Git.gitDirToPath pushStaging) srcConn destConn void $ push pushStaging repo where readRepo :: ReadRepo readRepo = writeToRead repo - doSync :: FilePath -> Connection -> Connection -> ExceptT C.GitError (ReaderT Connection m) () - doSync remotePath srcConn destConn = do + doSync :: CodebaseStatus -> FilePath -> Connection -> Connection -> ExceptT C.GitError (ReaderT Connection m) () + doSync codebaseStatus remotePath srcConn destConn = do _ <- flip State.execStateT emptySyncProgressState $ syncInternal syncProgress srcConn destConn (Branch.transform (lift . lift . lift) branch) - when setRoot $ overwriteRoot remotePath destConn - overwriteRoot :: forall m. MonadIO m => FilePath -> Connection -> ExceptT C.GitError m () - overwriteRoot remotePath destConn = do + when setRoot $ overwriteRoot codebaseStatus remotePath destConn + overwriteRoot :: forall m. MonadIO m => CodebaseStatus -> FilePath -> Connection -> ExceptT C.GitError m () + overwriteRoot codebaseStatus remotePath destConn = do let newRootHash = Branch.headHash branch - -- the call to runDB "handles" the possible DB error by bombing - maybeOldRootHash <- fmap Cv.branchHash2to1 <$> runDB destConn Ops.loadMaybeRootCausalHash - case maybeOldRootHash of - Nothing -> runDB destConn $ do - setRepoRoot newRootHash - (Just oldRootHash) -> runDB destConn $ do - before oldRootHash newRootHash >>= \case - Nothing -> - error $ - "I couldn't find the hash " ++ show newRootHash - ++ " that I just synced to the cached copy of " - ++ repoString - ++ " in " - ++ show remotePath - ++ "." - Just False -> do - lift . lift . throwError . C.GitProtocolError $ GitError.PushDestinationHasNewStuff repo - Just True -> do + case codebaseStatus of + ExistingCodebase -> do + -- the call to runDB "handles" the possible DB error by bombing + maybeOldRootHash <- fmap Cv.branchHash2to1 <$> runDB destConn Ops.loadMaybeRootCausalHash + case maybeOldRootHash of + Nothing -> runDB destConn $ do setRepoRoot newRootHash + (Just oldRootHash) -> runDB destConn $ do + before oldRootHash newRootHash >>= \case + Nothing -> + error $ + "I couldn't find the hash " ++ show newRootHash + ++ " that I just synced to the cached copy of " + ++ repoString + ++ " in " + ++ show remotePath + ++ "." + Just False -> do + lift . lift . throwError . C.GitProtocolError $ GitError.PushDestinationHasNewStuff repo + Just True -> pure () + CreatedCodebase -> pure () + runDB destConn $ setRepoRoot newRootHash repoString = Text.unpack $ printWriteRepo repo setRepoRoot :: forall m. Q.DB m => Branch.Hash -> m () From 103cb56a5ef9c0d7797487578213d032e01c5248 Mon Sep 17 00:00:00 2001 From: Arya Irani Date: Thu, 17 Feb 2022 00:21:56 -0700 Subject: [PATCH 243/297] Revert "stop using Shellmet in Git.hs" This reverts commit c77288c430acedf730485120e846f434f93f7ef2. --- .../src/Unison/Codebase/Editor/Git.hs | 36 +------------------ 1 file changed, 1 insertion(+), 35 deletions(-) diff --git a/parser-typechecker/src/Unison/Codebase/Editor/Git.hs b/parser-typechecker/src/Unison/Codebase/Editor/Git.hs index 0a02a8dc6f..e6398a93fc 100644 --- a/parser-typechecker/src/Unison/Codebase/Editor/Git.hs +++ b/parser-typechecker/src/Unison/Codebase/Editor/Git.hs @@ -24,6 +24,7 @@ import Control.Monad.Except (MonadError, throwError) import qualified Data.ByteString.Base16 as ByteString import qualified Data.Char as Char import qualified Data.Text as Text +import Shellmet (($?), ($^), ($|)) import System.Exit (ExitCode (ExitSuccess)) import System.FilePath (()) import System.IO.Unsafe (unsafePerformIO) @@ -300,38 +301,3 @@ gitTextIn :: MonadIO m => GitRepo -> [Text] -> m Text gitTextIn localPath args = do when debugGit $ traceM (Text.unpack . Text.unwords $ ["$ git"] <> setupGitDir localPath <> args) liftIO $ "git" $| setupGitDir localPath <> args - --- copying the Shellmet API for now; we can rename or change it up later - -{- | This operator runs shell command with given options but doesn't print the -command itself. ->>> "echo" $^ ["Foo", "Bar"] -Foo Bar --} -infix 5 $^ -($^) :: MonadIO m => FilePath -> [Text] -> m () -cmd $^ args = UnliftIO.callProcess cmd (map Text.unpack args) - -{- | Run shell command with given options and return stripped stdout of the -executed command. ->>> "echo" $| ["Foo", "Bar"] -"Foo Bar" --} -infix 5 $| -($|) :: MonadIO m => FilePath -> [Text] -> m Text -cmd $| args = post <$> UnliftIO.readProcess cmd (map Text.unpack args) stdin - where - stdin = "" - post = Text.strip . Text.pack - -{- | Do some IO actions when process failed with 'IOError'. ->>> "exit" ["0"] $? putStrLn "Command failed" -⚙ exit 0 ->>> "exit" ["1"] $? putStrLn "Command failed" -⚙ exit 1 -Command failed --} -infixl 4 $? -($?) :: IO a -> IO a -> IO a -action $? handler = action `UnliftIO.catch` \(_ :: IOError) -> handler -{-# INLINE ($?) #-} From b82aca00bcf821bbcb1b7a3ce69b10fa5925f45e Mon Sep 17 00:00:00 2001 From: Arya Irani Date: Thu, 17 Feb 2022 00:30:06 -0700 Subject: [PATCH 244/297] fork and patch Shellmet --- stack.yaml | 2 ++ 1 file changed, 2 insertions(+) diff --git a/stack.yaml b/stack.yaml index d2dbcf83ff..9214388c91 100644 --- a/stack.yaml +++ b/stack.yaml @@ -35,6 +35,8 @@ extra-deps: commit: 2944b11d19ee034c48276edc991736105c9d6143 - github: unisonweb/megaparsec commit: c4463124c578e8d1074c04518779b5ce5957af6b +- github: unisonweb/shellmet + commit: 2fd348592c8f51bb4c0ca6ba4bc8e38668913746 - guid-0.1.0@sha256:a7c975be473f6f142d5cc1b39bc807a99043d20b1bb0873fdfe7a3ce84d2faf1,1078 - prelude-extras-0.4.0.3@sha256:1c10b0123ea13a6423d74a8fcbaeb2d5249b472588abde418a36b47b7c4f48c8,1163 - sandi-0.5@sha256:b278d072ca717706ea38f9bd646e023f7f2576a778fb43565b434f93638849aa,3010 From c61cdd615f70571e8a0e59f52fbbf616e69e2bc4 Mon Sep 17 00:00:00 2001 From: Arya Irani Date: Thu, 17 Feb 2022 09:43:23 -0700 Subject: [PATCH 245/297] add integration-tests to hie.yaml --- hie.yaml | 3 +++ 1 file changed, 3 insertions(+) diff --git a/hie.yaml b/hie.yaml index 5fbf4e0894..3716118d52 100644 --- a/hie.yaml +++ b/hie.yaml @@ -51,6 +51,9 @@ cradle: - path: "unison-cli/src" component: "unison-cli:lib" + - path: "unison-cli/integration-tests" + component: "unison-cli:exe:integration-tests" + - path: "unison-cli/tests" component: "unison-cli:test:tests" From 953e372fa801f382cdf745b1e17968a835fef6f2 Mon Sep 17 00:00:00 2001 From: Arya Irani Date: Thu, 17 Feb 2022 09:36:58 -0700 Subject: [PATCH 246/297] delete broken runtime benchmark per discussion with @dolio --- hie.yaml | 3 - parser-typechecker/benchmarks/runtime/Main.hs | 287 ------------------ parser-typechecker/package.yaml | 11 - .../unison-parser-typechecker.cabal | 39 --- 4 files changed, 340 deletions(-) delete mode 100644 parser-typechecker/benchmarks/runtime/Main.hs diff --git a/hie.yaml b/hie.yaml index 5fbf4e0894..6a61667d76 100644 --- a/hie.yaml +++ b/hie.yaml @@ -60,9 +60,6 @@ cradle: - path: "unison-cli/unison" component: "unison-cli:exe:unison" - - path: "parser-typechecker/benchmarks/runtime" - component: "unison-parser-typechecker:bench:runtime" - - path: "unison-core/src" component: "unison-core1:lib" diff --git a/parser-typechecker/benchmarks/runtime/Main.hs b/parser-typechecker/benchmarks/runtime/Main.hs deleted file mode 100644 index 41ba89c8cd..0000000000 --- a/parser-typechecker/benchmarks/runtime/Main.hs +++ /dev/null @@ -1,287 +0,0 @@ -{- ORMOLU_DISABLE -} -- Remove this when the file is ready to be auto-formatted -{-# language PatternSynonyms #-} - -module Main(main) where - -import Criterion.Main - -import Data.Word - -import Unison.Runtime.MCode -import Unison.Runtime.Machine - -import Unison.Util.EnumContainers - -infixr 0 $$ -($$) :: Instr -> Section -> Section -($$) = Ins - -loop :: Section -loop = Match 0 $ Test1 0 (Yield $ UArg1 1) rec - where - rec = Prim2 ADDI 0 1 - $$ Prim1 DECI 1 - $$ App False (Env 0) (UArg2 0 1) - --- Boxed version of loop to see how fast we are without --- worker/wrapper. -sloop :: Section -sloop = Unpack 1 $$ Unpack 0 $$ body - where - body = Match 1 $ Test1 - 0 (Pack 0 (UArg1 3) $$ Yield (BArg1 0)) - {-else-} rec - rec = Prim2 ADDI 1 3 - $$ Prim1 DECI 2 - $$ Pack 0 (UArg1 1) - $$ Pack 0 (UArg1 0) - $$ App False (Env 1) (BArg2 0 1) - --- loop with fast path optimization -oloop :: Section -oloop = Match 0 $ Test1 0 (Yield $ UArg1 1) rec - where - rec = Prim2 ADDI 0 1 - $$ Prim1 DECI 1 - $$ Call False 7 (UArg2 0 1) - --- sloop with fast path optimization -soloop :: Section -soloop = Unpack 1 $$ Unpack 0 $$ body - where - body = Match 1 $ Test1 - 0 (Pack 0 (UArg1 3) $$ Yield (BArg1 0)) - {-else-} rec - rec = Prim2 ADDI 1 3 - $$ Prim1 DECI 2 - $$ Pack 0 (UArg1 1) - $$ Pack 0 (UArg1 0) - $$ Call False 8 (BArg2 0 1) - -konst :: Section -konst = Yield (BArg1 0) - -add :: Section -add = Unpack 1 - $$ Unpack 0 - $$ Prim2 ADDI 1 3 - $$ Pack 0 (UArg1 0) - $$ Yield (BArg1 0) - --- get = shift $ \k s -> k s s --- put s = shift $ \k _ -> k () s --- loop :: Int -> Int -> Int --- loop n s0 = reset (body n) s0 --- where --- body m | m == 0 = x = get ; f _ = x ; f --- | otherwise = x = get ; put (x+m) ; body (m-1) - --- k s => (k s) s -- k continuation -diag :: Section -diag = Let (Reset (setSingleton 0) $$ Jump 0 (BArg1 1)) - $ App False (Stk 0) (BArg1 2) - --- => shift k. diag k -get :: Section -get = Capture 0 - $$ App False (Env 12) (BArg1 0) - --- k s _ => (k) s -kid :: Section -kid = Let (Reset (setSingleton 0) $$ Jump 0 ZArgs) - $ App False (Stk 0) (BArg1 2) - --- s => shift k. kid k s -put :: Section -put = Capture 0 - $$ App False (Env 15) (BArg2 0 1) - --- m => ... -kloopb :: Section -kloopb = - Match 0 $ Test1 - 0 (Let (App False (Env 13) ZArgs) $ App False (Env 10) (BArg1 0)) - {-else-} $ rec - where - rec = Let (App False (Env 13) ZArgs) -- get - $ Pack 0 (UArg1 0) - $$ Let (App False (Env 11) (BArg2 0 1)) -- add - $ Let (App False (Env 14) (BArg1 0)) -- put - $ Prim1 DECI 0 - $$ App False (Env 5) (UArg1 0) - --- m a => f = reset (kloopb m) ; y = f (I# a) ; print y -kloop :: Section -kloop = Let (Reset (setSingleton 0) $$ App False (Env 5) (UArg1 0)) - $ Pack 0 (UArg1 1) - $$ App False (Stk 1) (BArg1 0) - --- s0 0 => s0 --- s0 1 s => tinst s setDyn 0 (teff s) -teff :: Section -teff - = Match 0 $ Test1 - 0 (Yield $ BArg1 0) - $ {-else-} Call True 21 ZArgs - --- s => setDyn 0 (teff s) -tinst :: Section -tinst - = Name 20 (BArg1 0) - $$ SetDyn 0 0 - $$ Yield ZArgs - --- m => ... -tloopb :: Section -tloopb = - Match 0 $ Test1 - 0 (Lit 0 $$ App True (Dyn 0) (UArg1 0)) -- get - {-else-} rec - where - rec = Let (Lit 0 $$ App False (Dyn 0) (UArg1 0)) -- get - $ Pack 0 (UArg1 0) -- I# m - $$ Let (App False (Env 11) (BArg2 0 1)) -- add - $ Let (Lit 1 $$ App False (Dyn 0) (UArg1 0)) -- put - $ Prim1 DECI 0 - $$ Call False 25 (UArg1 0) - --- m s => reset (tinst (I# s) ; tloopb m) -tloop :: Section -tloop = Reset (setSingleton 0) - $$ Pack 0 (UArg1 1) - $$ Let (Call True 21 $ BArg1 0) - $ Call True 25 $ UArg1 0 - -fib :: Section -fib = Match 0 $ Test2 - 0 (Lit 0 $$ Yield $ UArg1 0) - 1 (Lit 1 $$ Yield $ UArg1 0) - {-else-} rec - where - rec = Prim1 DECI 0 - $$ Prim1 DECI 0 - $$ Let (App False (Env 2) (UArg1 1)) - $ Let (App False (Env 2) (UArg1 1)) - $ Prim2 ADDI 0 1 $$ Yield (UArg1 0) - -ofib :: Section -ofib = Match 0 $ Test2 - 0 (Lit 0 $$ Yield $ UArg1 0) - 1 (Lit 1 $$ Yield $ UArg1 0) - {-else-} rec - where - rec = Prim1 DECI 0 - $$ Prim1 DECI 0 - $$ Let (Call True 9 (UArg1 1)) - $ Let (Call True 9 (UArg1 1)) - $ Prim2 ADDI 0 1 $$ Yield (UArg1 0) - -stackEater :: Section -stackEater - = Match 0 $ Test1 - 0 (Yield ZArgs) - $ Prim1 DECI 0 - $$ Let (App False (Env 4) (UArg1 0)) - $ Yield ZArgs - -testEnv :: Word64 -> Comb -testEnv 0 = Lam 2 0 4 0 loop -testEnv 1 = Lam 0 2 6 4 sloop -testEnv 2 = Lam 1 0 6 0 fib -testEnv 4 = Lam 1 0 1 0 stackEater -testEnv 5 = Lam 1 0 2 3 kloopb -testEnv 6 = Lam 2 0 2 2 kloop -testEnv 7 = Lam 2 0 4 0 oloop -testEnv 8 = Lam 0 2 6 4 soloop -testEnv 9 = Lam 1 0 6 0 ofib -testEnv 10 = Lam 0 2 0 2 konst -testEnv 11 = Lam 0 2 5 3 add -testEnv 12 = Lam 0 2 0 2 diag -testEnv 13 = Lam 0 0 0 1 get -testEnv 14 = Lam 0 1 0 2 put -testEnv 15 = Lam 0 3 0 3 kid -testEnv 20 = Lam 1 1 1 2 teff -testEnv 21 = Lam 0 1 0 2 tinst -testEnv 25 = Lam 1 0 4 3 tloopb -testEnv 26 = Lam 1 0 4 3 tloop -testEnv _ = error "testEnv" - -setupu1 :: Word64 -> Int -> Section -setupu1 f n = Lit n $$ App False (Env f) (UArg1 0) - -setupu2 :: Word64 -> Int -> Int -> Section -setupu2 f m n = Lit m $$ Lit n $$ App False (Env f) (UArg2 0 1) - -setupb2 :: Word64 -> Int -> Int -> Section -setupb2 f m n - = Lit m $$ Pack 0 (UArg1 0) - $$ Lit n $$ Pack 0 (UArg1 0) - $$ App False (Env f) (BArgR 0 2) - -benchEv :: String -> Section -> Benchmark -benchEv str code = bench str . whnfIO . eval0 testEnv $ code - -main = defaultMain - [ bgroup "loop" - [ benchEv "2500" $ setupu2 0 0 2500 - , benchEv "5000" $ setupu2 0 0 5000 - , benchEv "10000" $ setupu2 0 0 10000 - , benchEv "100000" $ setupu2 0 0 100000 - , benchEv "1000000" $ setupu2 0 0 1000000 - ] - , bgroup "oloop" - [ benchEv "2500" $ setupu2 7 0 2500 - , benchEv "5000" $ setupu2 7 0 5000 - , benchEv "10000" $ setupu2 7 0 10000 - , benchEv "100000" $ setupu2 7 0 100000 - , benchEv "1000000" $ setupu2 7 0 1000000 - ] - , bgroup "sloop" - [ benchEv "2500" $ setupb2 1 0 2500 - , benchEv "5000" $ setupb2 1 0 5000 - , benchEv "10000" $ setupb2 1 0 10000 - , benchEv "100000" $ setupb2 1 0 100000 - , benchEv "1000000" $ setupb2 1 0 1000000 - ] - , bgroup "soloop" - [ benchEv "2500" $ setupb2 8 0 2500 - , benchEv "5000" $ setupb2 8 0 5000 - , benchEv "10000" $ setupb2 8 0 10000 - , benchEv "100000" $ setupb2 8 0 100000 - , benchEv "1000000" $ setupb2 8 0 1000000 - ] - , bgroup "kloop" - [ benchEv "2500" $ setupu2 6 0 2500 - , benchEv "5000" $ setupu2 6 0 5000 - , benchEv "10000" $ setupu2 6 0 10000 - , benchEv "100000" $ setupu2 6 0 100000 - , benchEv "1000000" $ setupu2 6 0 1000000 - ] - , bgroup "tloop" - [ benchEv "2500" $ setupu2 26 0 2500 - , benchEv "5000" $ setupu2 26 0 5000 - , benchEv "10000" $ setupu2 26 0 10000 - , benchEv "100000" $ setupu2 26 0 100000 - , benchEv "1000000" $ setupu2 26 0 1000000 - ] - , bgroup "fib" - [ benchEv "10" $ setupu1 2 10 - , benchEv "15" $ setupu1 2 15 - , benchEv "20" $ setupu1 2 20 - , benchEv "25" $ setupu1 2 25 - , benchEv "30" $ setupu1 2 30 - ] - , bgroup "ofib" - [ benchEv "10" $ setupu1 9 10 - , benchEv "15" $ setupu1 9 15 - , benchEv "20" $ setupu1 9 20 - , benchEv "25" $ setupu1 9 25 - , benchEv "30" $ setupu1 9 30 - ] - , bgroup "stackEater" - [ benchEv "100" $ setupu1 4 100 - , benchEv "1000" $ setupu1 4 1000 - , benchEv "10000" $ setupu1 4 10000 - , benchEv "100000" $ setupu1 4 100000 - ] - ] diff --git a/parser-typechecker/package.yaml b/parser-typechecker/package.yaml index 2b89f56fee..e074b6aca1 100644 --- a/parser-typechecker/package.yaml +++ b/parser-typechecker/package.yaml @@ -185,14 +185,3 @@ executables: - unison-prelude - unison-util - unison-util-relation - -benchmarks: - runtime: - source-dirs: benchmarks/runtime - main: Main.hs - dependencies: - - base - - criterion - - containers - - unison-core1 - - unison-parser-typechecker diff --git a/parser-typechecker/unison-parser-typechecker.cabal b/parser-typechecker/unison-parser-typechecker.cabal index 1ce06e678b..b6fd862146 100644 --- a/parser-typechecker/unison-parser-typechecker.cabal +++ b/parser-typechecker/unison-parser-typechecker.cabal @@ -442,42 +442,3 @@ executable tests if flag(optimized) ghc-options: -funbox-strict-fields -O2 default-language: Haskell2010 - -benchmark runtime - type: exitcode-stdio-1.0 - main-is: Main.hs - other-modules: - Paths_unison_parser_typechecker - hs-source-dirs: - benchmarks/runtime - default-extensions: - ApplicativeDo - BangPatterns - BlockArguments - DeriveFunctor - DeriveGeneric - DerivingStrategies - DoAndIfThenElse - FlexibleContexts - FlexibleInstances - GeneralizedNewtypeDeriving - LambdaCase - MultiParamTypeClasses - NamedFieldPuns - OverloadedStrings - PatternSynonyms - RankNTypes - ScopedTypeVariables - TupleSections - TypeApplications - ViewPatterns - ghc-options: -Wall -O0 -fno-warn-name-shadowing -fno-warn-missing-pattern-synonym-signatures - build-depends: - base - , containers - , criterion - , unison-core1 - , unison-parser-typechecker - if flag(optimized) - ghc-options: -funbox-strict-fields -O2 - default-language: Haskell2010 From fa78af00e0c8c9d896216231c0de8f2ef94744e8 Mon Sep 17 00:00:00 2001 From: Chris Penner Date: Thu, 17 Feb 2022 12:18:01 -0600 Subject: [PATCH 247/297] Add annotations to reserved keyword, reserved word, and empty matches --- parser-typechecker/src/Unison/Lexer.hs | 15 +++++--- parser-typechecker/src/Unison/Parser.hs | 2 +- parser-typechecker/src/Unison/PrintError.hs | 24 ++++++++---- parser-typechecker/src/Unison/TermParser.hs | 41 +++++++++++---------- 4 files changed, 49 insertions(+), 33 deletions(-) diff --git a/parser-typechecker/src/Unison/Lexer.hs b/parser-typechecker/src/Unison/Lexer.hs index 467f26d019..b2fb1b803b 100644 --- a/parser-typechecker/src/Unison/Lexer.hs +++ b/parser-typechecker/src/Unison/Lexer.hs @@ -78,7 +78,9 @@ parseFailure e = PI.ParsecT $ \s _ _ _ eerr -> eerr e s data Err = InvalidWordyId String + | ReservedWordyId String | InvalidSymbolyId String + | ReservedSymbolyId String | InvalidShortHash String | InvalidBytesLiteral String | InvalidHexLiteral @@ -708,9 +710,12 @@ lexemes' eof = P.optional space >> do symbolyIdSeg :: P String symbolyIdSeg = do + start <- pos id <- P.takeWhile1P (Just symbolMsg) symbolyIdChar - if Set.member id reservedOperators then fail "reserved operator" - else pure id + when (Set.member id reservedOperators) $ do + stop <- pos + P.customFailure (Token (ReservedSymbolyId id) start stop) + pure id wordyIdSeg :: P String -- wordyIdSeg = litSeg <|> (P.try do -- todo @@ -718,10 +723,10 @@ lexemes' eof = P.optional space >> do start <- pos ch <- CP.satisfy wordyIdStartChar rest <- P.many (CP.satisfy wordyIdChar) - when (Set.member (ch : rest) keywords) $ do + let word = ch : rest + when (Set.member word keywords) $ do stop <- pos - let msg = show start <> ": Identifier segment can't be a keyword: " <> (ch:rest) - P.customFailure (Token (Opaque msg) start stop) + P.customFailure (Token (ReservedWordyId word) start stop) pure (ch : rest) {- diff --git a/parser-typechecker/src/Unison/Parser.hs b/parser-typechecker/src/Unison/Parser.hs index 5c4d7d599d..a996949153 100644 --- a/parser-typechecker/src/Unison/Parser.hs +++ b/parser-typechecker/src/Unison/Parser.hs @@ -114,7 +114,7 @@ data Error v | UnknownType (L.Token (HQ.HashQualified Name)) (Set Reference) | UnknownId (L.Token (HQ.HashQualified Name)) (Set Referent) (Set Reference) | ExpectedBlockOpen String (L.Token L.Lexeme) - | EmptyMatch + | EmptyMatch (L.Token ()) | EmptyWatch | UseInvalidPrefixSuffix (Either (L.Token Name) (L.Token Name)) (Maybe [L.Token Name]) | UseEmpty (L.Token String) -- an empty `use` statement diff --git a/parser-typechecker/src/Unison/PrintError.hs b/parser-typechecker/src/Unison/PrintError.hs index 61dd156fff..3908422b0b 100644 --- a/parser-typechecker/src/Unison/PrintError.hs +++ b/parser-typechecker/src/Unison/PrintError.hs @@ -692,7 +692,7 @@ renderTypeError e env src = case e of , ", actual=" , (fromString . show) a , ", reference=" - , showConstructor env r + , showConstructor env r ] C.MalformedEffectBind ctorType ctorResult es -> mconcat [ "MalformedEffectBind: " @@ -1061,11 +1061,17 @@ prettyParseError s = \case excerpt, "Here's a few examples of valid syntax: " <> style Code "abba1', snake_case, Foo.zoink!, 🌻" ] + L.ReservedWordyId _id -> Pr.lines [ + "The identifier used here can't be a reserved keyword: ", "", + excerpt ] L.InvalidSymbolyId _id -> Pr.lines [ "This infix identifier isn't valid syntax: ", "", excerpt, "Here's a few valid examples: " <> style Code "++, Float./, `List.map`" ] + L.ReservedSymbolyId _id -> Pr.lines [ + "This identifier is reserved by Unison and can't be used as an operator: ", "", + excerpt ] L.InvalidBytesLiteral bs -> Pr.lines [ "This bytes literal isn't valid syntax: " <> style ErrorSite (fromString bs), "", excerpt, @@ -1289,13 +1295,17 @@ prettyParseError s = \case , "but there wasn't one. Maybe check your indentation:\n" , tokenAsErrorSite s tok ] - go Parser.EmptyMatch = mconcat - ["I expected some patterns after a " - , style ErrorSite "match" - , "/" - , style ErrorSite "with" - , " but I didn't find any." + go (Parser.EmptyMatch tok) = + Pr.indentN 2 . Pr.callout "😶" $ Pr.lines + [ Pr.wrap ( "I expected some patterns after a " + <> style ErrorSite "match" + <> "/" + <> style ErrorSite "with" + <> " but I didn't find any." + ) + , tokenAsErrorSite s tok ] + go Parser.EmptyWatch = "I expected a non-empty watch expression and not just \">\"" go (Parser.UnknownAbilityConstructor tok _referents) = unknownConstructor "ability" tok diff --git a/parser-typechecker/src/Unison/TermParser.hs b/parser-typechecker/src/Unison/TermParser.hs index 7f9c7c1ea8..780e0ad35c 100644 --- a/parser-typechecker/src/Unison/TermParser.hs +++ b/parser-typechecker/src/Unison/TermParser.hs @@ -51,6 +51,8 @@ import qualified Unison.Typechecker.Components as Components import qualified Unison.Util.Bytes as Bytes import qualified Unison.Var as Var import qualified Unison.NamesWithHistory as NamesWithHistory +import qualified Data.List.NonEmpty as NonEmpty +import Data.List.NonEmpty (NonEmpty) watch :: Show a => String -> a -> a watch msg a = let !_ = trace (msg ++ ": " ++ show a) () in a @@ -134,16 +136,19 @@ match = do _ <- P.try (openBlockWith "with") <|> do t <- anyToken P.customFailure (ExpectedBlockOpen "with" t) - (_arities, cases) <- unzip <$> matchCases - when (null cases) $ P.customFailure EmptyMatch + (_arities, cases) <- NonEmpty.unzip <$> matchCases1 start _ <- closeBlock - pure $ Term.match (ann start <> maybe (ann start) ann (lastMay cases)) + pure $ Term.match (ann start <> ann (NonEmpty.last cases)) scrutinee - cases + (toList cases) -matchCases :: Var v => P v [(Int, Term.MatchCase Ann (Term v Ann))] -matchCases = sepBy1 semi matchCase +matchCases1 :: Var v => L.Token () -> P v (NonEmpty (Int, Term.MatchCase Ann (Term v Ann))) +matchCases1 start = do + cases <- sepBy1 semi matchCase <&> \cases -> [ (n,c) | (n,cs) <- cases, c <- cs ] + case cases of + [] -> P.customFailure (EmptyMatch start) + (c:cs) -> pure (c NonEmpty.:| cs) -- Returns the arity of the pattern and the `MatchCase`. Examples: -- @@ -284,7 +289,7 @@ lam p = label "lambda" $ mkLam <$> P.try (some prefixDefinitionName <* reserved where mkLam vs b = Term.lam' (ann (head vs) <> ann b) (map L.payload vs) b -letBlock, handle, lamCase, ifthen :: Var v => TermP v +letBlock, handle, ifthen :: Var v => TermP v letBlock = label "let" $ block "let" handle = label "handle" $ do @@ -292,21 +297,17 @@ handle = label "handle" $ do handler <- block "with" pure $ Term.handle (ann b) handler b -checkCasesArities :: (Ord v, Annotated a) => [(Int, a)] -> P v (Int, [a]) -checkCasesArities cases = go Nothing cases where - go arity [] = case arity of - Nothing -> fail "empty list of cases" - Just a -> pure (a, map snd cases) - go Nothing ((i,_):t) = go (Just i) t - go (Just i) ((j,a):t) = - if i == j then go (Just i) t - else P.customFailure $ PatternArityMismatch i j (ann a) +checkCasesArities :: (Ord v, Annotated a) => NonEmpty (Int, a) -> P v (Int, NonEmpty a) +checkCasesArities cases@((i,_) NonEmpty.:| rest) = + case List.find (\(j, _) -> j /= i) rest of + Nothing -> pure (i, snd <$> cases) + Just (j, a) -> P.customFailure $ PatternArityMismatch i j (ann a) +lamCase :: Var v => TermP v lamCase = do start <- openBlockWith "cases" - cases <- matchCases + cases <- matchCases1 start (arity, cases) <- checkCasesArities cases - when (null cases) (P.customFailure EmptyMatch) _ <- closeBlock lamvars <- replicateM arity (Parser.uniqueName 10) let vars = @@ -317,8 +318,8 @@ lamCase = do lamvarTerm = case lamvarTerms of [e] -> e es -> DD.tupleTerm es - anns = ann start <> maybe (ann start) ann (lastMay cases) - matchTerm = Term.match anns lamvarTerm cases + anns = ann start <> ann (NonEmpty.last cases) + matchTerm = Term.match anns lamvarTerm (toList cases) pure $ Term.lam' anns vars matchTerm ifthen = label "if" $ do start <- peekAny From 1a8c2701dfb23d5a681abf0a2abb6c8c75824b8e Mon Sep 17 00:00:00 2001 From: Chris Penner Date: Thu, 17 Feb 2022 12:29:24 -0600 Subject: [PATCH 248/297] Add error messages to transcript --- parser-typechecker/src/Unison/PrintError.hs | 2 +- unison-src/transcripts/error-messages.md | 17 ++++++ .../transcripts/error-messages.output.md | 52 +++++++++++++++++++ 3 files changed, 70 insertions(+), 1 deletion(-) diff --git a/parser-typechecker/src/Unison/PrintError.hs b/parser-typechecker/src/Unison/PrintError.hs index 3908422b0b..e97d39b36e 100644 --- a/parser-typechecker/src/Unison/PrintError.hs +++ b/parser-typechecker/src/Unison/PrintError.hs @@ -1062,7 +1062,7 @@ prettyParseError s = \case "Here's a few examples of valid syntax: " <> style Code "abba1', snake_case, Foo.zoink!, 🌻" ] L.ReservedWordyId _id -> Pr.lines [ - "The identifier used here can't be a reserved keyword: ", "", + "The identifier used here isn't allowed to be a reserved keyword: ", "", excerpt ] L.InvalidSymbolyId _id -> Pr.lines [ "This infix identifier isn't valid syntax: ", "", diff --git a/unison-src/transcripts/error-messages.md b/unison-src/transcripts/error-messages.md index e50a3f9a04..68c52909d4 100644 --- a/unison-src/transcripts/error-messages.md +++ b/unison-src/transcripts/error-messages.md @@ -58,3 +58,20 @@ foo = then -- unclosed ```unison:error foo = with -- unclosed ``` + +```unison:error +foo = match 1 with + 2 -- no right-hand-side +``` + +### Keywords + +```unison:error +use.keyword.in.namespace = 1 +``` + + +```unison:error +-- reserved operator +a ! b = 1 +``` diff --git a/unison-src/transcripts/error-messages.output.md b/unison-src/transcripts/error-messages.output.md index 672ac458d4..7a66623f3c 100644 --- a/unison-src/transcripts/error-messages.output.md +++ b/unison-src/transcripts/error-messages.output.md @@ -158,4 +158,56 @@ foo = with -- unclosed 1 | foo = with -- unclosed +``` +```unison +foo = match 1 with + 2 -- no right-hand-side +``` + +```ucm + + 😶 + + I expected some patterns after a match / with but I didn't + find any. + 1 | foo = match 1 with + + +``` +### Keywords + +```unison +use.keyword.in.namespace = 1 +``` + +```ucm + + The identifier used here isn't allowed to be a reserved keyword: + + 1 | use.keyword.in.namespace = 1 + + +``` +```unison +-- reserved operator +a ! b = 1 +``` + +```ucm + + This looks like the start of an expression here + + 2 | a ! b = 1 + + but at the file top-level, I expect one of the following: + + - A binding, like a = 42 OR + a : Nat + a = 42 + - A watch expression, like > a + 1 + - An `ability` declaration, like ability Foo where ... + - A `type` declaration, like type Optional a = None | Some a + - A `namespace` declaration, like namespace Seq where ... + + ``` From 8af3ad47b02e9a5a7ecf27bb07f60357fdd82149 Mon Sep 17 00:00:00 2001 From: Chris Penner Date: Thu, 17 Feb 2022 12:44:07 -0600 Subject: [PATCH 249/297] Add annotation to empty watches --- parser-typechecker/src/Unison/FileParser.hs | 14 +++++++------- parser-typechecker/src/Unison/Parser.hs | 2 +- parser-typechecker/src/Unison/PrintError.hs | 8 ++++++-- parser-typechecker/tests/Unison/Test/FileParser.hs | 2 +- 4 files changed, 15 insertions(+), 11 deletions(-) diff --git a/parser-typechecker/src/Unison/FileParser.hs b/parser-typechecker/src/Unison/FileParser.hs index bc8d7cdc1a..d79b8151a0 100644 --- a/parser-typechecker/src/Unison/FileParser.hs +++ b/parser-typechecker/src/Unison/FileParser.hs @@ -205,10 +205,16 @@ stanza = watchExpression <|> unexpectedAction <|> binding P.customFailure $ DidntExpectExpression t t2 watchExpression = do (kind, guid, ann) <- watched - _ <- closed + _ <- guardEmptyWatch ann msum [ WatchBinding kind ann <$> TermParser.binding, WatchExpression kind guid ann <$> TermParser.blockTerm ] + guardEmptyWatch ann = + P.try $ do + op <- optional (L.payload <$> P.lookAhead closeBlock) + case op of Just () -> P.customFailure (EmptyWatch ann) + _ -> pure () + -- binding :: forall v. Var v => P v ((Ann, v), Term v Ann) binding = do @@ -231,12 +237,6 @@ watched = P.try $ do guard $ maybe True (`L.touches` tok) kind pure (maybe UF.RegularWatch L.payload kind, guid, maybe mempty ann kind <> ann tok) -closed :: Var v => P v () -closed = P.try $ do - op <- optional (L.payload <$> P.lookAhead closeBlock) - case op of Just () -> P.customFailure EmptyWatch - _ -> pure () - -- The parsed form of record accessors, as in: -- -- type Additive a = { zero : a, (+) : a -> a -> a } diff --git a/parser-typechecker/src/Unison/Parser.hs b/parser-typechecker/src/Unison/Parser.hs index a996949153..6c6a7be5b0 100644 --- a/parser-typechecker/src/Unison/Parser.hs +++ b/parser-typechecker/src/Unison/Parser.hs @@ -115,7 +115,7 @@ data Error v | UnknownId (L.Token (HQ.HashQualified Name)) (Set Referent) (Set Reference) | ExpectedBlockOpen String (L.Token L.Lexeme) | EmptyMatch (L.Token ()) - | EmptyWatch + | EmptyWatch Ann | UseInvalidPrefixSuffix (Either (L.Token Name) (L.Token Name)) (Maybe [L.Token Name]) | UseEmpty (L.Token String) -- an empty `use` statement | DidntExpectExpression (L.Token L.Lexeme) (Maybe (L.Token L.Lexeme)) diff --git a/parser-typechecker/src/Unison/PrintError.hs b/parser-typechecker/src/Unison/PrintError.hs index e97d39b36e..29d9e2042d 100644 --- a/parser-typechecker/src/Unison/PrintError.hs +++ b/parser-typechecker/src/Unison/PrintError.hs @@ -1303,11 +1303,15 @@ prettyParseError s = \case <> style ErrorSite "with" <> " but I didn't find any." ) + , "" , tokenAsErrorSite s tok ] - go Parser.EmptyWatch = - "I expected a non-empty watch expression and not just \">\"" + go (Parser.EmptyWatch tok) = + Pr.lines [ "I expected a non-empty watch expression and not just \">\"" + , "" + , annotatedAsErrorSite s tok + ] go (Parser.UnknownAbilityConstructor tok _referents) = unknownConstructor "ability" tok go (Parser.UnknownDataConstructor tok _referents) = unknownConstructor "data" tok go (Parser.UnknownId tok referents references) = Pr.lines diff --git a/parser-typechecker/tests/Unison/Test/FileParser.hs b/parser-typechecker/tests/Unison/Test/FileParser.hs index ee114b5b3e..8a9fd6b445 100644 --- a/parser-typechecker/tests/Unison/Test/FileParser.hs +++ b/parser-typechecker/tests/Unison/Test/FileParser.hs @@ -81,7 +81,7 @@ module Unison.Test.FileParser where where expectation :: Var e => P.Error e -> Test () expectation e = case e of - P.EmptyWatch -> ok + P.EmptyWatch _ann -> ok _ -> crash "Error wasn't EmptyWatch" signatureNeedsAccompanyingBodyTest :: Test () From f8a15c1c0552f7f2b3c69e95ba1eec39fbcc1415 Mon Sep 17 00:00:00 2001 From: Chris Penner Date: Thu, 17 Feb 2022 12:44:22 -0600 Subject: [PATCH 250/297] Transcript for mismatched arities. --- unison-src/transcripts/error-messages.md | 18 ++++++++- .../transcripts/error-messages.output.md | 37 +++++++++++++++++++ 2 files changed, 54 insertions(+), 1 deletion(-) diff --git a/unison-src/transcripts/error-messages.md b/unison-src/transcripts/error-messages.md index 68c52909d4..1f11e0de86 100644 --- a/unison-src/transcripts/error-messages.md +++ b/unison-src/transcripts/error-messages.md @@ -59,18 +59,34 @@ foo = then -- unclosed foo = with -- unclosed ``` +### Matching + ```unison:error foo = match 1 with 2 -- no right-hand-side ``` +```unison:error +-- Mismatched arities +foo = cases + 1, 2 -> () + 3 -> () +``` + + +### Watches + +```unison:error +-- Empty watch +> +``` + ### Keywords ```unison:error use.keyword.in.namespace = 1 ``` - ```unison:error -- reserved operator a ! b = 1 diff --git a/unison-src/transcripts/error-messages.output.md b/unison-src/transcripts/error-messages.output.md index 7a66623f3c..3a918212d5 100644 --- a/unison-src/transcripts/error-messages.output.md +++ b/unison-src/transcripts/error-messages.output.md @@ -159,6 +159,8 @@ foo = with -- unclosed ``` +### Matching + ```unison foo = match 1 with 2 -- no right-hand-side @@ -170,9 +172,44 @@ foo = match 1 with I expected some patterns after a match / with but I didn't find any. + 1 | foo = match 1 with +``` +```unison +-- Mismatched arities +foo = cases + 1, 2 -> () + 3 -> () +``` + +```ucm + + 😶 + + Not all the branches of this pattern matching have the same + number of arguments. I was assuming they'd all have 2 + arguments (based on the previous patterns) but this one has + 1 arguments: + 4 | 3 -> () + + +``` +### Watches + +```unison +-- Empty watch +> +``` + +```ucm + + I expected a non-empty watch expression and not just ">" + + 2 | > + + ``` ### Keywords From fc013e7cb6631ad82a26f57a6f6716ee3af9e24a Mon Sep 17 00:00:00 2001 From: Chris Penner Date: Thu, 17 Feb 2022 14:00:39 -0600 Subject: [PATCH 251/297] Update transcript for v2 hashing algorithm --- unison-src/transcripts/update-on-conflict.output.md | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/unison-src/transcripts/update-on-conflict.output.md b/unison-src/transcripts/update-on-conflict.output.md index 2a5948c801..980148a39b 100644 --- a/unison-src/transcripts/update-on-conflict.output.md +++ b/unison-src/transcripts/update-on-conflict.output.md @@ -49,10 +49,10 @@ Cause a conflict: New name conflicts: - 1. x#jk19sm5bf8 : Nat + 1. x#gjmq673r1v : Nat ↓ - 2. ┌ x#0ja1qfpej6 : Nat - 3. └ x#jk19sm5bf8 : Nat + 2. ┌ x#dcgdua2lj6 : Nat + 3. └ x#gjmq673r1v : Nat Tip: You can use `todo` to see if this generated any work to do in this namespace and `test` to run the tests. Or you From efc70b2dc27fe4933364b5c4116e6f0ba6b7e315 Mon Sep 17 00:00:00 2001 From: Chris Penner Date: Thu, 17 Feb 2022 14:13:45 -0600 Subject: [PATCH 252/297] Add debugging combinators (#2856) * Add debugging combinators * Allow passing comma separated list of debug flags * Replace sqlite env var with Debug lib * Add debugLogM * Fix empty UNISON_DEBUG statements --- lib/unison-prelude/package.yaml | 1 + lib/unison-prelude/src/Unison/Debug.hs | 89 +++++++++++++++++++ lib/unison-prelude/unison-prelude.cabal | 2 + .../src/Unison/Sqlite/Connection.hs | 63 +++++-------- 4 files changed, 115 insertions(+), 40 deletions(-) create mode 100644 lib/unison-prelude/src/Unison/Debug.hs diff --git a/lib/unison-prelude/package.yaml b/lib/unison-prelude/package.yaml index 8e2734ccf8..e999a79ddd 100644 --- a/lib/unison-prelude/package.yaml +++ b/lib/unison-prelude/package.yaml @@ -18,6 +18,7 @@ dependencies: - lens - vector - unliftio + - pretty-simple ghc-options: -Wall diff --git a/lib/unison-prelude/src/Unison/Debug.hs b/lib/unison-prelude/src/Unison/Debug.hs new file mode 100644 index 0000000000..af6864914b --- /dev/null +++ b/lib/unison-prelude/src/Unison/Debug.hs @@ -0,0 +1,89 @@ +{-# LANGUAGE OverloadedStrings #-} + +module Unison.Debug (debug, debugM, whenDebug, debugLog, debugLogM, DebugFlag (..)) where + +import Control.Applicative (empty) +import Control.Monad (when) +import Data.Set (Set) +import qualified Data.Set as Set +import qualified Data.Text as Text +import Debug.Pretty.Simple (pTrace, pTraceM, pTraceShowId, pTraceShowM) +import System.IO.Unsafe (unsafePerformIO) +import UnliftIO.Environment (lookupEnv) + +data DebugFlag + = Git + | Sqlite + | Codebase + deriving (Eq, Ord, Show, Bounded, Enum) + +debugFlags :: Set DebugFlag +debugFlags = pTraceShowId $ case pTraceShowId $ (unsafePerformIO (lookupEnv "UNISON_DEBUG")) of + Nothing -> Set.empty + -- Enable all debugging flags for bare UNISON_DEBUG declarations like: + -- UNISON_DEBUG= ucm + Just "" -> Set.fromList [minBound .. maxBound] + Just s -> Set.fromList $ do + w <- (Text.splitOn "," . Text.pack $ s) + case Text.toUpper . Text.strip $ w of + "GIT" -> pure Git + "SQLITE" -> pure Sqlite + "CODEBASE" -> pure Codebase + _ -> empty +{-# NOINLINE debugFlags #-} + +debugGit :: Bool +debugGit = Git `Set.member` debugFlags +{-# NOINLINE debugGit #-} + +debugSqlite :: Bool +debugSqlite = Sqlite `Set.member` debugFlags +{-# NOINLINE debugSqlite #-} + +debugCodebase :: Bool +debugCodebase = Codebase `Set.member` debugFlags +{-# NOINLINE debugCodebase #-} + +-- | Use for trace-style selective debugging. +-- E.g. 1 + (debug Git "The second number" 2) +-- +-- Or, use in pattern matching to view arguments. +-- E.g. +-- myFunc (debug Git "argA" -> argA) = ... +debug :: Show a => DebugFlag -> String -> a -> a +debug flag msg a = + if shouldDebug flag + then pTrace (msg <> ":\n") $ pTraceShowId a + else a + +-- | Use for selective debug logging in monadic contexts. +-- E.g. +-- do +-- debugM Git "source repo" srcRepo +-- ... +debugM :: (Show a, Monad m) => DebugFlag -> String -> a -> m () +debugM flag msg a = + when (shouldDebug flag) $ do + pTraceM (msg <> ":\n") + pTraceShowM a + +debugLog :: DebugFlag -> String -> a -> a +debugLog flag msg = + if (shouldDebug flag) + then pTrace msg + else id + +debugLogM :: (Monad m) => DebugFlag -> String -> m () +debugLogM flag msg = + when (shouldDebug flag) $ pTraceM msg + +-- | A 'when' block which is triggered if the given flag is being debugged. +whenDebug :: Monad m => DebugFlag -> m () -> m () +whenDebug flag action = do + when (shouldDebug flag) action + +shouldDebug :: DebugFlag -> Bool +shouldDebug = \case + Git -> debugGit + Sqlite -> debugSqlite + Codebase -> debugCodebase diff --git a/lib/unison-prelude/unison-prelude.cabal b/lib/unison-prelude/unison-prelude.cabal index 1d76dd4ac8..8bb82595b2 100644 --- a/lib/unison-prelude/unison-prelude.cabal +++ b/lib/unison-prelude/unison-prelude.cabal @@ -17,6 +17,7 @@ source-repository head library exposed-modules: + Unison.Debug Unison.Prelude Unison.Util.Map Unison.Util.Set @@ -46,6 +47,7 @@ library , extra , lens , mtl + , pretty-simple , safe , text , transformers diff --git a/lib/unison-sqlite/src/Unison/Sqlite/Connection.hs b/lib/unison-sqlite/src/Unison/Sqlite/Connection.hs index 313eeef131..f6f48c8c00 100644 --- a/lib/unison-sqlite/src/Unison/Sqlite/Connection.hs +++ b/lib/unison-sqlite/src/Unison/Sqlite/Connection.hs @@ -58,26 +58,18 @@ module Unison.Sqlite.Connection ) where -import qualified Data.Text as Text -import qualified Data.Text.IO as Text import qualified Database.SQLite.Simple as Sqlite import qualified Database.SQLite.Simple.FromField as Sqlite import qualified Database.SQLite3.Direct as Sqlite (Database (..)) import Debug.RecoverRTTI (anythingToString) -import System.Environment (lookupEnv) -import System.IO (stderr) -import System.IO.Unsafe (unsafePerformIO) +import Unison.Debug (debugLogM, debugM) +import qualified Unison.Debug as Debug import Unison.Prelude import Unison.Sqlite.Exception import Unison.Sqlite.Sql import UnliftIO (MonadUnliftIO) import UnliftIO.Exception -debugTraceQueries :: Bool -debugTraceQueries = - isJust (unsafePerformIO (lookupEnv "UNISON_SQLITE_DEBUG")) -{-# NOINLINE debugTraceQueries #-} - -- | A /non-thread safe/ connection to a SQLite database. data Connection = Connection { name :: String, @@ -131,10 +123,9 @@ closeConnection (Connection _ _ conn) = execute :: Sqlite.ToRow a => Connection -> Sql -> a -> IO () execute conn@(Connection _ _ conn0) s params = do - when debugTraceQueries do - Text.hPutStrLn stderr ("query: " <> coerce s) - Text.hPutStrLn stderr ("params: " <> Text.pack (anythingToString params)) - Text.hPutStrLn stderr "----------" + debugM Debug.Sqlite "query" s + debugM Debug.Sqlite "params" (anythingToString params) + debugLogM Debug.Sqlite "----------" Sqlite.execute conn0 (coerce s) params `catch` \(exception :: Sqlite.SQLError) -> throwSqliteQueryException SqliteQueryExceptionInfo @@ -146,10 +137,9 @@ execute conn@(Connection _ _ conn0) s params = do executeMany :: Sqlite.ToRow a => Connection -> Sql -> [a] -> IO () executeMany conn@(Connection _ _ conn0) s params = do - when debugTraceQueries do - Text.hPutStrLn stderr ("query: " <> coerce s) - Text.hPutStrLn stderr ("params: " <> Text.pack (anythingToString params)) - Text.hPutStrLn stderr "----------" + debugM Debug.Sqlite "query" s + debugM Debug.Sqlite "params" (anythingToString params) + debugLogM Debug.Sqlite "----------" Sqlite.executeMany conn0 (coerce s) params `catch` \(exception :: Sqlite.SQLError) -> throwSqliteQueryException SqliteQueryExceptionInfo @@ -163,9 +153,8 @@ executeMany conn@(Connection _ _ conn0) s params = do execute_ :: Connection -> Sql -> IO () execute_ conn@(Connection _ _ conn0) s = do - when debugTraceQueries do - Text.hPutStrLn stderr ("query: " <> coerce s) - Text.hPutStrLn stderr "----------" + debugM Debug.Sqlite "query" s + debugLogM Debug.Sqlite "----------" Sqlite.execute_ conn0 (coerce s) `catch` \(exception :: Sqlite.SQLError) -> throwSqliteQueryException SqliteQueryExceptionInfo @@ -188,16 +177,13 @@ queryListRow conn@(Connection _ _ conn0) s params = sql = s } where - doQueryListRow = - if debugTraceQueries - then do - Text.hPutStrLn stderr ("query: " <> coerce s) - Text.hPutStrLn stderr ("params: " <> Text.pack (anythingToString params)) - result <- run - Text.hPutStrLn stderr ("result: " <> Text.pack (anythingToString result)) - Text.hPutStrLn stderr "----------" - pure result - else run + doQueryListRow = do + debugM Debug.Sqlite "query" s + debugM Debug.Sqlite "params" (anythingToString params) + result <- run + debugM Debug.Sqlite "result" (anythingToString params) + debugLogM Debug.Sqlite "----------" + pure result run = Sqlite.query conn0 (coerce s) params queryListCol :: forall a b. (Sqlite.FromField b, Sqlite.ToRow a) => Connection -> Sql -> a -> IO [b] @@ -328,15 +314,12 @@ queryListRow_ conn@(Connection _ _ conn0) s = sql = s } where - doQueryListRow_ = - if debugTraceQueries - then do - Text.hPutStrLn stderr ("query: " <> coerce s) - result <- run - Text.hPutStrLn stderr ("result: " <> Text.pack (anythingToString result)) - Text.hPutStrLn stderr "----------" - pure result - else run + doQueryListRow_ = do + debugM Debug.Sqlite "query" s + result <- run + debugM Debug.Sqlite "result" (anythingToString result) + debugLogM Debug.Sqlite "----------" + pure result run = Sqlite.query_ conn0 (coerce s) queryListCol_ :: forall a. Sqlite.FromField a => Connection -> Sql -> IO [a] From 6c3522e35435b3f115aa563b618b41245ba19649 Mon Sep 17 00:00:00 2001 From: Dan Doel Date: Thu, 17 Feb 2022 15:35:06 -0500 Subject: [PATCH 253/297] Traverse type references in ANF functions Previously the references in FCon and FReq didn't matter when loading into the cache, but due to tag packing, they now do. Strictly speaking, they probably still don't matter for FReq, but might as well be safe. --- parser-typechecker/src/Unison/Runtime/ANF.hs | 8 +++++--- 1 file changed, 5 insertions(+), 3 deletions(-) diff --git a/parser-typechecker/src/Unison/Runtime/ANF.hs b/parser-typechecker/src/Unison/Runtime/ANF.hs index a248912528..e6af62ee87 100644 --- a/parser-typechecker/src/Unison/Runtime/ANF.hs +++ b/parser-typechecker/src/Unison/Runtime/ANF.hs @@ -1278,7 +1278,7 @@ anfFLinks f g (AName er _ e) anfFLinks f g (AMatch _ bs) = branchLinks (f True) g bs anfFLinks f g (AShift r e) = f True r <> g e anfFLinks f g (AHnd rs _ e) = foldMap (f True) rs <> g e -anfFLinks f _ (AApp fu _) = funcLinks (f False) fu +anfFLinks f _ (AApp fu _) = funcLinks f fu anfFLinks _ _ _ = mempty branchLinks @@ -1296,9 +1296,11 @@ tyRefs _ _ = mempty funcLinks :: Monoid a - => (Reference -> a) + => (Bool -> Reference -> a) -> Func v -> a -funcLinks f (FComb r) = f r +funcLinks f (FComb r) = f False r +funcLinks f (FCon r _) = f True r +funcLinks f (FReq r _) = f True r funcLinks _ _ = mempty expandBindings' From 1e3db9ca696f977b25678a5a41d3f730d6b36935 Mon Sep 17 00:00:00 2001 From: Arya Irani Date: Thu, 17 Feb 2022 13:36:37 -0700 Subject: [PATCH 254/297] use posix paths when matching `git status` output --- .../src/Unison/Codebase/SqliteCodebase.hs | 14 ++++++++++---- 1 file changed, 10 insertions(+), 4 deletions(-) diff --git a/parser-typechecker/src/Unison/Codebase/SqliteCodebase.hs b/parser-typechecker/src/Unison/Codebase/SqliteCodebase.hs index 7a8dbb3dc6..527bc64c61 100644 --- a/parser-typechecker/src/Unison/Codebase/SqliteCodebase.hs +++ b/parser-typechecker/src/Unison/Codebase/SqliteCodebase.hs @@ -37,6 +37,8 @@ import qualified Database.SQLite.Simple as Sqlite import qualified System.Console.ANSI as ANSI import System.Directory (copyFile) import System.FilePath (()) +import qualified System.FilePath as FilePath +import qualified System.FilePath.Posix as FilePath.Posix import U.Codebase.HashTags (CausalHash (CausalHash, unCausalHash)) import qualified U.Codebase.Reference as C.Reference import qualified U.Codebase.Referent as C.Referent @@ -1226,14 +1228,18 @@ pushGitBranch srcConn branch repo (PushGitBranchOpts setRoot _syncMode) = mapLef then Just (hasDeleteWal, hasDeleteShm) else Nothing where + -- `git status` always displays paths using posix forward-slashes, + -- so we have to convert our expected path to test. + posixCodebasePath = + FilePath.Posix.joinPath (FilePath.splitDirectories codebasePath) statusLines = Text.unpack <$> Text.lines status t = dropWhile Char.isSpace - okLine (t -> '?' : '?' : (t -> p)) | p == codebasePath = True - okLine (t -> 'M' : (t -> p)) | p == codebasePath = True + okLine (t -> '?' : '?' : (t -> p)) | p == posixCodebasePath = True + okLine (t -> 'M' : (t -> p)) | p == posixCodebasePath = True okLine line = isWalDelete line || isShmDelete line - isWalDelete (t -> 'D' : (t -> p)) | p == codebasePath ++ "-wal" = True + isWalDelete (t -> 'D' : (t -> p)) | p == posixCodebasePath ++ "-wal" = True isWalDelete _ = False - isShmDelete (t -> 'D' : (t -> p)) | p == codebasePath ++ "-wal" = True + isShmDelete (t -> 'D' : (t -> p)) | p == posixCodebasePath ++ "-wal" = True isShmDelete _ = False hasDeleteWal = any isWalDelete statusLines hasDeleteShm = any isShmDelete statusLines From 87e58af9c538c40656494b4fa5bcbddcf071c6bd Mon Sep 17 00:00:00 2001 From: Arya Irani Date: Thu, 17 Feb 2022 09:49:05 -0700 Subject: [PATCH 255/297] switch to Windows code page 65001 in exe:unison and exe:tests I think it's only needed in these two spots, as the other executables just shell out to `stack exec unison`. I'm not actually clear on how this interacts with (or could be replaced by `hSetEncoding`, which we do have sprinkled around already, but not sufficiently to let Windows builds work. If there's a cleaner solution, I'd like to use it. i.e. am I papering over a more correct multi-platform fix by using the Windows-only System.IO.Codepage? --- unison-cli/package.yaml | 2 ++ unison-cli/tests/Main.hs | 3 ++- unison-cli/unison-cli.cabal | 2 ++ unison-cli/unison/Main.hs | 3 ++- 4 files changed, 8 insertions(+), 2 deletions(-) diff --git a/unison-cli/package.yaml b/unison-cli/package.yaml index 50cb2a3b9c..3a3dc35f77 100644 --- a/unison-cli/package.yaml +++ b/unison-cli/package.yaml @@ -51,6 +51,7 @@ library: tests: tests: dependencies: + - code-page - easytest - here - shellmet @@ -65,6 +66,7 @@ executables: main: Main.hs ghc-options: -threaded -rtsopts -with-rtsopts=-I0 -optP-Wno-nonportable-include-path dependencies: + - code-page - optparse-applicative >= 0.16.1.0 - shellmet - template-haskell diff --git a/unison-cli/tests/Main.hs b/unison-cli/tests/Main.hs index 65cd9683fd..80b724b31a 100644 --- a/unison-cli/tests/Main.hs +++ b/unison-cli/tests/Main.hs @@ -3,6 +3,7 @@ module Main where import EasyTest import System.Environment (getArgs) import System.IO +import System.IO.CodePage (withCP65001) import qualified Unison.Test.ClearCache as ClearCache import qualified Unison.Test.CommandLine as CommandLine import qualified Unison.Test.GitSync as GitSync @@ -20,7 +21,7 @@ test = ] main :: IO () -main = do +main = withCP65001 do args <- getArgs mapM_ (`hSetEncoding` utf8) [stdout, stdin, stderr] case args of diff --git a/unison-cli/unison-cli.cabal b/unison-cli/unison-cli.cabal index 8f7aa870d2..0c2228fbc3 100644 --- a/unison-cli/unison-cli.cabal +++ b/unison-cli/unison-cli.cabal @@ -298,6 +298,7 @@ executable unison , async , base , bytestring + , code-page , configurator , containers >=0.6.3 , cryptonite @@ -376,6 +377,7 @@ test-suite tests , async , base , bytestring + , code-page , configurator , containers >=0.6.3 , cryptonite diff --git a/unison-cli/unison/Main.hs b/unison-cli/unison/Main.hs index 8a6cf00920..5771973913 100644 --- a/unison-cli/unison/Main.hs +++ b/unison-cli/unison/Main.hs @@ -20,6 +20,7 @@ import System.Directory (canonicalizePath, getCurrentDirectory, removeDirectoryR import System.Environment (getProgName, withArgs) import qualified System.Exit as Exit import qualified System.FilePath as FP +import System.IO.CodePage (withCP65001) import System.IO.Error (catchIOError) import qualified System.IO.Temp as Temp import qualified System.Path as Path @@ -67,7 +68,7 @@ import qualified Data.List.NonEmpty as NonEmpty import Unison.CommandLine.Welcome (CodebaseInitStatus(..)) main :: IO () -main = do +main = withCP65001 do interruptHandler <- defaultInterruptHandler withInterruptHandler interruptHandler $ do progName <- getProgName From be690e790ffd85c8258648e49ea62db5ad5187e2 Mon Sep 17 00:00:00 2001 From: Arya Irani Date: Thu, 17 Feb 2022 16:14:22 -0700 Subject: [PATCH 256/297] fix up some hard-coded paths in SqliteCodebase.hs --- .../src/Unison/Codebase/SqliteCodebase.hs | 10 ++++++---- 1 file changed, 6 insertions(+), 4 deletions(-) diff --git a/parser-typechecker/src/Unison/Codebase/SqliteCodebase.hs b/parser-typechecker/src/Unison/Codebase/SqliteCodebase.hs index 527bc64c61..bf7382d90b 100644 --- a/parser-typechecker/src/Unison/Codebase/SqliteCodebase.hs +++ b/parser-typechecker/src/Unison/Codebase/SqliteCodebase.hs @@ -1259,7 +1259,9 @@ pushGitBranch srcConn branch repo (PushGitBranchOpts setRoot _syncMode) = mapLef Nothing -> error $ "An error occurred during push.\n" - <> "I was expecting only to see .unison/v2/unison.sqlite3 modified, but saw:\n\n" + <> "I was expecting only to see " + <> codebasePath + <> " modified, but saw:\n\n" <> Text.unpack status <> "\n\n" <> "Please visit https://github.com/unisonweb/unison/issues/2063\n" @@ -1267,9 +1269,9 @@ pushGitBranch srcConn branch repo (PushGitBranchOpts setRoot _syncMode) = mapLef Just (hasDeleteWal, hasDeleteShm) -> do -- Only stage files we're expecting; don't `git add --all .` -- which could accidentally commit some garbage - gitIn remotePath ["add", ".unison/v2/unison.sqlite3"] - when hasDeleteWal $ gitIn remotePath ["rm", ".unison/v2/unison.sqlite3-wal"] - when hasDeleteShm $ gitIn remotePath ["rm", ".unison/v2/unison.sqlite3-shm"] + gitIn remotePath ["add", Text.pack codebasePath] + when hasDeleteWal $ gitIn remotePath ["rm", Text.pack $ codebasePath <> "-wal"] + when hasDeleteShm $ gitIn remotePath ["rm", Text.pack $ codebasePath <> "-shm"] gitIn remotePath ["commit", "-q", "-m", "Sync branch " <> Text.pack (show $ Branch.headHash branch)] From c7a4fc3bd1cf9bd18924c0970b12be0cb94c2c40 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?R=C3=BAnar?= Date: Thu, 17 Feb 2022 23:33:13 -0500 Subject: [PATCH 257/297] Fix === --- parser-typechecker/src/Unison/Runtime/Machine.hs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/parser-typechecker/src/Unison/Runtime/Machine.hs b/parser-typechecker/src/Unison/Runtime/Machine.hs index 1be178bb79..0ec4895839 100644 --- a/parser-typechecker/src/Unison/Runtime/Machine.hs +++ b/parser-typechecker/src/Unison/Runtime/Machine.hs @@ -1292,7 +1292,7 @@ bprim2 !ustk !bstk EQLU i j = do x <- peekOff bstk i y <- peekOff bstk j ustk <- bump ustk - poke ustk $ if x == y then 1 else 0 + poke ustk $ if universalEq (==) x y then 1 else 0 pure (ustk, bstk) bprim2 !ustk !bstk DRPT i j = do n <- peekOff ustk i From 23e13958e6f597e170003a9701cc5a3a674e7952 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?R=C3=BAnar?= Date: Fri, 18 Feb 2022 09:14:33 -0500 Subject: [PATCH 258/297] Added test --- unison-src/transcripts/builtins.md | 2 ++ unison-src/transcripts/builtins.output.md | 5 ++++- 2 files changed, 6 insertions(+), 1 deletion(-) diff --git a/unison-src/transcripts/builtins.md b/unison-src/transcripts/builtins.md index a1e7ceb4f4..9b48d5a7ee 100644 --- a/unison-src/transcripts/builtins.md +++ b/unison-src/transcripts/builtins.md @@ -213,6 +213,8 @@ test> Text.tests.alignment = Text.alignRightWith 5 ?_ "ababa" == "ababa", Text.alignRightWith 5 ?_ "ab" == "___ab" ] + +test> Text.tests.literalsEq = checks [":)" == ":)"] ``` ```ucm:hide diff --git a/unison-src/transcripts/builtins.output.md b/unison-src/transcripts/builtins.output.md index 145c574f07..bb0c65e988 100644 --- a/unison-src/transcripts/builtins.output.md +++ b/unison-src/transcripts/builtins.output.md @@ -194,6 +194,8 @@ test> Text.tests.alignment = Text.alignRightWith 5 ?_ "ababa" == "ababa", Text.alignRightWith 5 ?_ "ab" == "___ab" ] + +test> Text.tests.literalsEq = checks [":)" == ":)"] ``` ## `Bytes` functions @@ -328,10 +330,11 @@ Now that all the tests have been added to the codebase, let's view the test repo ◉ Sandbox.test2 Passed ◉ Sandbox.test3 Passed ◉ Text.tests.alignment Passed + ◉ Text.tests.literalsEq Passed ◉ Text.tests.repeat Passed ◉ Text.tests.takeDropAppend Passed - ✅ 19 test(s) passing + ✅ 20 test(s) passing Tip: Use view Any.test1 to view the source of a test. From 4097291cfa0aea2eb4fd038de2eda70ce44241e0 Mon Sep 17 00:00:00 2001 From: Cody Allen Date: Fri, 18 Feb 2022 08:21:35 -0800 Subject: [PATCH 259/297] vim-completion: don't include bang Similar to #2842. When triggering auto-completion on something like `!currentTime`, we don't want to include the `!` in the term that we search for. --- editor-support/vim/autoload/unison.vim | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/editor-support/vim/autoload/unison.vim b/editor-support/vim/autoload/unison.vim index 29201213d6..6e2398dd7d 100644 --- a/editor-support/vim/autoload/unison.vim +++ b/editor-support/vim/autoload/unison.vim @@ -62,7 +62,7 @@ function! unison#Complete(findstart, base) abort " " (List.fol " ^ - while start > 0 && line[start - 1] !~ '\v\s|[(){}\[\]]' + while start > 0 && line[start - 1] !~ '\v\s|[!(){}\[\]]' let start -= 1 endwhile return start From f7e69a7dc376627bc2e3ab39cca5e5ade177e19b Mon Sep 17 00:00:00 2001 From: Arya Irani Date: Fri, 18 Feb 2022 12:01:24 -0700 Subject: [PATCH 260/297] add windows release (1st attempt) --- .github/workflows/release.yaml | 43 ++++++++++++++++++++++++---------- 1 file changed, 31 insertions(+), 12 deletions(-) diff --git a/.github/workflows/release.yaml b/.github/workflows/release.yaml index 68cb378b67..850972460f 100644 --- a/.github/workflows/release.yaml +++ b/.github/workflows/release.yaml @@ -12,6 +12,7 @@ jobs: needs: - build_linux - build_macos + - build_windows steps: - uses: actions/checkout@v2 @@ -48,12 +49,6 @@ jobs: curl -L https://github.com/commercialhaskell/stack/releases/download/v2.5.1/stack-2.5.1-linux-x86_64.tar.gz | tar -xz echo "$HOME/stack-2.5.1-linux-x86_64/" >> $GITHUB_PATH - # One of the transcripts fails if the user's git name hasn't been set. - - name: set git user info - run: | - git config --global user.name "GitHub Actions" - git config --global user.email "actions@github.com" - - name: build run: stack --no-terminal build --flag unison-parser-typechecker:optimized @@ -84,12 +79,6 @@ jobs: curl -L https://github.com/commercialhaskell/stack/releases/download/v2.5.1/stack-2.5.1-osx-x86_64.tar.gz | tar -xz echo "$HOME/stack-2.5.1-osx-x86_64/" >> $GITHUB_PATH - # One of the transcripts fails if the user's git name hasn't been set. - - name: set git user info - run: | - git config --global user.name "GitHub Actions" - git config --global user.email "actions@github.com" - - name: remove ~/.stack/setup-exe-cache on macOS run: rm -rf ~/.stack/setup-exe-cache @@ -111,3 +100,33 @@ jobs: if-no-files-found: error name: build-macos path: ucm-macos.tar.gz + + build_windows: + name: "build_windows" + runs-on: windows-2019 + + steps: + - uses: actions/checkout@v2 + - name: install stack + run: | + curl -L https://github.com/commercialhaskell/stack/releases/download/v2.5.1/stack-2.5.1-osx-x86_64.tar.gz | tar -xz + echo "$HOME/stack-2.5.1-osx-x86_64/" >> $GITHUB_PATH + + - name: build + run: stack --no-terminal build --flag unison-parser-typechecker:optimized + + - name: fetch latest codebase-ui and package with ucm + run: | + mkdir -p /tmp/ucm/ui + UCM=$(stack path | awk '/local-install-root/{print $2}')/bin/unison + cp $UCM /tmp/ucm/ucm + wget -O/tmp/unisonLocal.zip https://github.com/unisonweb/codebase-ui/releases/download/latest/unisonLocal.zip + unzip -d /tmp/ucm/ui /tmp/unisonLocal.zip + tar -c -z -f ucm-windows.tar.gz -C /tmp/ucm . + + - name: Upload windows artifact + uses: actions/upload-artifact@v2 + with: + if-no-files-found: error + name: build-windows + path: ucm-windows.tar.gz From 3386102d0035beccd152c621bd0cdb12fb0b9f92 Mon Sep 17 00:00:00 2001 From: Arya Irani Date: Fri, 18 Feb 2022 20:33:34 -0700 Subject: [PATCH 261/297] codepage 65001 for parser-typechecker tests too --- parser-typechecker/package.yaml | 1 + parser-typechecker/tests/Suite.hs | 3 ++- parser-typechecker/unison-parser-typechecker.cabal | 1 + 3 files changed, 4 insertions(+), 1 deletion(-) diff --git a/parser-typechecker/package.yaml b/parser-typechecker/package.yaml index e074b6aca1..f107d0aae3 100644 --- a/parser-typechecker/package.yaml +++ b/parser-typechecker/package.yaml @@ -159,6 +159,7 @@ executables: - async - base - bytestring + - code-page - containers - directory - easytest diff --git a/parser-typechecker/tests/Suite.hs b/parser-typechecker/tests/Suite.hs index d1fee504e9..5ec98550f4 100644 --- a/parser-typechecker/tests/Suite.hs +++ b/parser-typechecker/tests/Suite.hs @@ -7,6 +7,7 @@ module Main where import EasyTest import System.Environment (getArgs) import System.IO +import System.IO.CodePage (withCP65001) import qualified Unison.Core.Test.Name as Name import qualified Unison.Test.ABT as ABT import qualified Unison.Test.Cache as Cache @@ -73,7 +74,7 @@ test = tests ] main :: IO () -main = do +main = withCP65001 do args <- getArgs mapM_ (`hSetEncoding` utf8) [stdout, stdin, stderr] case args of diff --git a/parser-typechecker/unison-parser-typechecker.cabal b/parser-typechecker/unison-parser-typechecker.cabal index b6fd862146..641c961e6d 100644 --- a/parser-typechecker/unison-parser-typechecker.cabal +++ b/parser-typechecker/unison-parser-typechecker.cabal @@ -413,6 +413,7 @@ executable tests async , base , bytestring + , code-page , containers , directory , easytest From 36b2414bc6f7ee3fbbe741019e5c50dadb60cc20 Mon Sep 17 00:00:00 2001 From: Arya Irani Date: Fri, 18 Feb 2022 16:48:20 -0700 Subject: [PATCH 262/297] remove `stack exec` --- .../IntegrationTests/ArgumentParsing.hs | 68 +++++++++++-------- unison-cli/package.yaml | 2 + unison-cli/transcripts/Transcripts.hs | 4 +- unison-cli/unison-cli.cabal | 2 + 4 files changed, 44 insertions(+), 32 deletions(-) diff --git a/unison-cli/integration-tests/IntegrationTests/ArgumentParsing.hs b/unison-cli/integration-tests/IntegrationTests/ArgumentParsing.hs index f7ae411120..da7ac951d4 100644 --- a/unison-cli/integration-tests/IntegrationTests/ArgumentParsing.hs +++ b/unison-cli/integration-tests/IntegrationTests/ArgumentParsing.hs @@ -10,14 +10,21 @@ import Data.Time (getCurrentTime, diffUTCTime) import EasyTest import Shellmet (($|)) import System.Exit (ExitCode (ExitSuccess)) +import System.FilePath (()) import System.Process (readProcessWithExitCode) import Text.Printf -uFile :: String -uFile = "unison-cli/integration-tests/IntegrationTests/print.u" +integrationTestsDir :: FilePath +integrationTestsDir = "unison-cli" "integration-tests" "IntegrationTests" -transcriptFile :: String -transcriptFile = "unison-cli/integration-tests/IntegrationTests/transcript.md" +uFile :: FilePath +uFile = integrationTestsDir "print.u" + +ucFile :: FilePath +ucFile = integrationTestsDir "main.uc" + +transcriptFile :: FilePath +transcriptFile = integrationTestsDir "transcript.md" unisonCmdString :: String unisonCmdString = unlines @@ -25,39 +32,40 @@ unisonCmdString = unlines , "print _ = base.io.printLine \"ok\"" ] -tempCodebase :: String +tempCodebase :: FilePath tempCodebase = "tempcodebase" test :: Test () -test = +test = do + let ucm = "unison" EasyTest.using (pure ()) clearTempCodebase \_ -> scope "argument-parsing" . tests $ - [ expectExitCode ExitSuccess "stack" ["exec", "--", "unison", "--help"] "" - , expectExitCode ExitSuccess "stack" ["exec", "--", "unison", "-h"] "" - , expectExitCode ExitSuccess "stack" ["exec", "--", "unison", "version", "--help"] "" - , expectExitCode ExitSuccess "stack" ["exec", "--", "unison", "init", "--help"] "" - , expectExitCode ExitSuccess "stack" ["exec", "--", "unison", "run", "--help"] "" - , expectExitCode ExitSuccess "stack" ["exec", "--", "unison", "run.file", "--help"] "" - , expectExitCode ExitSuccess "stack" ["exec", "--", "unison", "run.pipe", "--help"] "" - , expectExitCode ExitSuccess "stack" ["exec", "--", "unison", "transcript", "--help"] "" - , expectExitCode ExitSuccess "stack" ["exec", "--", "unison", "transcript.fork", "--help"] "" - , expectExitCode ExitSuccess "stack" ["exec", "--", "unison", "headless", "--help"] "" - , expectExitCode ExitSuccess "stack" ["exec", "--", "unison", "version"] "" - -- , expectExitCode ExitSuccess "stack" ["exec", "--", "unison", "run"] "" -- how? - , expectExitCode ExitSuccess "stack" ["exec", "--", "unison", "run.file", uFile, "print", "--codebase-create", tempCodebase] "" - , expectExitCode ExitSuccess "stack" ["exec", "--", "unison", "run.pipe", "print", "--codebase-create", tempCodebase] unisonCmdString - , expectExitCode ExitSuccess "stack" ["exec", "--", "unison", "transcript", transcriptFile, "--codebase-create", tempCodebase] "" - , expectExitCode ExitSuccess "stack" ["exec", "--", "unison", "transcript.fork", transcriptFile, "--codebase-create", tempCodebase] "" - -- , expectExitCode ExitSuccess "stack" ["exec", "--", "unison", "headless"] "" -- ? + [ expectExitCode ExitSuccess ucm ["--help"] "" + , expectExitCode ExitSuccess ucm ["-h"] "" + , expectExitCode ExitSuccess ucm ["version", "--help"] "" + , expectExitCode ExitSuccess ucm ["init", "--help"] "" + , expectExitCode ExitSuccess ucm ["run", "--help"] "" + , expectExitCode ExitSuccess ucm ["run.file", "--help"] "" + , expectExitCode ExitSuccess ucm ["run.pipe", "--help"] "" + , expectExitCode ExitSuccess ucm ["transcript", "--help"] "" + , expectExitCode ExitSuccess ucm ["transcript.fork", "--help"] "" + , expectExitCode ExitSuccess ucm ["headless", "--help"] "" + , expectExitCode ExitSuccess ucm ["version"] "" + -- , expectExitCode ExitSuccess ucm ["run"] "" -- how? + , expectExitCode ExitSuccess ucm ["run.file", uFile, "print", "--codebase-create", tempCodebase] "" + , expectExitCode ExitSuccess ucm ["run.pipe", "print", "--codebase-create", tempCodebase] unisonCmdString + , expectExitCode ExitSuccess ucm ["transcript", transcriptFile, "--codebase-create", tempCodebase] "" + , expectExitCode ExitSuccess ucm ["transcript.fork", transcriptFile, "--codebase-create", tempCodebase] "" + -- , expectExitCode ExitSuccess ucm ["headless"] "" -- ? -- options - , expectExitCode ExitSuccess "stack" ["exec", "--", "unison", "--port", "8000", "--codebase-create", tempCodebase, "--no-base"] "" - , expectExitCode ExitSuccess "stack" ["exec", "--", "unison", "--host", "localhost", "--codebase-create", tempCodebase, "--no-base"] "" - , expectExitCode ExitSuccess "stack" ["exec", "--", "unison", "--token", "MY_TOKEN", "--codebase-create", tempCodebase, "--no-base"] "" -- ? - , expectExitCode ExitSuccess "stack" ["exec", "--", "unison", "--codebase-create", tempCodebase, "--no-base"] "" - , expectExitCode ExitSuccess "stack" ["exec", "--", "unison", "--ui", tempCodebase, "--codebase-create", tempCodebase, "--no-base"] "" + , expectExitCode ExitSuccess ucm ["--port", "8000", "--codebase-create", tempCodebase, "--no-base"] "" + , expectExitCode ExitSuccess ucm ["--host", "localhost", "--codebase-create", tempCodebase, "--no-base"] "" + , expectExitCode ExitSuccess ucm ["--token", "MY_TOKEN", "--codebase-create", tempCodebase, "--no-base"] "" -- ? + , expectExitCode ExitSuccess ucm ["--codebase-create", tempCodebase, "--no-base"] "" + , expectExitCode ExitSuccess ucm ["--ui", tempCodebase, "--codebase-create", tempCodebase, "--no-base"] "" , scope "can compile, then run compiled artifact" $ tests - [ expectExitCode ExitSuccess "stack" ["exec", "--", "unison", "transcript", transcriptFile] "" - , expectExitCode ExitSuccess "stack" ["exec", "--", "unison", "run.compiled", "./unison-cli/integration-tests/IntegrationTests/main.uc"] "" + [ expectExitCode ExitSuccess ucm ["transcript", transcriptFile] "" + , expectExitCode ExitSuccess ucm ["run.compiled", ucFile] "" ] ] diff --git a/unison-cli/package.yaml b/unison-cli/package.yaml index 3a3dc35f77..5a695739d9 100644 --- a/unison-cli/package.yaml +++ b/unison-cli/package.yaml @@ -93,6 +93,8 @@ executables: - process - shellmet - time + build-tools: + - unison-cli:unison when: - condition: flag(optimized) diff --git a/unison-cli/transcripts/Transcripts.hs b/unison-cli/transcripts/Transcripts.hs index 5e0bac8939..36fbeda67b 100644 --- a/unison-cli/transcripts/Transcripts.hs +++ b/unison-cli/transcripts/Transcripts.hs @@ -12,7 +12,7 @@ import Data.Text unpack, ) import EasyTest -import Shellmet (($|)) +import Shellmet () -- import instance for IsString ([Text] -> IO()) import System.Directory import System.Environment (getArgs) import System.FilePath @@ -79,7 +79,7 @@ buildTests config testBuilder dir = do -- if there is a matchPrefix set, filter non-prelude files by that prefix - or return True & second (filter (\f -> maybe True (`isPrefixOf` f) (matchPrefix config))) - ucm <- io $ unpack <$> "stack" $| ["exec", "--", "which", "unison"] -- todo: what is it in windows? + let ucm = "unison" case length transcripts of 0 -> pure () -- EasyTest exits early with "no test results recorded" diff --git a/unison-cli/unison-cli.cabal b/unison-cli/unison-cli.cabal index 0c2228fbc3..56b709e8c5 100644 --- a/unison-cli/unison-cli.cabal +++ b/unison-cli/unison-cli.cabal @@ -149,6 +149,8 @@ executable integration-tests TypeApplications ViewPatterns ghc-options: -Wall -W -threaded -rtsopts "-with-rtsopts=-N -T" -v0 + build-tools: + unison build-depends: ListLike , async From f77ec97cb2b8a99286a09750198a5fa60e27170a Mon Sep 17 00:00:00 2001 From: Arya Irani Date: Fri, 18 Feb 2022 18:37:28 -0700 Subject: [PATCH 263/297] replace `removeDirectoryRecursive` with `removePathForcibly` in case that helps #2924 also replaces `"rm" $|` with the same. --- .../IntegrationTests/ArgumentParsing.hs | 10 ++++------ unison-cli/tests/Unison/Test/GitSync.hs | 10 +++++----- 2 files changed, 9 insertions(+), 11 deletions(-) diff --git a/unison-cli/integration-tests/IntegrationTests/ArgumentParsing.hs b/unison-cli/integration-tests/IntegrationTests/ArgumentParsing.hs index f7ae411120..c2e75c2145 100644 --- a/unison-cli/integration-tests/IntegrationTests/ArgumentParsing.hs +++ b/unison-cli/integration-tests/IntegrationTests/ArgumentParsing.hs @@ -5,10 +5,9 @@ module IntegrationTests.ArgumentParsing where import Data.List (intercalate) -import Data.Text (pack) import Data.Time (getCurrentTime, diffUTCTime) import EasyTest -import Shellmet (($|)) +import qualified System.Directory import System.Exit (ExitCode (ExitSuccess)) import System.Process (readProcessWithExitCode) import Text.Printf @@ -73,7 +72,6 @@ expectExitCode expected cmd args stdin = scope (intercalate " " (cmd : args)) do defaultArgs :: [String] defaultArgs = ["--codebase-create", tempCodebase, "--no-base"] -clearTempCodebase :: () -> IO() -clearTempCodebase _ = do - "rm" $| (map pack ["-rf", tempCodebase]) - pure () +clearTempCodebase :: () -> IO () +clearTempCodebase _ = + System.Directory.removePathForcibly tempCodebase diff --git a/unison-cli/tests/Unison/Test/GitSync.hs b/unison-cli/tests/Unison/Test/GitSync.hs index a7a094082c..4e235acc6d 100644 --- a/unison-cli/tests/Unison/Test/GitSync.hs +++ b/unison-cli/tests/Unison/Test/GitSync.hs @@ -11,7 +11,7 @@ import Data.String.Here.Interpolated (i) import qualified Data.Text as Text import EasyTest import Shellmet () -import System.Directory (removeDirectoryRecursive) +import System.Directory (removePathForcibly) import System.FilePath (()) import qualified System.IO.Temp as Temp import qualified Unison.Codebase as Codebase @@ -554,7 +554,7 @@ pushPullTest name fmt authorScript userScript = scope name do (authorOutput <> "\n-------\n" <> userOutput) -- if we haven't crashed, clean up! - removeDirectoryRecursive repo + removePathForcibly repo Ucm.deleteCodebase author Ucm.deleteCodebase user ok @@ -574,7 +574,7 @@ watchPushPullTest name fmt authorScript userScript codebaseCheck = scope name do (authorOutput <> "\n-------\n" <> userOutput) -- if we haven't crashed, clean up! - removeDirectoryRecursive repo + removePathForcibly repo Ucm.deleteCodebase author Ucm.deleteCodebase user ok @@ -720,8 +720,8 @@ destroyedRemote = scope "destroyed-remote" do |] ok where - reinitRepo (Text.pack -> repo) = do - "rm" ["-rf", repo] + reinitRepo repoStr@(Text.pack -> repo) = do + removePathForcibly repoStr "git" ["init", "--bare", repo] From 4d514e81f1dc148976babd0250ba79dbba5eff6e Mon Sep 17 00:00:00 2001 From: Arya Irani Date: Fri, 18 Feb 2022 23:18:22 -0700 Subject: [PATCH 264/297] codepage 65001 for anything with emojis / unicode --- lib/unison-util-relation/package.yaml | 1 + lib/unison-util-relation/test/Main.hs | 3 ++- lib/unison-util-relation/unison-util-relation.cabal | 1 + unison-cli/integration-tests/Suite.hs | 3 ++- unison-cli/package.yaml | 2 ++ unison-cli/transcripts/Transcripts.hs | 3 ++- unison-cli/unison-cli.cabal | 2 ++ 7 files changed, 12 insertions(+), 3 deletions(-) diff --git a/lib/unison-util-relation/package.yaml b/lib/unison-util-relation/package.yaml index e3fe19acfc..10af5f4cad 100644 --- a/lib/unison-util-relation/package.yaml +++ b/lib/unison-util-relation/package.yaml @@ -8,6 +8,7 @@ library: tests: tests: dependencies: + - code-page - easytest - random - unison-util-relation diff --git a/lib/unison-util-relation/test/Main.hs b/lib/unison-util-relation/test/Main.hs index 1e18acbc4c..62ed962f57 100644 --- a/lib/unison-util-relation/test/Main.hs +++ b/lib/unison-util-relation/test/Main.hs @@ -2,6 +2,7 @@ module Main where import EasyTest +import System.IO.CodePage (withCP65001) import System.Random (Random) import qualified Unison.Util.Relation as Relation import Unison.Util.Relation3 (Relation3) @@ -10,7 +11,7 @@ import Unison.Util.Relation4 (Relation4) import qualified Unison.Util.Relation4 as Relation4 main :: IO () -main = +main = withCP65001 $ (run . tests) [ (scope "Relation" . tests) [ scope "mapDom works" do diff --git a/lib/unison-util-relation/unison-util-relation.cabal b/lib/unison-util-relation/unison-util-relation.cabal index e24af6817b..381da4c337 100644 --- a/lib/unison-util-relation/unison-util-relation.cabal +++ b/lib/unison-util-relation/unison-util-relation.cabal @@ -73,6 +73,7 @@ test-suite tests ghc-options: -Wall build-depends: base + , code-page , containers , deepseq , easytest diff --git a/unison-cli/integration-tests/Suite.hs b/unison-cli/integration-tests/Suite.hs index 2b62a929b5..06fe442c0d 100644 --- a/unison-cli/integration-tests/Suite.hs +++ b/unison-cli/integration-tests/Suite.hs @@ -8,6 +8,7 @@ import EasyTest import qualified IntegrationTests.ArgumentParsing as ArgumentParsing import System.Environment (getArgs) import System.IO +import System.IO.CodePage (withCP65001) test :: Test () test = @@ -16,7 +17,7 @@ test = ] main :: IO () -main = do +main = withCP65001 do args <- getArgs mapM_ (`hSetEncoding` utf8) [stdout, stdin, stderr] case args of diff --git a/unison-cli/package.yaml b/unison-cli/package.yaml index 3a3dc35f77..358c5da99f 100644 --- a/unison-cli/package.yaml +++ b/unison-cli/package.yaml @@ -78,6 +78,7 @@ executables: main: Transcripts.hs ghc-options: -threaded -rtsopts -with-rtsopts=-N -v0 dependencies: + - code-page - easytest - process - shellmet @@ -89,6 +90,7 @@ executables: main: Suite.hs ghc-options: -W -threaded -rtsopts "-with-rtsopts=-N -T" -v0 dependencies: + - code-page - easytest - process - shellmet diff --git a/unison-cli/transcripts/Transcripts.hs b/unison-cli/transcripts/Transcripts.hs index 5e0bac8939..3df70d747d 100644 --- a/unison-cli/transcripts/Transcripts.hs +++ b/unison-cli/transcripts/Transcripts.hs @@ -21,6 +21,7 @@ import System.FilePath takeExtensions, (), ) +import System.IO.CodePage (withCP65001) import System.Process (readProcessWithExitCode) import Unison.Prelude @@ -126,6 +127,6 @@ handleArgs args = in TestConfig matchPrefix main :: IO () -main = do +main = withCP65001 do testConfig <- handleArgs <$> getArgs run (test testConfig) diff --git a/unison-cli/unison-cli.cabal b/unison-cli/unison-cli.cabal index 0c2228fbc3..b910ce7db6 100644 --- a/unison-cli/unison-cli.cabal +++ b/unison-cli/unison-cli.cabal @@ -154,6 +154,7 @@ executable integration-tests , async , base , bytestring + , code-page , configurator , containers >=0.6.3 , cryptonite @@ -226,6 +227,7 @@ executable transcripts , async , base , bytestring + , code-page , configurator , containers >=0.6.3 , cryptonite From c1d28829320ee39ea1ed0775f5ae1a3c358152c9 Mon Sep 17 00:00:00 2001 From: Arya Irani Date: Sat, 19 Feb 2022 00:27:11 -0700 Subject: [PATCH 265/297] code page 65001 for benchmarks also --- lib/unison-util-relation/benchmarks/relation/Main.hs | 3 ++- lib/unison-util-relation/package.yaml | 1 + lib/unison-util-relation/unison-util-relation.cabal | 1 + 3 files changed, 4 insertions(+), 1 deletion(-) diff --git a/lib/unison-util-relation/benchmarks/relation/Main.hs b/lib/unison-util-relation/benchmarks/relation/Main.hs index 8aa7e709d6..0619085dba 100644 --- a/lib/unison-util-relation/benchmarks/relation/Main.hs +++ b/lib/unison-util-relation/benchmarks/relation/Main.hs @@ -2,6 +2,7 @@ module Main where import Control.Monad import qualified Data.Set as Set +import System.IO.CodePage (withCP65001) import System.Random import Test.Tasty.Bench import Unison.Prelude @@ -9,7 +10,7 @@ import Unison.Util.Relation (Relation) import qualified Unison.Util.Relation as R main :: IO () -main = +main = withCP65001 $ defaultMain [ env (genRelations @Char @Char 10000 20) \rs -> bgroup diff --git a/lib/unison-util-relation/package.yaml b/lib/unison-util-relation/package.yaml index 10af5f4cad..346f10b4ea 100644 --- a/lib/unison-util-relation/package.yaml +++ b/lib/unison-util-relation/package.yaml @@ -21,6 +21,7 @@ benchmarks: main: Main.hs dependencies: - base + - code-page - containers - random - tasty-bench diff --git a/lib/unison-util-relation/unison-util-relation.cabal b/lib/unison-util-relation/unison-util-relation.cabal index 381da4c337..0c7e69849b 100644 --- a/lib/unison-util-relation/unison-util-relation.cabal +++ b/lib/unison-util-relation/unison-util-relation.cabal @@ -108,6 +108,7 @@ benchmark relation ghc-options: -Wall build-depends: base + , code-page , containers , deepseq , extra From 9978a50d6489cedb5bcde3c5c622bfbdaa8ddf70 Mon Sep 17 00:00:00 2001 From: Chris Penner Date: Wed, 23 Feb 2022 13:35:40 -0600 Subject: [PATCH 266/297] Avoid redundant clone (and migration) when pushing. (#2912) --- .../src/Unison/Codebase/GitError.hs | 1 + .../src/Unison/Codebase/SqliteCodebase.hs | 64 +++++++++++-------- .../src/Unison/Codebase/Type.hs | 2 +- .../src/Unison/Codebase/Editor/Command.hs | 2 +- .../Unison/Codebase/Editor/HandleCommand.hs | 4 +- .../src/Unison/Codebase/Editor/HandleInput.hs | 41 ++++++------ .../src/Unison/CommandLine/OutputMessages.hs | 6 ++ 7 files changed, 73 insertions(+), 47 deletions(-) diff --git a/parser-typechecker/src/Unison/Codebase/GitError.hs b/parser-typechecker/src/Unison/Codebase/GitError.hs index 0fbae3bf84..8cccd2819e 100644 --- a/parser-typechecker/src/Unison/Codebase/GitError.hs +++ b/parser-typechecker/src/Unison/Codebase/GitError.hs @@ -31,6 +31,7 @@ data GitCodebaseError h = NoRemoteNamespaceWithHash ReadRepo ShortBranchHash | RemoteNamespaceHashAmbiguous ReadRepo ShortBranchHash (Set h) | CouldntLoadRootBranch ReadRepo h + | CouldntParseRemoteBranch ReadRepo String | CouldntLoadSyncedBranch ReadRemoteNamespace h | CouldntFindRemoteBranch ReadRepo Path deriving (Show) diff --git a/parser-typechecker/src/Unison/Codebase/SqliteCodebase.hs b/parser-typechecker/src/Unison/Codebase/SqliteCodebase.hs index 493ed2096c..cb74d3feaa 100644 --- a/parser-typechecker/src/Unison/Codebase/SqliteCodebase.hs +++ b/parser-typechecker/src/Unison/Codebase/SqliteCodebase.hs @@ -818,7 +818,7 @@ sqliteCodebase debugName root localOrRemote action = do syncFromDirectory syncToDirectory viewRemoteBranch' - (\b r opts -> pushGitBranch conn b r opts) + (\r opts action -> pushGitBranch conn r opts action) watches getWatch putWatch @@ -1145,17 +1145,18 @@ viewRemoteBranch' (repo, sbh, path) gitBranchBehavior action = UnliftIO.try $ do Left err -> throwIO . C.GitSqliteCodebaseError $ C.gitErrorFromOpenCodebaseError remotePath repo err Right inner -> pure inner --- Push a branch to a repo. Optionally attempt to set the branch as the new root, which fails if the branch is not after +-- | Push a branch to a repo. Optionally attempt to set the branch as the new root, which fails if the branch is not after -- the existing root. pushGitBranch :: - forall m. + forall m e. (MonadUnliftIO m) => Connection -> - Branch m -> WriteRepo -> PushGitBranchOpts -> - m (Either C.GitError ()) -pushGitBranch srcConn branch repo (PushGitBranchOpts setRoot _syncMode) = UnliftIO.try do + -- An action which accepts the current root branch on the remote and computes a new branch. + (Branch m -> m (Either e (Branch m))) -> + m (Either C.GitError (Either e (Branch m))) +pushGitBranch srcConn repo (PushGitBranchOpts setRoot _syncMode) action = UnliftIO.try do -- Pull the latest remote into our git cache -- Use a local git clone to copy this git repo into a temp-dir -- Delete the codebase in our temp-dir @@ -1171,34 +1172,47 @@ pushGitBranch srcConn branch repo (PushGitBranchOpts setRoot _syncMode) = Unlift -- -- set up the cache dir throwEitherMWith C.GitProtocolError . withRepo readRepo Git.CreateBranchIfMissing $ \pushStaging -> do - throwEitherMWith (C.GitSqliteCodebaseError . C.gitErrorFromOpenCodebaseError (Git.gitDirToPath pushStaging) readRepo) - . withOpenOrCreateCodebase "push.dest" (Git.gitDirToPath pushStaging) Remote $ \(codebaseStatus, _destCodebase, destConn) -> do - flip runReaderT destConn $ Q.withSavepoint_ @(ReaderT _ m) "push" $ do - throwExceptT $ doSync codebaseStatus (Git.gitDirToPath pushStaging) srcConn destConn - void $ push pushStaging repo + newBranchOrErr <- throwEitherMWith (C.GitSqliteCodebaseError . C.gitErrorFromOpenCodebaseError (Git.gitDirToPath pushStaging) readRepo) + . withOpenOrCreateCodebase "push.dest" (Git.gitDirToPath pushStaging) Remote $ \(codebaseStatus, destCodebase, destConn) -> do + currentRootBranch <- C.getRootBranch destCodebase >>= \case + Left err -> case err of + C.NoRootBranch -> pure Branch.empty + C.CouldntParseRootBranch s -> + throwIO . C.GitCodebaseError $ GitError.CouldntParseRemoteBranch readRepo s + C.CouldntLoadRootBranch h -> + throwIO . C.GitCodebaseError $ GitError.CouldntLoadRootBranch readRepo h + Right br -> pure br + action currentRootBranch >>= \case + Left e -> pure $ Left e + Right newBranch -> do + flip runReaderT destConn $ Q.withSavepoint_ @(ReaderT _ m) "push" $ do + throwExceptT $ doSync codebaseStatus (Git.gitDirToPath pushStaging) srcConn destConn newBranch + pure (Right newBranch) + for newBranchOrErr $ push pushStaging repo + pure newBranchOrErr where readRepo :: ReadRepo readRepo = writeToRead repo - doSync :: CodebaseStatus -> FilePath -> Connection -> Connection -> ExceptT C.GitError (ReaderT Connection m) () - doSync codebaseStatus remotePath srcConn destConn = do + doSync :: CodebaseStatus -> FilePath -> Connection -> Connection -> Branch m -> ExceptT C.GitError (ReaderT Connection m) () + doSync codebaseStatus remotePath srcConn destConn newBranch = do _ <- flip State.execStateT emptySyncProgressState $ - syncInternal syncProgress srcConn destConn (Branch.transform (lift . lift . lift) branch) - when setRoot $ overwriteRoot codebaseStatus remotePath destConn - overwriteRoot :: forall m. MonadIO m => CodebaseStatus -> FilePath -> Connection -> ExceptT C.GitError m () - overwriteRoot codebaseStatus remotePath destConn = do - let newRootHash = Branch.headHash branch + syncInternal syncProgress srcConn destConn (Branch.transform (lift . lift . lift) newBranch) + when setRoot $ overwriteRoot codebaseStatus remotePath destConn newBranch + overwriteRoot :: forall n. MonadIO n => CodebaseStatus -> FilePath -> Connection -> Branch m -> ExceptT C.GitError n () + overwriteRoot codebaseStatus remotePath destConn newBranch = do + let newBranchHash = Branch.headHash newBranch case codebaseStatus of ExistingCodebase -> do -- the call to runDB "handles" the possible DB error by bombing maybeOldRootHash <- fmap Cv.branchHash2to1 <$> runDB destConn Ops.loadMaybeRootCausalHash case maybeOldRootHash of Nothing -> runDB destConn $ do - setRepoRoot newRootHash + setRepoRoot newBranchHash (Just oldRootHash) -> runDB destConn $ do - before oldRootHash newRootHash >>= \case + before oldRootHash newBranchHash >>= \case Nothing -> error $ - "I couldn't find the hash " ++ show newRootHash + "I couldn't find the hash " ++ show newBranchHash ++ " that I just synced to the cached copy of " ++ repoString ++ " in " @@ -1208,7 +1222,7 @@ pushGitBranch srcConn branch repo (PushGitBranchOpts setRoot _syncMode) = Unlift lift . lift . throwError . C.GitProtocolError $ GitError.PushDestinationHasNewStuff repo Just True -> pure () CreatedCodebase -> pure () - runDB destConn $ setRepoRoot newRootHash + runDB destConn $ setRepoRoot newBranchHash repoString = Text.unpack $ printWriteRepo repo setRepoRoot :: forall m. Q.DB m => Branch.Hash -> m () @@ -1262,8 +1276,8 @@ pushGitBranch srcConn branch repo (PushGitBranchOpts setRoot _syncMode) = Unlift hasDeleteShm = any isShmDelete statusLines -- Commit our changes - push :: forall m. MonadIO m => Git.GitRepo -> WriteRepo -> m Bool -- withIOError needs IO - push remotePath repo@(WriteGitRepo {url'=url, branch=mayGitBranch}) = time "SqliteCodebase.pushGitRootBranch.push" $ do + push :: forall n. MonadIO n => Git.GitRepo -> WriteRepo -> Branch m -> n Bool -- withIOError needs IO + push remotePath repo@(WriteGitRepo {url'=url, branch=mayGitBranch}) newRootBranch = time "SqliteCodebase.pushGitRootBranch.push" $ do -- has anything changed? -- note: -uall recursively shows status for all files in untracked directories -- we want this so that we see @@ -1291,7 +1305,7 @@ pushGitBranch srcConn branch repo (PushGitBranchOpts setRoot _syncMode) = Unlift when hasDeleteShm $ gitIn remotePath ["rm", Text.pack $ codebasePath <> "-shm"] gitIn remotePath - ["commit", "-q", "-m", "Sync branch " <> Text.pack (show $ Branch.headHash branch)] + ["commit", "-q", "-m", "Sync branch " <> Text.pack (show $ Branch.headHash newRootBranch)] -- Push our changes to the repo, silencing all output. -- Even with quiet, the remote (Github) can still send output through, -- so we capture stdout and stderr. diff --git a/parser-typechecker/src/Unison/Codebase/Type.hs b/parser-typechecker/src/Unison/Codebase/Type.hs index e60051800c..b71cb30121 100644 --- a/parser-typechecker/src/Unison/Codebase/Type.hs +++ b/parser-typechecker/src/Unison/Codebase/Type.hs @@ -103,7 +103,7 @@ data Codebase m v a = Codebase syncToDirectory :: CodebasePath -> SyncMode -> Branch m -> m (), viewRemoteBranch' :: forall r. ReadRemoteNamespace -> Git.GitBranchBehavior -> ((Branch m, CodebasePath) -> m r) -> m (Either GitError r), -- | Push the given branch to the given repo, and optionally set it as the root branch. - pushGitBranch :: Branch m -> WriteRepo -> PushGitBranchOpts -> m (Either GitError ()), + pushGitBranch :: forall e. WriteRepo -> PushGitBranchOpts -> (Branch m -> m (Either e (Branch m))) -> m (Either GitError (Either e (Branch m))), -- | @watches k@ returns all of the references @r@ that were previously put by a @putWatch k r t@. @t@ can be -- retrieved by @getWatch k r@. watches :: WK.WatchKind -> m [Reference.Id], diff --git a/unison-cli/src/Unison/Codebase/Editor/Command.hs b/unison-cli/src/Unison/Codebase/Editor/Command.hs index 464b71b0d5..dc8b03f31f 100644 --- a/unison-cli/src/Unison/Codebase/Editor/Command.hs +++ b/unison-cli/src/Unison/Codebase/Editor/Command.hs @@ -225,7 +225,7 @@ data Command -- codebase are copied there. SyncLocalRootBranch :: Branch m -> Command m i v () - SyncRemoteBranch :: Branch m -> WriteRepo -> PushGitBranchOpts -> Command m i v (Either GitError ()) + SyncRemoteBranch :: WriteRepo -> PushGitBranchOpts -> (Branch m -> m (Either e (Branch m))) -> Command m i v (Either GitError (Either e (Branch m))) AppendToReflog :: Text -> Branch m -> Branch m -> Command m i v () diff --git a/unison-cli/src/Unison/Codebase/Editor/HandleCommand.hs b/unison-cli/src/Unison/Codebase/Editor/HandleCommand.hs index 550f2205ae..25971c9d7c 100644 --- a/unison-cli/src/Unison/Codebase/Editor/HandleCommand.hs +++ b/unison-cli/src/Unison/Codebase/Editor/HandleCommand.hs @@ -157,8 +157,8 @@ commandLine config awaitInput setBranchRef rt notifyUser notifyNumbered loadSour lift $ Codebase.viewRemoteBranch codebase ns gitBranchBehavior (toIO . Free.fold go . action) ImportRemoteBranch ns syncMode preprocess -> lift $ Codebase.importRemoteBranch codebase ns syncMode preprocess - SyncRemoteBranch branch repo opts -> - lift $ Codebase.pushGitBranch codebase branch repo opts + SyncRemoteBranch repo opts action -> + lift $ Codebase.pushGitBranch codebase repo opts action LoadTerm r -> lift $ Codebase.getTerm codebase r LoadTypeOfTerm r -> lift $ Codebase.getTypeOfTerm codebase r LoadTermComponentWithTypes h -> lift $ Codebase.getTermComponentWithTypes codebase h diff --git a/unison-cli/src/Unison/Codebase/Editor/HandleInput.hs b/unison-cli/src/Unison/Codebase/Editor/HandleInput.hs index e4171b4023..0af3e40c67 100644 --- a/unison-cli/src/Unison/Codebase/Editor/HandleInput.hs +++ b/unison-cli/src/Unison/Codebase/Editor/HandleInput.hs @@ -52,7 +52,7 @@ import qualified Unison.Codebase.Editor.Output as Output import qualified Unison.Codebase.Editor.Output.BranchDiff as OBranchDiff import qualified Unison.Codebase.Editor.Output.DumpNamespace as Output.DN import qualified Unison.Codebase.Editor.Propagate as Propagate -import Unison.Codebase.Editor.RemoteRepo (ReadRemoteNamespace, WriteRemotePath, WriteRepo, printNamespace, writePathToRead, writeToRead) +import Unison.Codebase.Editor.RemoteRepo (ReadRemoteNamespace, WriteRemotePath, WriteRepo, printNamespace, writePathToRead) import Unison.Codebase.Editor.SlurpComponent (SlurpComponent (..)) import qualified Unison.Codebase.Editor.SlurpComponent as SC import Unison.Codebase.Editor.SlurpResult (SlurpResult (..)) @@ -71,12 +71,11 @@ import qualified Unison.Codebase.PushBehavior as PushBehavior import qualified Unison.Codebase.Reflog as Reflog import Unison.Codebase.ShortBranchHash (ShortBranchHash) import qualified Unison.Codebase.ShortBranchHash as SBH -import Unison.Codebase.SqliteCodebase.GitError (GitSqliteCodebaseError(NoDatabaseFile)) import qualified Unison.Codebase.SyncMode as SyncMode import Unison.Codebase.TermEdit (TermEdit (..)) import qualified Unison.Codebase.TermEdit as TermEdit import qualified Unison.Codebase.TermEdit.Typing as TermEdit -import Unison.Codebase.Type (GitError(GitSqliteCodebaseError)) +import Unison.Codebase.Type (GitError) import qualified Unison.Codebase.TypeEdit as TypeEdit import qualified Unison.Codebase.Verbosity as Verbosity import qualified Unison.CommandLine.DisplayValues as DisplayValues @@ -1704,25 +1703,23 @@ doPushRemoteBranch repo localPath syncMode remoteTarget = do case remoteTarget of Nothing -> do let opts = PushGitBranchOpts {setRoot = False, syncMode} - syncRemoteBranch sourceBranch repo opts + syncRemoteBranch repo opts (\_remoteRoot -> pure (Right sourceBranch)) sbhLength <- (eval BranchHashLength) respond (GistCreated sbhLength repo (Branch.headHash sourceBranch)) Just (remotePath, pushBehavior) -> do - let withRemoteRoot remoteRoot = do + let withRemoteRoot :: Branch m -> m (Either (Output v) (Branch m)) + withRemoteRoot remoteRoot = do let -- We don't merge `sourceBranch` with `remoteBranch`, we just replace it. This push will be rejected if this -- rewinds time or misses any new updates in the remote branch that aren't in `sourceBranch` already. f remoteBranch = if shouldPushTo pushBehavior remoteBranch then Just sourceBranch else Nothing Branch.modifyAtM remotePath f remoteRoot & \case - Nothing -> respond (RefusedToPush pushBehavior) - Just newRemoteRoot -> do - let opts = PushGitBranchOpts {setRoot = True, syncMode} - runExceptT (syncRemoteBranch newRemoteRoot repo opts) >>= \case - Left gitErr -> respond (Output.GitError gitErr) - Right () -> respond Success - viewRemoteBranch (writeToRead repo, Nothing, Path.empty) Git.CreateBranchIfMissing withRemoteRoot >>= \case - Left (GitSqliteCodebaseError NoDatabaseFile{}) -> withRemoteRoot Branch.empty - Left err -> throwError err - Right () -> pure () + Nothing -> pure (Left $ RefusedToPush pushBehavior) + Just newRemoteRoot -> pure (Right newRemoteRoot) + let opts = PushGitBranchOpts {setRoot = True, syncMode} + runExceptT (syncRemoteBranch repo opts withRemoteRoot) >>= \case + Left gitErr -> respond (Output.GitError gitErr) + Right (Left output) -> respond output + Right (Right _branch) -> respond Success where -- Per `pushBehavior`, we are either: -- @@ -2090,9 +2087,17 @@ viewRemoteBranch :: viewRemoteBranch ns gitBranchBehavior action = do eval $ ViewRemoteBranch ns gitBranchBehavior action -syncRemoteBranch :: MonadCommand n m i v => Branch m -> WriteRepo -> PushGitBranchOpts -> ExceptT GitError n () -syncRemoteBranch b repo opts = - ExceptT . eval $ SyncRemoteBranch b repo opts +-- | Given the current root branch of a remote +-- (or an empty branch if no root branch exists) +-- compute a new branch, which will then be synced and pushed. +syncRemoteBranch :: + MonadCommand n m i v => + WriteRepo -> + PushGitBranchOpts -> + (Branch m -> m (Either e (Branch m))) -> + ExceptT GitError n (Either e (Branch m)) +syncRemoteBranch repo opts action = + ExceptT . eval $ SyncRemoteBranch repo opts action -- todo: compare to `getHQTerms` / `getHQTypes`. Is one universally better? resolveHQToLabeledDependencies :: Functor m => HQ.HashQualified Name -> Action' m v (Set LabeledDependency) diff --git a/unison-cli/src/Unison/CommandLine/OutputMessages.hs b/unison-cli/src/Unison/CommandLine/OutputMessages.hs index 74df221c3a..8d5c5dac88 100644 --- a/unison-cli/src/Unison/CommandLine/OutputMessages.hs +++ b/unison-cli/src/Unison/CommandLine/OutputMessages.hs @@ -1114,6 +1114,12 @@ notifyUser dir o = case o of push = P.group . P.backticked . IP.patternName $ IP.push pull = P.group . P.backticked . IP.patternName $ IP.pull GitCodebaseError e -> case e of + CouldntParseRemoteBranch repo s -> + P.wrap $ + "I couldn't decode the root branch " + <> P.string s + <> "from the repository at" + <> prettyReadRepo repo CouldntLoadRootBranch repo hash -> P.wrap $ "I couldn't load the designated root hash" From 9e9ec0f5fccde75d266f793ae036a4d916f88886 Mon Sep 17 00:00:00 2001 From: Chris Penner Date: Wed, 23 Feb 2022 13:43:17 -0600 Subject: [PATCH 267/297] Fix transcripts/io.md to work on windows builds. (#2942) * Use PATH so this test works on windows systems * Update transcript output --- unison-src/transcripts/io.md | 10 +++++----- unison-src/transcripts/io.output.md | 18 +++++++++--------- 2 files changed, 14 insertions(+), 14 deletions(-) diff --git a/unison-src/transcripts/io.md b/unison-src/transcripts/io.md index 3a27a4001e..18655cc509 100644 --- a/unison-src/transcripts/io.md +++ b/unison-src/transcripts/io.md @@ -258,11 +258,11 @@ testDirContents _ = ### Read environment variables ```unison:hide -testHomeEnvVar : '{io2.IO} [Result] -testHomeEnvVar _ = +testGetEnv : '{io2.IO} [Result] +testGetEnv _ = test = 'let - home = reraise (getEnv.impl "HOME") - check "HOME environent variable should be set" (size home > 0) + path = reraise (getEnv.impl "PATH") -- PATH exists on windows, mac and linux. + check "PATH environent variable should be set" (size path > 0) match getEnv.impl "DOESNTEXIST" with Right _ -> emit (Fail "env var shouldn't exist") Left _ -> emit (Ok "DOESNTEXIST didn't exist") @@ -270,7 +270,7 @@ testHomeEnvVar _ = ``` ```ucm .> add -.> io.test testHomeEnvVar +.> io.test testGetEnv ``` ### Read command line args diff --git a/unison-src/transcripts/io.output.md b/unison-src/transcripts/io.output.md index 7713bf0d61..e389cf159d 100644 --- a/unison-src/transcripts/io.output.md +++ b/unison-src/transcripts/io.output.md @@ -414,11 +414,11 @@ testDirContents _ = ### Read environment variables ```unison -testHomeEnvVar : '{io2.IO} [Result] -testHomeEnvVar _ = +testGetEnv : '{io2.IO} [Result] +testGetEnv _ = test = 'let - home = reraise (getEnv.impl "HOME") - check "HOME environent variable should be set" (size home > 0) + path = reraise (getEnv.impl "PATH") -- PATH exists on windows, mac and linux. + check "PATH environent variable should be set" (size path > 0) match getEnv.impl "DOESNTEXIST" with Right _ -> emit (Fail "env var shouldn't exist") Left _ -> emit (Ok "DOESNTEXIST didn't exist") @@ -430,18 +430,18 @@ testHomeEnvVar _ = ⍟ I've added these definitions: - testHomeEnvVar : '{IO} [Result] + testGetEnv : '{IO} [Result] -.> io.test testHomeEnvVar +.> io.test testGetEnv New test results: - ◉ testHomeEnvVar HOME environent variable should be set - ◉ testHomeEnvVar DOESNTEXIST didn't exist + ◉ testGetEnv PATH environent variable should be set + ◉ testGetEnv DOESNTEXIST didn't exist ✅ 2 test(s) passing - Tip: Use view testHomeEnvVar to view the source of a test. + Tip: Use view testGetEnv to view the source of a test. ``` ### Read command line args From 4edabef1432499301084ce92aa60d2579f9c92f8 Mon Sep 17 00:00:00 2001 From: Chris Penner Date: Wed, 23 Feb 2022 13:43:31 -0600 Subject: [PATCH 268/297] Add newline conversion to utf8 file helpers (#2943) --- lib/unison-prelude/src/Unison/Prelude.hs | 31 ++++++++++++++++--- parser-typechecker/src/Unison/Parsers.hs | 6 ++-- unison-cli/src/Unison/CommandLine/Main.hs | 4 +-- .../src/Unison/CommandLine/OutputMessages.hs | 10 +++--- 4 files changed, 34 insertions(+), 17 deletions(-) diff --git a/lib/unison-prelude/src/Unison/Prelude.hs b/lib/unison-prelude/src/Unison/Prelude.hs index 4cce3d80aa..d2aa99ee00 100644 --- a/lib/unison-prelude/src/Unison/Prelude.hs +++ b/lib/unison-prelude/src/Unison/Prelude.hs @@ -57,6 +57,8 @@ import Safe as X (atMay, headMay, lastMay, readMay) import Text.Read as X (readMaybe) import Control.Monad.Trans.Except (ExceptT (ExceptT), runExceptT, withExceptT) import qualified UnliftIO +import qualified System.IO as IO +import qualified GHC.IO.Handle as Handle onNothing :: Applicative m => m a -> Maybe a -> m a onNothing x = @@ -85,22 +87,43 @@ throwEitherMWith f action = throwExceptT . withExceptT f $ (ExceptT action) tShow :: Show a => a -> Text tShow = Text.pack . show --- Read an entire file strictly assuming UTF8 +-- | Strictly read an entire file decoding UTF8. +-- Converts \r\n -> \n on windows. readUtf8 :: FilePath -> IO Text -readUtf8 p = decodeUtf8 <$> BS.readFile p +readUtf8 fileName = + UnliftIO.withFile fileName UnliftIO.ReadMode readUtf8Handle +-- | Strictly read from a handle, decoding UTF8, or failing if not valid UTF8 +-- Converts \r\n -> \n on windows. safeReadUtf8 :: FilePath -> IO (Either IOException Text) safeReadUtf8 p = try (readUtf8 p) +-- | Strictly read from a handle, decoding UTF8. +-- Note, this changes the newline-mode of the handle +-- to convert \r\n -> \n on windows. +readUtf8Handle :: IO.Handle -> IO Text +readUtf8Handle handle = do + IO.hSetNewlineMode handle IO.universalNewlineMode + decodeUtf8 <$> BS.hGetContents handle + +-- | Strictly read from stdin, decoding UTF8. +-- Converts \r\n -> \n on windows. safeReadUtf8StdIn :: IO (Either IOException Text) -safeReadUtf8StdIn = try $ decodeUtf8 <$> BS.getContents +safeReadUtf8StdIn = do + handle <- Handle.hDuplicate IO.stdin + try $ readUtf8Handle handle uncurry4 :: (a -> b -> c -> d -> e) -> (a, b, c, d) -> e uncurry4 f (a, b, c, d) = f a b c d +-- | Write a file strictly assuming UTF8 +-- Converts \n -> \r\n on windows. writeUtf8 :: FilePath -> Text -> IO () -writeUtf8 p txt = BS.writeFile p (encodeUtf8 txt) +writeUtf8 fileName txt = do + UnliftIO.withFile fileName UnliftIO.WriteMode $ \handle -> do + IO.hSetNewlineMode handle IO.universalNewlineMode + BS.hPut handle (encodeUtf8 txt) reportBug :: String -> String -> String reportBug bugId msg = diff --git a/parser-typechecker/src/Unison/Parsers.hs b/parser-typechecker/src/Unison/Parsers.hs index c532db18a6..72af211bd4 100644 --- a/parser-typechecker/src/Unison/Parsers.hs +++ b/parser-typechecker/src/Unison/Parsers.hs @@ -4,8 +4,6 @@ module Unison.Parsers where import Unison.Prelude import qualified Data.Text as Text -import Data.Text.IO ( readFile ) -import Prelude hiding ( readFile ) import qualified Unison.NamesWithHistory as Names import qualified Unison.Builtin as Builtin import qualified Unison.FileParser as FileParser @@ -62,7 +60,7 @@ readAndParseFile -> FilePath -> IO (Either (Parser.Err v) (UnisonFile v Ann)) readAndParseFile penv fileName = do - txt <- readFile fileName + txt <- readUtf8 fileName let src = Text.unpack txt pure $ parseFile fileName src penv @@ -72,7 +70,7 @@ unsafeParseTerm s = fmap (unsafeGetRightFrom s) . parseTerm $ s unsafeReadAndParseFile :: Parser.ParsingEnv -> FilePath -> IO (UnisonFile Symbol Ann) unsafeReadAndParseFile penv fileName = do - txt <- readFile fileName + txt <- readUtf8 fileName let str = Text.unpack txt pure . unsafeGetRightFrom str $ parseFile fileName str penv diff --git a/unison-cli/src/Unison/CommandLine/Main.hs b/unison-cli/src/Unison/CommandLine/Main.hs index 1bedb0a017..a949cd76cf 100644 --- a/unison-cli/src/Unison/CommandLine/Main.hs +++ b/unison-cli/src/Unison/CommandLine/Main.hs @@ -14,7 +14,6 @@ import Control.Exception (finally, catch) import Control.Monad.State (runStateT) import Data.Configurator.Types (Config) import Data.IORef -import Prelude hiding (readFile, writeFile) import System.IO.Error (isDoesNotExistError) import Unison.Codebase.Branch (Branch) import qualified Unison.Codebase.Branch as Branch @@ -33,7 +32,6 @@ import Unison.Symbol (Symbol) import qualified Control.Concurrent.Async as Async import qualified Data.Map as Map import qualified Data.Text as Text -import qualified Data.Text.IO import qualified System.Console.Haskeline as Line import qualified Crypto.Random as Random import qualified Unison.Codebase.Path as Path @@ -148,7 +146,7 @@ main dir welcome initialPath (config, cancelConfig) initialInputs runtime codeba _ | isDoesNotExistError e -> return InvalidSourceNameError _ -> return LoadError go = do - contents <- Data.Text.IO.readFile $ Text.unpack fname + contents <- readUtf8 $ Text.unpack fname return $ LoadSuccess contents in catch go handle else return InvalidSourceNameError diff --git a/unison-cli/src/Unison/CommandLine/OutputMessages.hs b/unison-cli/src/Unison/CommandLine/OutputMessages.hs index 8d5c5dac88..0d31c69022 100644 --- a/unison-cli/src/Unison/CommandLine/OutputMessages.hs +++ b/unison-cli/src/Unison/CommandLine/OutputMessages.hs @@ -19,7 +19,6 @@ import qualified Data.Sequence as Seq import qualified Data.Set as Set import Data.Set.NonEmpty (NESet) import qualified Data.Text as Text -import Data.Text.IO (readFile, writeFile) import Data.Tuple (swap) import Data.Tuple.Extra (dupe, uncurry3) import System.Directory @@ -125,7 +124,6 @@ import qualified Unison.Util.Relation as R import Unison.Var (Var) import qualified Unison.Var as Var import qualified Unison.WatchKind as WK -import Prelude hiding (readFile, writeFile) import qualified Data.List.NonEmpty as NEList import qualified U.Util.Monoid as Monoid import qualified Data.Foldable as Foldable @@ -1651,9 +1649,9 @@ displayRendered outputLoc pp = existingContents <- do exists <- doesFileExist path if exists - then readFile path + then readUtf8 path else pure "" - writeFile path . Text.pack . P.toPlain 80 $ + writeUtf8 path . Text.pack . P.toPlain 80 $ P.lines [pp, "", P.text existingContents] message pp path = P.callout "☝️" $ @@ -1687,9 +1685,9 @@ displayDefinitions outputLoc ppe types terms = existingContents <- do exists <- doesFileExist path if exists - then readFile path + then readUtf8 path else pure "" - writeFile path . Text.pack . P.toPlain 80 $ + writeUtf8 path . Text.pack . P.toPlain 80 $ P.lines [ code, "", From 88f3c3a2f065c2b5ba853473aee18ff99f9ed319 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Simon=20H=C3=B8jberg?= Date: Wed, 23 Feb 2022 10:29:56 -0500 Subject: [PATCH 269/297] Doc Embed: Add Video and FrontMatter Expand the Doc Embed variant to support embedding videos and frontmatter. The latter will be rendered at the top of the output of the html file rendered via the `docs.to-html` command. --- parser-typechecker/package.yaml | 1 + .../src/Unison/Runtime/IOSource.hs | 35 ++++++++++++ .../src/Unison/Server/Backend.hs | 37 ++++++++++-- parser-typechecker/src/Unison/Server/Doc.hs | 31 ++++++++-- .../src/Unison/Server/Doc/AsHtml.hs | 57 ++++++++++++++++--- parser-typechecker/src/Unison/Server/Types.hs | 2 + .../unison-parser-typechecker.cabal | 1 + unison-core/src/Unison/Util/List.hs | 6 +- .../transcripts/emptyCodebase.output.md | 2 +- 9 files changed, 151 insertions(+), 21 deletions(-) diff --git a/parser-typechecker/package.yaml b/parser-typechecker/package.yaml index f107d0aae3..e8dc6b26ea 100644 --- a/parser-typechecker/package.yaml +++ b/parser-typechecker/package.yaml @@ -100,6 +100,7 @@ library: - mwc-random - NanoID - lucid + - yaml - semialign - servant - servant-docs diff --git a/parser-typechecker/src/Unison/Runtime/IOSource.hs b/parser-typechecker/src/Unison/Runtime/IOSource.hs index 3f4e5c803b..e02f6d2baf 100644 --- a/parser-typechecker/src/Unison/Runtime/IOSource.hs +++ b/parser-typechecker/src/Unison/Runtime/IOSource.hs @@ -145,6 +145,18 @@ doc2UntitledSectionId = constructorNamed doc2Ref "Doc2.UntitledSection" doc2ColumnId = constructorNamed doc2Ref "Doc2.Column" doc2GroupId = constructorNamed doc2Ref "Doc2.Group" +doc2MediaSourceRef :: R.Reference +doc2MediaSourceRef = typeNamed "Doc2.MediaSource" +pattern Doc2MediaSourceRef <- ((== doc2MediaSourceRef) -> True) + +doc2VideoRef :: R.Reference +doc2VideoRef = typeNamed "Doc2.Video" +pattern Doc2VideoRef <- ((== doc2VideoRef) -> True) + +doc2FrontMatterRef :: R.Reference +doc2FrontMatterRef = typeNamed "Doc2.FrontMatter" +pattern Doc2FrontMatterRef <- ((== doc2FrontMatterRef) -> True) + pattern Doc2Word txt <- Term.App' (Term.Constructor' (ConstructorReference Doc2Ref ((==) doc2WordId -> True))) (Term.Text' txt) pattern Doc2Code d <- Term.App' (Term.Constructor' (ConstructorReference Doc2Ref ((==) doc2CodeId -> True))) d pattern Doc2CodeBlock lang d <- Term.Apps' (Term.Constructor' (ConstructorReference Doc2Ref ((==) doc2CodeBlockId -> True))) [Term.Text' lang, d] @@ -198,6 +210,9 @@ pattern Doc2SpecialFormEval tm <- Term.App' (Term.Constructor' (ConstructorRefer pattern Doc2SpecialFormEvalInline tm <- Term.App' (Term.Constructor' (ConstructorReference Doc2SpecialFormRef ((==) doc2SpecialFormEvalInlineId -> True))) tm pattern Doc2SpecialFormEmbed any <- Term.App' (Term.Constructor' (ConstructorReference Doc2SpecialFormRef ((==) doc2SpecialFormEmbedId -> True))) any pattern Doc2SpecialFormEmbedInline any <- Term.App' (Term.Constructor' (ConstructorReference Doc2SpecialFormRef ((==) doc2SpecialFormEmbedInlineId -> True))) any +pattern Doc2MediaSource src mimeType <- Term.Apps' (Term.Constructor' (ConstructorReference Doc2MediaSourceRef _)) [src, mimeType] +pattern Doc2SpecialFormEmbedVideo sources config <- Doc2SpecialFormEmbed (Term.App' _ (Term.Apps' (Term.Constructor' (ConstructorReference Doc2VideoRef _)) [Term.List' (toList -> sources), Term.List' (toList -> config)])) +pattern Doc2SpecialFormEmbedFrontMatter frontMatter <- Doc2SpecialFormEmbed (Term.App' _ (Term.App' (Term.Constructor' (ConstructorReference Doc2FrontMatterRef _)) (Term.List' (toList -> frontMatter)))) -- pulls out `vs body` in `Doc2.Term (Any '(vs -> body))`, where -- vs can be any number of parameters @@ -358,6 +373,26 @@ metadata.isPropagated = IsPropagated.IsPropagated -- A newtype used when embedding term references in a Doc2 unique[fb488e55e66e2492c2946388e4e846450701db04] type Doc2.Term = Term Any +-- Media types for Doc2.Embed. +-- Somewhat modelled after: +-- https://developer.mozilla.org/en-US/docs/Web/HTML/Element/source and +-- https://developer.mozilla.org/en-US/docs/Web/HTML/Element/video + +unique[ab9344724264495159ec7122d276a6358630403b6a5529e1e5d76bcf] type Doc2.MediaSource + = { sourceUrl: Text, mimeType: Optional Text } + +-- Used with MediaSource to embed videos in a Doc. The `config` field is +-- intended to be used to add attributes etc, like `poster` or `autoplay` for +-- the HTML