{-# LANGUAGE RecursiveDo #-} module GHC.Tc.Solver( InferMode(..), simplifyInfer, findInferredDiff, growThetaTyVars, simplifyAmbiguityCheck, simplifyDefault, simplifyTop, simplifyTopImplic, simplifyInteractive, solveEqualities, pushLevelAndSolveEqualities, pushLevelAndSolveEqualitiesX, reportUnsolvedEqualities, simplifyWantedsTcM, tcCheckGivens, tcCheckWanteds, tcNormalise, captureTopConstraints, simplifyTopWanteds, promoteTyVarSet, simplifyAndEmitFlatConstraints, -- For Rules we need these solveWanteds, approximateWC ) where import GHC.Prelude import GHC.Data.Bag import GHC.Core.Class import GHC.Driver.Session import GHC.Tc.Utils.Instantiate import GHC.Data.List.SetOps import GHC.Types.Name import GHC.Types.Id( idType ) import GHC.Utils.Outputable import GHC.Builtin.Utils import GHC.Builtin.Names import GHC.Tc.Errors import GHC.Tc.Errors.Types import GHC.Tc.Types.Evidence import GHC.Tc.Solver.Interact import GHC.Tc.Solver.Canonical ( makeSuperClasses, solveCallStack ) import GHC.Tc.Solver.Rewrite ( rewriteType ) import GHC.Tc.Utils.Unify ( buildTvImplication ) import GHC.Tc.Utils.TcMType as TcM import GHC.Tc.Utils.Monad as TcM import GHC.Tc.Solver.InertSet import GHC.Tc.Solver.Monad as TcS import GHC.Tc.Types.Constraint import GHC.Tc.Instance.FunDeps import GHC.Core.Predicate import GHC.Tc.Types.Origin import GHC.Tc.Utils.TcType import GHC.Core.Type import GHC.Core.Ppr import GHC.Core.TyCon ( TyConBinder, isTypeFamilyTyCon ) import GHC.Builtin.Types ( liftedRepTy, liftedDataConTy ) import GHC.Core.Unify ( tcMatchTyKi ) import GHC.Utils.Misc import GHC.Utils.Panic import GHC.Types.Var import GHC.Types.Var.Set import GHC.Types.Basic ( IntWithInf, intGtLimit , DefaultingStrategy(..), NonStandardDefaultingStrategy(..) ) import GHC.Types.Error import qualified GHC.LanguageExtensions as LangExt import Control.Monad import Data.Foldable ( toList ) import Data.List ( partition ) import Data.List.NonEmpty ( NonEmpty(..) ) import GHC.Data.Maybe ( mapMaybe ) {- ********************************************************************************* * * * External interface * * * ********************************************************************************* -} captureTopConstraints :: TcM a -> TcM (a, WantedConstraints) -- (captureTopConstraints m) runs m, and returns the type constraints it -- generates plus the constraints produced by static forms inside. -- If it fails with an exception, it reports any insolubles -- (out of scope variables) before doing so -- -- captureTopConstraints is used exclusively by GHC.Tc.Module at the top -- level of a module. -- -- Importantly, if captureTopConstraints propagates an exception, it -- reports any insoluble constraints first, lest they be lost -- altogether. This is important, because solveEqualities (maybe -- other things too) throws an exception without adding any error -- messages; it just puts the unsolved constraints back into the -- monad. See GHC.Tc.Utils.Monad Note [Constraints and errors] -- #16376 is an example of what goes wrong if you don't do this. -- -- NB: the caller should bring any environments into scope before -- calling this, so that the reportUnsolved has access to the most -- complete GlobalRdrEnv captureTopConstraints thing_inside = do { static_wc_var <- TcM.newTcRef emptyWC ; ; (mb_res, lie) <- TcM.updGblEnv (\env -> env { tcg_static_wc = static_wc_var } ) $ TcM.tryCaptureConstraints thing_inside ; stWC <- TcM.readTcRef static_wc_var -- See GHC.Tc.Utils.Monad Note [Constraints and errors] -- If the thing_inside threw an exception, but generated some insoluble -- constraints, report the latter before propagating the exception -- Otherwise they will be lost altogether ; case mb_res of Just res -> return (res, lie `andWC` stWC) Nothing -> do { _ <- simplifyTop lie; failM } } -- This call to simplifyTop is the reason -- this function is here instead of GHC.Tc.Utils.Monad -- We call simplifyTop so that it does defaulting -- (esp of runtime-reps) before reporting errors simplifyTopImplic :: Bag Implication -> TcM () simplifyTopImplic implics = do { empty_binds <- simplifyTop (mkImplicWC implics) -- Since all the inputs are implications the returned bindings will be empty ; massertPpr (isEmptyBag empty_binds) (ppr empty_binds) ; return () } simplifyTop :: WantedConstraints -> TcM (Bag EvBind) -- Simplify top-level constraints -- Usually these will be implications, -- but when there is nothing to quantify we don't wrap -- in a degenerate implication, so we do that here instead simplifyTop wanteds = do { traceTc "simplifyTop {" $ text "wanted = " <+> ppr wanteds ; ((final_wc, unsafe_ol), binds1) <- runTcS $ do { final_wc <- simplifyTopWanteds wanteds ; unsafe_ol <- getSafeOverlapFailures ; return (final_wc, unsafe_ol) } ; traceTc "End simplifyTop }" empty ; binds2 <- reportUnsolved final_wc ; traceTc "reportUnsolved (unsafe overlapping) {" empty ; unless (isEmptyCts unsafe_ol) $ do { -- grab current error messages and clear, warnAllUnsolved will -- update error messages which we'll grab and then restore saved -- messages. ; errs_var <- getErrsVar ; saved_msg <- TcM.readTcRef errs_var ; TcM.writeTcRef errs_var emptyMessages ; warnAllUnsolved $ emptyWC { wc_simple = unsafe_ol } ; whyUnsafe <- getWarningMessages <$> TcM.readTcRef errs_var ; TcM.writeTcRef errs_var saved_msg ; recordUnsafeInfer (mkMessages whyUnsafe) } ; traceTc "reportUnsolved (unsafe overlapping) }" empty ; return (evBindMapBinds binds1 `unionBags` binds2) } pushLevelAndSolveEqualities :: SkolemInfoAnon -> [TyConBinder] -> TcM a -> TcM a -- Push level, and solve all resulting equalities -- If there are any unsolved equalities, report them -- and fail (in the monad) -- -- Panics if we solve any non-equality constraints. (In runTCSEqualities -- we use an error thunk for the evidence bindings.) pushLevelAndSolveEqualities skol_info_anon tcbs thing_inside = do { (tclvl, wanted, res) <- pushLevelAndSolveEqualitiesX "pushLevelAndSolveEqualities" thing_inside ; report_unsolved_equalities skol_info_anon (binderVars tcbs) tclvl wanted ; return res } pushLevelAndSolveEqualitiesX :: String -> TcM a -> TcM (TcLevel, WantedConstraints, a) -- Push the level, gather equality constraints, and then solve them. -- Returns any remaining unsolved equalities. -- Does not report errors. -- -- Panics if we solve any non-equality constraints. (In runTCSEqualities -- we use an error thunk for the evidence bindings.) pushLevelAndSolveEqualitiesX callsite thing_inside = do { traceTc "pushLevelAndSolveEqualitiesX {" (text "Called from" <+> text callsite) ; (tclvl, (wanted, res)) <- pushTcLevelM $ do { (res, wanted) <- captureConstraints thing_inside ; wanted <- runTcSEqualities (simplifyTopWanteds wanted) ; return (wanted,res) } ; traceTc "pushLevelAndSolveEqualities }" (vcat [ text "Residual:" <+> ppr wanted , text "Level:" <+> ppr tclvl ]) ; return (tclvl, wanted, res) } -- | Type-check a thing that emits only equality constraints, solving any -- constraints we can and re-emitting constraints that we can't. -- Use this variant only when we'll get another crack at it later -- See Note [Failure in local type signatures] -- -- Panics if we solve any non-equality constraints. (In runTCSEqualities -- we use an error thunk for the evidence bindings.) solveEqualities :: String -> TcM a -> TcM a solveEqualities callsite thing_inside = do { traceTc "solveEqualities {" (text "Called from" <+> text callsite) ; (res, wanted) <- captureConstraints thing_inside ; simplifyAndEmitFlatConstraints wanted -- simplifyAndEmitFlatConstraints fails outright unless -- the only unsolved constraints are soluble-looking -- equalities that can float out ; traceTc "solveEqualities }" empty ; return res } simplifyAndEmitFlatConstraints :: WantedConstraints -> TcM () -- See Note [Failure in local type signatures] simplifyAndEmitFlatConstraints wanted = do { -- Solve and zonk to establish the -- preconditions for floatKindEqualities wanted <- runTcSEqualities (solveWanteds wanted) ; wanted <- TcM.zonkWC wanted ; traceTc "emitFlatConstraints {" (ppr wanted) ; case floatKindEqualities wanted of Nothing -> do { traceTc "emitFlatConstraints } failing" (ppr wanted) -- Emit the bad constraints, wrapped in an implication -- See Note [Wrapping failing kind equalities] ; tclvl <- TcM.getTcLevel ; implic <- buildTvImplication unkSkolAnon [] (pushTcLevel tclvl) wanted -- ^^^^^^ | ^^^^^^^^^^^^^^^^^ -- it's OK to use unkSkol | we must increase the TcLevel, -- because we don't bind | as explained in -- any skolem variables here | Note [Wrapping failing kind equalities] ; emitImplication implic ; failM } Just (simples, errs) -> do { _ <- promoteTyVarSet (tyCoVarsOfCts simples) ; traceTc "emitFlatConstraints }" $ vcat [ text "simples:" <+> ppr simples , text "errs: " <+> ppr errs ] -- Holes and other delayed errors don't need promotion ; emitDelayedErrors errs ; emitSimples simples } } floatKindEqualities :: WantedConstraints -> Maybe (Bag Ct, Bag DelayedError) -- Float out all the constraints from the WantedConstraints, -- Return Nothing if any constraints can't be floated (captured -- by skolems), or if there is an insoluble constraint, or -- IC_Telescope telescope error -- Precondition 1: we have tried to solve the 'wanteds', both so that -- the ic_status field is set, and because solving can make constraints -- more floatable. -- Precondition 2: the 'wanteds' are zonked, since floatKindEqualities -- is not monadic -- See Note [floatKindEqualities vs approximateWC] floatKindEqualities wc = float_wc emptyVarSet wc where float_wc :: TcTyCoVarSet -> WantedConstraints -> Maybe (Bag Ct, Bag DelayedError) float_wc trapping_tvs (WC { wc_simple = simples , wc_impl = implics , wc_errors = errs }) | all is_floatable simples = do { (inner_simples, inner_errs) <- flatMapBagPairM (float_implic trapping_tvs) implics ; return ( simples `unionBags` inner_simples , errs `unionBags` inner_errs) } | otherwise = Nothing where is_floatable ct | insolubleEqCt ct = False | otherwise = tyCoVarsOfCt ct `disjointVarSet` trapping_tvs float_implic :: TcTyCoVarSet -> Implication -> Maybe (Bag Ct, Bag DelayedError) float_implic trapping_tvs (Implic { ic_wanted = wanted, ic_given_eqs = given_eqs , ic_skols = skols, ic_status = status }) | isInsolubleStatus status = Nothing -- A short cut /plus/ we must keep track of IC_BadTelescope | otherwise = do { (simples, holes) <- float_wc new_trapping_tvs wanted ; when (not (isEmptyBag simples) && given_eqs == MaybeGivenEqs) $ Nothing -- If there are some constraints to float out, but we can't -- because we don't float out past local equalities -- (c.f GHC.Tc.Solver.approximateWC), then fail ; return (simples, holes) } where new_trapping_tvs = trapping_tvs `extendVarSetList` skols {- Note [Failure in local type signatures] ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ When kind checking a type signature, we like to fail fast if we can't solve all the kind equality constraints, for two reasons: * A kind-bogus type signature may cause a cascade of knock-on errors if we let it pass * More seriously, we don't have a convenient term-level place to add deferred bindings for unsolved kind-equality constraints. In earlier GHCs this led to un-filled-in coercion holes, which caused GHC to crash with "fvProv falls into a hole" See #11563, #11520, #11516, #11399 But what about /local/ type signatures, mentioning in-scope type variables for which there might be 'given' equalities? For these we might not be able to solve all the equalities locally. Here's an example (T15076b): class (a ~ b) => C a b data SameKind :: k -> k -> Type where { SK :: SameKind a b } bar :: forall (a :: Type) (b :: Type). C a b => Proxy a -> Proxy b -> () bar _ _ = const () (undefined :: forall (x :: a) (y :: b). SameKind x y) Consider the type signature on 'undefined'. It's ill-kinded unless a~b. But the superclass of (C a b) means that indeed (a~b). So all should be well. BUT it's hard to see that when kind-checking the signature for undefined. We want to emit a residual (a~b) constraint, to solve later. Another possibility is that we might have something like F alpha ~ [Int] where alpha is bound further out, which might become soluble "later" when we learn more about alpha. So we want to emit those residual constraints. BUT it's no good simply wrapping all unsolved constraints from a type signature in an implication constraint to solve later. The problem is that we are going to /use/ that signature, including instantiate it. Say we have f :: forall a. (forall b. blah) -> blah2 f x =
To typecheck the definition of f, we have to instantiate those foralls. Moreover, any unsolved kind equalities will be coercion holes in the type. If we naively wrap them in an implication like forall a. (co1:k1~k2, forall b. co2:k3~k4) hoping to solve it later, we might end up filling in the holes co1 and co2 with coercions involving 'a' and 'b' -- but by now we've instantiated the type. Chaos! Moreover, the unsolved constraints might be skolem-escape things, and if we proceed with f bound to a nonsensical type, we get a cascade of follow-up errors. For example polykinds/T12593, T15577, and many others. So here's the plan (see tcHsSigType): * pushLevelAndSolveEqualitiesX: try to solve the constraints * kindGeneraliseSome: do kind generalisation * buildTvImplication: build an implication for the residual, unsolved constraint * simplifyAndEmitFlatConstraints: try to float out every unsolved equality inside that implication, in the hope that it constrains only global type variables, not the locally-quantified ones. * If we fail, or find an insoluble constraint, emit the implication, so that the errors will be reported, and fail. * If we succeed in floating all the equalities, promote them and re-emit them as flat constraint, not wrapped at all (since they don't mention any of the quantified variables. * Note that this float-and-promote step means that anonymous wildcards get floated to top level, as we want; see Note [Checking partial type signatures] in GHC.Tc.Gen.HsType. All this is done: * In GHC.Tc.Gen.HsType.tcHsSigType, as above * solveEqualities. Use this when there no kind-generalisation step to complicate matters; then we don't need to push levels, and can solve the equalities immediately without needing to wrap it in an implication constraint. (You'll generally see a kindGeneraliseNone nearby.) * In GHC.Tc.TyCl and GHC.Tc.TyCl.Instance; see calls to pushLevelAndSolveEqualitiesX, followed by quantification, and then reportUnsolvedEqualities. NB: we call reportUnsolvedEqualities before zonkTcTypeToType because the latter does not expect to see any un-filled-in coercions, which will happen if we have unsolved equalities. By calling reportUnsolvedEqualities first, which fails after reporting errors, we avoid that happening. See also #18062, #11506 Note [Wrapping failing kind equalities] ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ In simplifyAndEmitFlatConstraints, if we fail to get down to simple flat constraints we will * re-emit the constraints so that they are reported * fail in the monad But there is a Terrible Danger that, if -fdefer-type-errors is on, and we just re-emit an insoluble constraint like (* ~ (*->*)), that we'll report only a warning and proceed with compilation. But if we ever fail in the monad it should be fatal; we should report an error and stop after the type checker. If not, chaos results: #19142. Our solution is this: * Even with -fdefer-type-errors, inside an implication with no place for value bindings (ic_binds = CoEvBindsVar), report failing equalities as errors. We have to do this anyway; see GHC.Tc.Errors Note [Failing equalities with no evidence bindings]. * Right here in simplifyAndEmitFlatConstraints, use buildTvImplication to wrap the failing constraint in a degenerate implication (no skolems, no theta), with ic_binds = CoEvBindsVar. This setting of `ic_binds` means that any failing equalities will lead to an error not a warning, irrespective of -fdefer-type-errors: see Note [Failing equalities with no evidence bindings] in GHC.Tc.Errors, and `maybeSwitchOffDefer` in that module. We still take care to bump the TcLevel of the implication. Partly, that ensures that nested implications have increasing level numbers which seems nice. But more specifically, suppose the outer level has a Given `(C ty)`, which has pending (not-yet-expanded) superclasses. Consider what happens when we process this implication constraint (which we have re-emitted) in that context: - in the inner implication we'll call `getPendingGivenScs`, - we /do not/ want to get the `(C ty)` from the outer level, lest we try to add an evidence term for the superclass, which we can't do because we have specifically set `ic_binds` = `CoEvBindsVar`. - as `getPendingGivenSCcs is careful to only get Givens from the /current/ level, and we bumped the `TcLevel` of the implication, we're OK. TL;DR: bump the `TcLevel` when creating the nested implication. If we don't we get a panic in `GHC.Tc.Utils.Monad.addTcEvBind` (#20043). We re-emit the implication rather than reporting the errors right now, so that the error messages are improved by other solving and defaulting. e.g. we prefer Cannot match 'Type->Type' with 'Type' to Cannot match 'Type->Type' with 'TYPE r0' Note [floatKindEqualities vs approximateWC] ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ floatKindEqualities and approximateWC are strikingly similar to each other, but * floatKindEqualites tries to float /all/ equalities, and fails if it can't, or if any implication is insoluble. * approximateWC just floats out any constraints (not just equalities) that can float; it never fails. -} reportUnsolvedEqualities :: SkolemInfo -> [TcTyVar] -> TcLevel -> WantedConstraints -> TcM () -- Reports all unsolved wanteds provided; fails in the monad if there are any. -- -- The provided SkolemInfo and [TcTyVar] arguments are used in an implication to -- provide skolem info for any errors. reportUnsolvedEqualities skol_info skol_tvs tclvl wanted = report_unsolved_equalities (getSkolemInfo skol_info) skol_tvs tclvl wanted report_unsolved_equalities :: SkolemInfoAnon -> [TcTyVar] -> TcLevel -> WantedConstraints -> TcM () report_unsolved_equalities skol_info_anon skol_tvs tclvl wanted | isEmptyWC wanted = return () | otherwise -- NB: we build an implication /even if skol_tvs is empty/, -- just to ensure that our level invariants hold, specifically -- (WantedInv). See Note [TcLevel invariants]. = checkNoErrs $ -- Fail do { implic <- buildTvImplication skol_info_anon skol_tvs tclvl wanted ; reportAllUnsolved (mkImplicWC (unitBag implic)) } -- | Simplify top-level constraints, but without reporting any unsolved -- constraints nor unsafe overlapping. simplifyTopWanteds :: WantedConstraints -> TcS WantedConstraints -- See Note [Top-level Defaulting Plan] simplifyTopWanteds wanteds = do { wc_first_go <- nestTcS (solveWanteds wanteds) -- This is where the main work happens ; dflags <- getDynFlags ; try_tyvar_defaulting dflags wc_first_go } where try_tyvar_defaulting :: DynFlags -> WantedConstraints -> TcS WantedConstraints try_tyvar_defaulting dflags wc | isEmptyWC wc = return wc | insolubleWC wc , gopt Opt_PrintExplicitRuntimeReps dflags -- See Note [Defaulting insolubles] = try_class_defaulting wc | otherwise = do { -- Need to zonk first, as the WantedConstraints are not yet zonked. ; free_tvs <- TcS.zonkTyCoVarsAndFVList (tyCoVarsOfWCList wc) ; let defaultable_tvs = filter can_default free_tvs can_default tv = isTyVar tv -- Weed out coercion variables. && isMetaTyVar tv -- Weed out runtime-skolems in GHCi, which we definitely -- shouldn't try to default. && not (tv `elemVarSet` nonDefaultableTyVarsOfWC wc) -- Weed out variables for which defaulting would be unhelpful, -- e.g. alpha appearing in [W] alpha[conc] ~# rr[sk]. ; defaulted <- mapM defaultTyVarTcS defaultable_tvs -- Has unification side effects ; if or defaulted then do { wc_residual <- nestTcS (solveWanteds wc) -- See Note [Must simplify after defaulting] ; try_class_defaulting wc_residual } else try_class_defaulting wc } -- No defaulting took place try_class_defaulting :: WantedConstraints -> TcS WantedConstraints try_class_defaulting wc | isEmptyWC wc || insolubleWC wc -- See Note [Defaulting insolubles] = try_callstack_defaulting wc | otherwise -- See Note [When to do type-class defaulting] = do { something_happened <- applyDefaultingRules wc -- See Note [Top-level Defaulting Plan] ; if something_happened then do { wc_residual <- nestTcS (solveWanteds wc) ; try_class_defaulting wc_residual } -- See Note [Overview of implicit CallStacks] in GHC.Tc.Types.Evidence else try_callstack_defaulting wc } try_callstack_defaulting :: WantedConstraints -> TcS WantedConstraints try_callstack_defaulting wc | isEmptyWC wc = return wc | otherwise = defaultCallStacks wc -- | Default any remaining @CallStack@ constraints to empty @CallStack@s. defaultCallStacks :: WantedConstraints -> TcS WantedConstraints -- See Note [Overview of implicit CallStacks] in GHC.Tc.Types.Evidence defaultCallStacks wanteds = do simples <- handle_simples (wc_simple wanteds) mb_implics <- mapBagM handle_implic (wc_impl wanteds) return (wanteds { wc_simple = simples , wc_impl = catBagMaybes mb_implics }) where handle_simples simples = catBagMaybes <$> mapBagM defaultCallStack simples handle_implic :: Implication -> TcS (Maybe Implication) -- The Maybe is because solving the CallStack constraint -- may well allow us to discard the implication entirely handle_implic implic | isSolvedStatus (ic_status implic) = return (Just implic) | otherwise = do { wanteds <- setEvBindsTcS (ic_binds implic) $ -- defaultCallStack sets a binding, so -- we must set the correct binding group defaultCallStacks (ic_wanted implic) ; setImplicationStatus (implic { ic_wanted = wanteds }) } defaultCallStack ct | ClassPred cls tys <- classifyPredType (ctPred ct) , Just {} <- isCallStackPred cls tys = do { solveCallStack (ctEvidence ct) EvCsEmpty ; return Nothing } defaultCallStack ct = return (Just ct) {- Note [When to do type-class defaulting] ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ In GHC 7.6 and 7.8.2, we did type-class defaulting only if insolubleWC was false, on the grounds that defaulting can't help solve insoluble constraints. But if we *don't* do defaulting we may report a whole lot of errors that would be solved by defaulting; these errors are quite spurious because fixing the single insoluble error means that defaulting happens again, which makes all the other errors go away. This is jolly confusing: #9033. So it seems better to always do type-class defaulting. However, always doing defaulting does mean that we'll do it in situations like this (#5934): run :: (forall s. GenST s) -> Int run = fromInteger 0 We don't unify the return type of fromInteger with the given function type, because the latter involves foralls. So we're left with (Num alpha, alpha ~ (forall s. GenST s) -> Int) Now we do defaulting, get alpha := Integer, and report that we can't match Integer with (forall s. GenST s) -> Int. That's not totally stupid, but perhaps a little strange. Another potential alternative would be to suppress *all* non-insoluble errors if there are *any* insoluble errors, anywhere, but that seems too drastic. Note [Don't default in syntactic equalities] ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ When there are unsolved syntactic equalities such as rr[sk] ~S# alpha[conc] we should not default alpha, lest we obtain a poor error message such as Couldn't match kind `rr' with `LiftedRep' We would rather preserve the original syntactic equality to be reported to the user, especially as the concrete metavariable alpha might store an informative origin for the user. Note [Must simplify after defaulting] ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ We may have a deeply buried constraint (t:*) ~ (a:Open) which we couldn't solve because of the kind incompatibility, and 'a' is free. Then when we default 'a' we can solve the constraint. And we want to do that before starting in on type classes. We MUST do it before reporting errors, because it isn't an error! #7967 was due to this. Note [Top-level Defaulting Plan] ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ We have considered two design choices for where/when to apply defaulting. (i) Do it in SimplCheck mode only /whenever/ you try to solve some simple constraints, maybe deep inside the context of implications. This used to be the case in GHC 7.4.1. (ii) Do it in a tight loop at simplifyTop, once all other constraints have finished. This is the current story. Option (i) had many disadvantages: a) Firstly, it was deep inside the actual solver. b) Secondly, it was dependent on the context (Infer a type signature, or Check a type signature, or Interactive) since we did not want to always start defaulting when inferring (though there is an exception to this, see Note [Default while Inferring]). c) It plainly did not work. Consider typecheck/should_compile/DfltProb2.hs: f :: Int -> Bool f x = const True (\y -> let w :: a -> a w a = const a (y+1) in w y) We will get an implication constraint (for beta the type of y): [untch=beta] forall a. 0 => Num beta which we really cannot default /while solving/ the implication, since beta is untouchable. Instead our new defaulting story is to pull defaulting out of the solver loop and go with option (ii), implemented at SimplifyTop. Namely: - First, have a go at solving the residual constraint of the whole program - Try to approximate it with a simple constraint - Figure out derived defaulting equations for that simple constraint - Go round the loop again if you did manage to get some equations Now, that has to do with class defaulting. However there exists type variable /kind/ defaulting. Again this is done at the top-level and the plan is: - At the top-level, once you had a go at solving the constraint, do figure out /all/ the touchable unification variables of the wanted constraints. - Apply defaulting to their kinds More details in Note [DefaultTyVar]. Note [Safe Haskell Overlapping Instances] ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ In Safe Haskell, we apply an extra restriction to overlapping instances. The motive is to prevent untrusted code provided by a third-party, changing the behavior of trusted code through type-classes. This is due to the global and implicit nature of type-classes that can hide the source of the dictionary. Another way to state this is: if a module M compiles without importing another module N, changing M to import N shouldn't change the behavior of M. Overlapping instances with type-classes can violate this principle. However, overlapping instances aren't always unsafe. They are just unsafe when the most selected dictionary comes from untrusted code (code compiled with -XSafe) and overlaps instances provided by other modules. In particular, in Safe Haskell at a call site with overlapping instances, we apply the following rule to determine if it is a 'unsafe' overlap: 1) Most specific instance, I1, defined in an `-XSafe` compiled module. 2) I1 is an orphan instance or a MPTC. 3) At least one overlapped instance, Ix, is both: A) from a different module than I1 B) Ix is not marked `OVERLAPPABLE` This is a slightly involved heuristic, but captures the situation of an imported module N changing the behavior of existing code. For example, if condition (2) isn't violated, then the module author M must depend either on a type-class or type defined in N. Secondly, when should these heuristics be enforced? We enforced them when the type-class method call site is in a module marked `-XSafe` or `-XTrustworthy`. This allows `-XUnsafe` modules to operate without restriction, and for Safe Haskell inference to infer modules with unsafe overlaps as unsafe. One alternative design would be to also consider if an instance was imported as a `safe` import or not and only apply the restriction to instances imported safely. However, since instances are global and can be imported through more than one path, this alternative doesn't work. Note [Safe Haskell Overlapping Instances Implementation] ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ How is this implemented? It's complicated! So we'll step through it all: 1) `InstEnv.lookupInstEnv` -- Performs instance resolution, so this is where we check if a particular type-class method call is safe or unsafe. We do this through the return type, `ClsInstLookupResult`, where the last parameter is a list of instances that are unsafe to overlap. When the method call is safe, the list is null. 2) `GHC.Tc.Solver.Interact.matchClassInst` -- This module drives the instance resolution / dictionary generation. The return type is `ClsInstResult`, which either says no instance matched, or one found, and if it was a safe or unsafe overlap. 3) `GHC.Tc.Solver.Interact.doTopReactDict` -- Takes a dictionary / class constraint and tries to resolve it by calling (in part) `matchClassInst`. The resolving mechanism has a work list (of constraints) that it process one at a time. If the constraint can't be resolved, it's added to an inert set. When compiling an `-XSafe` or `-XTrustworthy` module, we follow this approach as we know compilation should fail. These are handled as normal constraint resolution failures from here-on (see step 6). Otherwise, we may be inferring safety (or using `-Wunsafe`), and compilation should succeed, but print warnings and/or mark the compiled module as `-XUnsafe`. In this case, we call `insertSafeOverlapFailureTcS` which adds the unsafe (but resolved!) constraint to the `inert_safehask` field of `InertCans`. 4) `GHC.Tc.Solver.simplifyTop`: * Call simplifyTopWanteds, the top-level function for driving the simplifier for constraint resolution. * Once finished, call `getSafeOverlapFailures` to retrieve the list of overlapping instances that were successfully resolved, but unsafe. Remember, this is only applicable for generating warnings (`-Wunsafe`) or inferring a module unsafe. `-XSafe` and `-XTrustworthy` cause compilation failure by not resolving the unsafe constraint at all. * For unresolved constraints (all types), call `GHC.Tc.Errors.reportUnsolved`, while for resolved but unsafe overlapping dictionary constraints, call `GHC.Tc.Errors.warnAllUnsolved`. Both functions convert constraints into a warning message for the user. * In the case of `warnAllUnsolved` for resolved, but unsafe dictionary constraints, we collect the generated warning message (pop it) and call `GHC.Tc.Utils.Monad.recordUnsafeInfer` to mark the module we are compiling as unsafe, passing the warning message along as the reason. 5) `GHC.Tc.Errors.*Unsolved` -- Generates error messages for constraints by actually calling `InstEnv.lookupInstEnv` again! Yes, confusing, but all we know is the constraint that is unresolved or unsafe. For dictionary, all we know is that we need a dictionary of type C, but not what instances are available and how they overlap. So we once again call `lookupInstEnv` to figure that out so we can generate a helpful error message. 6) `GHC.Tc.Utils.Monad.recordUnsafeInfer` -- Save the unsafe result and reason in IORefs called `tcg_safe_infer` and `tcg_safe_infer_reason`. 7) `GHC.Driver.Main.tcRnModule'` -- Reads `tcg_safe_infer` after type-checking, calling `GHC.Driver.Main.markUnsafeInfer` (passing the reason along) when safe-inference failed. Note [No defaulting in the ambiguity check] ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ When simplifying constraints for the ambiguity check, we use solveWanteds, not simplifyTopWanteds, so that we do no defaulting. #11947 was an example: f :: Num a => Int -> Int This is ambiguous of course, but we don't want to default the (Num alpha) constraint to (Num Int)! Doing so gives a defaulting warning, but no error. Note [Defaulting insolubles] ~~~~~~~~~~~~~~~~~~~~~~~~~~~~ If a set of wanteds is insoluble, we have no hope of accepting the program. Yet we do not stop constraint solving, etc., because we may simplify the wanteds to produce better error messages. So, once we have an insoluble constraint, everything we do is just about producing helpful error messages. Should we default in this case or not? Let's look at an example (tcfail004): (f,g) = (1,2,3) With defaulting, we get a conflict between (a0,b0) and (Integer,Integer,Integer). Without defaulting, we get a conflict between (a0,b0) and (a1,b1,c1). I (Richard) find the latter more helpful. Several other test cases (e.g. tcfail005) suggest similarly. So: we should not do class defaulting with insolubles. On the other hand, RuntimeRep-defaulting is different. Witness tcfail078: f :: Integer i => i f = 0 Without RuntimeRep-defaulting, we GHC suggests that Integer should have kind TYPE r0 -> Constraint and then complains that r0 is actually untouchable (presumably, because it can't be sure if `Integer i` entails an equality). If we default, we are told of a clash between (* -> Constraint) and Constraint. The latter seems far better, suggesting we *should* do RuntimeRep-defaulting even on insolubles. But, evidently, not always. Witness UnliftedNewtypesInfinite: newtype Foo = FooC (# Int#, Foo #) This should fail with an occurs-check error on the kind of Foo (with -XUnliftedNewtypes). If we default RuntimeRep-vars, we get Expecting a lifted type, but ‘(# Int#, Foo #)’ is unlifted which is just plain wrong. Another situation in which we don't want to default involves concrete metavariables. In equalities such as alpha[conc] ~# rr[sk] , alpha[conc] ~# RR beta[tau] for a type family RR (all at kind RuntimeRep), we would prefer to report a representation-polymorphism error rather than default alpha and get error: Could not unify `rr` with `Lifted` / Could not unify `RR b0` with `Lifted` which is very confusing. For this reason, we weed out the concrete metavariables participating in such equalities in nonDefaultableTyVarsOfWC. Just looking at insolublity is not enough, as `alpha[conc] ~# RR beta[tau]` could become soluble after defaulting beta (see also #21430). Conclusion: we should do RuntimeRep-defaulting on insolubles only when the user does not want to hear about RuntimeRep stuff -- that is, when -fprint-explicit-runtime-reps is not set. However, we must still take care not to default concrete type variables participating in an equality with a non-concrete type, as seen in the last example above. -} ------------------ simplifyAmbiguityCheck :: Type -> WantedConstraints -> TcM () simplifyAmbiguityCheck ty wanteds = do { traceTc "simplifyAmbiguityCheck {" (text "type = " <+> ppr ty $$ text "wanted = " <+> ppr wanteds) ; (final_wc, _) <- runTcS $ solveWanteds wanteds -- NB: no defaulting! See Note [No defaulting in the ambiguity check] ; traceTc "End simplifyAmbiguityCheck }" empty -- Normally report all errors; but with -XAllowAmbiguousTypes -- report only insoluble ones, since they represent genuinely -- inaccessible code ; allow_ambiguous <- xoptM LangExt.AllowAmbiguousTypes ; traceTc "reportUnsolved(ambig) {" empty ; unless (allow_ambiguous && not (insolubleWC final_wc)) (discardResult (reportUnsolved final_wc)) ; traceTc "reportUnsolved(ambig) }" empty ; return () } ------------------ simplifyInteractive :: WantedConstraints -> TcM (Bag EvBind) simplifyInteractive wanteds = traceTc "simplifyInteractive" empty >> simplifyTop wanteds ------------------ simplifyDefault :: ThetaType -- Wanted; has no type variables in it -> TcM Bool -- Return if the constraint is soluble simplifyDefault theta = do { traceTc "simplifyDefault" empty ; wanteds <- newWanteds DefaultOrigin theta ; (unsolved, _) <- runTcS (solveWanteds (mkSimpleWC wanteds)) ; return (isEmptyWC unsolved) } ------------------ {- Note [Pattern match warnings with insoluble Givens] ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ A pattern match on a GADT can introduce new type-level information, which needs to be analysed in order to get the expected pattern match warnings. For example: > type IsBool :: Type -> Constraint > type family IsBool a where > IsBool Bool = () > IsBool b = b ~ Bool > > data T a where > MkTInt :: Int -> T Int > MkTBool :: IsBool b => b -> T b > > f :: T Int -> Int > f (MkTInt i) = i The pattern matching performed by `f` is complete: we can't ever call `f (MkTBool b)`, as type-checking that application would require producing evidence for `Int ~ Bool`, which can't be done. The pattern match checker uses `tcCheckGivens` to accumulate all the Given constraints, and relies on `tcCheckGivens` to return Nothing if the Givens become insoluble. `tcCheckGivens` in turn relies on `insolubleCt` to identify these insoluble constraints. So the precise definition of `insolubleCt` has a big effect on pattern match overlap warnings. To detect this situation, we check whether there are any insoluble Given constraints. In the example above, the insoluble constraint was an equality constraint, but it is also important to detect custom type errors: > type NotInt :: Type -> Constraint > type family NotInt a where > NotInt Int = TypeError (Text "That's Int, silly.") > NotInt _ = () > > data R a where > MkT1 :: a -> R a > MkT2 :: NotInt a => R a > > foo :: R Int -> Int > foo (MkT1 x) = x To see that we can't call `foo (MkT2)`, we must detect that `NotInt Int` is insoluble because it is a custom type error. Failing to do so proved quite inconvenient for users, as evidence by the tickets #11503 #14141 #16377 #20180. Test cases: T11503, T14141. Examples of constraints that tcCheckGivens considers insoluble: - Int ~ Bool, - Coercible Float Word, - TypeError msg. Non-examples: - constraints which we know aren't satisfied, e.g. Show (Int -> Int) when no such instance is in scope, - Eq (TypeError msg), - C (Int ~ Bool), with @class C (c :: Constraint)@. -} tcCheckGivens :: InertSet -> Bag EvVar -> TcM (Maybe InertSet) -- ^ Return (Just new_inerts) if the Givens are satisfiable, Nothing if definitely -- contradictory. -- -- See Note [Pattern match warnings with insoluble Givens] above. tcCheckGivens inerts given_ids = do (sat, new_inerts) <- runTcSInerts inerts $ do traceTcS "checkGivens {" (ppr inerts <+> ppr given_ids) lcl_env <- TcS.getLclEnv let given_loc = mkGivenLoc topTcLevel (getSkolemInfo unkSkol) lcl_env let given_cts = mkGivens given_loc (bagToList given_ids) -- See Note [Superclasses and satisfiability] solveSimpleGivens given_cts insols <- getInertInsols insols <- try_harder insols traceTcS "checkGivens }" (ppr insols) return (isEmptyBag insols) return $ if sat then Just new_inerts else Nothing where try_harder :: Cts -> TcS Cts -- Maybe we have to search up the superclass chain to find -- an unsatisfiable constraint. Example: pmcheck/T3927b. -- At the moment we try just once try_harder insols | not (isEmptyBag insols) -- We've found that it's definitely unsatisfiable = return insols -- Hurrah -- stop now. | otherwise = do { pending_given <- getPendingGivenScs ; new_given <- makeSuperClasses pending_given ; solveSimpleGivens new_given ; getInertInsols } tcCheckWanteds :: InertSet -> ThetaType -> TcM Bool -- ^ Return True if the Wanteds are soluble, False if not tcCheckWanteds inerts wanteds = do cts <- newWanteds PatCheckOrigin wanteds (sat, _new_inerts) <- runTcSInerts inerts $ do traceTcS "checkWanteds {" (ppr inerts <+> ppr wanteds) -- See Note [Superclasses and satisfiability] wcs <- solveWanteds (mkSimpleWC cts) traceTcS "checkWanteds }" (ppr wcs) return (isSolvedWC wcs) return sat -- | Normalise a type as much as possible using the given constraints. -- See @Note [tcNormalise]@. tcNormalise :: InertSet -> Type -> TcM Type tcNormalise inerts ty = do { norm_loc <- getCtLocM PatCheckOrigin Nothing ; (res, _new_inerts) <- runTcSInerts inerts $ do { traceTcS "tcNormalise {" (ppr inerts) ; ty' <- rewriteType norm_loc ty ; traceTcS "tcNormalise }" (ppr ty') ; pure ty' } ; return res } {- Note [Superclasses and satisfiability] ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ Expand superclasses before starting, because (Int ~ Bool), has (Int ~~ Bool) as a superclass, which in turn has (Int ~N# Bool) as a superclass, and it's the latter that is insoluble. See Note [The equality types story] in GHC.Builtin.Types.Prim. If we fail to prove unsatisfiability we (arbitrarily) try just once to find superclasses, using try_harder. Reason: we might have a type signature f :: F op (Implements push) => .. where F is a type function. This happened in #3972. We could do more than once but we'd have to have /some/ limit: in the the recursive case, we would go on forever in the common case where the constraints /are/ satisfiable (#10592 comment:12!). For straightforward situations without type functions the try_harder step does nothing. Note [tcNormalise] ~~~~~~~~~~~~~~~~~~ tcNormalise is a rather atypical entrypoint to the constraint solver. Whereas most invocations of the constraint solver are intended to simplify a set of constraints or to decide if a particular set of constraints is satisfiable, the purpose of tcNormalise is to take a type, plus some locally solved constraints in the form of an InertSet, and normalise the type as much as possible with respect to those constraints. It does *not* reduce type or data family applications or look through newtypes. Why is this useful? As one example, when coverage-checking an EmptyCase expression, it's possible that the type of the scrutinee will only reduce if some local equalities are solved for. See "Wrinkle: Local equalities" in Note [Type normalisation] in "GHC.HsToCore.Pmc". To accomplish its stated goal, tcNormalise first initialises the solver monad with the given InertCans, then uses rewriteType to simplify the desired type with respect to the Givens in the InertCans. *********************************************************************************** * * * Inference * * *********************************************************************************** Note [Inferring the type of a let-bound variable] ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ Consider f x = rhs To infer f's type we do the following: * Gather the constraints for the RHS with ambient level *one more than* the current one. This is done by the call pushLevelAndCaptureConstraints (tcMonoBinds...) in GHC.Tc.Gen.Bind.tcPolyInfer * Call simplifyInfer to simplify the constraints and decide what to quantify over. We pass in the level used for the RHS constraints, here called rhs_tclvl. This ensures that the implication constraint we generate, if any, has a strictly-increased level compared to the ambient level outside the let binding. Note [Inferring principal types] ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ We don't always infer principal types. For instance, the inferred type for > f x = show [x] is > f :: Show a => a -> String This is not the most general type if we allow flexible contexts. Indeed, if we try to write the following > g :: Show [a] => a -> String > g x = f x we get the error: * Could not deduce (Show a) arising from a use of `f' from the context: Show [a] Though replacing f x in the right-hand side of g with the definition of f x works, the call to f x does not. This is the hallmark of unprincip{led,al} types. Another example: > class C a > class D a where > d :: a > instance C a => D a where > d = undefined > h _ = d -- argument is to avoid the monomorphism restriction The inferred type for h is > h :: C a => t -> a even though > h :: D a => t -> a is more general. The fix is easy: don't simplify constraints before inferring a type. That is, have the inferred type quantify over all constraints that arise in a definition's right-hand side, even if they are simplifiable. Unfortunately, this would yield all manner of unwieldy types, and so we won't do so. -} -- | How should we choose which constraints to quantify over? data InferMode = ApplyMR -- ^ Apply the monomorphism restriction, -- never quantifying over any constraints | EagerDefaulting -- ^ See Note [TcRnExprMode] in "GHC.Tc.Module", -- the :type +d case; this mode refuses -- to quantify over any defaultable constraint | NoRestrictions -- ^ Quantify over any constraint that -- satisfies pickQuantifiablePreds instance Outputable InferMode where ppr ApplyMR = text "ApplyMR" ppr EagerDefaulting = text "EagerDefaulting" ppr NoRestrictions = text "NoRestrictions" simplifyInfer :: TcLevel -- Used when generating the constraints -> InferMode -> [TcIdSigInst] -- Any signatures (possibly partial) -> [(Name, TcTauType)] -- Variables to be generalised, -- and their tau-types -> WantedConstraints -> TcM ([TcTyVar], -- Quantify over these type variables [EvVar], -- ... and these constraints (fully zonked) TcEvBinds, -- ... binding these evidence variables Bool) -- True <=> the residual constraints are insoluble simplifyInfer rhs_tclvl infer_mode sigs name_taus wanteds | isEmptyWC wanteds = do { -- When quantifying, we want to preserve any order of variables as they -- appear in partial signatures. cf. decideQuantifiedTyVars let psig_tv_tys = [ mkTyVarTy tv | sig <- partial_sigs , (_,Bndr tv _) <- sig_inst_skols sig ] psig_theta = [ pred | sig <- partial_sigs , pred <- sig_inst_theta sig ] ; dep_vars <- candidateQTyVarsOfTypes (psig_tv_tys ++ psig_theta ++ map snd name_taus) ; skol_info <- mkSkolemInfo (InferSkol name_taus) ; qtkvs <- quantifyTyVars skol_info DefaultNonStandardTyVars dep_vars ; traceTc "simplifyInfer: empty WC" (ppr name_taus $$ ppr qtkvs) ; return (qtkvs, [], emptyTcEvBinds, False) } | otherwise = do { traceTc "simplifyInfer {" $ vcat [ text "sigs =" <+> ppr sigs , text "binds =" <+> ppr name_taus , text "rhs_tclvl =" <+> ppr rhs_tclvl , text "infer_mode =" <+> ppr infer_mode , text "(unzonked) wanted =" <+> ppr wanteds ] ; let psig_theta = concatMap sig_inst_theta partial_sigs -- First do full-blown solving -- NB: we must gather up all the bindings from doing -- this solving; hence (runTcSWithEvBinds ev_binds_var). -- And note that since there are nested implications, -- calling solveWanteds will side-effect their evidence -- bindings, so we can't just revert to the input -- constraint. ; ev_binds_var <- TcM.newTcEvBinds ; psig_evs <- newWanteds AnnOrigin psig_theta ; wanted_transformed <- setTcLevel rhs_tclvl $ runTcSWithEvBinds ev_binds_var $ solveWanteds (mkSimpleWC psig_evs `andWC` wanteds) -- psig_evs : see Note [Add signature contexts as wanteds] -- See Note [Inferring principal types] -- Find quant_pred_candidates, the predicates that -- we'll consider quantifying over -- NB1: wanted_transformed does not include anything provable from -- the psig_theta; it's just the extra bit -- NB2: We do not do any defaulting when inferring a type, this can lead -- to less polymorphic types, see Note [Default while Inferring] ; wanted_transformed <- TcM.zonkWC wanted_transformed ; let definite_error = insolubleWC wanted_transformed -- See Note [Quantification with errors] quant_pred_candidates | definite_error = [] | otherwise = ctsPreds (approximateWC False wanted_transformed) -- Decide what type variables and constraints to quantify -- NB: quant_pred_candidates is already fully zonked -- NB: bound_theta are constraints we want to quantify over, -- including the psig_theta, which we always quantify over -- NB: bound_theta are fully zonked ; rec { (qtvs, bound_theta, co_vars) <- decideQuantification skol_info infer_mode rhs_tclvl name_taus partial_sigs quant_pred_candidates ; bound_theta_vars <- mapM TcM.newEvVar bound_theta ; let full_theta = map idType bound_theta_vars ; skol_info <- mkSkolemInfo (InferSkol [ (name, mkSigmaTy [] full_theta ty) | (name, ty) <- name_taus ]) } -- Now emit the residual constraint ; emitResidualConstraints rhs_tclvl ev_binds_var name_taus co_vars qtvs bound_theta_vars wanted_transformed -- All done! ; traceTc "} simplifyInfer/produced residual implication for quantification" $ vcat [ text "quant_pred_candidates =" <+> ppr quant_pred_candidates , text "psig_theta =" <+> ppr psig_theta , text "bound_theta =" <+> pprCoreBinders bound_theta_vars , text "qtvs =" <+> ppr qtvs , text "definite_error =" <+> ppr definite_error ] ; return ( qtvs, bound_theta_vars, TcEvBinds ev_binds_var, definite_error ) } -- NB: bound_theta_vars must be fully zonked where partial_sigs = filter isPartialSig sigs -------------------- emitResidualConstraints :: TcLevel -> EvBindsVar -> [(Name, TcTauType)] -> CoVarSet -> [TcTyVar] -> [EvVar] -> WantedConstraints -> TcM () -- Emit the remaining constraints from the RHS. emitResidualConstraints rhs_tclvl ev_binds_var name_taus co_vars qtvs full_theta_vars wanteds | isEmptyWC wanteds = return () | otherwise = do { wanted_simple <- TcM.zonkSimples (wc_simple wanteds) ; let (outer_simple, inner_simple) = partitionBag is_mono wanted_simple is_mono ct | Just ct_ev_id <- wantedEvId_maybe ct = ct_ev_id `elemVarSet` co_vars | otherwise = False -- Reason for the partition: -- see Note [Emitting the residual implication in simplifyInfer] -- Already done by defaultTyVarsAndSimplify -- ; _ <- TcM.promoteTyVarSet (tyCoVarsOfCts outer_simple) ; let inner_wanted = wanteds { wc_simple = inner_simple } ; implics <- if isEmptyWC inner_wanted then return emptyBag else do implic1 <- newImplication return $ unitBag $ implic1 { ic_tclvl = rhs_tclvl , ic_skols = qtvs , ic_given = full_theta_vars , ic_wanted = inner_wanted , ic_binds = ev_binds_var , ic_given_eqs = MaybeGivenEqs , ic_info = skol_info } ; emitConstraints (emptyWC { wc_simple = outer_simple , wc_impl = implics }) } where full_theta = map idType full_theta_vars skol_info = InferSkol [ (name, mkSigmaTy [] full_theta ty) | (name, ty) <- name_taus ] -- We don't add the quantified variables here, because they are -- also bound in ic_skols and we want them to be tidied -- uniformly. -------------------- ctsPreds :: Cts -> [PredType] ctsPreds cts = [ ctEvPred ev | ct <- bagToList cts , let ev = ctEvidence ct ] findInferredDiff :: TcThetaType -> TcThetaType -> TcM TcThetaType -- Given a partial type signature f :: (C a, D a, _) => blah -- and the inferred constraints (X a, D a, Y a, C a) -- compute the difference, which is what will fill in the "_" underscore, -- In this case the diff is (X a, Y a). findInferredDiff annotated_theta inferred_theta | null annotated_theta -- Short cut the common case when the user didn't = return inferred_theta -- write any constraints in the partial signature | otherwise = pushTcLevelM_ $ do { lcl_env <- TcM.getLclEnv ; given_ids <- mapM TcM.newEvVar annotated_theta ; wanteds <- newWanteds AnnOrigin inferred_theta ; let given_loc = mkGivenLoc topTcLevel (getSkolemInfo unkSkol) lcl_env given_cts = mkGivens given_loc given_ids ; (residual, _) <- runTcS $ do { _ <- solveSimpleGivens given_cts ; solveSimpleWanteds (listToBag (map mkNonCanonical wanteds)) } -- NB: There are no meta tyvars fromn this level annotated_theta -- because we have either promoted them or unified them -- See `Note [Quantification and partial signatures]` Wrinkle 2 ; return (map (box_pred . ctPred) $ bagToList $ wc_simple residual) } where box_pred :: PredType -> PredType box_pred pred = case classifyPredType pred of EqPred rel ty1 ty2 | Just (cls,tys) <- boxEqPred rel ty1 ty2 -> mkClassPred cls tys | otherwise -> pprPanic "findInferredDiff" (ppr pred) _other -> pred {- Note [Emitting the residual implication in simplifyInfer] ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ Consider f = e where f's type is inferred to be something like (a, Proxy k (Int |> co)) and we have an as-yet-unsolved, or perhaps insoluble, constraint [W] co :: Type ~ k We can't form types like (forall co. blah), so we can't generalise over the coercion variable, and hence we can't generalise over things free in its kind, in the case 'k'. But we can still generalise over 'a'. So we'll generalise to f :: forall a. (a, Proxy k (Int |> co)) Now we do NOT want to form the residual implication constraint forall a. [W] co :: Type ~ k because then co's eventual binding (which will be a value binding if we use -fdefer-type-errors) won't scope over the entire binding for 'f' (whose type mentions 'co'). Instead, just as we don't generalise over 'co', we should not bury its constraint inside the implication. Instead, we must put it outside. That is the reason for the partitionBag in emitResidualConstraints, which takes the CoVars free in the inferred type, and pulls their constraints out. (NB: this set of CoVars should be closed-over-kinds.) All rather subtle; see #14584. Note [Add signature contexts as wanteds] ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ Consider this (#11016): f2 :: (?x :: Int) => _ f2 = ?x or this class C a b | a -> b g :: C p q => p -> q f3 :: C Int b => _ f3 = g (3::Int) We'll use plan InferGen because there are holes in the type. But: * For f2 we want to have the (?x :: Int) constraint floating around so that the functional dependencies kick in. Otherwise the occurrence of ?x on the RHS produces constraint (?x :: alpha), and we won't unify alpha:=Int. * For f3 want the (C Int b) constraint from the partial signature to meet the (C Int beta) constraint we get from the call to g; again, fundeps Solution: in simplifyInfer, we add the constraints from the signature as extra Wanteds. Why Wanteds? Wouldn't it be neater to treat them as Givens? Alas that would mess up (GivenInv) in Note [TcLevel invariants]. Consider f :: (Eq a, _) => blah1 f = ....g... g :: (Eq b, _) => blah2 g = ...f... Then we have two psig_theta constraints (Eq a[tv], Eq b[tv]), both with TyVarTvs inside. Ultimately a[tv] := b[tv], but only when we've solved all those constraints. And both have level 1, so we can't put them as Givens when solving at level 1. Best to treat them as Wanteds. But see also #20076, which would be solved if they were Givens. ************************************************************************ * * Quantification * * ************************************************************************ Note [Deciding quantification] ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ If the monomorphism restriction does not apply, then we quantify as follows: * Step 1: decideMonoTyVars. Take the global tyvars, and "grow" them using functional dependencies E.g. if x:alpha is in the environment, and alpha ~ [beta] (which can happen because alpha is untouchable here) then do not quantify over beta, because alpha fixes beta, and beta is effectively free in the environment too; this logic extends to general fundeps, not just equalities We also account for the monomorphism restriction; if it applies, add the free vars of all the constraints. Result is mono_tvs; we will not quantify over these. * Step 2: defaultTyVarsAndSimplify. Default any non-mono tyvars (i.e ones that are definitely not going to become further constrained), and re-simplify the candidate constraints. Motivation for re-simplification (#7857): imagine we have a constraint (C (a->b)), where 'a :: TYPE l1' and 'b :: TYPE l2' are not free in the envt, and instance forall (a::*) (b::*). (C a) => C (a -> b) The instance doesn't match while l1,l2 are polymorphic, but it will match when we default them to LiftedRep. This is all very tiresome. This step also promotes the mono_tvs from Step 1. See Note [Promote monomorphic tyvars]. In fact, the *only* use of the mono_tvs from Step 1 is to promote them here. This promotion effectively stops us from quantifying over them later, in Step 3. Because the actual variables to quantify over are determined in Step 3 (not in Step 1), it is OK for the mono_tvs to be missing some variables free in the environment. This is why removing the psig_qtvs is OK in decideMonoTyVars. Test case for this scenario: T14479. * Step 3: decideQuantifiedTyVars. Decide which variables to quantify over, as follows: - Take the free vars of the partial-type-signature types and constraints, and the tau-type (zonked_tau_tvs), and then "grow" them using all the constraints. These are grown_tcvs. See Note [growThetaTyVars vs closeWrtFunDeps]. - Use quantifyTyVars to quantify over the free variables of all the types involved, but only those in the grown_tcvs. Result is qtvs. * Step 4: Filter the constraints using pickQuantifiablePreds and the qtvs. We have to zonk the constraints first, so they "see" the freshly created skolems. Note [Lift equality constraints when quantifying] ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ We can't quantify over a constraint (t1 ~# t2) because that isn't a predicate type; see Note [Types for coercions, predicates, and evidence] in GHC.Core.TyCo.Rep. So we have to 'lift' it to (t1 ~ t2). Similarly (~R#) must be lifted to Coercible. This tiresome lifting is the reason that pick_me (in pickQuantifiablePreds) returns a Maybe rather than a Bool. Note [Inheriting implicit parameters] ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ Consider this: f x = (x::Int) + ?y where f is *not* a top-level binding. From the RHS of f we'll get the constraint (?y::Int). There are two types we might infer for f: f :: Int -> Int (so we get ?y from the context of f's definition), or f :: (?y::Int) => Int -> Int At first you might think the first was better, because then ?y behaves like a free variable of the definition, rather than having to be passed at each call site. But of course, the WHOLE IDEA is that ?y should be passed at each call site (that's what dynamic binding means) so we'd better infer the second. BOTTOM LINE: when *inferring types* you must quantify over implicit parameters, *even if* they don't mention the bound type variables. Reason: because implicit parameters, uniquely, have local instance declarations. See pickQuantifiablePreds. Note [Quantifying over equality constraints] ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ Should we quantify over an equality constraint (s ~ t)? In general, we don't. Doing so may simply postpone a type error from the function definition site to its call site. (At worst, imagine (Int ~ Bool)). However, consider this forall a. (F [a] ~ Int) => blah Should we quantify over the (F [a] ~ Int). Perhaps yes, because at the call site we will know 'a', and perhaps we have instance F [Bool] = Int. So we *do* quantify over a type-family equality where the arguments mention the quantified variables. Note [Unconditionally resimplify constraints when quantifying] ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ During quantification (in defaultTyVarsAndSimplify, specifically), we re-invoke the solver to simplify the constraints before quantifying them. We do this for two reasons, enumerated below. We could, in theory, detect when either of these cases apply and simplify only then, but collecting this information is bothersome, and simplifying redundantly causes no real harm. Note that this code path happens only for definitions * without a type signature * when -XMonoLocalBinds does not apply * with unsolved constraints and so the performance cost will be small. 1. Defaulting Defaulting the variables handled by defaultTyVar may unlock instance simplifications. Example (typecheck/should_compile/T20584b): with (t :: Double) (u :: String) = printf "..." t u We know the types of t and u, but we do not know the return type of `with`. So, we assume `with :: alpha`, where `alpha :: TYPE rho`. The type of printf is printf :: PrintfType r => String -> r The occurrence of printf is instantiated with a fresh var beta. We then get beta := Double -> String -> alpha and [W] PrintfType (Double -> String -> alpha) Module Text.Printf exports instance (PrintfArg a, PrintfType r) => PrintfType (a -> r) and it looks like that instance should apply. But I have elided some key details: (->) is polymorphic over multiplicity and runtime representation. Here it is in full glory: [W] PrintfType ((Double :: Type) %m1 -> (String :: Type) %m2 -> (alpha :: TYPE rho)) instance (PrintfArg a, PrintfType r) => PrintfType ((a :: Type) %Many -> (r :: Type)) Because we do not know that m1 is Many, we cannot use the instance. (Perhaps a better instance would have an explicit equality constraint to the left of =>, but that's not what we have.) Then, in defaultTyVarsAndSimplify, we get m1 := Many, m2 := Many, and rho := LiftedRep. Yet it's too late to simplify the quantified constraint, and thus GHC infers wait :: PrintfType (Double -> String -> t) => Double -> String -> t which is silly. Simplifying again after defaulting solves this problem. 2. Interacting functional dependencies Suppose we have class C a b | a -> b and we are running simplifyInfer over forall[2] x. () => [W] C a beta1[1] forall[2] y. () => [W] C a beta2[1] These are two implication constraints, both of which contain a wanted for the class C. Neither constraint mentions the bound skolem. We might imagine that these constraint could thus float out of their implications and then interact, causing beta1 to unify with beta2, but constraints do not currently float out of implications. Unifying the beta1 and beta2 is important. Without doing so, then we might infer a type like (C a b1, C a b2) => a -> a, which will fail to pass the ambiguity check, which will say (rightly) that it cannot unify b1 with b2, as required by the fundep interactions. This happens in the parsec library, and in test case typecheck/should_compile/FloatFDs. If we re-simplify, however, the two fundep constraints will interact, causing a unification between beta1 and beta2, and all will be well. The key step is that this simplification happens *after* the call to approximateWC in simplifyInfer. Note [Do not quantify over constraints that determine a variable] ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ Consider (typecheck/should_compile/tc231), where we're trying to infer the type of a top-level declaration. We have class Zork s a b | a -> b and the candidate constraint at the end of simplifyInfer is [W] Zork alpha (Z [Char]) beta We definitely do want to quantify over alpha (which is mentioned in the tau-type). But we do *not* want to quantify over beta: it is determined by the functional dependency on Zork: note that the second argument to Zork in the Wanted is a variable-free Z [Char]. The question here: do we want to quantify over the constraint? Definitely not. Since we're not quantifying over beta, GHC has no choice but to zap beta to Any, and then we infer a type involving (Zork a (Z [Char]) Any => ...). No no no. The no_fixed_dependencies check in pickQuantifiablePreds eliminates this candidate from the pool. Because there are no Zork instances in scope, this program is rejected. -} decideQuantification :: SkolemInfo -> InferMode -> TcLevel -> [(Name, TcTauType)] -- Variables to be generalised -> [TcIdSigInst] -- Partial type signatures (if any) -> [PredType] -- Candidate theta; already zonked -> TcM ( [TcTyVar] -- Quantify over these (skolems) , [PredType] -- and this context (fully zonked) , CoVarSet) -- See Note [Deciding quantification] decideQuantification skol_info infer_mode rhs_tclvl name_taus psigs candidates = do { -- Step 1: find the mono_tvs ; (mono_tvs, candidates, co_vars) <- decideMonoTyVars infer_mode name_taus psigs candidates -- Step 2: default any non-mono tyvars, and re-simplify -- This step may do some unification, but result candidates is zonked ; candidates <- defaultTyVarsAndSimplify rhs_tclvl mono_tvs candidates -- Step 3: decide which kind/type variables to quantify over ; qtvs <- decideQuantifiedTyVars skol_info name_taus psigs candidates -- Step 4: choose which of the remaining candidate -- predicates to actually quantify over -- NB: decideQuantifiedTyVars turned some meta tyvars -- into quantified skolems, so we have to zonk again ; candidates <- TcM.zonkTcTypes candidates ; psig_theta <- TcM.zonkTcTypes (concatMap sig_inst_theta psigs) ; let min_theta = mkMinimalBySCs id $ -- See Note [Minimize by Superclasses] pickQuantifiablePreds (mkVarSet qtvs) candidates min_psig_theta = mkMinimalBySCs id psig_theta -- Add psig_theta back in here, even though it's already -- part of candidates, because we always want to quantify over -- psig_theta, and pickQuantifiableCandidates might have -- dropped some e.g. CallStack constraints. c.f #14658 -- equalities (a ~ Bool) -- It's helpful to use the same "find difference" algorithm here as -- we use in GHC.Tc.Gen.Bind.chooseInferredQuantifiers (#20921) -- See Note [Constraints in partial type signatures] ; theta <- if null psig_theta then return min_theta -- Fast path for the non-partial-sig case else do { diff <- findInferredDiff min_psig_theta min_theta ; return (min_psig_theta ++ diff) } ; traceTc "decideQuantification" (vcat [ text "infer_mode:" <+> ppr infer_mode , text "candidates:" <+> ppr candidates , text "psig_theta:" <+> ppr psig_theta , text "mono_tvs:" <+> ppr mono_tvs , text "co_vars:" <+> ppr co_vars , text "qtvs:" <+> ppr qtvs , text "theta:" <+> ppr theta ]) ; return (qtvs, theta, co_vars) } {- Note [Constraints in partial type signatures] ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ Suppose we have a partial type signature f :: (Eq a, C a, _) => blah We will ultimately quantify f over (Eq a, C a,