Safe Haskell | None |
---|---|
Language | Haskell2010 |
Everything needed to define data sources and to invoke the engine.
Synopsis
- newtype GenHaxl u w a = GenHaxl {}
- runHaxl :: forall u w a. Env u w -> GenHaxl u w a -> IO a
- runHaxlWithWrites :: forall u w a. Env u w -> GenHaxl u w a -> IO (a, [w])
- data Env u w = Env {
- dataCache :: !(DataCache (DataCacheItem u w))
- memoCache :: !(DataCache (DataCacheItem u w))
- memoKey :: !CallId
- flags :: !Flags
- userEnv :: u
- statsRef :: !(IORef Stats)
- statsBatchIdRef :: !(IORef Int)
- callIdRef :: !(IORef CallId)
- profCurrent :: ProfileCurrent
- profRef :: !(IORef Profile)
- states :: StateStore
- reqStoreRef :: !(IORef (RequestStore u))
- runQueueRef :: !(IORef (JobList u w))
- submittedReqsRef :: !(IORef ReqCountMap)
- completions :: !(TVar [CompleteReq u w])
- writeLogsRef :: !(IORef (WriteTree w))
- writeLogsRefNoMemo :: !(IORef (WriteTree w))
- type Caches u w = (DataCache (DataCacheItem u w), DataCache (DataCacheItem u w))
- caches :: Env u w -> Caches u w
- env :: (Env u w -> a) -> GenHaxl u w a
- withEnv :: Env u w -> GenHaxl u w a -> GenHaxl u w a
- withLabel :: ProfileLabel -> GenHaxl u w a -> GenHaxl u w a
- initEnvWithData :: StateStore -> u -> Caches u w -> IO (Env u w)
- initEnv :: StateStore -> u -> IO (Env u w)
- emptyEnv :: u -> IO (Env u w)
- sanitizeEnv :: Env u w -> IO (Env u w)
- data StateStore
- stateGet :: forall r. StateKey r => StateStore -> Maybe (State r)
- stateSet :: forall f. StateKey f => State f -> StateStore -> StateStore
- stateEmpty :: StateStore
- tellWrite :: w -> GenHaxl u w ()
- tellWriteNoMemo :: w -> GenHaxl u w ()
- throw :: Exception e => e -> GenHaxl u w a
- catch :: Exception e => GenHaxl u w a -> (e -> GenHaxl u w a) -> GenHaxl u w a
- catchIf :: Exception e => (e -> Bool) -> GenHaxl u w a -> (e -> GenHaxl u w a) -> GenHaxl u w a
- try :: Exception e => GenHaxl u w a -> GenHaxl u w (Either e a)
- tryToHaxlException :: GenHaxl u w a -> GenHaxl u w (Either HaxlException a)
- dataFetch :: (DataSource u r, Request r a) => r a -> GenHaxl u w a
- uncachedRequest :: forall a u w (r :: * -> *). (DataSource u r, Request r a) => r a -> GenHaxl u w a
- cacheRequest :: Request req a => req a -> Either SomeException a -> GenHaxl u w ()
- dupableCacheRequest :: Request req a => req a -> Either SomeException a -> GenHaxl u w ()
- cacheResult :: Request r a => r a -> IO a -> GenHaxl u w a
- cacheResultWithShow :: (Eq (r a), Hashable (r a), Typeable (r a)) => ShowReq r a -> r a -> IO a -> GenHaxl u w a
- cachedComputation :: forall req u w a. (Eq (req a), Hashable (req a), Typeable (req a)) => req a -> GenHaxl u w a -> GenHaxl u w a
- preCacheComputation :: forall req u w a. (Eq (req a), Hashable (req a), Typeable (req a)) => req a -> GenHaxl u w a -> GenHaxl u w a
- dumpCacheAsHaskell :: GenHaxl u w String
- newMemo :: GenHaxl u w (MemoVar u w a)
- newMemoWith :: GenHaxl u w a -> GenHaxl u w (MemoVar u w a)
- prepareMemo :: MemoVar u w a -> GenHaxl u w a -> GenHaxl u w ()
- runMemo :: MemoVar u w a -> GenHaxl u w a
- memo :: (Typeable a, Typeable k, Hashable k, Eq k) => k -> GenHaxl u w a -> GenHaxl u w a
- memoUnique :: (Typeable a, Typeable k, Hashable k, Eq k) => MemoFingerprintKey a -> Text -> k -> GenHaxl u w a -> GenHaxl u w a
- memoize :: GenHaxl u w a -> GenHaxl u w (GenHaxl u w a)
- memoize1 :: (Eq a, Hashable a) => (a -> GenHaxl u w b) -> GenHaxl u w (a -> GenHaxl u w b)
- memoize2 :: (Eq a, Hashable a, Eq b, Hashable b) => (a -> b -> GenHaxl u w c) -> GenHaxl u w (a -> b -> GenHaxl u w c)
- memoFingerprint :: Typeable a => MemoFingerprintKey a -> GenHaxl u w a -> GenHaxl u w a
- data MemoFingerprintKey a where
- MemoFingerprintKey :: !Word64 -> !Word64 -> Addr# -> Addr# -> MemoFingerprintKey a
- pAnd :: GenHaxl u w Bool -> GenHaxl u w Bool -> GenHaxl u w Bool
- pOr :: GenHaxl u w Bool -> GenHaxl u w Bool -> GenHaxl u w Bool
- unsafeChooseFirst :: GenHaxl u w a -> GenHaxl u w b -> GenHaxl u w (Either a b)
- newtype Stats = Stats [FetchStats]
- data FetchStats
- = FetchStats {
- fetchDataSource :: Text
- fetchBatchSize :: !Int
- fetchStart :: !Timestamp
- fetchDuration :: !Microseconds
- fetchSpace :: !Int64
- fetchFailures :: !Int
- fetchIgnoredFailures :: !Int
- fetchBatchId :: !Int
- fetchIds :: [CallId]
- | FetchCall {
- fetchReq :: String
- fetchStack :: [String]
- fetchStatId :: !CallId
- | MemoCall {
- memoStatId :: !CallId
- memoSpace :: !Int64
- | FetchWait { }
- = FetchStats {
- type CallId = Int
- type Microseconds = Int64
- type Timestamp = Microseconds
- emptyStats :: Stats
- numFetches :: Stats -> Int
- ppStats :: Stats -> String
- ppFetchStats :: FetchStats -> String
- aggregateFetchBatches :: ([FetchStats] -> a) -> Stats -> [a]
- data Profile = Profile {}
- data ProfileMemo = ProfileMemo {}
- data ProfileFetch = ProfileFetch {}
- emptyProfile :: Profile
- type ProfileLabel = Text
- type ProfileKey = Int64
- data ProfileData = ProfileData {}
- emptyProfileData :: ProfileData
- type AllocCount = Int64
- type LabelHitCount = Int64
- data Flags = Flags {}
- defaultFlags :: Flags
- ifTrace :: Monad m => Flags -> Int -> m a -> m ()
- ifReport :: Monad m => Flags -> Int -> m a -> m ()
- ifProfiling :: Monad m => Flags -> m a -> m ()
- class (DataSourceName req, StateKey req, ShowP req) => DataSource u req where
- fetch :: State req -> Flags -> u -> PerformFetch req
- schedulerHint :: u -> SchedulerHint req
- classifyFailure :: u -> req a -> SomeException -> FailureClassification
- class ShowP f where
- class DataSourceName (req :: * -> *) where
- dataSourceName :: Proxy req -> Text
- type Request req a = (Eq (req a), Hashable (req a), Typeable (req a), Show (req a), Show a)
- data BlockedFetch r = BlockedFetch (r a) (ResultVar a)
- data PerformFetch req
- = SyncFetch ([BlockedFetch req] -> IO ())
- | AsyncFetch ([BlockedFetch req] -> IO () -> IO ())
- | BackgroundFetch ([BlockedFetch req] -> IO ())
- class Typeable f => StateKey (f :: * -> *) where
- data State f
- getStateType :: Proxy f -> TypeRep
- data SchedulerHint (req :: * -> *)
- data FailureClassification
- newtype ResultVar a = ResultVar (Either SomeException a -> Bool -> IO ())
- mkResultVar :: (Either SomeException a -> Bool -> IO ()) -> ResultVar a
- putFailure :: Exception e => ResultVar a -> e -> IO ()
- putResult :: ResultVar a -> Either SomeException a -> IO ()
- putSuccess :: ResultVar a -> a -> IO ()
- putResultFromChildThread :: ResultVar a -> Either SomeException a -> IO ()
- asyncFetch :: ((service -> IO ()) -> IO ()) -> (service -> IO ()) -> (forall a. service -> request a -> IO (IO (Either SomeException a))) -> State request -> Flags -> u -> PerformFetch request
- asyncFetchWithDispatch :: ((service -> IO ()) -> IO ()) -> (service -> IO ()) -> (service -> IO ()) -> (forall a. service -> request a -> IO (IO (Either SomeException a))) -> State request -> Flags -> u -> PerformFetch request
- asyncFetchAcquireRelease :: IO service -> (service -> IO ()) -> (service -> IO ()) -> (service -> IO ()) -> (forall a. service -> request a -> IO (IO (Either SomeException a))) -> State request -> Flags -> u -> PerformFetch request
- backgroundFetchSeq :: (forall a. request a -> IO (Either SomeException a)) -> State request -> Flags -> u -> PerformFetch request
- backgroundFetchPar :: (forall a. request a -> IO (Either SomeException a)) -> State request -> Flags -> u -> PerformFetch request
- backgroundFetchAcquireRelease :: IO service -> (service -> IO ()) -> (service -> Int -> StablePtr PrimMVar -> IO ()) -> (service -> IO ()) -> (forall a. service -> request a -> IO (IO (Either SomeException a))) -> State request -> Flags -> u -> PerformFetch request
- backgroundFetchAcquireReleaseMVar :: IO service -> (service -> IO ()) -> (service -> Int -> MVar () -> IO ()) -> (service -> IO ()) -> (forall a. service -> request a -> IO (IO (Either SomeException a))) -> State request -> Flags -> u -> PerformFetch request
- stubFetch :: Exception e => (forall a. r a -> e) -> State r -> Flags -> u -> PerformFetch r
- syncFetch :: ((service -> IO ()) -> IO ()) -> (service -> IO ()) -> (forall a. service -> request a -> IO (IO (Either SomeException a))) -> State request -> Flags -> u -> PerformFetch request
- except :: Exception e => e -> Either SomeException a
- setError :: Exception e => (forall a. r a -> e) -> BlockedFetch r -> IO ()
- getMapFromRCMap :: ReqCountMap -> Map Text (Map TypeRep Int)
- module Haxl.Core.Exception
- module Haxl.Core.CallGraph
The monad and operations
newtype GenHaxl u w a Source #
The Haxl monad, which does several things:
- It is a reader monad for
Env
, which contains the current state of the scheduler, including unfetched requests and the run queue of computations. - It is a writer monad for
WriteTree
. These can be used to do arbitrary "logs" from any Haxl computation. These are better than doing arbitrary IO from a Haxl computation as these writes also get memoized if the Haxl computation associated with them is memoized. Now if this memoized computation is run again, you'll get the writes twice.
Instances
Monad (GenHaxl u w) Source # | |
Functor (GenHaxl u w) Source # | |
Applicative (GenHaxl u w) Source # | |
Defined in Haxl.Core.Monad | |
MonadThrow (GenHaxl u w) Source # | Since: 0.3.1.0 |
Defined in Haxl.Core.Monad | |
MonadCatch (GenHaxl u w) Source # | Since: 0.3.1.0 |
Fractional a => Fractional (GenHaxl u w a) Source # | |
Num a => Num (GenHaxl u w a) Source # | |
Defined in Haxl.Prelude (+) :: GenHaxl u w a -> GenHaxl u w a -> GenHaxl u w a # (-) :: GenHaxl u w a -> GenHaxl u w a -> GenHaxl u w a # (*) :: GenHaxl u w a -> GenHaxl u w a -> GenHaxl u w a # negate :: GenHaxl u w a -> GenHaxl u w a # abs :: GenHaxl u w a -> GenHaxl u w a # signum :: GenHaxl u w a -> GenHaxl u w a # fromInteger :: Integer -> GenHaxl u w a # | |
IsString a => IsString (GenHaxl u w a) Source # | |
Defined in Haxl.Core.Monad fromString :: String -> GenHaxl u w a # | |
Semigroup a => Semigroup (GenHaxl u w a) Source # | |
Monoid a => Monoid (GenHaxl u w a) Source # | |
u1 ~ u2 => IfThenElse (GenHaxl u1 w Bool) (GenHaxl u2 w a) Source # | |
Defined in Haxl.Prelude |
runHaxl :: forall u w a. Env u w -> GenHaxl u w a -> IO a Source #
Runs a Haxl
computation in the given Env
.
Note: to make multiple concurrent calls to runHaxl
, each one must
have a separate Env
. A single Env
must not be shared between
multiple concurrent calls to runHaxl
, otherwise deadlocks or worse
will likely ensue.
However, multiple Env
s may share a single StateStore
, and thereby
use the same set of datasources.
Env
Env | |
|
type Caches u w = (DataCache (DataCacheItem u w), DataCache (DataCacheItem u w)) Source #
Operations in the monad
withEnv :: Env u w -> GenHaxl u w a -> GenHaxl u w a Source #
Returns a version of the Haxl computation which always uses the
provided Env
, ignoring the one specified by runHaxl
.
withLabel :: ProfileLabel -> GenHaxl u w a -> GenHaxl u w a Source #
Label a computation so profiling data is attributed to the label.
Building the Env
initEnvWithData :: StateStore -> u -> Caches u w -> IO (Env u w) Source #
Initialize an environment with a StateStore
, an input map, a
preexisting DataCache
, and a seed for the random number generator.
initEnv :: StateStore -> u -> IO (Env u w) Source #
Initializes an environment with StateStore
and an input map.
sanitizeEnv :: Env u w -> IO (Env u w) Source #
If you're using the env from a failed Haxl computation in a second Haxl computation, it is recommended to sanitize the Env to remove all empty IVars - especially if it's possible the first Haxl computation could've been interrupted via an async exception. This is because if the Haxl computation was interrupted by an exception, it's possible that there are entries in the cache which are still blocked, while the results from outgone fetches have been discarded.
Building the StateStore
data StateStore Source #
The StateStore
maps a StateKey
to the State
for that type.
Instances
Semigroup StateStore Source # | |
Defined in Haxl.Core.StateStore (<>) :: StateStore -> StateStore -> StateStore # sconcat :: NonEmpty StateStore -> StateStore # stimes :: Integral b => b -> StateStore -> StateStore # | |
Monoid StateStore Source # | |
Defined in Haxl.Core.StateStore mempty :: StateStore # mappend :: StateStore -> StateStore -> StateStore # mconcat :: [StateStore] -> StateStore # |
stateGet :: forall r. StateKey r => StateStore -> Maybe (State r) Source #
Retrieves a State
from the StateStore
container.
stateSet :: forall f. StateKey f => State f -> StateStore -> StateStore Source #
Inserts a State
in the StateStore
container.
stateEmpty :: StateStore Source #
A StateStore
with no entries.
Writes inside the monad
tellWriteNoMemo :: w -> GenHaxl u w () Source #
Exceptions
catch :: Exception e => GenHaxl u w a -> (e -> GenHaxl u w a) -> GenHaxl u w a Source #
Catch an exception in the Haxl monad
catchIf :: Exception e => (e -> Bool) -> GenHaxl u w a -> (e -> GenHaxl u w a) -> GenHaxl u w a Source #
Catch exceptions that satisfy a predicate
tryToHaxlException :: GenHaxl u w a -> GenHaxl u w (Either HaxlException a) Source #
Like try
, but lifts all exceptions into the HaxlException
hierarchy. Uses unsafeToHaxlException
internally. Typically
this is used at the top level of a Haxl computation, to ensure that
all exceptions are caught.
Data fetching and caching
dataFetch :: (DataSource u r, Request r a) => r a -> GenHaxl u w a Source #
Performs actual fetching of data for a Request
from a DataSource
.
uncachedRequest :: forall a u w (r :: * -> *). (DataSource u r, Request r a) => r a -> GenHaxl u w a Source #
A data request that is not cached. This is not what you want for normal read requests, because then multiple identical requests may return different results, and this invalidates some of the properties that we expect Haxl computations to respect: that data fetches can be arbitrarily reordered, and identical requests can be commoned up, for example.
uncachedRequest
is useful for performing writes, provided those
are done in a safe way - that is, not mixed with reads that might
conflict in the same Haxl computation.
if we are recording or running a test, we fallback to using dataFetch This allows us to store the request in the cache when recording, which allows a transparent run afterwards. Without this, the test would try to call the datasource during testing and that would be an exception.
cacheRequest :: Request req a => req a -> Either SomeException a -> GenHaxl u w () Source #
Inserts a request/result pair into the cache. Throws an exception
if the request has already been issued, either via dataFetch
or
cacheRequest
.
This can be used to pre-populate the cache when running tests, to avoid going to the actual data source and ensure that results are deterministic.
dupableCacheRequest :: Request req a => req a -> Either SomeException a -> GenHaxl u w () Source #
Similar to cacheRequest
but doesn't throw an exception if the key
already exists in the cache.
If this function is called twice to cache the same Haxl request, the first
value will be discarded and overwritten with the second value.
Useful e.g. for unit tests
cacheResultWithShow :: (Eq (r a), Hashable (r a), Typeable (r a)) => ShowReq r a -> r a -> IO a -> GenHaxl u w a Source #
Transparently provides caching in the same way as cacheResult
, but uses
the given functions to show requests and their results.
cachedComputation :: forall req u w a. (Eq (req a), Hashable (req a), Typeable (req a)) => req a -> GenHaxl u w a -> GenHaxl u w a Source #
cachedComputation
memoizes a Haxl computation. The key is a
request.
Note: These cached computations will not be included in the output
of dumpCacheAsHaskell
.
preCacheComputation :: forall req u w a. (Eq (req a), Hashable (req a), Typeable (req a)) => req a -> GenHaxl u w a -> GenHaxl u w a Source #
Like cachedComputation
, but fails if the cache is already
populated.
Memoization can be (ab)used to "mock" a cached computation, by pre-populating the cache with an alternative implementation. In that case we don't want the operation to populate the cache to silently succeed if the cache is already populated.
dumpCacheAsHaskell :: GenHaxl u w String Source #
Dump the contents of the cache as Haskell code that, when compiled and run, will recreate the same cache contents. For example, the generated code looks something like this:
loadCache :: GenHaxl u w () loadCache = do cacheRequest (ListWombats 3) (Right ([1,2,3])) cacheRequest (CountAardvarks "abcabc") (Right (2))
Memoization
newMemo :: GenHaxl u w (MemoVar u w a) Source #
Create a new MemoVar
for storing a memoized computation. The created
MemoVar
is initially empty, not tied to any specific computation. Running
this memo (with runMemo
) without preparing it first (with prepareMemo
)
will result in an exception.
newMemoWith :: GenHaxl u w a -> GenHaxl u w (MemoVar u w a) Source #
Convenience function, combines newMemo
and prepareMemo
.
prepareMemo :: MemoVar u w a -> GenHaxl u w a -> GenHaxl u w () Source #
Store a computation within a supplied MemoVar
. Any memo stored within the
MemoVar
already (regardless of completion) will be discarded, in favor of
the supplied computation. A MemoVar
must be prepared before it is run.
runMemo :: MemoVar u w a -> GenHaxl u w a Source #
Continue the memoized computation within a given MemoVar
.
Notes:
- If the memo contains a complete result, return that result.
- If the memo contains an in-progress computation, continue it as far as possible for this round.
- If the memo is empty (it was not prepared), throw an error.
For example, to memoize the computation one
given by:
one :: Haxl Int one = return 1
use:
do oneMemo <- newMemoWith one let memoizedOne = runMemo aMemo one oneResult <- memoizedOne
To memoize mutually dependent computations such as in:
h :: Haxl Int h = do a <- f b <- g return (a + b) where f = return 42 g = succ <$> f
without needing to reorder them, use:
h :: Haxl Int h = do fMemoRef <- newMemo gMemoRef <- newMemo let f = runMemo fMemoRef g = runMemo gMemoRef prepareMemo fMemoRef $ return 42 prepareMemo gMemoRef $ succ <$> f a <- f b <- g return (a + b)
memo :: (Typeable a, Typeable k, Hashable k, Eq k) => k -> GenHaxl u w a -> GenHaxl u w a Source #
Memoize a computation using an arbitrary key. The result will be
calculated once; the second and subsequent time it will be returned
immediately. It is the caller's responsibility to ensure that for
every two calls memo key haxl
, if they have the same key
then
they compute the same result.
memoUnique :: (Typeable a, Typeable k, Hashable k, Eq k) => MemoFingerprintKey a -> Text -> k -> GenHaxl u w a -> GenHaxl u w a Source #
Memoize a computation using its location and a Fingerprint. This ensures uniqueness across computations.
memoize :: GenHaxl u w a -> GenHaxl u w (GenHaxl u w a) Source #
Transform a Haxl computation into a memoized version of itself.
Given a Haxl computation, memoize
creates a version which stores its result
in a MemoVar
(which memoize
creates), and returns the stored result on
subsequent invocations. This permits the creation of local memos, whose
lifetimes are scoped to the current function, rather than the entire request.
memoize1 :: (Eq a, Hashable a) => (a -> GenHaxl u w b) -> GenHaxl u w (a -> GenHaxl u w b) Source #
Transform a 1-argument function returning a Haxl computation into a memoized version of itself.
Given a function f
of type a -> GenHaxl u w b
, memoize1
creates a version
which memoizes the results of f
in a table keyed by its argument, and
returns stored results on subsequent invocations with the same argument.
e.g.:
allFriends :: [Int] -> GenHaxl u w [Int] allFriends ids = do memoizedFriendsOf <- memoize1 friendsOf concat <$> mapM memoizeFriendsOf ids
The above implementation will not invoke the underlying friendsOf
repeatedly for duplicate values in ids
.
memoize2 :: (Eq a, Hashable a, Eq b, Hashable b) => (a -> b -> GenHaxl u w c) -> GenHaxl u w (a -> b -> GenHaxl u w c) Source #
Transform a 2-argument function returning a Haxl computation, into a memoized version of itself.
The 2-ary version of memoize1
, see its documentation for details.
memoFingerprint :: Typeable a => MemoFingerprintKey a -> GenHaxl u w a -> GenHaxl u w a Source #
data MemoFingerprintKey a where Source #
A memo key derived from a 128-bit MD5 hash. Do not use this directly, it is for use by automatically-generated memoization.
MemoFingerprintKey :: !Word64 -> !Word64 -> Addr# -> Addr# -> MemoFingerprintKey a |
Instances
Eq (MemoFingerprintKey a) Source # | |
Defined in Haxl.Core.Memo (==) :: MemoFingerprintKey a -> MemoFingerprintKey a -> Bool # (/=) :: MemoFingerprintKey a -> MemoFingerprintKey a -> Bool # | |
Hashable (MemoFingerprintKey a) Source # | |
Defined in Haxl.Core.Memo hashWithSalt :: Int -> MemoFingerprintKey a -> Int # hash :: MemoFingerprintKey a -> Int # |
Conditionals
pAnd :: GenHaxl u w Bool -> GenHaxl u w Bool -> GenHaxl u w Bool infixr 5 Source #
Parallel version of '(.&&)'. Both arguments are evaluated in
parallel, and if either returns False
then the other is
not evaluated any further.
WARNING: exceptions may be unpredictable when using pAnd
. If one
argument returns False
before the other completes, then pAnd
returns False
immediately, ignoring a possible exception that
the other argument may have produced if it had been allowed to
complete.
pOr :: GenHaxl u w Bool -> GenHaxl u w Bool -> GenHaxl u w Bool infixr 4 Source #
Parallel version of '(.||)'. Both arguments are evaluated in
parallel, and if either returns True
then the other is
not evaluated any further.
WARNING: exceptions may be unpredictable when using pOr
. If one
argument returns True
before the other completes, then pOr
returns True
immediately, ignoring a possible exception that
the other argument may have produced if it had been allowed to
complete.
unsafeChooseFirst :: GenHaxl u w a -> GenHaxl u w b -> GenHaxl u w (Either a b) Source #
This function takes two haxl computations as input, and returns the output of whichever computation finished first. This is clearly non-deterministic in its output and exception behavior, be careful when using it.
Statistics
Stats that we collect along the way.
data FetchStats Source #
Maps data source name to the number of requests made in that round. The map only contains entries for sources that made requests in that round.
FetchStats | Timing stats for a (batched) data fetch |
| |
FetchCall | The stack trace of a call to |
| |
MemoCall | |
| |
FetchWait | |
|
Instances
Eq FetchStats Source # | |
Defined in Haxl.Core.Stats (==) :: FetchStats -> FetchStats -> Bool # (/=) :: FetchStats -> FetchStats -> Bool # | |
Show FetchStats Source # | |
Defined in Haxl.Core.Stats showsPrec :: Int -> FetchStats -> ShowS # show :: FetchStats -> String # showList :: [FetchStats] -> ShowS # | |
ToJSON FetchStats Source # | |
Defined in Haxl.Core.Stats toJSON :: FetchStats -> Value # toEncoding :: FetchStats -> Encoding # toJSONList :: [FetchStats] -> Value # toEncodingList :: [FetchStats] -> Encoding # |
type Microseconds = Int64 Source #
type Timestamp = Microseconds Source #
emptyStats :: Stats Source #
numFetches :: Stats -> Int Source #
ppFetchStats :: FetchStats -> String Source #
Pretty-print RoundStats.
aggregateFetchBatches :: ([FetchStats] -> a) -> Stats -> [a] Source #
Aggregate stats merging FetchStats from the same dispatched batch into one.
Profile | |
|
data ProfileMemo Source #
Instances
Eq ProfileMemo Source # | |
Defined in Haxl.Core.Stats (==) :: ProfileMemo -> ProfileMemo -> Bool # (/=) :: ProfileMemo -> ProfileMemo -> Bool # | |
Show ProfileMemo Source # | |
Defined in Haxl.Core.Stats showsPrec :: Int -> ProfileMemo -> ShowS # show :: ProfileMemo -> String # showList :: [ProfileMemo] -> ShowS # |
data ProfileFetch Source #
Instances
Eq ProfileFetch Source # | |
Defined in Haxl.Core.Stats (==) :: ProfileFetch -> ProfileFetch -> Bool # (/=) :: ProfileFetch -> ProfileFetch -> Bool # | |
Show ProfileFetch Source # | |
Defined in Haxl.Core.Stats showsPrec :: Int -> ProfileFetch -> ShowS # show :: ProfileFetch -> String # showList :: [ProfileFetch] -> ShowS # |
type ProfileLabel = Text Source #
type ProfileKey = Int64 Source #
data ProfileData Source #
ProfileData | |
|
Instances
Show ProfileData Source # | |
Defined in Haxl.Core.Stats showsPrec :: Int -> ProfileData -> ShowS # show :: ProfileData -> String # showList :: [ProfileData] -> ShowS # |
type AllocCount = Int64 Source #
type LabelHitCount = Int64 Source #
Tracing flags
Flags that control the operation of the engine.
Flags | |
|
defaultFlags :: Flags Source #
ifTrace :: Monad m => Flags -> Int -> m a -> m () Source #
Runs an action if the tracing level is above the given threshold.
ifReport :: Monad m => Flags -> Int -> m a -> m () Source #
Runs an action if the report level is above the given threshold.
ifProfiling :: Monad m => Flags -> m a -> m () Source #
Building data sources
class (DataSourceName req, StateKey req, ShowP req) => DataSource u req where Source #
The class of data sources, parameterised over the request type for that data source. Every data source must implement this class.
A data source keeps track of its state by creating an instance of
StateKey
to map the request type to its state. In this case, the
type of the state should probably be a reference type of some kind,
such as IORef
.
For a complete example data source, see Examples.
:: State req | Current state. |
-> Flags | Tracing flags. |
-> u | User environment. |
-> PerformFetch req | Fetch the data; see |
Issues a list of fetches to this DataSource
. The BlockedFetch
objects contain both the request and the ResultVar
s into which to put
the results.
schedulerHint :: u -> SchedulerHint req Source #
classifyFailure :: u -> req a -> SomeException -> FailureClassification Source #
Instances
(Typeable tag, ShowP (ConcurrentIOReq tag), ConcurrentIO tag) => DataSource u (ConcurrentIOReq tag) Source # | |
Defined in Haxl.DataSource.ConcurrentIO fetch :: State (ConcurrentIOReq tag) -> Flags -> u -> PerformFetch (ConcurrentIOReq tag) Source # schedulerHint :: u -> SchedulerHint (ConcurrentIOReq tag) Source # classifyFailure :: u -> ConcurrentIOReq tag a -> SomeException -> FailureClassification Source # |
A class of type constructors for which we can show all parameterizations.
class DataSourceName (req :: * -> *) where Source #
dataSourceName :: Proxy req -> Text Source #
The name of this DataSource
, used in tracing and stats. Must
take a dummy request.
Instances
Typeable tag => DataSourceName (ConcurrentIOReq tag) Source # | |
Defined in Haxl.DataSource.ConcurrentIO dataSourceName :: Proxy (ConcurrentIOReq tag) -> Text Source # |
type Request req a = (Eq (req a), Hashable (req a), Typeable (req a), Show (req a), Show a) Source #
data BlockedFetch r Source #
A BlockedFetch
is a pair of
- The request to fetch (with result type
a
) - A
ResultVar
to store either the result or an error
We often want to collect together multiple requests, but they return
different types, and the type system wouldn't let us put them
together in a list because all the elements of the list must have the
same type. So we wrap up these types inside the BlockedFetch
type,
so that they all look the same and we can put them in a list.
When we unpack the BlockedFetch
and get the request and the ResultVar
out, the type system knows that the result type of the request
matches the type parameter of the ResultVar
, so it will let us take the
result of the request and store it in the ResultVar
.
BlockedFetch (r a) (ResultVar a) |
data PerformFetch req Source #
A data source can fetch data in one of four ways.
SyncFetch ([BlockedFetch req] -> IO ()) | Fully synchronous, returns only when all the data is fetched.
See |
AsyncFetch ([BlockedFetch req] -> IO () -> IO ()) | Asynchronous; performs an arbitrary IO action while the data
is being fetched, but only returns when all the data is
fetched. See |
BackgroundFetch ([BlockedFetch req] -> IO ()) | Fetches the data in the background, calling |
class Typeable f => StateKey (f :: * -> *) where Source #
StateKey
maps one type to another type. A type that is an
instance of StateKey
can store and retrieve information from a
StateStore
.
Nothing
getStateType :: Proxy f -> TypeRep Source #
We default this to typeOf1, but if f is itself a complex type that is already applied to some paramaters, we want to be able to use the same state by using typeOf2, etc
Instances
Typeable tag => StateKey (ConcurrentIOReq tag) Source # | |
Defined in Haxl.DataSource.ConcurrentIO data State (ConcurrentIOReq tag) :: Type Source # getStateType :: Proxy (ConcurrentIOReq tag) -> TypeRep Source # |
data SchedulerHint (req :: * -> *) Source #
Hints to the scheduler about this data source
TryToBatch | Hold data-source requests while we execute as much as we can, so that we can hopefully collect more requests to batch. |
SubmitImmediately | Submit a request via fetch as soon as we have one, don't try to batch multiple requests. This is really only useful if the data source returns BackgroundFetch, otherwise requests to this data source will be performed synchronously, one at a time. |
data FailureClassification Source #
Hints to the stats module about how to deal with these failures
Result variables
A sink for the result of a data fetch in BlockedFetch
ResultVar (Either SomeException a -> Bool -> IO ()) |
mkResultVar :: (Either SomeException a -> Bool -> IO ()) -> ResultVar a Source #
putSuccess :: ResultVar a -> a -> IO () Source #
putResultFromChildThread :: ResultVar a -> Either SomeException a -> IO () Source #
Like putResult
, but used to get correct accounting when work is
being done in child threads. This is particularly important for
data sources that are using BackgroundFetch
, The allocation performed
in the child thread up to this point will be propagated back to the
thread that called runHaxl
.
Note: if you're doing multiple putResult
calls in the same thread
ensure that only the last one is putResultFromChildThread
. If you
make multiple putResultFromChildThread
calls, the allocation will be
counted multiple times.
If you are reusing a thread for multiple fetches, you should call
System.Mem.setAllocationCounter 0
after
putResultFromChildThread
, so that allocation is not counted
multiple times.
Default fetch implementations
:: ((service -> IO ()) -> IO ()) | Wrapper to perform an action in the context of a service. |
-> (service -> IO ()) | Dispatch all the pending requests and wait for the results |
-> (forall a. service -> request a -> IO (IO (Either SomeException a))) | Submits an individual request to the service. |
-> State request | Currently unused. |
-> Flags | Currently unused. |
-> u | Currently unused. |
-> PerformFetch request |
asyncFetchWithDispatch Source #
:: ((service -> IO ()) -> IO ()) | Wrapper to perform an action in the context of a service. |
-> (service -> IO ()) | Dispatch all the pending requests |
-> (service -> IO ()) | Wait for the results |
-> (forall a. service -> request a -> IO (IO (Either SomeException a))) | Enqueue an individual request to the service. |
-> State request | Currently unused. |
-> Flags | Currently unused. |
-> u | Currently unused. |
-> PerformFetch request |
Common implementation templates for fetch
of DataSource
.
Example usage:
fetch = syncFetch MyDS.withService MyDS.retrieve $ \service request -> case request of This x -> MyDS.fetchThis service x That y -> MyDS.fetchThat service y
asyncFetchAcquireRelease Source #
:: IO service | Resource acquisition for this datasource |
-> (service -> IO ()) | Resource release |
-> (service -> IO ()) | Dispatch all the pending requests and wait for the results |
-> (service -> IO ()) | Wait for the results |
-> (forall a. service -> request a -> IO (IO (Either SomeException a))) | Submits an individual request to the service. |
-> State request | Currently unused. |
-> Flags | Currently unused. |
-> u | Currently unused. |
-> PerformFetch request |
A version of asyncFetch
(actually asyncFetchWithDispatch
) that
handles exceptions correctly. You should use this instead of
asyncFetch
or asyncFetchWithDispatch
. The danger with
asyncFetch
is that if an exception is thrown by withService
, the
inner
action won't be executed, and we'll drop some data-fetches in
the same round.
asyncFetchAcquireRelease
behaves like the following:
asyncFetchAcquireRelease acquire release dispatch wait enqueue = AsyncFetch $ \requests inner -> bracket acquire release $ \service -> do getResults <- mapM (submitFetch service enqueue) requests dispatch service inner wait service sequence_ getResults
except that inner
is run even if acquire
, enqueue
, or dispatch
throws,
unless an async exception is received.
:: (forall a. request a -> IO (Either SomeException a)) | Run one request, will be run in a background thread |
-> State request | Currently unused. |
-> Flags | Currently unused. |
-> u | Currently unused. |
-> PerformFetch request |
:: (forall a. request a -> IO (Either SomeException a)) | Run one request, will be run in a background thread |
-> State request | Currently unused. |
-> Flags | Currently unused. |
-> u | Currently unused. |
-> PerformFetch request |
backgroundFetchAcquireRelease Source #
:: IO service | Resource acquisition for this datasource |
-> (service -> IO ()) | Resource release |
-> (service -> Int -> StablePtr PrimMVar -> IO ()) | Dispatch all the pending requests and when ready trigger the given mvar |
-> (service -> IO ()) | Process all requests |
-> (forall a. service -> request a -> IO (IO (Either SomeException a))) | Submits an individual request to the service. |
-> State request | Currently unused. |
-> Flags | Currently unused. |
-> u | Currently unused. |
-> PerformFetch request |
A version of backgroundFetchAcquireReleaseMVar
where the dispatch function
is given a 'StablePtr PrimMVar' which is more useful for C based APIs.
backgroundFetchAcquireReleaseMVar Source #
:: IO service | Resource acquisition for this datasource |
-> (service -> IO ()) | Resource release |
-> (service -> Int -> MVar () -> IO ()) | Dispatch all the pending requests and when ready trigger the given mvar |
-> (service -> IO ()) | Process all requests |
-> (forall a. service -> request a -> IO (IO (Either SomeException a))) | Submits an individual request to the service. |
-> State request | Currently unused. |
-> Flags | Currently unused. |
-> u | Currently unused. |
-> PerformFetch request |
stubFetch :: Exception e => (forall a. r a -> e) -> State r -> Flags -> u -> PerformFetch r Source #
:: ((service -> IO ()) -> IO ()) | Wrapper to perform an action in the context of a service. |
-> (service -> IO ()) | Dispatch all the pending requests and wait for the results |
-> (forall a. service -> request a -> IO (IO (Either SomeException a))) | Submits an individual request to the service. |
-> State request | Currently unused. |
-> Flags | Currently unused. |
-> u | Currently unused. |
-> PerformFetch request |
Utilities
setError :: Exception e => (forall a. r a -> e) -> BlockedFetch r -> IO () Source #
Function for easily setting a fetch to a particular exception
getMapFromRCMap :: ReqCountMap -> Map Text (Map TypeRep Int) Source #
Exceptions
module Haxl.Core.Exception
Recording the function callgraph
module Haxl.Core.CallGraph