Safe Haskell | None |
---|---|
Language | Haskell98 |
Synopsis
- tag :: Model -> DAG a X -> DAG a CbIx
- tag' :: Model -> DAG a X -> DAG a Cb
- tagK :: Int -> Model -> DAG a X -> DAG a [(CbIx, LogFloat)]
- fastTag :: Model -> DAG a X -> DAG a (Maybe CbIx)
- fastTag' :: Model -> DAG a X -> DAG a (Maybe Cb)
- marginals :: Model -> DAG a X -> DAG a [(CbIx, LogFloat)]
- marginals' :: Model -> DAG a X -> DAG a [(Cb, LogFloat)]
- data ProbType
- probs :: ProbType -> Model -> DAG a X -> DAG a [(CbIx, LogFloat)]
- probs' :: ProbType -> Model -> DAG a X -> DAG a [(Cb, LogFloat)]
- accuracy :: Model -> [DAG a (X, Y)] -> Double
- expectedFeaturesIn :: Model -> DAG a X -> [(Feat, LogFloat)]
- zx :: Model -> DAG a X -> LogFloat
- zx' :: Model -> DAG a X -> LogFloat
- type AccF = [LogFloat] -> LogFloat
- type ProbArray = PosArray LogFloat
- data Pos
- simplify :: Pos -> Maybe EdgeIx
- complicate :: Pos -> Maybe EdgeIx -> Pos
- memoProbArray :: DAG a b -> PosArray c -> PosArray c
- memoEdgeIx :: DAG a b -> Memo EdgeIx
Documentation
tag :: Model -> DAG a X -> DAG a CbIx Source #
Find the most probable label sequence (with probabilities of individual lables determined with respect to marginal distributions) satisfying the constraints imposed over label values.
tagK :: Int -> Model -> DAG a X -> DAG a [(CbIx, LogFloat)] Source #
Get (at most) k best tags for each word and return them in descending order. TODO: Tagging with respect to marginal distributions might not be the best idea. Think of some more elegant method.
fastTag :: Model -> DAG a X -> DAG a (Maybe CbIx) Source #
A version of tag
which should be, roughly, twice as efficient, since it
only performs one forward
and no backward
computation. The downside is
that probabilities cannot be retrieved.
marginals :: Model -> DAG a X -> DAG a [(CbIx, LogFloat)] Source #
Tag potential labels with marginal probabilities.
marginals' :: Model -> DAG a X -> DAG a [(Cb, LogFloat)] Source #
Tag potential labels with marginal probabilities.
probs :: ProbType -> Model -> DAG a X -> DAG a [(CbIx, LogFloat)] Source #
Tag potential labels with alternative probabilities. TODO: explain what is that exactly.
probs' :: ProbType -> Model -> DAG a X -> DAG a [(Cb, LogFloat)] Source #
Tag potential labels with alternative probabilities. TODO: explain what is that exactly.
accuracy :: Model -> [DAG a (X, Y)] -> Double Source #
Compute the accuracy of the model with respect to the labeled dataset.
expectedFeaturesIn :: Model -> DAG a X -> [(Feat, LogFloat)] Source #
A list of features defined within the context of the sentence accompanied by expected probabilities determined on the basis of the model.
One feature can occur multiple times in the output list.
zx :: Model -> DAG a X -> LogFloat Source #
Normalization factor computed for the sentence using the forward computation.
zx' :: Model -> DAG a X -> LogFloat Source #
Normalization factor computed for the sentence using the backward computation.
Internals (used by Probs
) (TODO: move elsewhere)
complicate :: Pos -> Maybe EdgeIx -> Pos Source #
Inverse operation of simplify
, with the default position value.
Memoization
memoProbArray :: DAG a b -> PosArray c -> PosArray c Source #