antlr-haskell-0.1.0.1: A Haskell implementation of the ANTLR top-down parser generator

Copyright(c) Karl Cronburg 2018
LicenseBSD3
Maintainerkarl@cs.tufts.edu
Stabilityexperimental
PortabilityPOSIX
Safe HaskellNone
LanguageHaskell2010

Text.ANTLR.Lex.Tokenizer

Description

 
Synopsis

Documentation

data Token n v Source #

Token with names n, values v, and number of input symbols consumed to match it.

Constructors

Token n v Int

Tokenized a token

EOF

The end-of-file token

Error Text

Error encountered while tokenizing

Instances
Eq n => Eq (Token n v) Source # 
Instance details

Defined in Text.ANTLR.Lex.Tokenizer

Methods

(==) :: Token n v -> Token n v -> Bool #

(/=) :: Token n v -> Token n v -> Bool #

(Ord n, Ord v) => Ord (Token n v) Source # 
Instance details

Defined in Text.ANTLR.Lex.Tokenizer

Methods

compare :: Token n v -> Token n v -> Ordering #

(<) :: Token n v -> Token n v -> Bool #

(<=) :: Token n v -> Token n v -> Bool #

(>) :: Token n v -> Token n v -> Bool #

(>=) :: Token n v -> Token n v -> Bool #

max :: Token n v -> Token n v -> Token n v #

min :: Token n v -> Token n v -> Token n v #

(Show n, Show v) => Show (Token n v) Source # 
Instance details

Defined in Text.ANTLR.Lex.Tokenizer

Methods

showsPrec :: Int -> Token n v -> ShowS #

show :: Token n v -> String #

showList :: [Token n v] -> ShowS #

Generic (Token n v) Source # 
Instance details

Defined in Text.ANTLR.Lex.Tokenizer

Associated Types

type Rep (Token n v) :: Type -> Type #

Methods

from :: Token n v -> Rep (Token n v) x #

to :: Rep (Token n v) x -> Token n v #

(Hashable n, Hashable v) => Hashable (Token n v) Source # 
Instance details

Defined in Text.ANTLR.Lex.Tokenizer

Methods

hashWithSalt :: Int -> Token n v -> Int #

hash :: Token n v -> Int #

(Prettify n, Prettify v) => Prettify (Token n v) Source # 
Instance details

Defined in Text.ANTLR.Lex.Tokenizer

Ref (Token n v) Source #

Tokens are symbolized by an icon containing their name.

Instance details

Defined in Text.ANTLR.Parser

Associated Types

type Sym (Token n v) :: Type Source #

Methods

getSymbol :: Token n v -> Sym (Token n v) Source #

type Rep (Token n v) Source # 
Instance details

Defined in Text.ANTLR.Lex.Tokenizer

type Sym (Token n v) Source # 
Instance details

Defined in Text.ANTLR.Parser

type Sym (Token n v) = TokenSymbol n

tokenName :: Token n v -> n Source #

Token Names are Input Symbols to the parser.

tokenValue :: Token n v -> v Source #

Get the value of a token, ignoring its name.

tokenSize :: Token n v -> Int Source #

Get the number of characters from the input that this token matched on.

type Lexeme s = [s] Source #

A Lexeme is a sequence of zero or more (matched) input symbols

type NDFA s i n = (n, DFA s i) Source #

A named DFA over symbols s, indices i, and names n.

tokenize Source #

Arguments

:: (Eq i, Ord s, Eq s, Show s, Show i, Show n, Show v, Hashable i, Hashable s) 
=> [(n, DFA s i)]

Association list of named DFAs.

-> (Lexeme s -> n -> v)

Constructs the value of a token from lexeme matched.

-> [s]

The input string.

-> [Token n v]

The tokenized tokens.

Entrypoint for tokenizing an input stream given a list of named DFAs that we can match on.

@dfaTuples@: converts from DFAs to the names associated with them in

the specification of the lexer.

@fncn@: function for constructing the value of a token from the lexeme

matched (e.g. varName) and the associated token name (e.g. id)

tokenizeInc Source #

Arguments

:: (Eq i, Ord s, Eq n, Eq s, Show s, Show i, Show n, Show v, Hashable i, Hashable s, Hashable n) 
=> (n -> Bool)

Function that returns True on DFA names we wish to filter out of the results.

-> [(n, DFA s i)]

Closure over association list of named DFAs.

-> (Lexeme s -> n -> v)

Token value constructor from lexemes.

-> Set n -> [s] -> (Token n v, [s])

The incremental tokenizer closure.

Incremental tokenizer takes in the same list of DFAs and AST value constructor function, but instead returns an incremental tokenizer function that expects a set of names that we currently expect to tokenize on, the current input stream, and returns a single tokenized token along with the modified input stream to iteratively call tokenizeInc on.

tokenizeIncAll Source #

Arguments

:: (Eq i, Ord s, Eq n, Eq s, Show s, Show i, Show n, Show v, Hashable i, Hashable s, Hashable n) 
=> (n -> Bool)

Function that returns True on DFA names we wish to filter out of the results.

-> [(n, DFA s i)] 
-> (Lexeme s -> n -> v) 
-> Set n -> [s] -> [(Token n v, [s])]