Safe Haskell | Safe-Inferred |
---|---|
Language | Haskell98 |
Network.HTTP.Robots
Synopsis
- type Robot = ([([UserAgent], [Directive])], [Unparsable])
- type Unparsable = ByteString
- data UserAgent
- type Path = ByteString
- type TimeInterval = (DiffTime, DiffTime)
- data Directive
- subParser :: Parser a -> ByteString -> Parser a
- safeParseRational :: Parser Rational
- dropUTF8BOM :: ByteString -> ByteString
- parseHourMinute :: Parser (Integer, Integer)
- parseTimeInterval :: Parser TimeInterval
- allDay :: TimeInterval
- parseRequestRate :: Parser Directive
- parseVisitTime :: Parser Directive
- parseCrawlDelay :: Parser Directive
- strip :: ByteString -> ByteString
- parseRobots :: ByteString -> Either String Robot
- robotP :: Parser Robot
- unparsableP :: Parser ByteString
- agentDirectiveP :: Parser ([UserAgent], [Directive])
- skipSpace :: Parser ()
- directiveP :: Parser Directive
- agentP :: Parser UserAgent
- commentsP :: Parser ()
- tokenP :: Parser ByteString
- tokenWithSpacesP :: Parser ByteString
- canAccess :: ByteString -> Robot -> Path -> Bool
Documentation
type Unparsable = ByteString Source #
type Path = ByteString Source #
type TimeInterval = (DiffTime, DiffTime) Source #
Constructors
Allow Path | |
Disallow Path | |
CrawlDelay | |
Fields | |
NoArchive Path | |
NoSnippet Path | |
NoTranslate Path | |
NoIndex Path |
dropUTF8BOM :: ByteString -> ByteString Source #
strip :: ByteString -> ByteString Source #
parseRobots :: ByteString -> Either String Robot Source #
parseRobots is the main entry point for parsing a robots.txt file.