trifecta-2/0000755000000000000000000000000013316775526011223 5ustar0000000000000000trifecta-2/CHANGELOG.markdown0000644000000000000000000000464413316775526014266 0ustar00000000000000002 [2018.07.03] -------------- * `stepParser` no longer takes a `ByteString`. * Add a `Text.Trifecta.Tutorial` module, as well as lots of documentation. * Add a `foldResult` function to `Text.Trifecta.Result`. * Allow building with `containers-0.6`. 1.7.1.1 ------- * Support `ansi-wl-pprint-0.6.8` 1.7.1 ----- * Support `doctest-0.12` 1.7 --- * Make `trifecta` forward `-Wcompat`ible: * Adding `Semigroup` instances to correspond to every existing `Monoid` instance. This requires adding a `Semigroup` constraint to the `Monoid` instance for `Parser` to emulate the `Semigroup`-`Monoid` superclass relation that will be present in future versions of GHC. * Adding a `MonadFail` instance for `Parser` * Revamp `Setup.hs` to use `cabal-doctest`. This makes it build with `Cabal-2.0`, and makes the `doctest`s work with `cabal new-build` and sandboxes. 1.6.2.1 ------- * Add this changelog to the `extra-souce-files` in `trifecta.cabal` so that the changelog will appear on Hackage 1.6.2 ----- * Enable support for `blaze-html-0.9` and `blaze-markup-0.8` 1.6.1 ----- * Remove redundant constraints from `DeltaParsing`'s class methods. This is required for `trifecta` to build on GHC 8.0.2. 1.6 ----- * Version bumps to support GHC 8 * Add line/col numbers to parse results by giving a list of all deltas when errors happen. 1.5.2 ----- * `lens` 4.13 support * `It` is a `Profunctor` * Builds clean on GHC 7.10. 1.5.1.3 ------- * Support newer `utf8-string` versions and GHC 7.10 1.5.1.2 ------- * Work around lack of the old `preEscapedString` export in near-current `blaze-markup`. 1.5.1.1 ------- * Work around new exports in `blaze`. 1.5.1 ----- * Parsers 0.12.1 support. This removes many `Show` constraints introduced after 1.4 1.5 ----- * Properly PVP compliant point release for the `parsers` changes to properly handle `notFollowedBy` 1.4.3 ----- * Accidentally non-PVP compliant point release. 1.4.1 ----- * GHC 7.8.1 compatibility 1.4 --- * Simplified AsResult * `lens` 4.0 compatibility 1.2.1.1 ------- * Updated `array` dependency for compatibility with GHC 7.8 1.2.1 ----- * Bug fix for the `Monoid` instance in response to [issue #15](https://github.com/ekmett/trifecta/issues/14) * Made the `Semigroup` instance match the `Monoid` as well. 1.2 --- * Changed the `Monoid` instance for `Parser` in response to [issue #14](https://github.com/ekmett/trifecta/issues/14) * Exported `MonadErr` class for raising `Err`s trifecta-2/trifecta.cabal0000644000000000000000000000706113316775526014014 0ustar0000000000000000name: trifecta category: Text, Parsing, Diagnostics, Pretty Printer, Logging version: 2 license: BSD3 cabal-version: >= 1.10 license-file: LICENSE author: Edward A. Kmett maintainer: Edward A. Kmett stability: experimental homepage: http://github.com/ekmett/trifecta/ bug-reports: http://github.com/ekmett/trifecta/issues copyright: Copyright (C) 2010-2017 Edward A. Kmett synopsis: A modern parser combinator library with convenient diagnostics description: A modern parser combinator library with slicing and Clang-style colored diagnostics build-type: Custom tested-with: GHC == 7.4.2 , GHC == 7.6.3 , GHC == 7.8.4 , GHC == 7.10.3 , GHC == 8.0.2 , GHC == 8.2.2 , GHC == 8.4.3 , GHC == 8.6.1 extra-source-files: examples/*.hs examples/LICENSE examples/rfc2616/*.hs examples/RFC2616.txt examples/trifecta-examples.cabal .travis.yml CHANGELOG.markdown README.markdown Warning.hs source-repository head type: git location: https://github.com/ekmett/trifecta custom-setup setup-depends: base >= 4 && < 5, Cabal, cabal-doctest >= 1 && < 1.1 library exposed-modules: Text.Trifecta Text.Trifecta.Combinators Text.Trifecta.Delta Text.Trifecta.Highlight Text.Trifecta.Parser Text.Trifecta.Rendering Text.Trifecta.Result Text.Trifecta.Rope Text.Trifecta.Tutorial Text.Trifecta.Util.Array Text.Trifecta.Util.IntervalMap Text.Trifecta.Util.It other-modules: Text.Trifecta.Instances Text.Trifecta.Util.Combinators build-depends: ansi-wl-pprint >= 0.6.6 && < 0.7, ansi-terminal >= 0.6 && < 0.9, array >= 0.3.0.2 && < 0.6, base >= 4.4 && < 5, blaze-builder >= 0.3.0.1 && < 0.5, blaze-html >= 0.5 && < 0.10, blaze-markup >= 0.5 && < 0.9, bytestring >= 0.9.1 && < 0.11, charset >= 0.3.5.1 && < 1, comonad >= 4 && < 6, containers >= 0.3 && < 0.7, deepseq >= 1.2.0.1 && < 1.5, fingertree >= 0.1 && < 0.2, ghc-prim, hashable >= 1.2.1 && < 1.3, lens >= 4.0 && < 5, mtl >= 2.0.1 && < 2.3, parsers >= 0.12.1 && < 1, profunctors >= 4.0 && < 6, reducers >= 3.10 && < 4, semigroups >= 0.8.3.1 && < 1, transformers >= 0.2 && < 0.6, unordered-containers >= 0.2.1 && < 0.3, utf8-string >= 0.3.6 && < 1.1 default-language: Haskell2010 hs-source-dirs: src ghc-options: -O2 -Wall -fobject-code -- See https://ghc.haskell.org/trac/ghc/wiki/Migration/8.0#base-4.9.0.0 if impl(ghc >= 8.0) ghc-options: -Wcompat -Wnoncanonical-monad-instances -Wnoncanonical-monadfail-instances else build-depends: fail == 4.9.* test-suite doctests type: exitcode-stdio-1.0 main-is: doctests.hs ghc-options: -Wall -threaded x-doctest-options: -fobject-code hs-source-dirs: tests default-language: Haskell2010 build-depends: base, doctest >= 0.11.1 && < 0.17, trifecta test-suite quickcheck type: exitcode-stdio-1.0 main-is: QuickCheck.hs default-language: Haskell2010 build-depends: base == 4.*, parsers, QuickCheck, trifecta ghc-options: -Wall -threaded hs-source-dirs: tests trifecta-2/README.markdown0000644000000000000000000000077613316775526013736 0ustar0000000000000000trifecta ======== [![Hackage](https://img.shields.io/hackage/v/trifecta.svg)](https://hackage.haskell.org/package/trifecta) [![Build Status](https://secure.travis-ci.org/ekmett/trifecta.png?branch=master)](http://travis-ci.org/ekmett/trifecta) This package provides a parser that focuses on nice diagnostics. Contact Information ------------------- Contributions and bug reports are welcome! Please feel free to contact me through github or on the #haskell IRC channel on irc.freenode.net. -Edward Kmett trifecta-2/Setup.lhs0000644000000000000000000000124113316775526013031 0ustar0000000000000000\begin{code} {-# LANGUAGE CPP #-} {-# OPTIONS_GHC -Wall #-} module Main (main) where #ifndef MIN_VERSION_cabal_doctest #define MIN_VERSION_cabal_doctest(x,y,z) 0 #endif #if MIN_VERSION_cabal_doctest(1,0,0) import Distribution.Extra.Doctest ( defaultMainWithDoctests ) main :: IO () main = defaultMainWithDoctests "doctests" #else #ifdef MIN_VERSION_Cabal -- If the macro is defined, we have new cabal-install, -- but for some reason we don't have cabal-doctest in package-db -- -- Probably we are running cabal sdist, when otherwise using new-build -- workflow import Warning () #endif import Distribution.Simple main :: IO () main = defaultMain #endif \end{code} trifecta-2/Warning.hs0000644000000000000000000000040013316775526013156 0ustar0000000000000000module Warning {-# WARNING ["You are configuring this package without cabal-doctest installed.", "The doctests test-suite will not work as a result.", "To fix this, install cabal-doctest before configuring."] #-} () where trifecta-2/.travis.yml0000644000000000000000000001607413316775526013344 0ustar0000000000000000# This Travis job script has been generated by a script via # # runghc make_travis_yml_2.hs '-o' '.travis.yml' '--ghc-head' '--irc-channel=irc.freenode.org#haskell-lens' '--no-no-tests-no-bench' '--no-unconstrained' 'cabal.project' # # For more information, see https://github.com/hvr/multi-ghc-travis # language: c sudo: false git: submodules: false # whether to recursively clone submodules notifications: irc: channels: - "irc.freenode.org#haskell-lens" skip_join: true template: - "\x0313trifecta\x03/\x0306%{branch}\x03 \x0314%{commit}\x03 %{build_url} %{message}" cache: directories: - $HOME/.cabal/packages - $HOME/.cabal/store before_cache: - rm -fv $HOME/.cabal/packages/hackage.haskell.org/build-reports.log # remove files that are regenerated by 'cabal update' - rm -fv $HOME/.cabal/packages/hackage.haskell.org/00-index.* - rm -fv $HOME/.cabal/packages/hackage.haskell.org/*.json - rm -fv $HOME/.cabal/packages/hackage.haskell.org/01-index.cache - rm -fv $HOME/.cabal/packages/hackage.haskell.org/01-index.tar - rm -fv $HOME/.cabal/packages/hackage.haskell.org/01-index.tar.idx - rm -rfv $HOME/.cabal/packages/head.hackage matrix: include: - compiler: "ghc-8.6.1" env: GHCHEAD=true addons: {apt: {packages: [ghc-ppa-tools,cabal-install-head,ghc-8.6.1], sources: [hvr-ghc]}} - compiler: "ghc-8.4.3" # env: TEST=--disable-tests BENCH=--disable-benchmarks addons: {apt: {packages: [ghc-ppa-tools,cabal-install-2.2,ghc-8.4.3], sources: [hvr-ghc]}} - compiler: "ghc-8.2.2" # env: TEST=--disable-tests BENCH=--disable-benchmarks addons: {apt: {packages: [ghc-ppa-tools,cabal-install-2.2,ghc-8.2.2], sources: [hvr-ghc]}} - compiler: "ghc-8.0.2" # env: TEST=--disable-tests BENCH=--disable-benchmarks addons: {apt: {packages: [ghc-ppa-tools,cabal-install-2.2,ghc-8.0.2], sources: [hvr-ghc]}} - compiler: "ghc-7.10.3" # env: TEST=--disable-tests BENCH=--disable-benchmarks addons: {apt: {packages: [ghc-ppa-tools,cabal-install-2.2,ghc-7.10.3], sources: [hvr-ghc]}} - compiler: "ghc-7.8.4" # env: TEST=--disable-tests BENCH=--disable-benchmarks addons: {apt: {packages: [ghc-ppa-tools,cabal-install-2.2,ghc-7.8.4], sources: [hvr-ghc]}} - compiler: "ghc-7.6.3" # env: TEST=--disable-tests BENCH=--disable-benchmarks addons: {apt: {packages: [ghc-ppa-tools,cabal-install-2.2,ghc-7.6.3], sources: [hvr-ghc]}} - compiler: "ghc-7.4.2" # env: TEST=--disable-tests BENCH=--disable-benchmarks addons: {apt: {packages: [ghc-ppa-tools,cabal-install-2.2,ghc-7.4.2], sources: [hvr-ghc]}} - compiler: "ghc-head" env: GHCHEAD=true addons: {apt: {packages: [ghc-ppa-tools,cabal-install-head,ghc-head], sources: [hvr-ghc]}} allow_failures: - compiler: "ghc-head" - compiler: "ghc-8.6.1" before_install: - HC=${CC} - HCPKG=${HC/ghc/ghc-pkg} - unset CC - ROOTDIR=$(pwd) - mkdir -p $HOME/.local/bin - "PATH=/opt/ghc/bin:/opt/ghc-ppa-tools/bin:$HOME/local/bin:$PATH" - HCNUMVER=$(( $(${HC} --numeric-version|sed -E 's/([0-9]+)\.([0-9]+)\.([0-9]+).*/\1 * 10000 + \2 * 100 + \3/') )) - echo $HCNUMVER install: - cabal --version - echo "$(${HC} --version) [$(${HC} --print-project-git-commit-id 2> /dev/null || echo '?')]" - BENCH=${BENCH---enable-benchmarks} - TEST=${TEST---enable-tests} - HADDOCK=${HADDOCK-true} - UNCONSTRAINED=${UNCONSTRAINED-true} - NOINSTALLEDCONSTRAINTS=${NOINSTALLEDCONSTRAINTS-false} - GHCHEAD=${GHCHEAD-false} - travis_retry cabal update -v - "sed -i.bak 's/^jobs:/-- jobs:/' ${HOME}/.cabal/config" - rm -fv cabal.project cabal.project.local # Overlay Hackage Package Index for GHC HEAD: https://github.com/hvr/head.hackage - | if $GHCHEAD; then sed -i 's/-- allow-newer: .*/allow-newer: *:base/' ${HOME}/.cabal/config for pkg in $($HCPKG list --simple-output); do pkg=$(echo $pkg | sed 's/-[^-]*$//'); sed -i "s/allow-newer: /allow-newer: *:$pkg, /" ${HOME}/.cabal/config; done echo 'repository head.hackage' >> ${HOME}/.cabal/config echo ' url: http://head.hackage.haskell.org/' >> ${HOME}/.cabal/config echo ' secure: True' >> ${HOME}/.cabal/config echo ' root-keys: 07c59cb65787dedfaef5bd5f987ceb5f7e5ebf88b904bbd4c5cbdeb2ff71b740' >> ${HOME}/.cabal/config echo ' 2e8555dde16ebd8df076f1a8ef13b8f14c66bad8eafefd7d9e37d0ed711821fb' >> ${HOME}/.cabal/config echo ' 8f79fd2389ab2967354407ec852cbe73f2e8635793ac446d09461ffb99527f6e' >> ${HOME}/.cabal/config echo ' key-threshold: 3' >> ${HOME}/.cabal.config grep -Ev -- '^\s*--' ${HOME}/.cabal/config | grep -Ev '^\s*$' cabal new-update head.hackage -v fi - grep -Ev -- '^\s*--' ${HOME}/.cabal/config | grep -Ev '^\s*$' - "printf 'packages: \".\" \"./examples\"\\n' > cabal.project" - touch cabal.project.local - "if ! $NOINSTALLEDCONSTRAINTS; then for pkg in $($HCPKG list --simple-output); do echo $pkg | sed 's/^/constraints: /' | sed 's/-[^-]*$/ installed/' >> cabal.project.local; done; fi" - cat cabal.project || true - cat cabal.project.local || true - if [ -f "./configure.ac" ]; then (cd "." && autoreconf -i); fi - if [ -f "./examples/configure.ac" ]; then (cd "./examples" && autoreconf -i); fi - rm -f cabal.project.freeze - cabal new-build -w ${HC} ${TEST} ${BENCH} --project-file="cabal.project" --dep -j2 all - rm -rf .ghc.environment.* "."/dist "./examples"/dist - DISTDIR=$(mktemp -d /tmp/dist-test.XXXX) # Here starts the actual work to be performed for the package under test; # any command which exits with a non-zero exit code causes the build to fail. script: # test that source-distributions can be generated - (cd "." && cabal sdist) - (cd "./examples" && cabal sdist) - mv "."/dist/trifecta-*.tar.gz "./examples"/dist/trifecta-examples-*.tar.gz ${DISTDIR}/ - cd ${DISTDIR} || false - find . -maxdepth 1 -name '*.tar.gz' -exec tar -xvf '{}' \; - "printf 'packages: trifecta-*/*.cabal trifecta-examples-*/*.cabal\\n' > cabal.project" - touch cabal.project.local - "if ! $NOINSTALLEDCONSTRAINTS; then for pkg in $($HCPKG list --simple-output); do echo $pkg | sed 's/^/constraints: /' | sed 's/-[^-]*$/ installed/' >> cabal.project.local; done; fi" - cat cabal.project || true - cat cabal.project.local || true # build & run tests, build benchmarks - cabal new-build -w ${HC} ${TEST} ${BENCH} all - if [ "x$TEST" = "x--enable-tests" ]; then cabal new-test -w ${HC} ${TEST} ${BENCH} all; fi # cabal check - (cd trifecta-* && cabal check) - (cd trifecta-examples-* && cabal check) # haddock - rm -rf ./dist-newstyle - if $HADDOCK; then cabal new-haddock -w ${HC} ${TEST} ${BENCH} all; else echo "Skipping haddock generation";fi # REGENDATA ["-o",".travis.yml","--ghc-head","--irc-channel=irc.freenode.org#haskell-lens","--no-no-tests-no-bench","--no-unconstrained","cabal.project"] # EOF trifecta-2/LICENSE0000644000000000000000000000301313316775526012225 0ustar0000000000000000Copyright 2010-2017 Edward Kmett Copyright 2008 Ross Patterson Copyright 2007 Paolo Martini Copyright 1999-2000 Daan Leijen All rights reserved. Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: 1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. 2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. 3. Neither the name of the author nor the names of his contributors may be used to endorse or promote products derived from this software without specific prior written permission. THIS SOFTWARE IS PROVIDED BY THE AUTHORS ``AS IS'' AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHORS OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. trifecta-2/src/0000755000000000000000000000000013316775526012012 5ustar0000000000000000trifecta-2/src/Text/0000755000000000000000000000000013316775526012736 5ustar0000000000000000trifecta-2/src/Text/Trifecta.hs0000644000000000000000000000200213316775526015025 0ustar0000000000000000----------------------------------------------------------------------------- -- | -- Copyright : (C) 2011-2015 Edward Kmett -- License : BSD-style (see the file LICENSE) -- -- Maintainer : Edward Kmett -- Stability : experimental -- Portability : non-portable -- -- For a short introduction, see the "Text.Trifecta.Tutorial" module. ---------------------------------------------------------------------------- module Text.Trifecta ( module Text.Trifecta.Rendering , module Text.Trifecta.Highlight , module Text.Trifecta.Parser , module Text.Trifecta.Combinators , module Text.Trifecta.Result , module Text.Trifecta.Rope , module Text.Parser.Combinators , module Text.Parser.Char , module Text.Parser.Token ) where import Text.Parser.Char import Text.Parser.Combinators import Text.Parser.Token import Text.Trifecta.Combinators import Text.Trifecta.Highlight import Text.Trifecta.Parser import Text.Trifecta.Rendering import Text.Trifecta.Result import Text.Trifecta.Rope trifecta-2/src/Text/Trifecta/0000755000000000000000000000000013316775526014477 5ustar0000000000000000trifecta-2/src/Text/Trifecta/Combinators.hs0000644000000000000000000002100613316775526017312 0ustar0000000000000000{-# LANGUAGE CPP #-} {-# LANGUAGE FlexibleContexts #-} {-# LANGUAGE FlexibleInstances #-} {-# LANGUAGE FunctionalDependencies #-} {-# LANGUAGE MultiParamTypeClasses #-} {-# LANGUAGE UndecidableInstances #-} ----------------------------------------------------------------------------- -- | -- Module : Text.Trifecta.Combinators -- Copyright : (c) Edward Kmett 2011-2015 -- License : BSD3 -- -- Maintainer : ekmett@gmail.com -- Stability : experimental -- Portability : non-portable -- ----------------------------------------------------------------------------- module Text.Trifecta.Combinators ( DeltaParsing(..) , sliced , careting, careted , spanning, spanned , fixiting , MarkParsing(..) ) where import Control.Applicative import Control.Monad (MonadPlus) import Control.Monad.Trans.Class import Control.Monad.Trans.Identity import Control.Monad.Trans.Reader import Control.Monad.Trans.RWS.Lazy as Lazy import Control.Monad.Trans.RWS.Strict as Strict import Control.Monad.Trans.State.Lazy as Lazy import Control.Monad.Trans.State.Strict as Strict import Control.Monad.Trans.Writer.Lazy as Lazy import Control.Monad.Trans.Writer.Strict as Strict import Data.ByteString as Strict hiding (span) #if !(MIN_VERSION_base(4,11,0)) import Data.Semigroup #endif import Prelude hiding (span) import Text.Parser.Token import Text.Trifecta.Delta import Text.Trifecta.Rendering -- | This class provides parsers with easy access to: -- -- 1) the current line contents. -- 2) the current position as a 'Delta'. -- 3) the ability to use 'sliced' on any parser. class (MonadPlus m, TokenParsing m) => DeltaParsing m where -- | Retrieve the contents of the current line (from the beginning of the line) line :: m ByteString -- | Retrieve the current position as a 'Delta'. position :: m Delta -- | Run a parser, grabbing all of the text between its start and end points slicedWith :: (a -> Strict.ByteString -> r) -> m a -> m r -- | Retrieve a 'Rendering' of the current line noting this position, but not -- placing a 'Caret' there. rend :: m Rendering rend = rendered <$> position <*> line {-# INLINE rend #-} -- | Grab the remainder of the current line restOfLine :: m ByteString restOfLine = Strict.drop . fromIntegral . columnByte <$> position <*> line {-# INLINE restOfLine #-} instance (MonadPlus m, DeltaParsing m) => DeltaParsing (Lazy.StateT s m) where line = lift line {-# INLINE line #-} position = lift position {-# INLINE position #-} slicedWith f (Lazy.StateT m) = Lazy.StateT $ \s -> slicedWith (\(a,s') b -> (f a b, s')) $ m s {-# INLINE slicedWith #-} rend = lift rend {-# INLINE rend #-} restOfLine = lift restOfLine {-# INLINE restOfLine #-} instance (MonadPlus m, DeltaParsing m) => DeltaParsing (Strict.StateT s m) where line = lift line {-# INLINE line #-} position = lift position {-# INLINE position #-} slicedWith f (Strict.StateT m) = Strict.StateT $ \s -> slicedWith (\(a,s') b -> (f a b, s')) $ m s {-# INLINE slicedWith #-} rend = lift rend {-# INLINE rend #-} restOfLine = lift restOfLine {-# INLINE restOfLine #-} instance (MonadPlus m, DeltaParsing m) => DeltaParsing (ReaderT e m) where line = lift line {-# INLINE line #-} position = lift position {-# INLINE position #-} slicedWith f (ReaderT m) = ReaderT $ slicedWith f . m {-# INLINE slicedWith #-} rend = lift rend {-# INLINE rend #-} restOfLine = lift restOfLine {-# INLINE restOfLine #-} instance (MonadPlus m, DeltaParsing m, Monoid w) => DeltaParsing (Strict.WriterT w m) where line = lift line {-# INLINE line #-} position = lift position {-# INLINE position #-} slicedWith f (Strict.WriterT m) = Strict.WriterT $ slicedWith (\(a,s') b -> (f a b, s')) m {-# INLINE slicedWith #-} rend = lift rend {-# INLINE rend #-} restOfLine = lift restOfLine {-# INLINE restOfLine #-} instance (MonadPlus m, DeltaParsing m, Monoid w) => DeltaParsing (Lazy.WriterT w m) where line = lift line {-# INLINE line #-} position = lift position {-# INLINE position #-} slicedWith f (Lazy.WriterT m) = Lazy.WriterT $ slicedWith (\(a,s') b -> (f a b, s')) m {-# INLINE slicedWith #-} rend = lift rend {-# INLINE rend #-} restOfLine = lift restOfLine {-# INLINE restOfLine #-} instance (MonadPlus m, DeltaParsing m, Monoid w) => DeltaParsing (Lazy.RWST r w s m) where line = lift line {-# INLINE line #-} position = lift position {-# INLINE position #-} slicedWith f (Lazy.RWST m) = Lazy.RWST $ \r s -> slicedWith (\(a,s',w) b -> (f a b, s',w)) $ m r s {-# INLINE slicedWith #-} rend = lift rend {-# INLINE rend #-} restOfLine = lift restOfLine {-# INLINE restOfLine #-} instance (MonadPlus m, DeltaParsing m, Monoid w) => DeltaParsing (Strict.RWST r w s m) where line = lift line {-# INLINE line #-} position = lift position {-# INLINE position #-} slicedWith f (Strict.RWST m) = Strict.RWST $ \r s -> slicedWith (\(a,s',w) b -> (f a b, s',w)) $ m r s {-# INLINE slicedWith #-} rend = lift rend {-# INLINE rend #-} restOfLine = lift restOfLine {-# INLINE restOfLine #-} instance (MonadPlus m, DeltaParsing m) => DeltaParsing (IdentityT m) where line = lift line {-# INLINE line #-} position = lift position {-# INLINE position #-} slicedWith f (IdentityT m) = IdentityT $ slicedWith f m {-# INLINE slicedWith #-} rend = lift rend {-# INLINE rend #-} restOfLine = lift restOfLine {-# INLINE restOfLine #-} -- | Run a parser, grabbing all of the text between its start and end points and -- discarding the original result sliced :: DeltaParsing m => m a -> m ByteString sliced = slicedWith (\_ bs -> bs) {-# INLINE sliced #-} -- | Grab a 'Caret' pointing to the current location. careting :: DeltaParsing m => m Caret careting = Caret <$> position <*> line {-# INLINE careting #-} -- | Parse a 'Careted' result. Pointing the 'Caret' to where you start. careted :: DeltaParsing m => m a -> m (Careted a) careted p = (\m l a -> a :^ Caret m l) <$> position <*> line <*> p {-# INLINE careted #-} -- | Discard the result of a parse, returning a 'Span' from where we start to -- where it ended parsing. spanning :: DeltaParsing m => m a -> m Span spanning p = (\s l e -> Span s e l) <$> position <*> line <*> (p *> position) {-# INLINE spanning #-} -- | Parse a 'Spanned' result. The 'Span' starts here and runs to the last -- position parsed. spanned :: DeltaParsing m => m a -> m (Spanned a) spanned p = (\s l a e -> a :~ Span s e l) <$> position <*> line <*> p <*> position {-# INLINE spanned #-} -- | Grab a fixit. fixiting :: DeltaParsing m => m Strict.ByteString -> m Fixit fixiting p = (\(r :~ s) -> Fixit s r) <$> spanned p {-# INLINE fixiting #-} -- | This class is a refinement of 'DeltaParsing' that adds the ability to mark -- your position in the input and return there for further parsing later. class (DeltaParsing m, HasDelta d) => MarkParsing d m | m -> d where -- | mark the current location so it can be used in constructing a span, or -- for later seeking mark :: m d -- | Seek a previously marked location release :: d -> m () instance (MonadPlus m, MarkParsing d m) => MarkParsing d (Lazy.StateT s m) where mark = lift mark {-# INLINE mark #-} release = lift . release {-# INLINE release #-} instance (MonadPlus m, MarkParsing d m) => MarkParsing d (Strict.StateT s m) where mark = lift mark {-# INLINE mark #-} release = lift . release {-# INLINE release #-} instance (MonadPlus m, MarkParsing d m) => MarkParsing d (ReaderT e m) where mark = lift mark {-# INLINE mark #-} release = lift . release {-# INLINE release #-} instance (MonadPlus m, MarkParsing d m, Monoid w) => MarkParsing d (Strict.WriterT w m) where mark = lift mark {-# INLINE mark #-} release = lift . release {-# INLINE release #-} instance (MonadPlus m, MarkParsing d m, Monoid w) => MarkParsing d (Lazy.WriterT w m) where mark = lift mark {-# INLINE mark #-} release = lift . release {-# INLINE release #-} instance (MonadPlus m, MarkParsing d m, Monoid w) => MarkParsing d (Lazy.RWST r w s m) where mark = lift mark {-# INLINE mark #-} release = lift . release {-# INLINE release #-} instance (MonadPlus m, MarkParsing d m, Monoid w) => MarkParsing d (Strict.RWST r w s m) where mark = lift mark {-# INLINE mark #-} release = lift . release {-# INLINE release #-} instance (MonadPlus m, MarkParsing d m) => MarkParsing d (IdentityT m) where mark = lift mark {-# INLINE mark #-} release = lift . release {-# INLINE release #-} trifecta-2/src/Text/Trifecta/Tutorial.hs0000644000000000000000000000540613316775526016643 0ustar0000000000000000-- | This module provides a short introduction to get users started using -- Trifecta. The key takeaway message is that it’s not harder, or even much -- different, from using other parser libraries, so for users familiar with one -- of the many Parsecs should feel right at home. -- -- __The source of this file is written in a literate style__, and can be read -- top-to-bottom. module Text.Trifecta.Tutorial where import Control.Applicative import Text.Trifecta -- | First, we import Trifecta itself. It only the core parser definitions and -- instances. Since Trifecta on its own is just the parser and a handful of -- instances; the bulk of the utility functions is actually from a separate -- package, /parsers/, that provides the usual parsing functions like -- 'manyTill', 'between', and so on. The idea behind the /parsers/ package is -- that most parser libraries define the same generic functions, so they were -- put into their own package to be shared. Trifecta reexports these -- definitions, but it’s useful to keep in mind that the documentation of -- certain functions might not be directly in the /trifecta/ package. importDocumentation :: docDummy importDocumentation = error "Auxiliary definition to write Haddock documetation for :-)" -- | In order to keep things minimal, we define a very simple language for -- arithmetic expressions. data Expr = Add Expr Expr -- ^ expr + expr | Lit Integer -- ^ 1, 2, -345, … deriving (Show) -- | The parser is straightforward: there are literal integers, and -- parenthesized additions. We require parentheses in order to keep the example -- super simple as to not worry about operator precedence. -- -- It is useful to use /tokenizing/ functions to write parsers. Roughly -- speaking, these automatically skip trailing whitespace on their own, so that -- the parser isn’t cluttered with 'skipWhitespace' calls. 'symbolic' for -- example parses a 'Char' and then skips trailing whitespace; there is also the -- more primitive 'char' function that just parses its argument and nothing -- else. parseExpr :: Parser Expr parseExpr = parseAdd <|> parseLit where parseAdd = parens $ do x <- parseExpr _ <- symbolic '+' y <- parseExpr pure (Add x y) parseLit = Lit <$> integer -- | We can now use our parser to convert a 'String' to an 'Expr', -- -- @ -- parseString parseExpr mempty "(1 + (2 + 3))" -- @ -- -- > Success (Add (Lit 1) (Add (Lit 2) (Lit 3))) -- -- When we provide ill-formed input, we get a nice error message with an arrow -- to the location where the error occurred: -- -- @ -- parseString parseExpr mempty "(1 + 2 + 3))" -- @ -- -- > (interactive):1:8: error: expected: ")" -- > (1 + 2 + 3)) -- > ^ examples :: docDummy examples = error "Haddock dummy for documentation" trifecta-2/src/Text/Trifecta/Instances.hs0000644000000000000000000000123713316775526016765 0ustar0000000000000000{-# LANGUAGE CPP #-} {-# OPTIONS_GHC -fno-warn-orphans #-} ----------------------------------------------------------------------------- -- | -- Copyright : (c) Edward Kmett 2013-2015 -- License : BSD3 -- -- Maintainer : ekmett@gmail.com -- Stability : experimental -- Portability : non-portable -- -- Orphan instances we need to remain sane. ----------------------------------------------------------------------------- module Text.Trifecta.Instances () where #if !MIN_VERSION_ansi_wl_pprint(0,6,8) import qualified Data.Semigroup as Data import Text.PrettyPrint.ANSI.Leijen instance Data.Semigroup Doc where (<>) = (<>) #endif trifecta-2/src/Text/Trifecta/Rendering.hs0000644000000000000000000003647113316775526016763 0ustar0000000000000000{-# LANGUAGE DeriveDataTypeable #-} {-# LANGUAGE DeriveGeneric #-} {-# LANGUAGE FlexibleInstances #-} {-# LANGUAGE MultiParamTypeClasses #-} {-# LANGUAGE TemplateHaskell #-} {-# LANGUAGE TypeSynonymInstances #-} ----------------------------------------------------------------------------- -- | -- Copyright : (C) 2011-2015 Edward Kmett -- License : BSD-style (see the file LICENSE) -- -- Maintainer : Edward Kmett -- Stability : experimental -- Portability : non-portable -- -- The type for Lines will very likely change over time, to enable drawing -- lit up multi-character versions of control characters for @^Z@, @^[@, -- @<0xff>@, etc. This will make for much nicer diagnostics when -- working with protocols. -- ---------------------------------------------------------------------------- module Text.Trifecta.Rendering ( Rendering(Rendering) , HasRendering(..) , nullRendering , emptyRendering , Source(..) , rendered , Renderable(..) , Rendered(..) -- * Carets , Caret(..) , HasCaret(..) , Careted(..) , drawCaret , addCaret , caretEffects , renderingCaret -- * Spans , Span(..) , HasSpan(..) , Spanned(..) , spanEffects , drawSpan , addSpan -- * Fixits , Fixit(..) , HasFixit(..) , drawFixit , addFixit -- * Drawing primitives , Lines , draw , ifNear , (.#) ) where import Control.Applicative import Control.Comonad import Control.Lens import Data.Array import Data.ByteString as B hiding (any, empty, groupBy) import qualified Data.ByteString.UTF8 as UTF8 import Data.Data import Data.Foldable import Data.Function (on) import Data.Hashable import Data.Int (Int64) import Data.List (groupBy) import Data.Maybe import Data.Semigroup import Data.Semigroup.Reducer import GHC.Generics import Prelude as P hiding (span) import System.Console.ANSI import Text.PrettyPrint.ANSI.Leijen hiding (column, (<$>), (<>)) import Text.Trifecta.Delta import Text.Trifecta.Instances () import Text.Trifecta.Util.Combinators -- $setup -- -- >>> :set -XOverloadedStrings -- >>> import Text.PrettyPrint.ANSI.Leijen (pretty, plain) -- >>> import Data.ByteString (ByteString) -- >>> import Data.Monoid (mempty) -- >>> import Text.Trifecta.Delta -- >>> let exampleRendering = rendered mempty ("int main(int argc, char ** argv) { int; }" :: ByteString) outOfRangeEffects :: [SGR] -> [SGR] outOfRangeEffects xs = SetConsoleIntensity BoldIntensity : xs sgr :: [SGR] -> Doc -> Doc sgr xs0 = go (P.reverse xs0) where go [] = id go (SetConsoleIntensity NormalIntensity : xs) = debold . go xs go (SetConsoleIntensity BoldIntensity : xs) = bold . go xs go (SetUnderlining NoUnderline : xs) = deunderline . go xs go (SetUnderlining SingleUnderline : xs) = underline . go xs go (SetColor f i c : xs) = case f of Foreground -> case i of Dull -> case c of Black -> dullblack . go xs Red -> dullred . go xs Green -> dullgreen . go xs Yellow -> dullyellow . go xs Blue -> dullblue . go xs Magenta -> dullmagenta . go xs Cyan -> dullcyan . go xs White -> dullwhite . go xs Vivid -> case c of Black -> black . go xs Red -> red . go xs Green -> green . go xs Yellow -> yellow . go xs Blue -> blue . go xs Magenta -> magenta . go xs Cyan -> cyan . go xs White -> white . go xs Background -> case i of Dull -> case c of Black -> ondullblack . go xs Red -> ondullred . go xs Green -> ondullgreen . go xs Yellow -> ondullyellow . go xs Blue -> ondullblue . go xs Magenta -> ondullmagenta . go xs Cyan -> ondullcyan . go xs White -> ondullwhite . go xs Vivid -> case c of Black -> onblack . go xs Red -> onred . go xs Green -> ongreen . go xs Yellow -> onyellow . go xs Blue -> onblue . go xs Magenta -> onmagenta . go xs Cyan -> oncyan . go xs White -> onwhite . go xs go (_ : xs) = go xs -- | A raw canvas to paint ANSI-styled characters on. type Lines = Array (Int,Int64) ([SGR], Char) -- | Remove a number of @(index, element)@ values from an @'Array'@. (///) :: Ix i => Array i e -> [(i, e)] -> Array i e a /// xs = a // P.filter (inRange (bounds a) . fst) xs grow :: Int -> Lines -> Lines grow y a | inRange (t,b) y = a | otherwise = array new [ (i, if inRange old i then a ! i else ([],' ')) | i <- range new ] where old@((t,lo),(b,hi)) = bounds a new = ((min t y,lo),(max b y,hi)) draw :: [SGR] -- ^ ANSI style to use -> Int -- ^ Line; 0 is at the top -> Int64 -- ^ Column; 0 is on the left -> String -- ^ Data to be written -> Lines -- ^ Canvas to draw on -> Lines draw _ _ _ "" a0 = a0 draw e y n xs a0 = gt $ lt (a /// out) where a = grow y a0 ((_,lo),(_,hi)) = bounds a out = P.zipWith (\i c -> ((y,i),(e,c))) [n..] xs lt | P.any (\el -> snd (fst el) < lo) out = (// [((y,lo),(outOfRangeEffects e,'<'))]) | otherwise = id gt | P.any (\el -> snd (fst el) > hi) out = (// [((y,hi),(outOfRangeEffects e,'>'))]) | otherwise = id -- | A 'Rendering' is a canvas of text that output can be written to. data Rendering = Rendering { _renderingDelta :: !Delta -- ^ focus, the render will keep this visible , _renderingLineLen :: {-# UNPACK #-} !Int64 -- ^ actual line length , _renderingLineBytes :: {-# UNPACK #-} !Int64 -- ^ line length in bytes , _renderingLine :: Lines -> Lines , _renderingOverlays :: Delta -> Lines -> Lines } makeClassy ''Rendering instance Show Rendering where showsPrec d (Rendering p ll lb _ _) = showParen (d > 10) $ showString "Rendering " . showsPrec 11 p . showChar ' ' . showsPrec 11 ll . showChar ' ' . showsPrec 11 lb . showString " ... ..." -- | Is the 'Rendering' empty? -- -- >>> nullRendering emptyRendering -- True -- -- >>> nullRendering exampleRendering -- False nullRendering :: Rendering -> Bool nullRendering (Rendering (Columns 0 0) 0 0 _ _) = True nullRendering _ = False -- | The empty 'Rendering', which contains nothing at all. -- -- >>> show (pretty emptyRendering) -- "" emptyRendering :: Rendering emptyRendering = Rendering (Columns 0 0) 0 0 id (const id) instance Semigroup Rendering where -- an unprincipled hack Rendering (Columns 0 0) 0 0 _ f <> Rendering del len lb dc g = Rendering del len lb dc $ \d l -> f d (g d l) Rendering del len lb dc f <> Rendering _ _ _ _ g = Rendering del len lb dc $ \d l -> f d (g d l) instance Monoid Rendering where mappend = (<>) mempty = emptyRendering ifNear :: Delta -- ^ Position 1 -> (Lines -> Lines) -- ^ Modify the fallback result if the positions are 'near' each other -> Delta -- ^ Position 2 -> Lines -- ^ Fallback result if the positions are not 'near' each other -> Lines ifNear d f d' l | near d d' = f l | otherwise = l instance HasDelta Rendering where delta = _renderingDelta class Renderable t where render :: t -> Rendering instance Renderable Rendering where render = id class Source t where source :: t -> (Int64, Int64, Lines -> Lines) -- ^ @ -- ( Number of (padded) columns -- , number of bytes -- , line ) -- @ instance Source String where source s | P.elem '\n' s = (ls, bs, draw [] 0 0 s') | otherwise = ( ls + fromIntegral (P.length end), bs, draw [SetColor Foreground Vivid Blue, SetConsoleIntensity BoldIntensity] 0 ls end . draw [] 0 0 s') where end = "" s' = go 0 s bs = fromIntegral $ B.length $ UTF8.fromString $ P.takeWhile (/='\n') s ls = fromIntegral $ P.length s' go n ('\t':xs) = let t = 8 - mod n 8 in P.replicate t ' ' ++ go (n + t) xs go _ ('\n':_) = [] go n (x:xs) = x : go (n + 1) xs go _ [] = [] instance Source ByteString where source = source . UTF8.toString -- | create a drawing surface rendered :: Source s => Delta -> s -> Rendering rendered del s = case source s of (len, lb, dc) -> Rendering del len lb dc (\_ l -> l) (.#) :: (Delta -> Lines -> Lines) -> Rendering -> Rendering f .# Rendering d ll lb s g = Rendering d ll lb s $ \e l -> f e $ g e l instance Pretty Rendering where pretty (Rendering d ll _ l f) = nesting $ \k -> columns $ \mn -> go (fromIntegral (fromMaybe 80 mn - k)) where go cols = align (vsep (P.map ln [t..b])) where (lo, hi) = window (column d) ll (min (max (cols - 2) 30) 200) a = f d $ l $ array ((0,lo),(-1,hi)) [] ((t,_),(b,_)) = bounds a ln y = hcat $ P.map (\g -> sgr (fst (P.head g)) (pretty (P.map snd g))) $ groupBy ((==) `on` fst) [ a ! (y,i) | i <- [lo..hi] ] window :: Int64 -> Int64 -> Int64 -> (Int64, Int64) window c l w | c <= w2 = (0, min w l) | c + w2 >= l = if l > w then (l-w, l) else (0 , w) | otherwise = (c-w2, c+w2) where w2 = div w 2 data Rendered a = a :@ Rendering deriving Show instance Functor Rendered where fmap f (a :@ s) = f a :@ s instance HasDelta (Rendered a) where delta = delta . render instance HasBytes (Rendered a) where bytes = bytes . delta instance Comonad Rendered where extend f as@(_ :@ s) = f as :@ s extract (a :@ _) = a instance ComonadApply Rendered where (f :@ s) <@> (a :@ t) = f a :@ (s <> t) instance Foldable Rendered where foldMap f (a :@ _) = f a instance Traversable Rendered where traverse f (a :@ s) = (:@ s) <$> f a instance Renderable (Rendered a) where render (_ :@ s) = s -- | A 'Caret' marks a point in the input with a simple @^@ character. -- -- >>> plain (pretty (addCaret (Columns 35 35) exampleRendering)) -- int main(int argc, char ** argv) { int; } -- ^ data Caret = Caret !Delta {-# UNPACK #-} !ByteString deriving (Eq,Ord,Show,Data,Typeable,Generic) class HasCaret t where caret :: Lens' t Caret instance HasCaret Caret where caret = id instance Hashable Caret -- | ANSI terminal style for rendering the caret. caretEffects :: [SGR] caretEffects = [SetColor Foreground Vivid Green] drawCaret :: Delta -> Delta -> Lines -> Lines drawCaret p = ifNear p $ draw caretEffects 1 (fromIntegral (column p)) "^" -- | Render a caret at a certain position in a 'Rendering'. addCaret :: Delta -> Rendering -> Rendering addCaret p r = drawCaret p .# r instance HasBytes Caret where bytes = bytes . delta instance HasDelta Caret where delta (Caret d _) = d instance Renderable Caret where render (Caret d bs) = addCaret d $ rendered d bs instance Reducer Caret Rendering where unit = render instance Semigroup Caret where a <> _ = a renderingCaret :: Delta -> ByteString -> Rendering renderingCaret d bs = addCaret d $ rendered d bs data Careted a = a :^ Caret deriving (Eq,Ord,Show,Data,Typeable,Generic) instance HasCaret (Careted a) where caret f (a :^ c) = (a :^) <$> f c instance Functor Careted where fmap f (a :^ s) = f a :^ s instance HasDelta (Careted a) where delta (_ :^ c) = delta c instance HasBytes (Careted a) where bytes (_ :^ c) = bytes c instance Comonad Careted where extend f as@(_ :^ s) = f as :^ s extract (a :^ _) = a instance ComonadApply Careted where (a :^ c) <@> (b :^ d) = a b :^ (c <> d) instance Foldable Careted where foldMap f (a :^ _) = f a instance Traversable Careted where traverse f (a :^ s) = (:^ s) <$> f a instance Renderable (Careted a) where render (_ :^ a) = render a instance Reducer (Careted a) Rendering where unit = render instance Hashable a => Hashable (Careted a) -- | ANSI terminal style to render spans with. spanEffects :: [SGR] spanEffects = [SetColor Foreground Dull Green] drawSpan :: Delta -- ^ Start of the region of interest -> Delta -- ^ End of the region of interest -> Delta -- ^ Currrent location -> Lines -- ^ 'Lines' to add the rendering to -> Lines drawSpan start end d a | nearLo && nearHi = go (column lo) (rep (max (column hi - column lo) 0) '~') a | nearLo = go (column lo) (rep (max (snd (snd (bounds a)) - column lo + 1) 0) '~') a | nearHi = go (-1) (rep (max (column hi + 1) 0) '~') a | otherwise = a where go = draw spanEffects 1 . fromIntegral lo = argmin bytes start end hi = argmax bytes start end nearLo = near lo d nearHi = near hi d rep = P.replicate . fromIntegral addSpan :: Delta -> Delta -> Rendering -> Rendering addSpan s e r = drawSpan s e .# r -- | A 'Span' marks a range of input characters. If 'Caret' is a point, then -- 'Span' is a line. -- -- >>> plain (pretty (addSpan (Columns 35 35) (Columns 38 38) exampleRendering)) -- int main(int argc, char ** argv) { int; } -- ~~~ data Span = Span !Delta !Delta {-# UNPACK #-} !ByteString deriving (Eq,Ord,Show,Data,Typeable,Generic) class HasSpan t where span :: Lens' t Span instance HasSpan Span where span = id instance Renderable Span where render (Span s e bs) = addSpan s e $ rendered s bs instance Semigroup Span where Span s _ b <> Span _ e _ = Span s e b instance Reducer Span Rendering where unit = render instance Hashable Span -- | Annotate an arbitrary piece of data with a 'Span', typically its -- corresponding input location. data Spanned a = a :~ Span deriving (Eq,Ord,Show,Data,Typeable,Generic) instance HasSpan (Spanned a) where span f (a :~ c) = (a :~) <$> f c instance Functor Spanned where fmap f (a :~ s) = f a :~ s instance Comonad Spanned where extend f as@(_ :~ s) = f as :~ s extract (a :~ _) = a instance ComonadApply Spanned where (a :~ c) <@> (b :~ d) = a b :~ (c <> d) instance Foldable Spanned where foldMap f (a :~ _) = f a instance Traversable Spanned where traverse f (a :~ s) = (:~ s) <$> f a instance Reducer (Spanned a) Rendering where unit = render instance Renderable (Spanned a) where render (_ :~ s) = render s instance Hashable a => Hashable (Spanned a) drawFixit :: Delta -> Delta -> String -> Delta -> Lines -> Lines drawFixit s e rpl d a = ifNear l (draw [SetColor Foreground Dull Blue] 2 (fromIntegral (column l)) rpl) d $ drawSpan s e d a where l = argmin bytes s e addFixit :: Delta -> Delta -> String -> Rendering -> Rendering addFixit s e rpl r = drawFixit s e rpl .# r -- | A 'Fixit' is a 'Span' with a suggestion. -- -- >>> plain (pretty (addFixit (Columns 35 35) (Columns 38 38) "Fix this!" exampleRendering)) -- int main(int argc, char ** argv) { int; } -- ~~~ -- Fix this! data Fixit = Fixit { _fixitSpan :: {-# UNPACK #-} !Span -- ^ 'Span' where the error occurred , _fixitReplacement :: !ByteString -- ^ Replacement suggestion } deriving (Eq,Ord,Show,Data,Typeable,Generic) makeClassy ''Fixit instance HasSpan Fixit where span = fixitSpan instance Hashable Fixit instance Reducer Fixit Rendering where unit = render instance Renderable Fixit where render (Fixit (Span s e bs) r) = addFixit s e (UTF8.toString r) $ rendered s bs trifecta-2/src/Text/Trifecta/Rope.hs0000644000000000000000000001402113316775526015736 0ustar0000000000000000{-# LANGUAGE BangPatterns #-} {-# LANGUAGE CPP #-} {-# LANGUAGE DeriveDataTypeable #-} {-# LANGUAGE DeriveGeneric #-} {-# LANGUAGE FlexibleInstances #-} {-# LANGUAGE MultiParamTypeClasses #-} {-# LANGUAGE TypeFamilies #-} ----------------------------------------------------------------------------- -- | -- Copyright : (C) 2011-2015 Edward Kmett -- License : BSD-style (see the file LICENSE) -- -- Maintainer : Edward Kmett -- Stability : experimental -- Portability : non-portable -- -- A rope is a data strucure to efficiently store and manipulate long strings. -- Wikipedia provides a nice overview: -- ---------------------------------------------------------------------------- module Text.Trifecta.Rope ( Rope(..) , rope , ropeBS , Strand(..) , strand , strands , grabRest , grabLine ) where import Data.ByteString (ByteString) import qualified Data.ByteString as Strict import qualified Data.ByteString.Lazy as Lazy import qualified Data.ByteString.UTF8 as UTF8 import Data.Data import Data.FingerTree as FingerTree import Data.Foldable (toList) import Data.Hashable #if !(MIN_VERSION_base(4,11,0)) import Data.Semigroup #endif import Data.Semigroup.Reducer import GHC.Generics import Text.Trifecta.Delta import Text.Trifecta.Util.Combinators as Util -- $setup -- -- >>> :set -XOverloadedStrings -- >>> import Data.Monoid ((<>)) -- >>> import qualified Data.ByteString.UTF8 as Strict -- >>> import qualified Data.ByteString.Lazy.UTF8 as Lazy -- A 'Strand' is a chunk of data; many 'Strand's together make a 'Rope'. data Strand = Strand {-# UNPACK #-} !ByteString !Delta -- ^ Data of a certain length | Skipping !Delta -- ^ Absence of data of a certain length deriving (Show, Data, Typeable, Generic) -- | Construct a single 'Strand' out of a 'ByteString'. strand :: ByteString -> Strand strand bs = Strand bs (delta bs) instance Measured Delta Strand where measure (Strand _ s) = delta s measure (Skipping d) = d instance Hashable Strand instance HasDelta Strand where delta = measure instance HasBytes Strand where bytes (Strand _ d) = bytes d bytes _ = 0 data Rope = Rope !Delta !(FingerTree Delta Strand) deriving Show rope :: FingerTree Delta Strand -> Rope rope r = Rope (measure r) r -- | Construct a 'Rope' out of a single 'ByteString' strand. ropeBS :: ByteString -> Rope ropeBS = rope . singleton . strand strands :: Rope -> FingerTree Delta Strand strands (Rope _ r) = r -- | Grab the entire rest of the input 'Rope', starting at an initial offset, or -- return a default if we’re already at or beyond the end. Also see 'grabLine'. -- -- Extract a suffix of a certain length from the input: -- -- >>> grabRest (delta ("Hello " :: ByteString)) (ropeBS "Hello World\nLorem") Nothing (\x y -> Just (x, Lazy.toString y)) -- Just (Columns 6 6,"World\nLorem") -- -- Same deal, but over multiple strands: -- -- >>> grabRest (delta ("Hel" :: ByteString)) (ropeBS "Hello" <> ropeBS "World") Nothing (\x y -> Just (x, Lazy.toString y)) -- Just (Columns 3 3,"loWorld") -- -- When the offset is too long, fall back to a default: -- -- >>> grabRest (delta ("OffetTooLong" :: ByteString)) (ropeBS "Hello") Nothing (\x y -> Just (x, Lazy.toString y)) -- Nothing grabRest :: Delta -- ^ Initial offset -> Rope -- ^ Input -> r -- ^ Default value if there is no input left -> (Delta -> Lazy.ByteString -> r) -- ^ If there is some input left, create an @r@ out of the data from the -- initial offset until the end -> r grabRest offset input failure success = trim (delta l) (bytes offset - bytes l) (toList r) where trim offset' 0 (Strand str _ : xs) = go offset' str xs trim _ k (Strand str _ : xs) = go offset (Strict.drop (fromIntegral k) str) xs trim offset' k (Skipping p : xs) = trim (offset' <> p) k xs trim _ _ [] = failure go offset' str strands' = success offset' (Lazy.fromChunks (str : [ a | Strand a _ <- strands' ])) (l, r) = splitRopeAt offset input -- | Split the rope in two halves, given a 'Delta' offset from the beginning. splitRopeAt :: Delta -> Rope -> (FingerTree Delta Strand, FingerTree Delta Strand) splitRopeAt splitPos = FingerTree.split (\pos -> bytes pos > bytes splitPos) . strands -- | Grab the rest of the line at a certain offset in the input 'Rope', or -- return a default if there is no newline left in the input. Also see -- 'grabRest'. -- -- >>> grabLine (delta ("Hello " :: ByteString)) (ropeBS "Hello" <> ropeBS " World\nLorem") Nothing (\x y -> Just (x, Strict.toString y)) -- Just (Columns 6 6,"World\n") grabLine :: Delta -- ^ Initial offset -> Rope -- ^ Input -> r -- ^ Default value if there is no input left -> (Delta -> Strict.ByteString -> r) -- ^ If there is some input left, create an @r@ out of the data from the -- initial offset until the end of the line -> r grabLine offset input failure success = grabRest offset input failure (\d -> success d . Util.fromLazy . Util.takeLine) instance HasBytes Rope where bytes = bytes . measure instance HasDelta Rope where delta = measure instance Measured Delta Rope where measure (Rope s _) = s instance Monoid Rope where mempty = Rope mempty mempty mappend = (<>) instance Semigroup Rope where Rope mx x <> Rope my y = Rope (mx <> my) (x `mappend` y) instance Reducer Rope Rope where unit = id instance Reducer Strand Rope where unit s = rope (FingerTree.singleton s) cons s (Rope mt t) = Rope (delta s `mappend` mt) (s <| t) snoc (Rope mt t) !s = Rope (mt `mappend` delta s) (t |> s) instance Reducer Strict.ByteString Rope where unit = unit . strand cons = cons . strand snoc r = snoc r . strand instance Reducer [Char] Rope where unit = unit . strand . UTF8.fromString cons = cons . strand . UTF8.fromString snoc r = snoc r . strand . UTF8.fromString trifecta-2/src/Text/Trifecta/Delta.hs0000644000000000000000000001650613316775526016074 0ustar0000000000000000{-# LANGUAGE CPP #-} {-# LANGUAGE DeriveDataTypeable #-} {-# LANGUAGE DeriveGeneric #-} ----------------------------------------------------------------------------- -- | -- Copyright : (C) 2011-2015 Edward Kmett -- License : BSD-style (see the file LICENSE) -- -- Maintainer : Edward Kmett -- Stability : experimental -- Portability : non-portable -- -- A 'Delta' keeps track of the cursor position of the parser, so it can be -- referred to later, for example in error messages. ---------------------------------------------------------------------------- module Text.Trifecta.Delta ( Delta(..) , HasDelta(..) , HasBytes(..) , nextTab , rewind , near , column , columnByte ) where #if !(MIN_VERSION_base(4,11,0)) import Data.Semigroup #endif import Data.Hashable import Data.Int import Data.Data import Data.Word #if __GLASGOW_HASKELL__ < 710 import Data.Foldable #endif import Data.Function (on) import Data.FingerTree hiding (empty) import Data.ByteString as Strict hiding (empty) import qualified Data.ByteString.UTF8 as UTF8 import GHC.Generics import Text.PrettyPrint.ANSI.Leijen hiding (column, (<>)) import Text.Trifecta.Instances () class HasBytes t where bytes :: t -> Int64 instance HasBytes ByteString where bytes = fromIntegral . Strict.length instance (Measured v a, HasBytes v) => HasBytes (FingerTree v a) where bytes = bytes . measure -- | Since there are multiple ways to be at a certain location, 'Delta' captures -- all these alternatives as a single type. data Delta = Columns {-# UNPACK #-} !Int64 {-# UNPACK #-} !Int64 -- ^ @ -- ( number of characters -- , number of bytes ) -- @ | Tab {-# UNPACK #-} !Int64 {-# UNPACK #-} !Int64 {-# UNPACK #-} !Int64 -- ^ @ -- ( number of characters before the tab -- , number of characters after the tab -- , number of bytes ) -- @ | Lines {-# UNPACK #-} !Int64 {-# UNPACK #-} !Int64 {-# UNPACK #-} !Int64 {-# UNPACK #-} !Int64 -- ^ @ -- ( number of newlines contained -- , number of characters since the last newline -- , number of bytes -- , number of bytes since the last newline ) -- @ | Directed !ByteString {-# UNPACK #-} !Int64 {-# UNPACK #-} !Int64 {-# UNPACK #-} !Int64 {-# UNPACK #-} !Int64 -- ^ @ -- ( current file name -- , number of lines since the last line directive -- , number of characters since the last newline -- , number of bytes -- , number of bytes since the last newline ) -- @ deriving (Show, Data, Typeable, Generic) instance Eq Delta where (==) = (==) `on` bytes instance Ord Delta where compare = compare `on` bytes instance (HasDelta l, HasDelta r) => HasDelta (Either l r) where delta = either delta delta -- | Example: @file.txt:12:34@ instance Pretty Delta where pretty d = case d of Columns c _ -> prettyDelta interactive 0 c Tab x y _ -> prettyDelta interactive 0 (nextTab x + y) Lines l c _ _ -> prettyDelta interactive l c Directed fn l c _ _ -> prettyDelta (UTF8.toString fn) l c where prettyDelta :: String -- Source description -> Int64 -- Line -> Int64 -- Column -> Doc prettyDelta source line' column' = bold (pretty source) <> char ':' <> bold (int64 (line'+1)) <> char ':' <> bold (int64 (column'+1)) interactive = "(interactive)" int64 :: Int64 -> Doc int64 = pretty . show -- | Retrieve the character offset within the current line from this 'Delta'. column :: HasDelta t => t -> Int64 column t = case delta t of Columns c _ -> c Tab b a _ -> nextTab b + a Lines _ c _ _ -> c Directed _ _ c _ _ -> c {-# INLINE column #-} -- | Retrieve the byte offset within the current line from this 'Delta'. columnByte :: Delta -> Int64 columnByte (Columns _ b) = b columnByte (Tab _ _ b) = b columnByte (Lines _ _ _ b) = b columnByte (Directed _ _ _ _ b) = b {-# INLINE columnByte #-} instance HasBytes Delta where bytes (Columns _ b) = b bytes (Tab _ _ b) = b bytes (Lines _ _ b _) = b bytes (Directed _ _ _ b _) = b instance Hashable Delta instance Monoid Delta where mempty = Columns 0 0 mappend = (<>) instance Semigroup Delta where Columns c a <> Columns d b = Columns (c + d) (a + b) Columns c a <> Tab x y b = Tab (c + x) y (a + b) Columns _ a <> Lines l c t a' = Lines l c (t + a) a' Columns _ a <> Directed p l c t a' = Directed p l c (t + a) a' Lines l c t a <> Columns d b = Lines l (c + d) (t + b) (a + b) Lines l c t a <> Tab x y b = Lines l (nextTab (c + x) + y) (t + b) (a + b) Lines l _ t _ <> Lines m d t' b = Lines (l + m) d (t + t') b Lines _ _ t _ <> Directed p l c t' a = Directed p l c (t + t') a Tab x y a <> Columns d b = Tab x (y + d) (a + b) Tab x y a <> Tab x' y' b = Tab x (nextTab (y + x') + y') (a + b) Tab _ _ a <> Lines l c t a' = Lines l c (t + a ) a' Tab _ _ a <> Directed p l c t a' = Directed p l c (t + a ) a' Directed p l c t a <> Columns d b = Directed p l (c + d) (t + b ) (a + b) Directed p l c t a <> Tab x y b = Directed p l (nextTab (c + x) + y) (t + b ) (a + b) Directed p l _ t _ <> Lines m d t' b = Directed p (l + m) d (t + t') b Directed _ _ _ t _ <> Directed p l c t' b = Directed p l c (t + t') b -- | Increment a column number to the next tabstop. nextTab :: Int64 -> Int64 nextTab x = x + (8 - mod x 8) {-# INLINE nextTab #-} -- | Rewind a 'Delta' to the beginning of the line. rewind :: Delta -> Delta rewind (Lines n _ b d) = Lines n 0 (b - d) 0 rewind (Directed p n _ b d) = Directed p n 0 (b - d) 0 rewind _ = Columns 0 0 {-# INLINE rewind #-} -- | Should we show two things with a 'Delta' on the same line? -- -- >>> near (Columns 0 0) (Columns 5 5) -- True -- -- >>> near (Lines 1 0 1 0) (Lines 2 4 4 2) -- False near :: (HasDelta s, HasDelta t) => s -> t -> Bool near s t = rewind (delta s) == rewind (delta t) {-# INLINE near #-} class HasDelta t where delta :: t -> Delta instance HasDelta Delta where delta = id instance HasDelta Char where delta '\t' = Tab 0 0 1 delta '\n' = Lines 1 0 1 0 delta c | o <= 0x7f = Columns 1 1 | o <= 0x7ff = Columns 1 2 | o <= 0xffff = Columns 1 3 | otherwise = Columns 1 4 where o = fromEnum c instance HasDelta Word8 where delta 9 = Tab 0 0 1 delta 10 = Lines 1 0 1 0 delta n | n <= 0x7f = Columns 1 1 | n >= 0xc0 && n <= 0xf4 = Columns 1 1 | otherwise = Columns 0 1 instance HasDelta ByteString where delta = foldMap delta . unpack instance (Measured v a, HasDelta v) => HasDelta (FingerTree v a) where delta = delta . measure trifecta-2/src/Text/Trifecta/Parser.hs0000644000000000000000000003632013316775526016273 0ustar0000000000000000{-# LANGUAGE BangPatterns #-} {-# LANGUAGE CPP #-} {-# LANGUAGE DeriveFoldable #-} {-# LANGUAGE DeriveFunctor #-} {-# LANGUAGE DeriveTraversable #-} {-# LANGUAGE FlexibleContexts #-} {-# LANGUAGE FlexibleInstances #-} {-# LANGUAGE FunctionalDependencies #-} {-# LANGUAGE MultiParamTypeClasses #-} {-# LANGUAGE Rank2Types #-} {-# LANGUAGE TemplateHaskell #-} ----------------------------------------------------------------------------- -- | -- Copyright : (c) Edward Kmett 2011-2015 -- License : BSD3 -- -- Maintainer : ekmett@gmail.com -- Stability : experimental -- Portability : non-portable -- ----------------------------------------------------------------------------- module Text.Trifecta.Parser ( Parser(..) , manyAccum -- * Feeding a parser more more input , Step(..) , feed , starve , stepParser , stepResult , stepIt -- * Parsing , runParser , parseFromFile , parseFromFileEx , parseString , parseByteString , parseTest ) where import Control.Applicative as Alternative import Control.Monad (MonadPlus(..), ap, join) import Control.Monad.IO.Class import qualified Control.Monad.Fail as Fail import Data.ByteString as Strict hiding (empty, snoc) import Data.ByteString.UTF8 as UTF8 import Data.Maybe (fromMaybe, isJust) #if !(MIN_VERSION_base(4,11,0)) import Data.Semigroup #endif import Data.Semigroup.Reducer -- import Data.Sequence as Seq hiding (empty) import Data.Set as Set hiding (empty, toList) import System.IO import Text.Parser.Combinators import Text.Parser.Char import Text.Parser.LookAhead import Text.Parser.Token import Text.PrettyPrint.ANSI.Leijen as Pretty hiding (line, (<>), (<$>), empty) import Text.Trifecta.Combinators import Text.Trifecta.Delta as Delta import Text.Trifecta.Instances () import Text.Trifecta.Rendering import Text.Trifecta.Result import Text.Trifecta.Rope import Text.Trifecta.Util.It -- | The type of a trifecta parser -- -- The first four arguments are behavior continuations: -- -- * epsilon success: the parser has consumed no input and has a result -- as well as a possible Err; the position and chunk are unchanged -- (see `pure`) -- -- * epsilon failure: the parser has consumed no input and is failing -- with the given Err; the position and chunk are unchanged (see -- `empty`) -- -- * committed success: the parser has consumed input and is yielding -- the result, set of expected strings that would have permitted this -- parse to continue, new position, and residual chunk to the -- continuation. -- -- * committed failure: the parser has consumed input and is failing with -- a given ErrInfo (user-facing error message) -- -- The remaining two arguments are -- -- * the current position -- -- * the chunk of input currently under analysis -- -- `Parser` is an `Alternative`; trifecta's backtracking behavior encoded as -- `<|>` is to behave as the leftmost parser which yields a value -- (regardless of any input being consumed) or which consumes input and -- fails. That is, a choice of parsers will only yield an epsilon failure -- if *all* parsers in the choice do. If that is not the desired behavior, -- see `try`, which turns a committed parser failure into an epsilon failure -- (at the cost of error information). newtype Parser a = Parser { unparser :: forall r. (a -> Err -> It Rope r) -> (Err -> It Rope r) -> (a -> Set String -> Delta -> ByteString -> It Rope r) -- committed success -> (ErrInfo -> It Rope r) -- committed err -> Delta -> ByteString -> It Rope r } instance Functor Parser where fmap f (Parser m) = Parser $ \ eo ee co -> m (eo . f) ee (co . f) {-# INLINE fmap #-} a <$ Parser m = Parser $ \ eo ee co -> m (\_ -> eo a) ee (\_ -> co a) {-# INLINE (<$) #-} instance Applicative Parser where pure a = Parser $ \ eo _ _ _ _ _ -> eo a mempty {-# INLINE pure #-} (<*>) = ap {-# INLINE (<*>) #-} instance Alternative Parser where empty = Parser $ \_ ee _ _ _ _ -> ee mempty {-# INLINE empty #-} Parser m <|> Parser n = Parser $ \ eo ee co ce d bs -> m eo (\e -> n (\a e' -> eo a (e <> e')) (\e' -> ee (e <> e')) co ce d bs) co ce d bs {-# INLINE (<|>) #-} many p = Prelude.reverse <$> manyAccum (:) p {-# INLINE many #-} some p = (:) <$> p <*> Alternative.many p instance Semigroup a => Semigroup (Parser a) where (<>) = liftA2 (<>) {-# INLINE (<>) #-} instance (Semigroup a, Monoid a) => Monoid (Parser a) where mappend = (<>) {-# INLINE mappend #-} mempty = pure mempty {-# INLINE mempty #-} instance Monad Parser where return = pure {-# INLINE return #-} Parser m >>= k = Parser $ \ eo ee co ce d bs -> m -- epsilon result: feed result to monadic continutaion; committed -- continuations as they were given to us; epsilon callbacks merge -- error information with `<>` (\a e -> unparser (k a) (\b e' -> eo b (e <> e')) (\e' -> ee (e <> e')) co ce d bs) -- epsilon error: as given ee -- committed result: feed result to monadic continuation and... (\a es d' bs' -> unparser (k a) -- epsilon results are now committed results due to m consuming. -- -- epsilon success is now committed success at the new position -- (after m), yielding the result from (k a) and merging the -- expected sets (i.e. things that could have resulted in a longer -- parse) (\b e' -> co b (es <> _expected e') d' bs') -- epsilon failure is now a committed failure at the new position -- (after m); compute the error to display to the user (\e -> let errDoc = explain (renderingCaret d' bs') e { _expected = _expected e <> es } errDelta = _finalDeltas e in ce $ ErrInfo errDoc (d' : errDelta) ) -- committed behaviors as given; nothing exciting here co ce -- new position and remaining chunk after m d' bs') -- committed error, delta, and bytestring: as given ce d bs {-# INLINE (>>=) #-} (>>) = (*>) {-# INLINE (>>) #-} fail = Fail.fail {-# INLINE fail #-} instance Fail.MonadFail Parser where fail s = Parser $ \ _ ee _ _ _ _ -> ee (failed s) {-# INLINE fail #-} instance MonadPlus Parser where mzero = empty {-# INLINE mzero #-} mplus = (<|>) {-# INLINE mplus #-} manyAccum :: (a -> [a] -> [a]) -> Parser a -> Parser [a] manyAccum f (Parser p) = Parser $ \eo _ co ce d bs -> let walk xs x es d' bs' = p (manyErr d' bs') (\e -> co (f x xs) (_expected e <> es) d' bs') (walk (f x xs)) ce d' bs' manyErr d' bs' _ e = ce (ErrInfo errDoc [d']) where errDoc = explain (renderingCaret d' bs') (e <> failed "'many' applied to a parser that accepted an empty string") in p (manyErr d bs) (eo []) (walk []) ce d bs liftIt :: It Rope a -> Parser a liftIt m = Parser $ \ eo _ _ _ _ _ -> do a <- m eo a mempty {-# INLINE liftIt #-} instance Parsing Parser where try (Parser m) = Parser $ \ eo ee co _ -> m eo ee co (\_ -> ee mempty) {-# INLINE try #-} Parser m nm = Parser $ \ eo ee -> m (\a e -> eo a (if isJust (_reason e) then e { _expected = Set.singleton nm } else e)) (\e -> ee e { _expected = Set.singleton nm }) {-# INLINE () #-} skipMany p = () <$ manyAccum (\_ _ -> []) p {-# INLINE skipMany #-} unexpected s = Parser $ \ _ ee _ _ _ _ -> ee $ failed $ "unexpected " ++ s {-# INLINE unexpected #-} eof = notFollowedBy anyChar "end of input" {-# INLINE eof #-} notFollowedBy p = try (optional p >>= maybe (pure ()) (unexpected . show)) {-# INLINE notFollowedBy #-} instance Errable Parser where raiseErr e = Parser $ \ _ ee _ _ _ _ -> ee e {-# INLINE raiseErr #-} instance LookAheadParsing Parser where lookAhead (Parser m) = Parser $ \eo ee _ -> m eo ee (\a _ _ _ -> eo a mempty) {-# INLINE lookAhead #-} instance CharParsing Parser where satisfy f = Parser $ \ _ ee co _ d bs -> case UTF8.uncons $ Strict.drop (fromIntegral (columnByte d)) bs of Nothing -> ee (failed "unexpected EOF") Just (c, xs) | not (f c) -> ee mempty | Strict.null xs -> let !ddc = d <> delta c in join $ fillIt (co c mempty ddc (if c == '\n' then mempty else bs)) (co c mempty) ddc | otherwise -> co c mempty (d <> delta c) bs {-# INLINE satisfy #-} instance TokenParsing Parser instance DeltaParsing Parser where line = Parser $ \eo _ _ _ _ bs -> eo bs mempty {-# INLINE line #-} position = Parser $ \eo _ _ _ d _ -> eo d mempty {-# INLINE position #-} rend = Parser $ \eo _ _ _ d bs -> eo (rendered d bs) mempty {-# INLINE rend #-} slicedWith f p = do m <- position a <- p r <- position f a <$> liftIt (sliceIt m r) {-# INLINE slicedWith #-} instance MarkParsing Delta Parser where mark = position {-# INLINE mark #-} release d' = Parser $ \_ ee co _ d bs -> do mbs <- rewindIt d' case mbs of Just bs' -> co () mempty d' bs' Nothing | bytes d' == bytes (rewind d) + fromIntegral (Strict.length bs) -> if near d d' then co () mempty d' bs else co () mempty d' mempty | otherwise -> ee mempty -- | A 'Step' allows for incremental parsing, since the parser -- -- - can be done with a final result -- - have errored -- - can have yielded a partial result with possibly more to come data Step a = StepDone !Rope a -- ^ Parsing is done and has converted the 'Rope' to a final result | StepFail !Rope ErrInfo -- ^ Parsing the 'Rope' has failed with an error | StepCont !Rope (Result a) (Rope -> Step a) -- ^ The 'Rope' has been partially consumed and already yielded a 'Result', -- and if more input is provided, more results can be produced. -- -- One common scenario for this is to parse log files: after parsing a -- single line, that data can already be worked with, but there may be more -- lines to come. instance Show a => Show (Step a) where showsPrec d (StepDone r a) = showParen (d > 10) $ showString "StepDone " . showsPrec 11 r . showChar ' ' . showsPrec 11 a showsPrec d (StepFail r xs) = showParen (d > 10) $ showString "StepFail " . showsPrec 11 r . showChar ' ' . showsPrec 11 xs showsPrec d (StepCont r fin _) = showParen (d > 10) $ showString "StepCont " . showsPrec 11 r . showChar ' ' . showsPrec 11 fin . showString " ..." instance Functor Step where fmap f (StepDone r a) = StepDone r (f a) fmap _ (StepFail r xs) = StepFail r xs fmap f (StepCont r z k) = StepCont r (fmap f z) (fmap f . k) -- | Feed some additional input to a 'Step' to continue parsing a bit further. feed :: Reducer t Rope => t -> Step r -> Step r feed t (StepDone r a) = StepDone (snoc r t) a feed t (StepFail r xs) = StepFail (snoc r t) xs feed t (StepCont r _ k) = k (snoc r t) {-# INLINE feed #-} -- | Assume all possible input has been given to the parser, execute it to yield -- a final result. starve :: Step a -> Result a starve (StepDone _ a) = Success a starve (StepFail _ xs) = Failure xs starve (StepCont _ z _) = z {-# INLINE starve #-} stepResult :: Rope -> Result a -> Step a stepResult r (Success a) = StepDone r a stepResult r (Failure xs) = StepFail r xs {-# INLINE stepResult #-} stepIt :: It Rope a -> Step a stepIt = go mempty where go r m = case simplifyIt m r of Pure a -> StepDone r a It a k -> StepCont r (pure a) $ \r' -> go r' (k r') {-# INLINE stepIt #-} data Stepping a = EO a Err | EE Err | CO a (Set String) Delta ByteString | CE ErrInfo -- | Incremental parsing. A 'Step' can be supplied with new input using 'feed', -- the final 'Result' is obtained using 'starve'. stepParser :: Parser a -> Delta -- ^ Starting cursor position. Usually 'mempty' for the beginning of the file. -> Step a stepParser (Parser p) d0 = joinStep $ stepIt $ do bs0 <- fromMaybe mempty <$> rewindIt d0 go bs0 <$> p eo ee co ce d0 bs0 where eo a e = Pure (EO a e) ee e = Pure (EE e) co a es d' bs = Pure (CO a es d' bs) ce errInf = Pure (CE errInf) go :: ByteString -> Stepping a -> Result a go _ (EO a _) = Success a go bs0 (EE e) = Failure $ let errDoc = explain (renderingCaret d0 bs0) e in ErrInfo errDoc (d0 : _finalDeltas e) go _ (CO a _ _ _) = Success a go _ (CE e) = Failure e joinStep :: Step (Result a) -> Step a joinStep (StepDone r (Success a)) = StepDone r a joinStep (StepDone r (Failure e)) = StepFail r e joinStep (StepFail r e) = StepFail r e joinStep (StepCont r a k) = StepCont r (join a) (joinStep <$> k) {-# INLINE joinStep #-} -- | Run a 'Parser' on input that can be reduced to a 'Rope', e.g. 'String', or -- 'ByteString'. See also the monomorphic versions 'parseString' and -- 'parseByteString'. runParser :: Reducer t Rope => Parser a -> Delta -- ^ Starting cursor position. Usually 'mempty' for the beginning of the file. -> t -> Result a runParser p d bs = starve $ feed bs $ stepParser p d {-# INLINE runParser #-} -- | @('parseFromFile' p filePath)@ runs a parser @p@ on the input read from -- @filePath@ using 'ByteString.readFile'. All diagnostic messages emitted over -- the course of the parse attempt are shown to the user on the console. -- -- > main = do -- > result <- parseFromFile numbers "digits.txt" -- > case result of -- > Nothing -> return () -- > Just a -> print $ sum a parseFromFile :: MonadIO m => Parser a -> String -> m (Maybe a) parseFromFile p fn = do result <- parseFromFileEx p fn case result of Success a -> return (Just a) Failure xs -> do liftIO $ displayIO stdout $ renderPretty 0.8 80 $ (_errDoc xs) <> linebreak return Nothing -- | @('parseFromFileEx' p filePath)@ runs a parser @p@ on the input read from -- @filePath@ using 'ByteString.readFile'. Returns all diagnostic messages -- emitted over the course of the parse and the answer if the parse was -- successful. -- -- > main = do -- > result <- parseFromFileEx (many number) "digits.txt" -- > case result of -- > Failure xs -> displayLn xs -- > Success a -> print (sum a) parseFromFileEx :: MonadIO m => Parser a -> String -> m (Result a) parseFromFileEx p fn = do s <- liftIO $ Strict.readFile fn return $ parseByteString p (Directed (UTF8.fromString fn) 0 0 0 0) s -- | Fully parse a 'UTF8.ByteString' to a 'Result'. -- -- @parseByteString p delta i@ runs a parser @p@ on @i@. parseByteString :: Parser a -> Delta -- ^ Starting cursor position. Usually 'mempty' for the beginning of the file. -> UTF8.ByteString -> Result a parseByteString = runParser -- | Fully parse a 'String' to a 'Result'. -- -- @parseByteString p delta i@ runs a parser @p@ on @i@. parseString :: Parser a -> Delta -- ^ Starting cursor position. Usually 'mempty' for the beginning of the file. -> String -> Result a parseString = runParser parseTest :: (MonadIO m, Show a) => Parser a -> String -> m () parseTest p s = case parseByteString p mempty (UTF8.fromString s) of Failure xs -> liftIO $ displayIO stdout $ renderPretty 0.8 80 $ (_errDoc xs) <> linebreak -- TODO: retrieve columns Success a -> liftIO (print a) trifecta-2/src/Text/Trifecta/Highlight.hs0000644000000000000000000001247013316775526016746 0ustar0000000000000000{-# LANGUAGE CPP #-} {-# LANGUAGE OverloadedStrings #-} {-# LANGUAGE TemplateHaskell #-} #ifndef MIN_VERSION_lens #define MIN_VERSION_lens(x,y,z) 1 #endif ----------------------------------------------------------------------------- -- | -- Copyright : (C) 2011-2015 Edward Kmett -- License : BSD-style (see the file LICENSE) -- -- Maintainer : Edward Kmett -- Stability : experimental -- Portability : non-portable -- ---------------------------------------------------------------------------- module Text.Trifecta.Highlight ( Highlight , HighlightedRope(HighlightedRope) , HasHighlightedRope(..) , withHighlight , HighlightDoc(HighlightDoc) , HasHighlightDoc(..) , doc ) where import Control.Lens #if MIN_VERSION_lens(4,13,0) && __GLASGOW_HASKELL__ >= 710 hiding (Empty) #endif import Data.Foldable as F import Data.Int (Int64) import Data.List (sort) import Data.Semigroup import Data.Semigroup.Union import Prelude hiding (head) import Text.Blaze import Text.Blaze.Html5 hiding (a,b,i) import qualified Text.Blaze.Html5 as Html5 import Text.Blaze.Html5.Attributes hiding (title,id) import Text.Blaze.Internal (MarkupM(Empty, Leaf)) import Text.Parser.Token.Highlight import Text.PrettyPrint.ANSI.Leijen hiding ((<>)) import qualified Data.ByteString.Lazy.Char8 as L import qualified Data.ByteString.Lazy.UTF8 as LazyUTF8 import Text.Trifecta.Util.IntervalMap as IM import Text.Trifecta.Delta import Text.Trifecta.Rope -- | Convert a 'Highlight' into a coloration on a 'Doc'. withHighlight :: Highlight -> Doc -> Doc withHighlight Comment = blue withHighlight ReservedIdentifier = magenta withHighlight ReservedConstructor = magenta withHighlight EscapeCode = magenta withHighlight Operator = yellow withHighlight CharLiteral = cyan withHighlight StringLiteral = cyan withHighlight Constructor = bold withHighlight ReservedOperator = yellow withHighlight ConstructorOperator = yellow withHighlight ReservedConstructorOperator = yellow withHighlight _ = id -- | A 'HighlightedRope' is a 'Rope' with an associated 'IntervalMap' full of highlighted regions. data HighlightedRope = HighlightedRope { _ropeHighlights :: !(IM.IntervalMap Delta Highlight) , _ropeContent :: {-# UNPACK #-} !Rope } makeClassy ''HighlightedRope instance HasDelta HighlightedRope where delta = delta . _ropeContent instance HasBytes HighlightedRope where bytes = bytes . _ropeContent instance Semigroup HighlightedRope where HighlightedRope h bs <> HighlightedRope h' bs' = HighlightedRope (h `union` IM.offset (delta bs) h') (bs <> bs') instance Monoid HighlightedRope where mappend = (<>) mempty = HighlightedRope mempty mempty data Located a = a :@ {-# UNPACK #-} !Int64 infix 5 :@ instance Eq (Located a) where _ :@ m == _ :@ n = m == n instance Ord (Located a) where compare (_ :@ m) (_ :@ n) = compare m n instance ToMarkup HighlightedRope where toMarkup (HighlightedRope intervals r) = Html5.pre $ go 0 lbs effects where lbs = L.fromChunks [bs | Strand bs _ <- F.toList (strands r)] ln no = Html5.a ! name (toValue $ "line-" ++ show no) $ emptyMarkup effects = sort $ [ i | (Interval lo hi, tok) <- intersections mempty (delta r) intervals , i <- [ (leafMarkup "span" "" ! class_ (toValue $ show tok)) :@ bytes lo , preEscapedToHtml ("" :: String) :@ bytes hi ] ] ++ imap (\k i -> ln k :@ i) (L.elemIndices '\n' lbs) go _ cs [] = unsafeLazyByteString cs go b cs ((eff :@ eb) : es) | eb <= b = eff >> go b cs es | otherwise = unsafeLazyByteString om >> go eb nom es where (om,nom) = L.splitAt (fromIntegral (eb - b)) cs #if MIN_VERSION_blaze_markup(0,8,0) emptyMarkup = Empty () leafMarkup a b c = Leaf a b c () #else emptyMarkup = Empty leafMarkup a b c = Leaf a b c #endif instance Pretty HighlightedRope where pretty (HighlightedRope intervals r) = go mempty lbs boundaries where lbs = L.fromChunks [bs | Strand bs _ <- F.toList (strands r)] ints = intersections mempty (delta r) intervals boundaries = sort [ i | (Interval lo hi, _) <- ints, i <- [ lo, hi ] ] dominated l h = Prelude.foldr (fmap . withHighlight . snd) id (dominators l h intervals) go l cs [] = dominated l (delta r) $ pretty (LazyUTF8.toString cs) go l cs (h:es) = dominated l h (pretty (LazyUTF8.toString om)) <> go h nom es where (om,nom) = L.splitAt (fromIntegral (bytes h - bytes l)) cs -- | Represents a source file like an HsColour rendered document data HighlightDoc = HighlightDoc { _docTitle :: String , _docCss :: String -- href for the css file , _docContent :: HighlightedRope } makeClassy ''HighlightDoc -- | Generate an HTML document from a title and a 'HighlightedRope'. doc :: String -> HighlightedRope -> HighlightDoc doc t r = HighlightDoc t "trifecta.css" r instance ToMarkup HighlightDoc where toMarkup (HighlightDoc t css cs) = docTypeHtml $ do head $ do preEscapedToHtml ("\n" :: String) title $ toHtml t link ! rel "stylesheet" ! type_ "text/css" ! href (toValue css) body $ toHtml cs trifecta-2/src/Text/Trifecta/Result.hs0000644000000000000000000001325413316775526016316 0ustar0000000000000000{-# LANGUAGE CPP #-} {-# LANGUAGE DeriveFoldable #-} {-# LANGUAGE DeriveFunctor #-} {-# LANGUAGE DeriveTraversable #-} {-# LANGUAGE FlexibleContexts #-} {-# LANGUAGE FlexibleInstances #-} {-# LANGUAGE FunctionalDependencies #-} {-# LANGUAGE MultiParamTypeClasses #-} {-# LANGUAGE Rank2Types #-} {-# LANGUAGE TemplateHaskell #-} {-# LANGUAGE UndecidableInstances #-} ----------------------------------------------------------------------------- -- | -- Copyright : (c) Edward Kmett 2011-2015 -- License : BSD3 -- -- Maintainer : ekmett@gmail.com -- Stability : experimental -- Portability : non-portable -- -- Results and Parse Errors ----------------------------------------------------------------------------- module Text.Trifecta.Result ( -- * Parse Results Result(..) , AsResult(..) , foldResult , _Success , _Failure -- * Parsing Errors , Err(..), HasErr(..), Errable(..) , ErrInfo(..) , explain , failed ) where import Control.Applicative as Alternative import Control.Lens hiding (cons, snoc) import Control.Monad (guard) import Data.Foldable import qualified Data.List as List import Data.Maybe (fromMaybe, isJust) #if !(MIN_VERSION_base(4,11,0)) import Data.Semigroup #endif import Data.Set as Set hiding (empty, toList) import Text.PrettyPrint.ANSI.Leijen as Pretty hiding (empty, line, (<$>), (<>)) import Text.Trifecta.Delta as Delta import Text.Trifecta.Instances () import Text.Trifecta.Rendering data ErrInfo = ErrInfo { _errDoc :: Doc , _errDeltas :: [Delta] } deriving(Show) -- | This is used to report an error. What went wrong, some supplemental docs -- and a set of things expected at the current location. This does not, however, -- include the actual location. data Err = Err { _reason :: Maybe Doc , _footnotes :: [Doc] , _expected :: Set String , _finalDeltas :: [Delta] } makeClassy ''Err instance Semigroup Err where Err md mds mes delta1 <> Err nd nds nes delta2 = Err (nd <|> md) (if isJust nd then nds else if isJust md then mds else nds ++ mds) (mes <> nes) (delta1 <> delta2) {-# INLINE (<>) #-} instance Monoid Err where mempty = Err Nothing [] mempty mempty {-# INLINE mempty #-} mappend = (<>) {-# INLINE mappend #-} -- | Generate a simple 'Err' word-wrapping the supplied message. failed :: String -> Err failed m = Err (Just (fillSep (pretty <$> words m))) [] mempty mempty {-# INLINE failed #-} -- | Convert a 'Rendering' of auxiliary information and an 'Err' into a 'Doc', -- ready to be prettyprinted to the user. explain :: Rendering -> Err -> Doc explain r (Err mm as es _) | Set.null es = report (withEx mempty) | isJust mm = report $ withEx $ Pretty.char ',' <+> expecting | otherwise = report expecting where now = spaceHack $ toList es spaceHack [""] = ["space"] spaceHack xs = List.filter (/= "") xs withEx x = fromMaybe (fillSep $ text <$> words "unspecified error") mm <> x expecting = text "expected:" <+> fillSep (punctuate (Pretty.char ',') (text <$> now)) report txt = vsep $ [pretty (delta r) <> Pretty.char ':' <+> red (text "error") <> Pretty.char ':' <+> nest 4 txt] <|> pretty r <$ guard (not (nullRendering r)) <|> as class Errable m where raiseErr :: Err -> m a instance Monoid ErrInfo where mempty = ErrInfo mempty mempty mappend = (<>) instance Semigroup ErrInfo where ErrInfo xs d1 <> ErrInfo ys d2 = ErrInfo (vsep [xs, ys]) (max d1 d2) -- | The result of parsing. Either we succeeded or something went wrong. data Result a = Success a | Failure ErrInfo deriving (Show,Functor,Foldable,Traversable) -- | Fold over a 'Result' foldResult :: (ErrInfo -> b) -> (a -> b) -> Result a -> b foldResult f g r = case r of Failure e -> f e Success a -> g a -- | A 'Prism' that lets you embed or retrieve a 'Result' in a potentially larger type. class AsResult s t a b | s -> a, t -> b, s b -> t, t a -> s where _Result :: Prism s t (Result a) (Result b) instance AsResult (Result a) (Result b) a b where _Result = id {-# INLINE _Result #-} -- | The 'Prism' for the 'Success' constructor of 'Result' _Success :: AsResult s t a b => Prism s t a b _Success = _Result . dimap seta (either id id) . right' . rmap (fmap Success) where seta (Success a) = Right a seta (Failure e) = Left (pure (Failure e)) {-# INLINE _Success #-} -- | The 'Prism' for the 'Failure' constructor of 'Result' _Failure :: AsResult s s a a => Prism' s ErrInfo _Failure = _Result . dimap seta (either id id) . right' . rmap (fmap Failure) where seta (Failure e) = Right e seta (Success a) = Left (pure (Success a)) {-# INLINE _Failure #-} instance Show a => Pretty (Result a) where pretty (Success a) = pretty (show a) pretty (Failure xs) = pretty (_errDoc xs) instance Applicative Result where pure = Success {-# INLINE pure #-} Success f <*> Success a = Success (f a) Success _ <*> Failure y = Failure y Failure x <*> Success _ = Failure x Failure x <*> Failure y = Failure $ ErrInfo (vsep [_errDoc x, _errDoc y]) (_errDeltas x <> _errDeltas y) {-# INLINE (<*>) #-} instance Alternative Result where Failure x <|> Failure y = Failure $ ErrInfo (vsep [_errDoc x, _errDoc y]) (_errDeltas x <> _errDeltas y) Success a <|> Success _ = Success a Success a <|> Failure _ = Success a Failure _ <|> Success a = Success a {-# INLINE (<|>) #-} empty = Failure mempty {-# INLINE empty #-} instance Monad Result where return = pure Success a >>= m = m a Failure e >>= _ = Failure e trifecta-2/src/Text/Trifecta/Util/0000755000000000000000000000000013316775526015414 5ustar0000000000000000trifecta-2/src/Text/Trifecta/Util/IntervalMap.hs0000644000000000000000000002144713316775526020202 0ustar0000000000000000{-# LANGUAGE CPP #-} {-# LANGUAGE FlexibleInstances #-} {-# LANGUAGE MultiParamTypeClasses #-} {-# LANGUAGE TypeFamilies #-} #ifndef MIN_VERSION_lens #define MIN_VERSION_lens(x,y,z) 1 #endif ----------------------------------------------------------------------------- -- | -- Module : Text.Trifecta.Util.IntervalMap -- Copyright : (c) Edward Kmett 2011-2015 -- (c) Ross Paterson 2008 -- License : BSD-style -- Maintainer : ekmett@gmail.com -- Stability : experimental -- Portability : non-portable (MPTCs, type families, functional dependencies) -- -- Interval maps implemented using the 'FingerTree' type, following -- section 4.8 of -- -- * Ralf Hinze and Ross Paterson, -- \"Finger trees: a simple general-purpose data structure\", -- /Journal of Functional Programming/ 16:2 (2006) pp 197-217. -- -- -- An amortized running time is given for each operation, with /n/ -- referring to the size of the priority queue. These bounds hold even -- in a persistent (shared) setting. -- -- /Note/: Many of these operations have the same names as similar -- operations on lists in the "Prelude". The ambiguity may be resolved -- using either qualification or the @hiding@ clause. -- -- Unlike "Data.IntervalMap.FingerTree", this version sorts things so -- that the largest interval from a given point comes first. This way -- if you have nested intervals, you get the outermost interval before -- the contained intervals. ----------------------------------------------------------------------------- module Text.Trifecta.Util.IntervalMap ( -- * Intervals Interval(..) -- * Interval maps , IntervalMap(..), singleton, insert -- * Searching , search, intersections, dominators -- * Prepending an offset onto every interval in the map , offset -- * The result monoid , IntInterval(..) , fromList ) where #if __GLASGOW_HASKELL__ < 710 import Control.Applicative hiding (empty) import Data.Foldable (Foldable (foldMap)) #endif #if MIN_VERSION_lens(4,13,0) && __GLASGOW_HASKELL__ >= 710 import Control.Lens hiding ((:<), (<|), (|>)) #else import Control.Lens hiding ((<|), (|>)) #endif import Data.FingerTree (FingerTree, Measured (..), ViewL (..), (<|), (><)) import qualified Data.FingerTree as FT #if !(MIN_VERSION_base(4,11,0)) import Data.Semigroup #endif import Data.Semigroup.Reducer import Data.Semigroup.Union ---------------------------------- -- 4.8 Application: interval trees ---------------------------------- -- | A closed interval. The lower bound should be less than or equal -- to the higher bound. data Interval v = Interval { low :: v, high :: v } deriving Show instance Ord v => Semigroup (Interval v) where Interval a b <> Interval c d = Interval (min a c) (max b d) -- assumes the monoid and ordering are compatible. instance (Ord v, Monoid v) => Reducer v (Interval v) where unit v = Interval v v cons v (Interval a b) = Interval (v `mappend` a) (v `mappend` b) snoc (Interval a b) v = Interval (a `mappend` v) (b `mappend` v) instance Eq v => Eq (Interval v) where Interval a b == Interval c d = a == c && d == b instance Ord v => Ord (Interval v) where compare (Interval a b) (Interval c d) = case compare a c of LT -> LT EQ -> compare d b -- reversed to put larger intervals first GT -> GT instance Functor Interval where fmap f (Interval a b) = Interval (f a) (f b) instance Foldable Interval where foldMap f (Interval a b) = f a `mappend` f b instance Traversable Interval where traverse f (Interval a b) = Interval <$> f a <*> f b data Node v a = Node (Interval v) a instance Functor (Node v) where fmap f (Node i x) = Node i (f x) instance FunctorWithIndex (Interval v) (Node v) where imap f (Node i x) = Node i (f i x) instance Foldable (Node v) where foldMap f (Node _ x) = f x instance FoldableWithIndex (Interval v) (Node v) where ifoldMap f (Node k v) = f k v instance Traversable (Node v) where traverse f (Node i x) = Node i <$> f x instance TraversableWithIndex (Interval v) (Node v) where itraverse f (Node i x) = Node i <$> f i x -- rightmost interval (including largest lower bound) and largest upper bound. data IntInterval v = NoInterval | IntInterval (Interval v) v instance Ord v => Monoid (IntInterval v) where mempty = NoInterval mappend = (<>) instance Ord v => Semigroup (IntInterval v) where NoInterval <> i = i i <> NoInterval = i IntInterval _ hi1 <> IntInterval int2 hi2 = IntInterval int2 (max hi1 hi2) instance Ord v => Measured (IntInterval v) (Node v a) where measure (Node i _) = IntInterval i (high i) -- | Map of closed intervals, possibly with duplicates. -- The 'Foldable' and 'Traversable' instances process the intervals in -- lexicographical order. newtype IntervalMap v a = IntervalMap { runIntervalMap :: FingerTree (IntInterval v) (Node v a) } -- ordered lexicographically by interval instance Functor (IntervalMap v) where fmap f (IntervalMap t) = IntervalMap (FT.unsafeFmap (fmap f) t) instance FunctorWithIndex (Interval v) (IntervalMap v) where imap f (IntervalMap t) = IntervalMap (FT.unsafeFmap (imap f) t) instance Foldable (IntervalMap v) where foldMap f (IntervalMap t) = foldMap (foldMap f) t instance FoldableWithIndex (Interval v) (IntervalMap v) where ifoldMap f (IntervalMap t) = foldMap (ifoldMap f) t instance Traversable (IntervalMap v) where traverse f (IntervalMap t) = IntervalMap <$> FT.unsafeTraverse (traverse f) t instance TraversableWithIndex (Interval v) (IntervalMap v) where itraverse f (IntervalMap t) = IntervalMap <$> FT.unsafeTraverse (itraverse f) t instance Ord v => Measured (IntInterval v) (IntervalMap v a) where measure (IntervalMap m) = measure m largerError :: a largerError = error "Text.Trifecta.IntervalMap.larger: the impossible happened" -- | /O(m log (n/\//m))/. Merge two interval maps. -- The map may contain duplicate intervals; entries with equal intervals -- are kept in the original order. instance Ord v => HasUnion (IntervalMap v a) where union (IntervalMap xs) (IntervalMap ys) = IntervalMap (merge1 xs ys) where merge1 as bs = case FT.viewl as of EmptyL -> bs a@(Node i _) :< as' -> l >< a <| merge2 as' r where (l, r) = FT.split larger bs larger (IntInterval k _) = k >= i larger _ = largerError merge2 as bs = case FT.viewl bs of EmptyL -> as b@(Node i _) :< bs' -> l >< b <| merge1 r bs' where (l, r) = FT.split larger as larger (IntInterval k _) = k >= i larger _ = largerError instance Ord v => HasUnion0 (IntervalMap v a) where empty = IntervalMap FT.empty instance Ord v => Monoid (IntervalMap v a) where mempty = empty mappend = (<>) instance Ord v => Semigroup (IntervalMap v a) where (<>) = union -- | /O(n)/. Add a delta to each interval in the map offset :: (Ord v, Monoid v) => v -> IntervalMap v a -> IntervalMap v a offset v (IntervalMap m) = IntervalMap $ FT.fmap' (\(Node (Interval lo hi) a) -> Node (Interval (mappend v lo) (mappend v hi)) a) m -- | /O(1)/. Interval map with a single entry. singleton :: Ord v => Interval v -> a -> IntervalMap v a singleton i x = IntervalMap (FT.singleton (Node i x)) -- | /O(log n)/. Insert an interval into a map. -- The map may contain duplicate intervals; the new entry will be inserted -- before any existing entries for the same interval. insert :: Ord v => v -> v -> a -> IntervalMap v a -> IntervalMap v a insert lo hi _ m | lo > hi = m insert lo hi x (IntervalMap t) = IntervalMap (l >< Node i x <| r) where i = Interval lo hi (l, r) = FT.split larger t larger (IntInterval k _) = k >= i larger _ = largerError -- | /O(k log (n/\//k))/. All intervals that contain the given interval, -- in lexicographical order. dominators :: Ord v => v -> v -> IntervalMap v a -> [(Interval v, a)] dominators i j = intersections j i -- | /O(k log (n/\//k))/. All intervals that contain the given point, -- in lexicographical order. search :: Ord v => v -> IntervalMap v a -> [(Interval v, a)] search p = intersections p p -- | /O(k log (n/\//k))/. All intervals that intersect with the given -- interval, in lexicographical order. intersections :: Ord v => v -> v -> IntervalMap v a -> [(Interval v, a)] intersections lo hi (IntervalMap t) = matches (FT.takeUntil (greater hi) t) where matches xs = case FT.viewl (FT.dropUntil (atleast lo) xs) of EmptyL -> [] Node i x :< xs' -> (i, x) : matches xs' atleast :: Ord v => v -> IntInterval v -> Bool atleast k (IntInterval _ hi) = k <= hi atleast _ _ = False greater :: Ord v => v -> IntInterval v -> Bool greater k (IntInterval i _) = low i > k greater _ _ = False fromList :: Ord v => [(v, v, a)] -> IntervalMap v a fromList = foldr ins empty where ins (lo, hi, n) = insert lo hi n trifecta-2/src/Text/Trifecta/Util/Combinators.hs0000644000000000000000000000232313316775526020230 0ustar0000000000000000----------------------------------------------------------------------------- -- | -- Copyright : (C) 2011-2015 Edward Kmett -- License : BSD-style (see the file LICENSE) -- -- Maintainer : Edward Kmett -- Stability : experimental -- Portability : non-portable -- ---------------------------------------------------------------------------- module Text.Trifecta.Util.Combinators ( argmin , argmax -- * ByteString conversions , fromLazy , toLazy , takeLine , (<$!>) ) where import Data.ByteString as Strict import Data.ByteString.Lazy as Lazy argmin :: Ord b => (a -> b) -> a -> a -> a argmin f a b | f a <= f b = a | otherwise = b {-# INLINE argmin #-} argmax :: Ord b => (a -> b) -> a -> a -> a argmax f a b | f a > f b = a | otherwise = b {-# INLINE argmax #-} fromLazy :: Lazy.ByteString -> Strict.ByteString fromLazy = Strict.concat . Lazy.toChunks toLazy :: Strict.ByteString -> Lazy.ByteString toLazy = Lazy.fromChunks . return takeLine :: Lazy.ByteString -> Lazy.ByteString takeLine s = case Lazy.elemIndex 10 s of Just i -> Lazy.take (i + 1) s Nothing -> s infixl 4 <$!> (<$!>) :: Monad m => (a -> b) -> m a -> m b f <$!> m = do a <- m return $! f a trifecta-2/src/Text/Trifecta/Util/It.hs0000644000000000000000000001702313316775526016327 0ustar0000000000000000{-# LANGUAGE BangPatterns #-} {-# LANGUAGE CPP #-} {-# LANGUAGE MagicHash #-} {-# LANGUAGE MultiParamTypeClasses #-} {-# LANGUAGE TypeFamilies #-} {-# LANGUAGE UnboxedTuples #-} ----------------------------------------------------------------------------- -- | -- Module : Text.Trifecta.Util.It -- Copyright : (C) 2011-2015 Edward Kmett -- License : BSD-style (see the file LICENSE) -- -- Maintainer : Edward Kmett -- Stability : experimental -- Portability : non-portable -- -- harder, better, faster, stronger... ---------------------------------------------------------------------------- module Text.Trifecta.Util.It ( It(Pure, It) , needIt , wantIt , simplifyIt , foldIt , runIt , fillIt , rewindIt , sliceIt ) where #if __GLASGOW_HASKELL__ < 710 import Control.Applicative #endif import Control.Comonad import Control.Monad #if __GLASGOW_HASKELL__ < 710 import Data.Monoid #endif import Data.ByteString as Strict import Data.ByteString.Lazy as Lazy import Data.Profunctor import Text.Trifecta.Delta import Text.Trifecta.Rope import Text.Trifecta.Util.Combinators as Util -- $setup -- >>> import Control.Comonad (extract) -- >>> import Data.ByteString as Strict -- >>> import Text.Trifecta.Delta -- >>> import Text.Trifecta.Util.It -- | @'It'@ is an -- that can produce partial results. -- -- @'It' r a@ consumes a feed of @r@s and produces @a@s on the way. New values -- can be fed using @'simplifyIt'@, the current (partial or final) result is -- extracted using @'extract'@. -- -- >>> let keepIt a = Pure a -- >>> let replaceIt a = It a replaceIt -- -- >>> extract (keepIt 0) -- 0 -- -- >>> extract (replaceIt 0) -- 0 -- -- >>> extract (simplifyIt (keepIt 0) 5) -- 0 -- -- >>> extract (simplifyIt (replaceIt 0) 5) -- 5 data It r a = Pure a -- ^ Final result, rest of the feed is discarded | It a (r -> It r a) -- ^ Intermediate result, consumed values produce new results instance Show a => Show (It r a) where showsPrec d (Pure a) = showParen (d > 10) $ showString "Pure " . showsPrec 11 a showsPrec d (It a _) = showParen (d > 10) $ showString "It " . showsPrec 11 a . showString " ..." instance Functor (It r) where fmap f (Pure a) = Pure $ f a fmap f (It a k) = It (f a) $ fmap f . k instance Profunctor It where rmap = fmap lmap _ (Pure a) = Pure a lmap f (It a g) = It a (lmap f . g . f) instance Applicative (It r) where pure = Pure Pure f <*> Pure a = Pure $ f a Pure f <*> It a ka = It (f a) $ fmap f . ka It f kf <*> Pure a = It (f a) $ fmap ($a) . kf It f kf <*> It a ka = It (f a) $ \r -> kf r <*> ka r indexIt :: It r a -> r -> a indexIt (Pure a) _ = a indexIt (It _ k) r = extract (k r) -- | Feed a value to 'It', obtaining a new (partial or final) result. simplifyIt :: It r a -> r -> It r a simplifyIt (It _ k) r = k r simplifyIt pa _ = pa instance Monad (It r) where return = pure Pure a >>= f = f a It a k >>= f = It (extract (f a)) $ \r -> case k r of It a' k' -> It (indexIt (f a') r) $ k' >=> f Pure a' -> simplifyIt (f a') r instance ComonadApply (It r) where (<@>) = (<*>) -- | 'It' is a cofree comonad instance Comonad (It r) where duplicate p@Pure{} = Pure p duplicate p@(It _ k) = It p (duplicate . k) extend f p@Pure{} = Pure (f p) extend f p@(It _ k) = It (f p) (extend f . k) extract (Pure a) = a extract (It a _) = a -- | Consumes input until a value can be produced. -- -- >>> :{ -- let needTen = needIt 0 (\n -> if n < 10 then Nothing else Just n) :: It Int Int -- :} -- -- >>> extract needTen -- 0 -- -- >>> extract (simplifyIt needTen 5) -- 0 -- -- >>> extract (simplifyIt needTen 11) -- 11 -- -- >>> extract (simplifyIt (simplifyIt (simplifyIt needTen 5) 11) 15) -- 11 needIt :: a -- ^ Initial result -> (r -> Maybe a) -- ^ Produce a result if possible -> It r a needIt z f = k where k = It z $ \r -> case f r of Just a -> Pure a Nothing -> k -- | Consumes input and produces partial results until a condition is met. -- Unlike 'needIt', partial results are already returned when the condition is -- not fulfilled yet. -- -- > >>> :{ -- > let wantTen :: It Int Int -- > wantTen = wantIt 0 (\n -> (# n >= 10, n #)) -- > :} -- -- > >>> extract wantTen -- > 0 -- -- > >>> extract (simplifyIt wantTen 5) -- > 5 -- -- > >>> extract (simplifyIt wantTen 11) -- > 11 -- -- > >>> extract (simplifyIt (simplifyIt (simplifyIt wantTen 5) 11) 15) -- > 11 wantIt :: a -- ^ Initial result -> (r -> (# Bool, a #)) -- ^ Produce a partial or final result -> It r a wantIt z f = It z k where k r = case f r of (# False, a #) -> It a k (# True, a #) -> Pure a -- | The generalized fold (Böhm-Berarducci decoding) over 'It r a'. -- -- 'foldIt' satisfies the property: -- -- @foldIt Pure It = id@ foldIt :: (a -> o) -> (a -> (r -> o) -> o) -> It r a -> o foldIt p _ (Pure a) = p a foldIt p i (It a k) = i a (\r -> foldIt p i (k r)) -- | Scott decoding of 'It r a'. -- -- The scott decoding is similar to the generalized fold over a data type, but -- leaves the recursion step to the calling function. -- -- 'runIt' satiesfies the property: -- -- @runIt Pure It = id@ -- -- See also the Scott decoding of lists: -- -- @runList :: (a -> [a] -> b) -> b -> [a] -> b@ -- -- and compare it with 'foldr' (the Böhm-Berarducci decoding for lists): -- -- @foldr :: (a -> b -> b) -> b -> [a] -> b@ runIt :: (a -> o) -> (a -> (r -> It r a) -> o) -> It r a -> o runIt p _ (Pure a) = p a runIt _ i (It a k) = i a k -- * Rope specifics -- | Given a position, go there, and grab the rest of the line forward from that -- point. -- -- >>> :set -XOverloadedStrings -- >>> let secondLine = fillIt Nothing (const Just) (delta ("foo\nb" :: Strict.ByteString)) -- -- >>> extract secondLine -- Nothing -- -- >>> extract (simplifyIt secondLine (ropeBS "foo")) -- Nothing -- -- >>> extract (simplifyIt secondLine (ropeBS "foo\nbar")) -- Just "ar" -- -- >>> extract (simplifyIt secondLine (ropeBS "foo\nbar\nbaz")) -- Just "ar\n" fillIt :: r -> (Delta -> Strict.ByteString -> r) -> Delta -> It Rope r fillIt kf ks n = wantIt kf $ \r -> (# bytes n < bytes (rewind (delta r)) , grabLine n r kf ks #) -- | Return the text of the line that contains a given position -- -- >>> :set -XOverloadedStrings -- >>> let secondLine = rewindIt (delta ("foo\nb" :: Strict.ByteString)) -- -- >>> extract secondLine -- Nothing -- -- >>> extract (simplifyIt secondLine (ropeBS "foo")) -- Nothing -- -- >>> extract (simplifyIt secondLine (ropeBS "foo\nbar")) -- Just "bar" -- -- >>> extract (simplifyIt secondLine (ropeBS "foo\nbar\nbaz")) -- Just "bar\n" rewindIt :: Delta -> It Rope (Maybe Strict.ByteString) rewindIt n = wantIt Nothing $ \r -> (# bytes n < bytes (rewind (delta r)) , grabLine (rewind n) r Nothing $ const Just #) -- | Return the text between two offsets. -- -- >>> :set -XOverloadedStrings -- >>> let secondLine = sliceIt (delta ("foo\n" :: Strict.ByteString)) (delta ("foo\nbar\n" :: Strict.ByteString)) -- -- >>> extract secondLine -- "" -- -- >>> extract (simplifyIt secondLine (ropeBS "foo")) -- "" -- -- >>> extract (simplifyIt secondLine (ropeBS "foo\nbar")) -- "bar" -- -- >>> extract (simplifyIt secondLine (ropeBS "foo\nbar\nbaz")) -- "bar\n" sliceIt :: Delta -> Delta -> It Rope Strict.ByteString sliceIt !i !j = wantIt mempty $ \r -> (# bj < bytes (rewind (delta r)) , grabRest i r mempty $ const $ Util.fromLazy . Lazy.take (fromIntegral (bj - bi)) #) where bi = bytes i bj = bytes j trifecta-2/src/Text/Trifecta/Util/Array.hs0000644000000000000000000002462113316775526017033 0ustar0000000000000000{-# LANGUAGE BangPatterns, CPP, MagicHash, Rank2Types, UnboxedTuples #-} ----------------------------------------------------------------------------- -- | -- Copyright : Edward Kmett 2011-2015 -- Johan Tibell 2011 -- License : BSD3 -- -- Maintainer : ekmett@gmail.com -- Stability : experimental -- Portability : unknown -- -- Fast zero based arrays, based on the implementation in the HAMT-branch of -- unordered-containers ----------------------------------------------------------------------------- module Text.Trifecta.Util.Array ( Array , MArray -- * Creation , new , new_ , empty , singleton -- * Basic interface , length , lengthM , read , write , index , index_ , indexM_ , update , insert , delete , unsafeFreeze , run , run2 , copy , copyM -- * Folds , foldl' , foldr , thaw , map , map' , traverse , filter ) where import qualified Data.Traversable as Traversable #if __GLASGOW_HASKELL__ < 710 import Control.Applicative (Applicative) #endif import Control.DeepSeq import Control.Monad.ST import GHC.Exts ( Array#, copyArray#, copyMutableArray#, indexArray#, Int(I#), MutableArray#, newArray#, readArray#, sizeofArray#, sizeofMutableArray#, thawArray#, unsafeFreezeArray#, writeArray#) import GHC.ST (ST(..)) import Prelude hiding (filter, foldr, length, map, read #if __GLASGOW_HASKELL__ >= 710 , traverse #endif ) ------------------------------------------------------------------------ #if defined(ASSERTS) -- This fugly hack is brought by GHC's apparent reluctance to deal -- with MagicHash and UnboxedTuples when inferring types. Eek! # define CHECK_BOUNDS(_func_,_len_,_k_) \ if (_k_) < 0 || (_k_) >= (_len_) then error ("Data.HashMap.Array." ++ (_func_) ++ ": bounds error, offset " ++ show (_k_) ++ ", length " ++ show (_len_)) else # define CHECK_OP(_func_,_op_,_lhs_,_rhs_) \ if not ((_lhs_) _op_ (_rhs_)) then error ("Data.HashMap.Array." ++ (_func_) ++ ": Check failed: _lhs_ _op_ _rhs_ (" ++ show (_lhs_) ++ " vs. " ++ show (_rhs_) ++ ")") else # define CHECK_GT(_func_,_lhs_,_rhs_) CHECK_OP(_func_,>,_lhs_,_rhs_) # define CHECK_LE(_func_,_lhs_,_rhs_) CHECK_OP(_func_,<=,_lhs_,_rhs_) #else # define CHECK_BOUNDS(_func_,_len_,_k_) # define CHECK_OP(_func_,_op_,_lhs_,_rhs_) # define CHECK_GT(_func_,_lhs_,_rhs_) # define CHECK_LE(_func_,_lhs_,_rhs_) #endif data Array a = Array { unArray :: !(Array# a) #if __GLASGOW_HASKELL__ < 702 , length :: {-# UNPACK #-} !Int #endif } #if __GLASGOW_HASKELL__ >= 702 length :: Array a -> Int length ary = I# (sizeofArray# (unArray ary)) {-# INLINE length #-} #endif -- | Smart constructor array :: Array# a -> Int -> Array a #if __GLASGOW_HASKELL__ >= 702 array ary _n = Array ary #else array = Array #endif {-# INLINE array #-} data MArray s a = MArray { unMArray :: !(MutableArray# s a) #if __GLASGOW_HASKELL__ < 702 , lengthM :: {-# UNPACK #-} !Int #endif } #if __GLASGOW_HASKELL__ >= 702 lengthM :: MArray s a -> Int lengthM mary = I# (sizeofMutableArray# (unMArray mary)) {-# INLINE lengthM #-} #endif -- | Smart constructor marray :: MutableArray# s a -> Int -> MArray s a #if __GLASGOW_HASKELL__ >= 702 marray mary _n = MArray mary #else marray = MArray #endif {-# INLINE marray #-} ------------------------------------------------------------------------ instance NFData a => NFData (Array a) where rnf = rnfArray rnfArray :: NFData a => Array a -> () rnfArray ary0 = go ary0 n0 0 where n0 = length ary0 go !ary !n !i | i >= n = () | otherwise = rnf (index ary i) `seq` go ary n (i+1) {-# INLINE rnfArray #-} -- | Create a new mutable array of specified size, in the specified -- state thread, with each element containing the specified initial -- value. new :: Int -> a -> ST s (MArray s a) new n@(I# n#) b = CHECK_GT("new",n,(0 :: Int)) ST $ \s -> case newArray# n# b s of (# s', ary #) -> (# s', marray ary n #) {-# INLINE new #-} new_ :: Int -> ST s (MArray s a) new_ n = new n undefinedElem empty :: Array a empty = run (new_ 0) singleton :: a -> Array a singleton x = run (new 1 x) {-# INLINE singleton #-} read :: MArray s a -> Int -> ST s a read ary _i@(I# i#) = ST $ \ s -> CHECK_BOUNDS("read", lengthM ary, _i) readArray# (unMArray ary) i# s {-# INLINE read #-} write :: MArray s a -> Int -> a -> ST s () write ary _i@(I# i#) b = ST $ \ s -> CHECK_BOUNDS("write", lengthM ary, _i) case writeArray# (unMArray ary) i# b s of s' -> (# s' , () #) {-# INLINE write #-} index :: Array a -> Int -> a index ary _i@(I# i#) = CHECK_BOUNDS("index", length ary, _i) case indexArray# (unArray ary) i# of (# b #) -> b {-# INLINE index #-} index_ :: Array a -> Int -> ST s a index_ ary _i@(I# i#) = CHECK_BOUNDS("index_", length ary, _i) case indexArray# (unArray ary) i# of (# b #) -> return b {-# INLINE index_ #-} indexM_ :: MArray s a -> Int -> ST s a indexM_ ary _i@(I# i#) = CHECK_BOUNDS("index_", lengthM ary, _i) ST $ \ s# -> readArray# (unMArray ary) i# s# {-# INLINE indexM_ #-} unsafeFreeze :: MArray s a -> ST s (Array a) unsafeFreeze mary = ST $ \s -> case unsafeFreezeArray# (unMArray mary) s of (# s', ary #) -> (# s', array ary (lengthM mary) #) {-# INLINE unsafeFreeze #-} run :: (forall s . ST s (MArray s e)) -> Array e run act = runST $ act >>= unsafeFreeze {-# INLINE run #-} run2 :: (forall s. ST s (MArray s e, a)) -> (Array e, a) run2 k = runST $ do (marr,b) <- k arr <- unsafeFreeze marr return (arr,b) -- | Unsafely copy the elements of an array. Array bounds are not checked. copy :: Array e -> Int -> MArray s e -> Int -> Int -> ST s () #if __GLASGOW_HASKELL__ >= 702 copy !src !_sidx@(I# sidx#) !dst !_didx@(I# didx#) _n@(I# n#) = CHECK_LE("copy", _sidx + _n, length src) CHECK_LE("copy", _didx + _n, lengthM dst) ST $ \ s# -> case copyArray# (unArray src) sidx# (unMArray dst) didx# n# s# of s2 -> (# s2, () #) #else copy !src !sidx !dst !didx n = CHECK_LE("copy", sidx + n, length src) CHECK_LE("copy", didx + n, lengthM dst) copy_loop sidx didx 0 where copy_loop !i !j !c | c >= n = return () | otherwise = do b <- index_ src i write dst j b copy_loop (i+1) (j+1) (c+1) #endif -- | Unsafely copy the elements of an array. Array bounds are not checked. copyM :: MArray s e -> Int -> MArray s e -> Int -> Int -> ST s () #if __GLASGOW_HASKELL__ >= 702 copyM !src !_sidx@(I# sidx#) !dst !_didx@(I# didx#) _n@(I# n#) = CHECK_BOUNDS("copyM: src", lengthM src, _sidx + _n - 1) CHECK_BOUNDS("copyM: dst", lengthM dst, _didx + _n - 1) ST $ \ s# -> case copyMutableArray# (unMArray src) sidx# (unMArray dst) didx# n# s# of s2 -> (# s2, () #) #else copyM !src !sidx !dst !didx n = CHECK_BOUNDS("copyM: src", lengthM src, sidx + n - 1) CHECK_BOUNDS("copyM: dst", lengthM dst, didx + n - 1) copy_loop sidx didx 0 where copy_loop !i !j !c | c >= n = return () | otherwise = do b <- indexM_ src i write dst j b copy_loop (i+1) (j+1) (c+1) #endif -- | /O(n)/ Insert an element at the given position in this array, -- increasing its size by one. insert :: Array e -> Int -> e -> Array e insert ary idx b = CHECK_BOUNDS("insert", count + 1, idx) run $ do mary <- new_ (count+1) copy ary 0 mary 0 idx write mary idx b copy ary idx mary (idx+1) (count-idx) return mary where !count = length ary {-# INLINE insert #-} -- | /O(n)/ Update the element at the given position in this array. update :: Array e -> Int -> e -> Array e update ary idx b = CHECK_BOUNDS("update", count, idx) run $ do mary <- thaw ary 0 count write mary idx b return mary where !count = length ary {-# INLINE update #-} foldl' :: (b -> a -> b) -> b -> Array a -> b foldl' f = \ z0 ary0 -> go ary0 (length ary0) 0 z0 where go ary n i !z | i >= n = z | otherwise = go ary n (i+1) (f z (index ary i)) {-# INLINE foldl' #-} foldr :: (a -> b -> b) -> b -> Array a -> b foldr f = \ z0 ary0 -> go ary0 (length ary0) 0 z0 where go ary n i z | i >= n = z | otherwise = f (index ary i) (go ary n (i+1) z) {-# INLINE foldr #-} undefinedElem :: a undefinedElem = error "Undefined element" thaw :: Array e -> Int -> Int -> ST s (MArray s e) #if __GLASGOW_HASKELL__ >= 702 thaw !ary !_o@(I# o#) !n@(I# n#) = CHECK_LE("thaw", _o + n, length ary) ST $ \ s -> case thawArray# (unArray ary) o# n# s of (# s2, mary# #) -> (# s2, marray mary# n #) #else thaw !ary !o !n = CHECK_LE("thaw", o + n, length ary) do mary <- new_ n copy ary o mary 0 n return mary #endif {-# INLINE thaw #-} -- | /O(n)/ Delete an element at the given position in this array, -- decreasing its size by one. delete :: Array e -> Int -> Array e delete ary idx = run $ do mary <- new_ (count-1) copy ary 0 mary 0 idx copy ary (idx+1) mary idx (count-(idx+1)) return mary where !count = length ary {-# INLINE delete #-} map :: (a -> b) -> Array a -> Array b map f = \ ary -> let !n = length ary in run $ do mary <- new_ n go ary mary 0 n where go ary mary i n | i >= n = return mary | otherwise = do write mary i $ f (index ary i) go ary mary (i+1) n {-# INLINE map #-} -- | Strict version of 'map'. map' :: (a -> b) -> Array a -> Array b map' f = \ ary -> let !n = length ary in run $ do mary <- new_ n go ary mary 0 n where go ary mary i n | i >= n = return mary | otherwise = do write mary i $! f (index ary i) go ary mary (i+1) n {-# INLINE map' #-} fromList :: Int -> [a] -> Array a fromList n xs0 = run $ do mary <- new_ n go xs0 mary 0 where go [] !mary !_ = return mary go (x:xs) mary i = do write mary i x go xs mary (i+1) toList :: Array a -> [a] toList = foldr (:) [] traverse :: Applicative f => (a -> f b) -> Array a -> f (Array b) traverse f = \ ary -> fromList (length ary) `fmap` Traversable.traverse f (toList ary) {-# INLINE traverse #-} filter :: (a -> Bool) -> Array a -> Array a filter p = \ ary -> let !n = length ary in run $ do mary <- new_ n go ary mary 0 0 n where go ary mary i j n | i >= n = if i == j then return mary else do mary2 <- new_ j copyM mary 0 mary2 0 j return mary2 | p el = write mary j el >> go ary mary (i+1) (j+1) n | otherwise = go ary mary (i+1) j n where el = index ary i {-# INLINE filter #-} trifecta-2/tests/0000755000000000000000000000000013316775526012365 5ustar0000000000000000trifecta-2/tests/QuickCheck.hs0000644000000000000000000000435113316775526014736 0ustar0000000000000000{-# LANGUAGE CPP #-} {-# LANGUAGE OverloadedStrings #-} module Main ( main ) where import Control.Applicative #if MIN_VERSION_base(4,7,0) import Data.Either #endif #if __GLASGOW_HASKELL__ < 710 import Data.Monoid #endif import qualified Test.QuickCheck as Q import Text.Parser.Char import Text.Parser.Combinators import Text.Trifecta.Parser import Text.Trifecta.Result import System.Exit -- -------------------------------------------------------------------------- -- -- Main main :: IO () main = mapM Q.quickCheckResult tests >>= \x -> case filter (not . passed) x of [] -> exitSuccess _ -> exitFailure where passed Q.Success{} = True passed _ = False -- -------------------------------------------------------------------------- -- -- Tests tests :: [Q.Property] tests = [ Q.property prop_fail , Q.property prop_succeed , Q.property prop_notFollowedBy0 , Q.property prop_notFollowedBy1 , Q.property prop_notFollowedBy2 , Q.property prop_notFollowedBy3 ] -- -------------------------------------------------------------------------- -- -- Properties prop_fail :: String -> Bool prop_fail = isLeft . parse (fail "fail" :: Parser ()) prop_succeed :: String -> Bool prop_succeed = isRight . parse (mempty :: Parser ()) prop_notFollowedBy0 :: Char -> Char -> Bool prop_notFollowedBy0 x y = either (\_ -> x == y) (/= y) $ parse (notFollowedBy (char y) *> anyChar) [x] prop_notFollowedBy1 :: Char -> Bool prop_notFollowedBy1 x = either (\_ -> x == x) (/= x) $ parse (notFollowedBy (char x) *> anyChar) [x] prop_notFollowedBy2 :: String -> Char -> Bool prop_notFollowedBy2 x y = isLeft $ parse (anyChar *> notFollowedBy (char y) *> char y) x prop_notFollowedBy3 :: Char -> Bool prop_notFollowedBy3 x = isRight $ parse (notFollowedBy (char x) <|> char x *> pure ()) [x] -- -------------------------------------------------------------------------- -- -- Utils parse :: Parser a -> String -> Either String a parse p s = case parseString p mempty s of Failure e -> Left (show e) Success a -> Right a #if !MIN_VERSION_base(4,7,0) isLeft :: Either a b -> Bool isLeft = either (const True) (const False) isRight :: Either a b -> Bool isRight = either (const False) (const True) #endif trifecta-2/tests/doctests.hs0000644000000000000000000000147213316775526014555 0ustar0000000000000000----------------------------------------------------------------------------- -- | -- Module : Main (doctests) -- Copyright : (C) 2012-14 Edward Kmett -- License : BSD-style (see the file LICENSE) -- Maintainer : Edward Kmett -- Stability : provisional -- Portability : portable -- -- This module provides doctests for a project based on the actual versions -- of the packages it was built with. It requires a corresponding Setup.lhs -- to be added to the project ----------------------------------------------------------------------------- module Main where import Build_doctests (flags, pkgs, module_sources) import Data.Foldable (traverse_) import Test.DocTest main :: IO () main = do traverse_ putStrLn args doctest args where args = flags ++ pkgs ++ module_sources trifecta-2/examples/0000755000000000000000000000000013316775526013041 5ustar0000000000000000trifecta-2/examples/Main.hs0000644000000000000000000000020313316775526014254 0ustar0000000000000000module Main (main) where import RFC2616 (lumpy) import System.Environment (getArgs) main :: IO () main = mapM_ lumpy =<< getArgs trifecta-2/examples/trifecta-examples.cabal0000644000000000000000000000366513316775526017454 0ustar0000000000000000name: trifecta-examples category: Text, Parsing, Diagnostics, Pretty Printer, Logging version: 2 license: BSD3 license-file: LICENSE author: Edward A. Kmett maintainer: Edward A. Kmett stability: experimental homepage: http://github.com/ekmett/trifecta/ bug-reports: http://github.com/ekmett/trifecta/issues copyright: Copyright (C) 2010-2017 Edward A. Kmett synopsis: A modern parser combinator library with convenient diagnostics description: A modern parser combinator library with slicing and Clang-style colored diagnostics cabal-version: >= 1.10 build-type: Simple tested-with: GHC == 7.4.2 , GHC == 7.6.3 , GHC == 7.8.4 , GHC == 7.10.3 , GHC == 8.0.2 , GHC == 8.2.2 , GHC == 8.4.3 , GHC == 8.6.1 extra-source-files: RFC2616.txt source-repository head type: git location: https://github.com/ekmett/trifecta.git library ghc-options: -Wall exposed-modules: RFC2616 hs-source-dirs: rfc2616 build-depends: base >= 4.3 && <5, bytestring, charset, parsers, trifecta default-language: Haskell2010 executable trifecta-examples-rfc2616 main-is: Main.hs ghc-options: -Wall -threaded hs-source-dirs: . default-language: Haskell2010 build-depends: base, trifecta-examples test-suite trifecta-examples-tests type: exitcode-stdio-1.0 main-is: Spec.hs ghc-options: -Wall -threaded hs-source-dirs: . default-language: Haskell2010 build-depends: base, hspec, parsers, transformers, trifecta, trifecta-examples trifecta-2/examples/Spec.hs0000644000000000000000000000160113316775526014265 0ustar0000000000000000module Main where import qualified RFC2616 import Control.Monad.IO.Class (liftIO) import Test.Hspec import Text.Trifecta -- Just [(Request {requestMethod = "GET", requestUri = "http://slashdot.org/", requestProtocol = "1.1"},[Header {headerName = "foo", headerValue = ["this is a test"]}]),(Request {requestMethod = "GET", requestUri = "http://slashdot.org/", requestProtocol = "1.0"},[Header {headerName = "foo", headerValue = ["of the emergency broadcast system"]}])] main :: IO () main = hspec $ do describe "RFC2616.hs should be able to parse RFC2616" $ do it "parses the RFC2616.txt file successfully" $ do -- result :: Maybe [(RFC2616.Request, [RFC2616.Header])] -- Tests are intended to be run from the top level. result <- liftIO $ parseFromFile RFC2616.requests "RFC2616.txt" print result result `shouldNotBe` Nothing trifecta-2/examples/RFC2616.txt0000644000000000000000000000020113316775526014524 0ustar0000000000000000GET http://slashdot.org/ HTTP/1.1 foo: this is a test GET http://slashdot.org/ HTTP/1.0 foo: of the emergency broadcast system trifecta-2/examples/LICENSE0000644000000000000000000000301313316775526014043 0ustar0000000000000000Copyright 2010-2017 Edward Kmett Copyright 2008 Ross Patterson Copyright 2007 Paolo Martini Copyright 1999-2000 Daan Leijen All rights reserved. Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: 1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. 2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. 3. Neither the name of the author nor the names of his contributors may be used to endorse or promote products derived from this software without specific prior written permission. THIS SOFTWARE IS PROVIDED BY THE AUTHORS ``AS IS'' AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHORS OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. trifecta-2/examples/rfc2616/0000755000000000000000000000000013316775526014132 5ustar0000000000000000trifecta-2/examples/rfc2616/RFC2616.hs0000644000000000000000000000442513316775526015424 0ustar0000000000000000{-# LANGUAGE BangPatterns #-} module RFC2616 where import Control.Applicative import System.Environment (getArgs) import Text.Trifecta hiding (token) import Text.Parser.Token.Highlight infixl 4 <$!> (<$!>) :: Monad m => (a -> b) -> m a -> m b f <$!> ma = do a <- ma return $! f a token :: CharParsing m => m Char token = noneOf $ ['\0'..'\31'] ++ "()<>@,;:\\\"/[]?={} \t" ++ ['\128'..'\255'] isHSpace :: Char -> Bool isHSpace c = c == ' ' || c == '\t' skipHSpaces :: CharParsing m => m () skipHSpaces = skipSome (satisfy isHSpace) data Request = Request { requestMethod :: String , requestUri :: String , requestProtocol :: String } deriving (Eq, Ord, Show) requestLine :: (Monad m, TokenParsing m) => m Request requestLine = Request <$!> (highlight ReservedIdentifier (some token) "request method") <* skipHSpaces <*> (highlight Identifier (some (satisfy (not . isHSpace))) "url") <* skipHSpaces <*> (try (highlight ReservedIdentifier (string "HTTP/" *> many httpVersion <* endOfLine)) "protocol") where httpVersion :: (Monad m, CharParsing m) => m Char httpVersion = satisfy $ \c -> c == '1' || c == '0' || c == '.' || c == '9' endOfLine :: CharParsing m => m () endOfLine = (string "\r\n" *> pure ()) <|> (char '\n' *> pure ()) data Header = Header { headerName :: String , headerValue :: [String] } deriving (Eq, Ord, Show) messageHeader :: (Monad m, TokenParsing m) => m Header messageHeader = (\h b c -> Header h (b : c)) <$!> (highlight ReservedIdentifier (some token) "header name") <* highlight Operator (char ':') <* skipHSpaces <*> (highlight Identifier (manyTill anyChar endOfLine) "header value") <*> (many (skipHSpaces *> manyTill anyChar endOfLine) "blank line") request :: (Monad m, TokenParsing m) => m (Request, [Header]) request = (,) <$> requestLine <*> many messageHeader <* endOfLine requests :: (Monad m, TokenParsing m) => m [(Request, [Header])] requests = many request lumpy :: String -> IO () lumpy arg = do r <- parseFromFile requests arg case r of Nothing -> return () Just rs -> print (length rs) main :: IO () main = mapM_ lumpy =<< getArgs