From 494f84829d44e06d13c6dadb253ae6518f1e1419 Mon Sep 17 00:00:00 2001 From: Harendra Kumar Date: Thu, 15 May 2025 03:37:34 +0530 Subject: [PATCH] Move array stream parsing funcs to array modules --- benchmark/Streamly/Benchmark/Data/ParserK.hs | 31 +- benchmark/streamly-benchmarks.cabal | 1 - core/src/Streamly/Data/Parser.hs | 15 +- core/src/Streamly/Data/ParserK.hs | 40 +-- core/src/Streamly/Data/StreamK.hs | 6 +- core/src/Streamly/Internal/Data/Array.hs | 291 +++++++++++++++- .../Streamly/Internal/Data/Array/Generic.hs | 295 ++++++++++++++++- core/src/Streamly/Internal/Data/ParserK.hs | 23 ++ .../Streamly/Internal/Data/ParserK/Type.hs | 313 +----------------- core/src/Streamly/Internal/Data/StreamK.hs | 289 +--------------- .../Streamly/Internal/Data/StreamK/Type.hs | 9 +- hie.yaml | 2 + test/Streamly/Test/Data/Parser.hs | 6 +- test/Streamly/Test/Data/ParserK.hs | 20 +- 14 files changed, 705 insertions(+), 636 deletions(-) diff --git a/benchmark/Streamly/Benchmark/Data/ParserK.hs b/benchmark/Streamly/Benchmark/Data/ParserK.hs index 8ad6d5f291..68b99caab1 100644 --- a/benchmark/Streamly/Benchmark/Data/ParserK.hs +++ b/benchmark/Streamly/Benchmark/Data/ParserK.hs @@ -5,9 +5,9 @@ -- License : BSD-3-Clause -- Maintainer : streamly@composewell.com --- BENCH_CHUNKED -> adaptC --- BENCH_CHUNKED_GENERIC -> adaptCG --- BENCH_SINGULAR -> adapt +-- BENCH_CHUNKED -> parse from Array stream +-- BENCH_CHUNKED_GENERIC -> parse from Generic Array stream +-- BENCH_SINGULAR -> parse from single element stream {-# LANGUAGE CPP #-} {-# LANGUAGE FlexibleContexts #-} @@ -24,11 +24,9 @@ import Control.Monad.IO.Class (MonadIO) import Data.Foldable (asum) #ifdef BENCH_CHUNKED import Streamly.Data.Array (Array, Unbox) -import qualified Streamly.Internal.Data.Array as Array (chunksOf) #endif #ifdef BENCH_CHUNKED_GENERIC import Streamly.Data.Array.Generic (Array) -import qualified Streamly.Internal.Data.Array.Generic as GenArr (chunksOf) #endif import Streamly.Internal.Data.Fold (Fold(..)) import Streamly.Data.StreamK (StreamK) @@ -48,6 +46,11 @@ import qualified Streamly.Internal.Data.Fold as Fold import qualified Streamly.Data.Parser as PRD import qualified Streamly.Internal.Data.ParserK as PR import qualified Streamly.Internal.Data.StreamK as StreamK +#ifdef BENCH_CHUNKED +import qualified Streamly.Internal.Data.Array as Array +#elif defined(BENCH_CHUNKED_GENERIC) +import qualified Streamly.Internal.Data.Array.Generic as GenArr +#endif import Test.Tasty.Bench import Streamly.Benchmark.Common @@ -58,8 +61,8 @@ import Streamly.Benchmark.Common #ifdef BENCH_CHUNKED -#define PARSE_OP StreamK.parseChunks -#define FROM_PARSER adaptC +#define PARSE_OP Array.parse +#define FROM_PARSER Array.parserK #define INPUT (Array a) #define PARSE_ELEM (Array Int) #define CONSTRAINT_IO (MonadIO m, Unbox a) @@ -70,8 +73,8 @@ import Streamly.Benchmark.Common #ifdef BENCH_CHUNKED_GENERIC -#define PARSE_OP StreamK.parseChunksGeneric -#define FROM_PARSER adaptCG +#define PARSE_OP GenArr.parse +#define FROM_PARSER GenArr.parserK #define INPUT (Array a) #define PARSE_ELEM (Array Int) #define CONSTRAINT_IO (MonadIO m) @@ -83,7 +86,7 @@ import Streamly.Benchmark.Common #ifdef BENCH_SINGULAR #define PARSE_OP StreamK.parse -#define FROM_PARSER adapt +#define FROM_PARSER PR.parserK #define INPUT a #define PARSE_ELEM Int #define CONSTRAINT_IO (MonadIO m) @@ -136,18 +139,18 @@ one value = PARSE_OP p where p = do - m <- PR.FROM_PARSER (PRD.fromFold FL.one) + m <- FROM_PARSER (PRD.fromFold FL.one) case m of Just i -> if i >= value then pure m else p Nothing -> pure Nothing {-# INLINE satisfy #-} satisfy :: CONSTRAINT_IO => (a -> Bool) -> PR.ParserK INPUT m a -satisfy = PR.FROM_PARSER . PRD.satisfy +satisfy = FROM_PARSER . PRD.satisfy {-# INLINE takeWhile #-} takeWhile :: CONSTRAINT_IO => (a -> Bool) -> PR.ParserK INPUT m () -takeWhile p = PR.FROM_PARSER $ PRD.takeWhile p FL.drain +takeWhile p = FROM_PARSER $ PRD.takeWhile p FL.drain {-# INLINE takeWhileK #-} takeWhileK :: MonadIO m => @@ -240,7 +243,7 @@ takeWhileFailD predicate (Fold fstep finitial _ ffinal) = {-# INLINE takeWhileFail #-} takeWhileFail :: CONSTRAINT => (a -> Bool) -> Fold m a b -> PR.ParserK INPUT m b -takeWhileFail p f = PR.FROM_PARSER (takeWhileFailD p f) +takeWhileFail p f = FROM_PARSER (takeWhileFailD p f) {-# INLINE alt2 #-} alt2 :: MonadIO m diff --git a/benchmark/streamly-benchmarks.cabal b/benchmark/streamly-benchmarks.cabal index c2e962e9c2..12d6306247 100644 --- a/benchmark/streamly-benchmarks.cabal +++ b/benchmark/streamly-benchmarks.cabal @@ -349,7 +349,6 @@ benchmark Data.Parser else ghc-options: +RTS -M2500M -RTS --- XXX This is chunked? benchmark Data.ParserK import: bench-options type: exitcode-stdio-1.0 diff --git a/core/src/Streamly/Data/Parser.hs b/core/src/Streamly/Data/Parser.hs index 5b16c0952f..466e7dae8a 100644 --- a/core/src/Streamly/Data/Parser.hs +++ b/core/src/Streamly/Data/Parser.hs @@ -68,20 +68,23 @@ -- >>> import Control.Applicative ((<|>)) -- -- >>> :{ --- >>> p :: Monad m => Parser Char m String --- >>> p = Parser.satisfy (== '(') *> p <|> Parser.fromFold Fold.toList +-- >>> p, p1, p2 :: Monad m => Parser Char m String +-- >>> p1 = Parser.satisfy (== '(') *> p +-- >>> p2 = Parser.fromFold Fold.toList +-- >>> p = p1 <|> p2 -- >>> :} -- -- Use ParserK when recursive use is required: -- --- >>> import Streamly.Data.ParserK (ParserK) +-- >>> import Streamly.Data.ParserK (ParserK, parserK) -- >>> import qualified Streamly.Data.StreamK as StreamK -- >>> import qualified Streamly.Internal.Data.StreamK as StreamK (parse) --- >>> import qualified Streamly.Internal.Data.ParserK as ParserK (adapt) -- -- >>> :{ --- >>> p :: Monad m => ParserK Char m String --- >>> p = ParserK.adapt (Parser.satisfy (== '(')) *> p <|> ParserK.adapt (Parser.fromFold Fold.toList) +-- >>> p, p1, p2 :: Monad m => ParserK Char m String +-- >>> p1 = parserK (Parser.satisfy (== '(')) *> p +-- >>> p2 = parserK (Parser.fromFold Fold.toList) +-- >>> p = p1 <|> p2 -- >>> :} -- -- >>> StreamK.parse p $ StreamK.fromStream $ Stream.fromList "hello" diff --git a/core/src/Streamly/Data/ParserK.hs b/core/src/Streamly/Data/ParserK.hs index 6ca13b4fe5..6b646e1e0c 100644 --- a/core/src/Streamly/Data/ParserK.hs +++ b/core/src/Streamly/Data/ParserK.hs @@ -32,17 +32,17 @@ -- -- == Using ParserK -- --- All the parsers from the "Streamly.Data.Parser" module can be adapted to --- ParserK using the 'Streamly.Data.ParserK.adaptC', --- 'Streamly.Internal.Data.ParserK.adapt', and --- 'Streamly.Internal.Data.ParserK.adaptCG' combinators. +-- All the parsers from the "Streamly.Data.Parser" module can be converted to +-- ParserK using the 'Streamly.Data.Array.parserK', +-- 'Streamly.Internal.Data.ParserK.parserK', and +-- 'Streamly.Internal.Data.Array.Generic.parserK' combinators. -- --- 'Streamly.Data.StreamK.parseChunks' runs a parser on a stream of unboxed +-- 'Streamly.Data.Array.parse' runs a parser on a stream of unboxed -- arrays, this is the preferred and most efficient way to parse chunked input. --- The more general 'Streamly.Data.StreamK.parseBreakChunks' function returns +-- The more general 'Streamly.Data.Array.parseBreak' function returns -- the remaining stream as well along with the parse result. There are --- 'Streamly.Internal.Data.StreamK.parseChunksGeneric', --- 'Streamly.Internal.Data.StreamK.parseBreakChunksGeneric' as well to run +-- 'Streamly.Internal.Data.Array.Generic.parse', +-- 'Streamly.Internal.Data.Array.Generic.parseBreak' as well to run -- parsers on boxed arrays. 'Streamly.Internal.Data.StreamK.parse', -- 'Streamly.Internal.Data.StreamK.parseBreak' run parsers on a stream of -- individual elements instead of stream of arrays. @@ -58,7 +58,7 @@ -- >>> digits p1 p2 = ((:) <$> p1 <*> ((:) <$> p2 <*> pure [])) -- >>> :{ -- backtracking :: Monad m => ParserK Char m String --- backtracking = ParserK.adapt $ +-- backtracking = ParserK.parserK $ -- digits (Parser.satisfy isDigit) (Parser.satisfy isAlpha) -- <|> -- digits (Parser.satisfy isAlpha) (Parser.satisfy isDigit) @@ -75,11 +75,11 @@ -- >>> :{ -- lookbehind :: Monad m => ParserK Char m String -- lookbehind = do --- x1 <- ParserK.adapt $ +-- x1 <- ParserK.parserK $ -- Digit <$> Parser.satisfy isDigit -- <|> Alpha <$> Parser.satisfy isAlpha -- -- Note: the parse depends on what we parsed already --- x2 <- ParserK.adapt $ +-- x2 <- ParserK.parserK $ -- case x1 of -- Digit _ -> Parser.satisfy isAlpha -- Alpha _ -> Parser.satisfy isDigit @@ -106,9 +106,7 @@ module Streamly.Data.ParserK -- * Parsers -- ** Conversions - , adapt - , adaptC - , adaptCG + , parserK -- , toParser -- ** Without Input @@ -119,6 +117,9 @@ module Streamly.Data.ParserK -- * Deprecated , fromFold , fromParser + , adapt + , adaptC + , adaptCG ) where @@ -128,18 +129,19 @@ import Streamly.Internal.Data.Fold (Fold) import Streamly.Internal.Data.Unbox (Unbox) import Streamly.Internal.Data.Array (Array) import qualified Streamly.Internal.Data.Parser as ParserD +import qualified Streamly.Internal.Data.Array as Array -import Streamly.Internal.Data.ParserK.Type +import Streamly.Internal.Data.ParserK #include "DocTestDataParserK.hs" -{-# DEPRECATED fromFold "Please use \"ParserK.adaptC . Parser.fromFold\" instead." #-} +{-# DEPRECATED fromFold "Please use \"Array.parserK . Parser.fromFold\" instead." #-} {-# INLINE fromFold #-} fromFold :: (MonadIO m, Unbox a) => Fold m a b -> ParserK (Array a) m b -fromFold = adaptC . ParserD.fromFold +fromFold = Array.parserK . ParserD.fromFold -{-# DEPRECATED fromParser "Please use \"adaptC\" instead." #-} +{-# DEPRECATED fromParser "Please use \"Array.parserK\" instead." #-} {-# INLINE fromParser #-} fromParser :: (MonadIO m, Unbox a) => ParserD.Parser a m b -> ParserK (Array a) m b -fromParser = adaptC +fromParser = Array.parserK diff --git a/core/src/Streamly/Data/StreamK.hs b/core/src/Streamly/Data/StreamK.hs index de3e330b93..0009fa812b 100644 --- a/core/src/Streamly/Data/StreamK.hs +++ b/core/src/Streamly/Data/StreamK.hs @@ -124,8 +124,6 @@ module Streamly.Data.StreamK -- ** Parsing , parse , parseBreak - , parseBreakChunks - , parseChunks -- * Transformation , mapM @@ -186,6 +184,10 @@ module Streamly.Data.StreamK -- | Please note that 'Stream' type does not observe any exceptions from -- the consumer of the stream whereas 'StreamK' does. , bracketIO + + -- * Deprecated + , parseBreakChunks + , parseChunks ) where diff --git a/core/src/Streamly/Internal/Data/Array.hs b/core/src/Streamly/Internal/Data/Array.hs index edcf6295ad..6402a079df 100644 --- a/core/src/Streamly/Internal/Data/Array.hs +++ b/core/src/Streamly/Internal/Data/Array.hs @@ -70,10 +70,14 @@ module Streamly.Internal.Data.Array , compactEndByByte_ , compactEndByLn_ - , foldBreakChunks + -- * Parsing Stream of Arrays + , foldBreakChunks -- Uses Stream, bad perf on break , foldChunks - , foldBreakChunksK - , parseBreakChunksK + , foldBreakChunksK -- XXX rename to foldBreak + , parseBreakChunksK -- XXX uses Parser. parseBreak is better? + , parserK + , parseBreak + , parse -- * Serialization , encodeAs @@ -124,8 +128,10 @@ import Streamly.Internal.Data.MutByteArray.Type (PinnedState(..), MutByteArray) import Streamly.Internal.Data.Serialize.Type (Serialize) import Streamly.Internal.Data.Fold.Type (Fold(..)) import Streamly.Internal.Data.Parser (Parser(..), Initial(..), ParseError(..)) +import Streamly.Internal.Data.ParserK.Type + (ParserK, ParseResult(..), Input(..), Step(..)) import Streamly.Internal.Data.Stream (Stream(..)) -import Streamly.Internal.Data.StreamK (StreamK) +import Streamly.Internal.Data.StreamK.Type (StreamK) import Streamly.Internal.Data.SVar.Type (adaptState, defState) import Streamly.Internal.Data.Tuple.Strict (Tuple'(..)) import Streamly.Internal.Data.Unfold.Type (Unfold(..)) @@ -137,10 +143,11 @@ import qualified Streamly.Internal.Data.MutByteArray.Type as MBA import qualified Streamly.Internal.Data.MutArray as MA import qualified Streamly.Internal.Data.RingArray as RB import qualified Streamly.Internal.Data.Parser as Parser --- import qualified Streamly.Internal.Data.ParserK as ParserK +import qualified Streamly.Internal.Data.Parser.Type as ParserD +import qualified Streamly.Internal.Data.ParserK.Type as ParserK import qualified Streamly.Internal.Data.Stream as D import qualified Streamly.Internal.Data.Stream as Stream -import qualified Streamly.Internal.Data.StreamK as StreamK +import qualified Streamly.Internal.Data.StreamK.Type as StreamK import qualified Streamly.Internal.Data.Unfold as Unfold import qualified Prelude @@ -839,10 +846,8 @@ parseBreakD -- | Parse an array stream using the supplied 'Parser'. Returns the parse -- result and the unconsumed stream. Throws 'ParseError' if the parse fails. -- --- The following alternative to this function allows composing the parser using --- the parser Monad: --- --- >>> parseBreakStreamK p = StreamK.parseBreakChunks (ParserK.adaptC p) +-- 'parseBreak' is an alternative to this function which allows composing the +-- parser using the parser Monad. -- -- We can compare perf and remove this one or define it in terms of that. -- @@ -991,3 +996,269 @@ parseBreakChunksK (Parser pstep initial extract) stream = do let n = Prelude.length backBuf arr0 = fromListN n (Prelude.reverse backBuf) return (Left (ParseError err), StreamK.fromPure arr0) + +-- The backracking buffer consists of arrays in the most-recent-first order. We +-- want to take a total of n array elements from this buffer. Note: when we +-- have to take an array partially, we must take the last part of the array. +{-# INLINE backTrack #-} +backTrack :: forall m a. Unbox a => + Int + -> [Array a] + -> StreamK m (Array a) + -> (StreamK m (Array a), [Array a]) +backTrack = go + + where + + go _ [] stream = (stream, []) + go n xs stream | n <= 0 = (stream, xs) + go n (x:xs) stream = + let len = length x + in if n > len + then go (n - len) xs (StreamK.cons x stream) + else if n == len + then (StreamK.cons x stream, xs) + else let !(Array contents start end) = x + !start1 = end - (n * SIZE_OF(a)) + arr1 = Array contents start1 end + arr2 = Array contents start start1 + in (StreamK.cons arr1 stream, arr2:xs) + +-- | Run a 'ParserK' over a 'StreamK' of Arrays and return the parse result and +-- the remaining Stream. +{-# INLINE_NORMAL parseBreak #-} +parseBreak + :: (Monad m, Unbox a) + => ParserK (Array a) m b + -> StreamK m (Array a) + -> m (Either ParseError b, StreamK m (Array a)) +parseBreak parser input = do + let parserk = ParserK.runParser parser ParserK.parserDone 0 0 + in go [] parserk input + + where + + {-# INLINE goStop #-} + goStop backBuf parserk = do + pRes <- parserk ParserK.None + case pRes of + -- If we stop in an alternative, it will try calling the next + -- parser, the next parser may call initial returning Partial and + -- then immediately we have to call extract on it. + ParserK.Partial 0 cont1 -> + go [] cont1 StreamK.nil + ParserK.Partial n cont1 -> do + let n1 = negate n + assertM(n1 >= 0 && n1 <= sum (Prelude.map length backBuf)) + let (s1, backBuf1) = backTrack n1 backBuf StreamK.nil + in go backBuf1 cont1 s1 + ParserK.Continue 0 cont1 -> + go backBuf cont1 StreamK.nil + ParserK.Continue n cont1 -> do + let n1 = negate n + assertM(n1 >= 0 && n1 <= sum (Prelude.map length backBuf)) + let (s1, backBuf1) = backTrack n1 backBuf StreamK.nil + in go backBuf1 cont1 s1 + ParserK.Done 0 b -> + return (Right b, StreamK.nil) + ParserK.Done n b -> do + let n1 = negate n + assertM(n1 >= 0 && n1 <= sum (Prelude.map length backBuf)) + let (s1, _) = backTrack n1 backBuf StreamK.nil + in return (Right b, s1) + ParserK.Error _ err -> do + let (s1, _) = backTrack maxBound backBuf StreamK.nil + return (Left (ParseError err), s1) + + seekErr n len = + error $ "parseBreak: Partial: forward seek not implemented n = " + ++ show n ++ " len = " ++ show len + + yieldk backBuf parserk arr stream = do + pRes <- parserk (ParserK.Chunk arr) + let len = length arr + case pRes of + ParserK.Partial n cont1 -> + case compare n len of + EQ -> go [] cont1 stream + LT -> do + if n >= 0 + then yieldk [] cont1 arr stream + else do + let n1 = negate n + bufLen = sum (Prelude.map length backBuf) + s = StreamK.cons arr stream + assertM(n1 >= 0 && n1 <= bufLen) + let (s1, _) = backTrack n1 backBuf s + go [] cont1 s1 + GT -> seekErr n len + ParserK.Continue n cont1 -> + case compare n len of + EQ -> go (arr:backBuf) cont1 stream + LT -> do + if n >= 0 + then yieldk backBuf cont1 arr stream + else do + let n1 = negate n + bufLen = sum (Prelude.map length backBuf) + s = StreamK.cons arr stream + assertM(n1 >= 0 && n1 <= bufLen) + let (s1, backBuf1) = backTrack n1 backBuf s + go backBuf1 cont1 s1 + GT -> seekErr n len + ParserK.Done n b -> do + let n1 = len - n + assertM(n1 <= sum (Prelude.map length (arr:backBuf))) + let (s1, _) = backTrack n1 (arr:backBuf) stream + in return (Right b, s1) + ParserK.Error _ err -> do + let (s1, _) = backTrack maxBound (arr:backBuf) stream + return (Left (ParseError err), s1) + + go backBuf parserk stream = do + let stop = goStop backBuf parserk + single a = yieldk backBuf parserk a StreamK.nil + in StreamK.foldStream + defState (yieldk backBuf parserk) single stop stream + +{-# INLINE parse #-} +parse :: (Monad m, Unbox a) => + ParserK (Array a) m b -> StreamK m (Array a) -> m (Either ParseError b) +parse f = fmap fst . parseBreak f + +------------------------------------------------------------------------------- +-- Convert ParserD to ParserK +------------------------------------------------------------------------------- + +{-# INLINE adaptCWith #-} +adaptCWith + :: forall m a s b r. (Monad m, Unbox a) + => (s -> a -> m (ParserD.Step s b)) + -> m (ParserD.Initial s b) + -> (s -> m (ParserD.Step s b)) + -> (ParseResult b -> Int -> Input (Array a) -> m (Step (Array a) m r)) + -> Int + -> Int + -> Input (Array a) + -> m (Step (Array a) m r) +adaptCWith pstep initial extract cont !offset0 !usedCount !input = do + res <- initial + case res of + ParserD.IPartial pst -> do + case input of + Chunk arr -> parseContChunk usedCount offset0 pst arr + None -> parseContNothing usedCount pst + ParserD.IDone b -> cont (Success offset0 b) usedCount input + ParserD.IError err -> cont (Failure offset0 err) usedCount input + + where + + -- XXX We can maintain an absolute position instead of relative that will + -- help in reporting of error location in the stream. + {-# NOINLINE parseContChunk #-} + parseContChunk !count !offset !state arr@(Array contents start end) = do + if offset >= 0 + then go SPEC (start + offset * SIZE_OF(a)) state + else return $ Continue offset (parseCont count state) + + where + + {-# INLINE onDone #-} + onDone n b = + assert (n <= length arr) + (cont (Success n b) (count + n - offset) (Chunk arr)) + + {-# INLINE callParseCont #-} + callParseCont constr n pst1 = + assert (n < 0 || n >= length arr) + (return $ constr n (parseCont (count + n - offset) pst1)) + + {-# INLINE onPartial #-} + onPartial = callParseCont Partial + + {-# INLINE onContinue #-} + onContinue = callParseCont Continue + + {-# INLINE onError #-} + onError n err = + cont (Failure n err) (count + n - offset) (Chunk arr) + + {-# INLINE onBack #-} + onBack offset1 elemSize constr pst = do + let pos = offset1 - start + in if pos >= 0 + then go SPEC offset1 pst + else constr (pos `div` elemSize) pst + + -- Note: div may be expensive but the alternative is to maintain an element + -- offset in addition to a byte offset or just the element offset and use + -- multiplication to get the byte offset every time, both these options + -- turned out to be more expensive than using div. + go !_ !cur !pst | cur >= end = + onContinue ((end - start) `div` SIZE_OF(a)) pst + go !_ !cur !pst = do + let !x = unsafeInlineIO $ peekAt cur contents + pRes <- pstep pst x + let elemSize = SIZE_OF(a) + next = INDEX_NEXT(cur,a) + back n = next - n * elemSize + curOff = (cur - start) `div` elemSize + nextOff = (next - start) `div` elemSize + -- The "n" here is stream position index wrt the array start, and + -- not the backtrack count as returned by byte stream parsers. + case pRes of + ParserD.Done 0 b -> + onDone nextOff b + ParserD.Done 1 b -> + onDone curOff b + ParserD.Done n b -> + onDone ((back n - start) `div` elemSize) b + ParserD.Partial 0 pst1 -> + go SPEC next pst1 + ParserD.Partial 1 pst1 -> + go SPEC cur pst1 + ParserD.Partial n pst1 -> + onBack (back n) elemSize onPartial pst1 + ParserD.Continue 0 pst1 -> + go SPEC next pst1 + ParserD.Continue 1 pst1 -> + go SPEC cur pst1 + ParserD.Continue n pst1 -> + onBack (back n) elemSize onContinue pst1 + ParserD.Error err -> + onError curOff err + + {-# NOINLINE parseContNothing #-} + parseContNothing !count !pst = do + r <- extract pst + case r of + -- IMPORTANT: the n here is from the byte stream parser, that means + -- it is the backtrack element count and not the stream position + -- index into the current input array. + ParserD.Done n b -> + assert (n >= 0) + (cont (Success (- n) b) (count - n) None) + ParserD.Continue n pst1 -> + assert (n >= 0) + (return $ Continue (- n) (parseCont (count - n) pst1)) + ParserD.Error err -> + -- XXX It is called only when there is no input arr. So using 0 + -- as the position is correct? + cont (Failure 0 err) count None + ParserD.Partial _ _ -> error "Bug: adaptCWith Partial unreachable" + + -- XXX Maybe we can use two separate continuations instead of using + -- Just/Nothing cases here. That may help in avoiding the parseContJust + -- function call. + {-# INLINE parseCont #-} + parseCont !cnt !pst (Chunk arr) = parseContChunk cnt 0 pst arr + parseCont !cnt !pst None = parseContNothing cnt pst + +-- | Convert a 'Parser' to 'ParserK' working on an Array stream. +-- +-- /Pre-release/ +-- +{-# INLINE_LATE parserK #-} +parserK :: (Monad m, Unbox a) => ParserD.Parser a m b -> ParserK (Array a) m b +parserK (ParserD.Parser step initial extract) = + ParserK.MkParser $ adaptCWith step initial extract diff --git a/core/src/Streamly/Internal/Data/Array/Generic.hs b/core/src/Streamly/Internal/Data/Array/Generic.hs index a3d01ae749..c0c4c5a31f 100644 --- a/core/src/Streamly/Internal/Data/Array/Generic.hs +++ b/core/src/Streamly/Internal/Data/Array/Generic.hs @@ -45,6 +45,11 @@ module Streamly.Internal.Data.Array.Generic , unsafeSliceOffLen , dropAround + -- * Parsing Stream of Arrays + , parserK + , parse + , parseBreak + -- * Deprecated , strip , getIndexUnsafe @@ -57,29 +62,40 @@ module Streamly.Internal.Data.Array.Generic where #include "inline.hs" +#include "assert.hs" #include "deprecation.h" import Control.Monad (replicateM) import Control.Monad.IO.Class (MonadIO) +import Data.Foldable (sum) import Data.Functor.Identity (Identity(..)) import Data.Word (Word8) import GHC.Base (MutableArray#, RealWorld) import GHC.Exts (Addr#) +import GHC.Types (SPEC(..)) import GHC.IO (unsafePerformIO) import Text.Read (readPrec) import Streamly.Internal.Data.Fold.Type (Fold(..)) +import Streamly.Internal.Data.Parser (ParseError(..)) +import Streamly.Internal.Data.ParserK.Type + (ParserK, ParseResult(..), Input(..), Step(..)) import Streamly.Internal.Data.Stream.Type (Stream) +import Streamly.Internal.Data.StreamK.Type (StreamK) +import Streamly.Internal.Data.SVar.Type (defState) import Streamly.Internal.Data.Unfold.Type (Unfold(..)) import Streamly.Internal.System.IO (unsafeInlineIO) -import qualified Streamly.Internal.Data.MutArray.Generic as MArray import qualified Streamly.Internal.Data.Fold.Type as FL +import qualified Streamly.Internal.Data.MutArray.Generic as MArray +import qualified Streamly.Internal.Data.Parser.Type as ParserD +import qualified Streamly.Internal.Data.ParserK.Type as ParserK import qualified Streamly.Internal.Data.Producer.Type as Producer import qualified Streamly.Internal.Data.Producer as Producer import qualified Streamly.Internal.Data.RingArray.Generic as RB import qualified Streamly.Internal.Data.Stream.Type as D import qualified Streamly.Internal.Data.Stream.Generate as D +import qualified Streamly.Internal.Data.StreamK.Type as StreamK import qualified Text.ParserCombinators.ReadPrec as ReadPrec import Prelude hiding (Foldable(..), read) @@ -362,3 +378,280 @@ RENAME(strip,dropAround) RENAME(getSliceUnsafe,unsafeSliceOffLen) RENAME(unsafeGetSlice,unsafeSliceOffLen) RENAME(getIndexUnsafe,unsafeGetIndex) + +------------------------------------------------------------------------------- +-- ParserK Chunked Generic +------------------------------------------------------------------------------- + +{-# INLINE backTrackGenericChunks #-} +backTrackGenericChunks :: + Int + -> [Array a] + -> StreamK m (Array a) + -> (StreamK m (Array a), [Array a]) +backTrackGenericChunks = go + + where + + go _ [] stream = (stream, []) + go n xs stream | n <= 0 = (stream, xs) + go n (x:xs) stream = + let len = length x + in if n > len + then go (n - len) xs (StreamK.cons x stream) + else if n == len + then (StreamK.cons x stream, xs) + else let arr1 = unsafeSliceOffLen (len - n) n x + arr2 = unsafeSliceOffLen 0 (len - n) x + in (StreamK.cons arr1 stream, arr2:xs) + +{-# INLINE_NORMAL parseBreak #-} +parseBreak + :: forall m a b. Monad m + => ParserK.ParserK (Array a) m b + -> StreamK m (Array a) + -> m (Either ParseError b, StreamK m (Array a)) +parseBreak parser input = do + let parserk = ParserK.runParser parser ParserK.parserDone 0 0 + in go [] parserk input + + where + + {-# INLINE goStop #-} + goStop + :: [Array a] + -> (ParserK.Input (Array a) + -> m (ParserK.Step (Array a) m b)) + -> m (Either ParseError b, StreamK m (Array a)) + goStop backBuf parserk = do + pRes <- parserk ParserK.None + case pRes of + -- If we stop in an alternative, it will try calling the next + -- parser, the next parser may call initial returning Partial and + -- then immediately we have to call extract on it. + ParserK.Partial 0 cont1 -> + go [] cont1 StreamK.nil + ParserK.Partial n cont1 -> do + let n1 = negate n + assertM(n1 >= 0 && n1 <= sum (Prelude.map length backBuf)) + let (s1, backBuf1) = backTrackGenericChunks n1 backBuf StreamK.nil + in go backBuf1 cont1 s1 + ParserK.Continue 0 cont1 -> + go backBuf cont1 StreamK.nil + ParserK.Continue n cont1 -> do + let n1 = negate n + assertM(n1 >= 0 && n1 <= sum (Prelude.map length backBuf)) + let (s1, backBuf1) = backTrackGenericChunks n1 backBuf StreamK.nil + in go backBuf1 cont1 s1 + ParserK.Done 0 b -> + return (Right b, StreamK.nil) + ParserK.Done n b -> do + let n1 = negate n + assertM(n1 >= 0 && n1 <= sum (Prelude.map length backBuf)) + let (s1, _) = backTrackGenericChunks n1 backBuf StreamK.nil + in return (Right b, s1) + ParserK.Error _ err -> + let strm = StreamK.fromList (Prelude.reverse backBuf) + in return (Left (ParseError err), strm) + + seekErr n len = + error $ "parseBreak: Partial: forward seek not implemented n = " + ++ show n ++ " len = " ++ show len + + yieldk + :: [Array a] + -> (ParserK.Input (Array a) + -> m (ParserK.Step (Array a) m b)) + -> Array a + -> StreamK m (Array a) + -> m (Either ParseError b, StreamK m (Array a)) + yieldk backBuf parserk arr stream = do + pRes <- parserk (ParserK.Chunk arr) + let len = length arr + case pRes of + ParserK.Partial n cont1 -> + case compare n len of + EQ -> go [] cont1 stream + LT -> do + if n >= 0 + then yieldk [] cont1 arr stream + else do + let n1 = negate n + bufLen = sum (Prelude.map length backBuf) + s = StreamK.cons arr stream + assertM(n1 >= 0 && n1 <= bufLen) + let (s1, _) = backTrackGenericChunks n1 backBuf s + go [] cont1 s1 + GT -> seekErr n len + ParserK.Continue n cont1 -> + case compare n len of + EQ -> go (arr:backBuf) cont1 stream + LT -> do + if n >= 0 + then yieldk backBuf cont1 arr stream + else do + let n1 = negate n + bufLen = sum (Prelude.map length backBuf) + s = StreamK.cons arr stream + assertM(n1 >= 0 && n1 <= bufLen) + let (s1, backBuf1) = backTrackGenericChunks n1 backBuf s + go backBuf1 cont1 s1 + GT -> seekErr n len + ParserK.Done n b -> do + let n1 = len - n + assertM(n1 <= sum (Prelude.map length (arr:backBuf))) + let (s1, _) = backTrackGenericChunks n1 (arr:backBuf) stream + in return (Right b, s1) + ParserK.Error _ err -> + let strm = + StreamK.append + (StreamK.fromList (Prelude.reverse backBuf)) + (StreamK.cons arr stream) + in return (Left (ParseError err), strm) + + go + :: [Array a] + -> (ParserK.Input (Array a) + -> m (ParserK.Step (Array a) m b)) + -> StreamK m (Array a) + -> m (Either ParseError b, StreamK m (Array a)) + go backBuf parserk stream = do + let stop = goStop backBuf parserk + single a = yieldk backBuf parserk a StreamK.nil + in StreamK.foldStream + defState (yieldk backBuf parserk) single stop stream + +{-# INLINE parse #-} +parse :: + (Monad m) + => ParserK.ParserK (Array a) m b + -> StreamK m (Array a) + -> m (Either ParseError b) +parse f = fmap fst . parseBreak f + +-------------------------------------------------------------------------------- +-- Convert Parser to Parserk on Generic Arrays +-------------------------------------------------------------------------------- + +{-# INLINE adaptCGWith #-} +adaptCGWith + :: forall m a s b r. (Monad m) + => (s -> a -> m (ParserD.Step s b)) + -> m (ParserD.Initial s b) + -> (s -> m (ParserD.Step s b)) + -> (ParseResult b -> Int -> Input (Array a) -> m (Step (Array a) m r)) + -> Int + -> Int + -> Input (Array a) + -> m (Step (Array a) m r) +adaptCGWith pstep initial extract cont !offset0 !usedCount !input = do + res <- initial + case res of + ParserD.IPartial pst -> do + case input of + Chunk arr -> parseContChunk usedCount offset0 pst arr + None -> parseContNothing usedCount pst + ParserD.IDone b -> cont (Success offset0 b) usedCount input + ParserD.IError err -> cont (Failure offset0 err) usedCount input + + where + + {-# NOINLINE parseContChunk #-} + parseContChunk !count !offset !state arr@(Array contents start end) = do + if offset >= 0 + then go SPEC (start + offset) state + else return $ Continue offset (parseCont count state) + + where + + {-# INLINE onDone #-} + onDone n b = + assert (n <= length arr) + (cont (Success n b) (count + n - offset) (Chunk arr)) + + {-# INLINE callParseCont #-} + callParseCont constr n pst1 = + assert (n < 0 || n >= length arr) + (return $ constr n (parseCont (count + n - offset) pst1)) + + {-# INLINE onPartial #-} + onPartial = callParseCont Partial + + {-# INLINE onContinue #-} + onContinue = callParseCont Continue + + {-# INLINE onError #-} + onError n err = + cont (Failure n err) (count + n - offset) (Chunk arr) + + {-# INLINE onBack #-} + onBack offset1 constr pst = do + let pos = offset1 - start + in if pos >= 0 + then go SPEC offset1 pst + else constr pos pst + + go !_ !cur !pst | cur >= end = + onContinue (end - start) pst + go !_ !cur !pst = do + let !x = unsafeInlineIO $ MArray.unsafeGetIndexWith contents cur + pRes <- pstep pst x + let next = cur + 1 + back n = next - n + curOff = cur - start + nextOff = next - start + -- The "n" here is stream position index wrt the array start, and + -- not the backtrack count as returned by byte stream parsers. + case pRes of + ParserD.Done 0 b -> + onDone nextOff b + ParserD.Done 1 b -> + onDone curOff b + ParserD.Done n b -> + onDone (back n - start) b + ParserD.Partial 0 pst1 -> + go SPEC next pst1 + ParserD.Partial 1 pst1 -> + go SPEC cur pst1 + ParserD.Partial n pst1 -> + onBack (back n) onPartial pst1 + ParserD.Continue 0 pst1 -> + go SPEC next pst1 + ParserD.Continue 1 pst1 -> + go SPEC cur pst1 + ParserD.Continue n pst1 -> + onBack (back n) onContinue pst1 + ParserD.Error err -> + onError curOff err + + {-# NOINLINE parseContNothing #-} + parseContNothing !count !pst = do + r <- extract pst + case r of + -- IMPORTANT: the n here is from the byte stream parser, that means + -- it is the backtrack element count and not the stream position + -- index into the current input array. + ParserD.Done n b -> + assert (n >= 0) + (cont (Success (- n) b) (count - n) None) + ParserD.Continue n pst1 -> + assert (n >= 0) + (return $ Continue (- n) (parseCont (count - n) pst1)) + ParserD.Error err -> + -- XXX It is called only when there is no input arr. So using 0 + -- as the position is correct? + cont (Failure 0 err) count None + ParserD.Partial _ _ -> error "Bug: adaptCGWith Partial unreachable" + + {-# INLINE parseCont #-} + parseCont !cnt !pst (Chunk arr) = parseContChunk cnt 0 pst arr + parseCont !cnt !pst None = parseContNothing cnt pst + +-- | Convert a 'Parser' to 'ParserK' working on generic Array stream. +-- +-- /Pre-release/ +-- +{-# INLINE_LATE parserK #-} +parserK :: Monad m => ParserD.Parser a m b -> ParserK (Array a) m b +parserK (ParserD.Parser step initial extract) = + ParserK.MkParser $ adaptCGWith step initial extract diff --git a/core/src/Streamly/Internal/Data/ParserK.hs b/core/src/Streamly/Internal/Data/ParserK.hs index 9fbd3a9b74..a5867f79bf 100644 --- a/core/src/Streamly/Internal/Data/ParserK.hs +++ b/core/src/Streamly/Internal/Data/ParserK.hs @@ -10,7 +10,30 @@ module Streamly.Internal.Data.ParserK ( module Streamly.Internal.Data.ParserK.Type + + -- * Deprecated + , adaptC + , adaptCG ) where +import Streamly.Internal.Data.Parser (Parser) +import Streamly.Internal.Data.Array (Array) +import Streamly.Internal.Data.Unbox (Unbox) import Streamly.Internal.Data.ParserK.Type + +import qualified Streamly.Internal.Data.Array as Array +import qualified Streamly.Internal.Data.Array.Generic as GenArray + +#include "inline.hs" + +{-# DEPRECATED adaptC "Use Streamly.Data.Array.parserK" #-} +{-# INLINE_LATE adaptC #-} +adaptC :: (Monad m, Unbox a) => Parser a m b -> ParserK (Array a) m b +adaptC = Array.parserK + +{-# DEPRECATED adaptCG "Use Streamly.Data.Array.Generic.parserK" #-} +{-# INLINE_LATE adaptCG #-} +adaptCG :: + Monad m => Parser a m b -> ParserK (GenArray.Array a) m b +adaptCG = GenArray.parserK diff --git a/core/src/Streamly/Internal/Data/ParserK/Type.hs b/core/src/Streamly/Internal/Data/ParserK/Type.hs index 82f0903d53..41da376ee3 100644 --- a/core/src/Streamly/Internal/Data/ParserK/Type.hs +++ b/core/src/Streamly/Internal/Data/ParserK/Type.hs @@ -27,18 +27,22 @@ module Streamly.Internal.Data.ParserK.Type , Input (..) , ParseResult (..) , ParserK (..) - , adaptC - , adapt - , adaptCG + , parserK , toParser -- XXX unParserK, unK, unPK , fromPure , fromEffect , die + + , parserDone + + -- * Deprecated + , adapt ) where #include "ArrayMacros.h" #include "assert.hs" +#include "deprecation.h" #include "inline.hs" #if !MIN_VERSION_base(4,18,0) @@ -48,18 +52,9 @@ import Control.Applicative (Alternative(..)) import Control.Monad (MonadPlus(..), ap) import Control.Monad.IO.Class (MonadIO, liftIO) -- import Control.Monad.Trans.Class (MonadTrans(lift)) -import Data.Proxy (Proxy(..)) import GHC.Types (SPEC(..)) -import Streamly.Internal.Data.Array.Type (Array(..)) -import Streamly.Internal.Data.Unbox (Unbox(..)) -import Streamly.Internal.System.IO (unsafeInlineIO) import qualified Control.Monad.Fail as Fail -import qualified Streamly.Internal.Data.Array.Type as Array -import qualified Streamly.Internal.Data.MutArray.Generic as GenArr - ( unsafeGetIndexWith - ) -import qualified Streamly.Internal.Data.Array.Generic as GenArr import qualified Streamly.Internal.Data.Parser.Type as ParserD ------------------------------------------------------------------------------- @@ -368,150 +363,8 @@ instance MonadTrans (ParserK a) where lift = fromEffect -} -------------------------------------------------------------------------------- --- Convert ParserD to ParserK -------------------------------------------------------------------------------- - -------------------------------------------------------------------------------- --- Chunked --------------------------------------------------------------------------------- - -{-# INLINE adaptCWith #-} -adaptCWith - :: forall m a s b r. (Monad m, Unbox a) - => (s -> a -> m (ParserD.Step s b)) - -> m (ParserD.Initial s b) - -> (s -> m (ParserD.Step s b)) - -> (ParseResult b -> Int -> Input (Array a) -> m (Step (Array a) m r)) - -> Int - -> Int - -> Input (Array a) - -> m (Step (Array a) m r) -adaptCWith pstep initial extract cont !offset0 !usedCount !input = do - res <- initial - case res of - ParserD.IPartial pst -> do - case input of - Chunk arr -> parseContChunk usedCount offset0 pst arr - None -> parseContNothing usedCount pst - ParserD.IDone b -> cont (Success offset0 b) usedCount input - ParserD.IError err -> cont (Failure offset0 err) usedCount input - - where - - -- XXX We can maintain an absolute position instead of relative that will - -- help in reporting of error location in the stream. - {-# NOINLINE parseContChunk #-} - parseContChunk !count !offset !state arr@(Array contents start end) = do - if offset >= 0 - then go SPEC (start + offset * SIZE_OF(a)) state - else return $ Continue offset (parseCont count state) - - where - - {-# INLINE onDone #-} - onDone n b = - assert (n <= Array.length arr) - (cont (Success n b) (count + n - offset) (Chunk arr)) - - {-# INLINE callParseCont #-} - callParseCont constr n pst1 = - assert (n < 0 || n >= Array.length arr) - (return $ constr n (parseCont (count + n - offset) pst1)) - - {-# INLINE onPartial #-} - onPartial = callParseCont Partial - - {-# INLINE onContinue #-} - onContinue = callParseCont Continue - - {-# INLINE onError #-} - onError n err = - cont (Failure n err) (count + n - offset) (Chunk arr) - - {-# INLINE onBack #-} - onBack offset1 elemSize constr pst = do - let pos = offset1 - start - in if pos >= 0 - then go SPEC offset1 pst - else constr (pos `div` elemSize) pst - - -- Note: div may be expensive but the alternative is to maintain an element - -- offset in addition to a byte offset or just the element offset and use - -- multiplication to get the byte offset every time, both these options - -- turned out to be more expensive than using div. - go !_ !cur !pst | cur >= end = - onContinue ((end - start) `div` SIZE_OF(a)) pst - go !_ !cur !pst = do - let !x = unsafeInlineIO $ peekAt cur contents - pRes <- pstep pst x - let elemSize = SIZE_OF(a) - next = INDEX_NEXT(cur,a) - back n = next - n * elemSize - curOff = (cur - start) `div` elemSize - nextOff = (next - start) `div` elemSize - -- The "n" here is stream position index wrt the array start, and - -- not the backtrack count as returned by byte stream parsers. - case pRes of - ParserD.Done 0 b -> - onDone nextOff b - ParserD.Done 1 b -> - onDone curOff b - ParserD.Done n b -> - onDone ((back n - start) `div` elemSize) b - ParserD.Partial 0 pst1 -> - go SPEC next pst1 - ParserD.Partial 1 pst1 -> - go SPEC cur pst1 - ParserD.Partial n pst1 -> - onBack (back n) elemSize onPartial pst1 - ParserD.Continue 0 pst1 -> - go SPEC next pst1 - ParserD.Continue 1 pst1 -> - go SPEC cur pst1 - ParserD.Continue n pst1 -> - onBack (back n) elemSize onContinue pst1 - ParserD.Error err -> - onError curOff err - - {-# NOINLINE parseContNothing #-} - parseContNothing !count !pst = do - r <- extract pst - case r of - -- IMPORTANT: the n here is from the byte stream parser, that means - -- it is the backtrack element count and not the stream position - -- index into the current input array. - ParserD.Done n b -> - assert (n >= 0) - (cont (Success (- n) b) (count - n) None) - ParserD.Continue n pst1 -> - assert (n >= 0) - (return $ Continue (- n) (parseCont (count - n) pst1)) - ParserD.Error err -> - -- XXX It is called only when there is no input arr. So using 0 - -- as the position is correct? - cont (Failure 0 err) count None - ParserD.Partial _ _ -> error "Bug: adaptCWith Partial unreachable" - - -- XXX Maybe we can use two separate continuations instead of using - -- Just/Nothing cases here. That may help in avoiding the parseContJust - -- function call. - {-# INLINE parseCont #-} - parseCont !cnt !pst (Chunk arr) = parseContChunk cnt 0 pst arr - parseCont !cnt !pst None = parseContNothing cnt pst - --- | Convert an element 'Parser' to a chunked 'ParserK'. A chunked parser is --- more efficient than an element parser. --- --- /Pre-release/ --- -{-# INLINE_LATE adaptC #-} -adaptC :: (Monad m, Unbox a) => ParserD.Parser a m b -> ParserK (Array a) m b -adaptC (ParserD.Parser step initial extract) = - MkParser $ adaptCWith step initial extract - --------------------------------------------------------------------------------- --- Singular +-- Make a ParserK from Parser -------------------------------------------------------------------------------- {-# INLINE adaptWith #-} @@ -614,139 +467,12 @@ adaptWith pstep initial extract cont !relPos !usedCount !input = do -- -- /Pre-release/ -- -{-# INLINE_LATE adapt #-} -adapt :: Monad m => ParserD.Parser a m b -> ParserK a m b -adapt (ParserD.Parser step initial extract) = +{-# INLINE_LATE parserK #-} +parserK, adapt :: Monad m => ParserD.Parser a m b -> ParserK a m b +parserK (ParserD.Parser step initial extract) = MkParser $ adaptWith step initial extract --------------------------------------------------------------------------------- --- Chunked Generic --------------------------------------------------------------------------------- - -{-# INLINE adaptCGWith #-} -adaptCGWith - :: forall m a s b r. (Monad m) - => (s -> a -> m (ParserD.Step s b)) - -> m (ParserD.Initial s b) - -> (s -> m (ParserD.Step s b)) - -> (ParseResult b -> Int -> Input (GenArr.Array a) -> m (Step (GenArr.Array a) m r)) - -> Int - -> Int - -> Input (GenArr.Array a) - -> m (Step (GenArr.Array a) m r) -adaptCGWith pstep initial extract cont !offset0 !usedCount !input = do - res <- initial - case res of - ParserD.IPartial pst -> do - case input of - Chunk arr -> parseContChunk usedCount offset0 pst arr - None -> parseContNothing usedCount pst - ParserD.IDone b -> cont (Success offset0 b) usedCount input - ParserD.IError err -> cont (Failure offset0 err) usedCount input - - where - - {-# NOINLINE parseContChunk #-} - parseContChunk !count !offset !state arr@(GenArr.Array contents start end) = do - if offset >= 0 - then go SPEC (start + offset) state - else return $ Continue offset (parseCont count state) - - where - - {-# INLINE onDone #-} - onDone n b = - assert (n <= GenArr.length arr) - (cont (Success n b) (count + n - offset) (Chunk arr)) - - {-# INLINE callParseCont #-} - callParseCont constr n pst1 = - assert (n < 0 || n >= GenArr.length arr) - (return $ constr n (parseCont (count + n - offset) pst1)) - - {-# INLINE onPartial #-} - onPartial = callParseCont Partial - - {-# INLINE onContinue #-} - onContinue = callParseCont Continue - - {-# INLINE onError #-} - onError n err = - cont (Failure n err) (count + n - offset) (Chunk arr) - - {-# INLINE onBack #-} - onBack offset1 constr pst = do - let pos = offset1 - start - in if pos >= 0 - then go SPEC offset1 pst - else constr pos pst - - go !_ !cur !pst | cur >= end = - onContinue (end - start) pst - go !_ !cur !pst = do - let !x = unsafeInlineIO $ GenArr.unsafeGetIndexWith contents cur - pRes <- pstep pst x - let next = cur + 1 - back n = next - n - curOff = cur - start - nextOff = next - start - -- The "n" here is stream position index wrt the array start, and - -- not the backtrack count as returned by byte stream parsers. - case pRes of - ParserD.Done 0 b -> - onDone nextOff b - ParserD.Done 1 b -> - onDone curOff b - ParserD.Done n b -> - onDone (back n - start) b - ParserD.Partial 0 pst1 -> - go SPEC next pst1 - ParserD.Partial 1 pst1 -> - go SPEC cur pst1 - ParserD.Partial n pst1 -> - onBack (back n) onPartial pst1 - ParserD.Continue 0 pst1 -> - go SPEC next pst1 - ParserD.Continue 1 pst1 -> - go SPEC cur pst1 - ParserD.Continue n pst1 -> - onBack (back n) onContinue pst1 - ParserD.Error err -> - onError curOff err - - {-# NOINLINE parseContNothing #-} - parseContNothing !count !pst = do - r <- extract pst - case r of - -- IMPORTANT: the n here is from the byte stream parser, that means - -- it is the backtrack element count and not the stream position - -- index into the current input array. - ParserD.Done n b -> - assert (n >= 0) - (cont (Success (- n) b) (count - n) None) - ParserD.Continue n pst1 -> - assert (n >= 0) - (return $ Continue (- n) (parseCont (count - n) pst1)) - ParserD.Error err -> - -- XXX It is called only when there is no input arr. So using 0 - -- as the position is correct? - cont (Failure 0 err) count None - ParserD.Partial _ _ -> error "Bug: adaptCGWith Partial unreachable" - - {-# INLINE parseCont #-} - parseCont !cnt !pst (Chunk arr) = parseContChunk cnt 0 pst arr - parseCont !cnt !pst None = parseContNothing cnt pst - --- | A generic 'adaptC'. Similar to 'adaptC' but is not constrained to 'Unbox' --- types. --- --- /Pre-release/ --- -{-# INLINE_LATE adaptCG #-} -adaptCG :: - Monad m => ParserD.Parser a m b -> ParserK (GenArr.Array a) m b -adaptCG (ParserD.Parser step initial extract) = - MkParser $ adaptCGWith step initial extract +RENAME(adapt,parserK) ------------------------------------------------------------------------------- -- Convert CPS style 'Parser' to direct style 'D.Parser' @@ -754,13 +480,10 @@ adaptCG (ParserD.Parser step initial extract) = -- | A continuation to extract the result when a CPS parser is done. {-# INLINE parserDone #-} -parserDone :: Monad m => ParseResult b -> Int -> Input a -> m (Step a m b) -parserDone (Success n b) _ _ = do - assertM(n <= 1) - return $ Done n b -parserDone (Failure n e) _ _ = do - assertM(n <= 1) - return $ Error n e +parserDone :: Applicative m => + ParseResult b -> Int -> Input a -> m (Step a m b) +parserDone (Success n b) _ _ = assert(n <= 1) `seq` pure (Done n b) +parserDone (Failure n e) _ _ = assert(n <= 1) `seq` pure (Error n e) -- XXX Note that this works only for single element parsers and not for Array -- input parsers. The asserts will fail for array parsers. @@ -797,6 +520,6 @@ toParser parser = ParserD.Parser step initial extract assert (n <= 0) (return $ ParserD.Continue (negate n) cont1) {-# RULES "fromParser/toParser fusion" [2] - forall s. toParser (adapt s) = s #-} + forall s. toParser (parserK s) = s #-} {-# RULES "toParser/fromParser fusion" [2] - forall s. adapt (toParser s) = s #-} + forall s. parserK (toParser s) = s #-} diff --git a/core/src/Streamly/Internal/Data/StreamK.hs b/core/src/Streamly/Internal/Data/StreamK.hs index 3978df9d6d..65dab750d6 100644 --- a/core/src/Streamly/Internal/Data/StreamK.hs +++ b/core/src/Streamly/Internal/Data/StreamK.hs @@ -22,7 +22,6 @@ module Streamly.Internal.Data.StreamK , module Streamly.Internal.Data.StreamK.Transformer -- * From containers - , fromList , fromStream -- * Specialized Generation @@ -43,12 +42,8 @@ module Streamly.Internal.Data.StreamK , foldConcat , parseDBreak , parseD - , parseBreakChunks - , parseChunks , parseBreak , parse - , parseBreakChunksGeneric - , parseChunksGeneric -- ** Specialized Folds , head @@ -127,6 +122,10 @@ module Streamly.Internal.Data.StreamK -- * Deprecated , hoist + , parseBreakChunks + , parseChunks + , parseBreakChunksGeneric + , parseChunksGeneric ) where @@ -140,7 +139,6 @@ import Control.Monad (void, join) import Control.Monad.Catch (MonadCatch) import Control.Monad.IO.Class (MonadIO(..)) import Data.Ord (comparing) -import Data.Proxy (Proxy(..)) import GHC.Types (SPEC(..)) import Streamly.Internal.Data.Array.Type (Array(..)) import Streamly.Internal.Data.Fold.Type (Fold(..)) @@ -148,10 +146,10 @@ import Streamly.Internal.Data.IOFinalizer (newIOFinalizer, runIOFinalizer) import Streamly.Internal.Data.ParserK.Type (ParserK) import Streamly.Internal.Data.Producer.Type (Producer(..)) import Streamly.Internal.Data.SVar.Type (adaptState, defState) -import Streamly.Internal.Data.Unbox (sizeOf, Unbox) +import Streamly.Internal.Data.Unbox (Unbox) import qualified Control.Monad.Catch as MC -import qualified Streamly.Internal.Data.Array.Type as Array +import qualified Streamly.Internal.Data.Array as Array import qualified Streamly.Internal.Data.Array.Generic as GenArr import qualified Streamly.Internal.Data.Fold.Type as FL import qualified Streamly.Internal.Data.Parser as Parser @@ -165,7 +163,7 @@ import Prelude take, filter, all, any, takeWhile, drop, dropWhile, notElem, head, tail, init, zipWith, lookup, (!!), replicate, reverse, concatMap, iterate, splitAt) -import Data.Foldable (sum, length) +import Data.Foldable (length) import Streamly.Internal.Data.StreamK.Type import Streamly.Internal.Data.StreamK.Transformer import Streamly.Internal.Data.Parser (ParseError(..)) @@ -287,14 +285,6 @@ iterate step = go iterateM :: Monad m => (a -> m a) -> m a -> StreamK m a iterateM = iterateMWith consM -------------------------------------------------------------------------------- --- Conversions -------------------------------------------------------------------------------- - -{-# INLINE fromList #-} -fromList :: [a] -> StreamK m a -fromList = fromFoldable - ------------------------------------------------------------------------------- -- Elimination by Folding ------------------------------------------------------------------------------- @@ -1283,145 +1273,26 @@ parseD f = fmap fst . parseDBreak f -- ParserK Chunked ------------------------------------------------------------------------------- --- The backracking buffer consists of arrays in the most-recent-first order. We --- want to take a total of n array elements from this buffer. Note: when we --- have to take an array partially, we must take the last part of the array. -{-# INLINE backTrack #-} -backTrack :: forall m a. Unbox a => - Int - -> [Array a] - -> StreamK m (Array a) - -> (StreamK m (Array a), [Array a]) -backTrack = go - - where - - go _ [] stream = (stream, []) - go n xs stream | n <= 0 = (stream, xs) - go n (x:xs) stream = - let len = Array.length x - in if n > len - then go (n - len) xs (cons x stream) - else if n == len - then (cons x stream, xs) - else let !(Array contents start end) = x - !start1 = end - (n * SIZE_OF(a)) - arr1 = Array contents start1 end - arr2 = Array contents start start1 - in (cons arr1 stream, arr2:xs) - --- | A continuation to extract the result when a CPS parser is done. -{-# INLINE parserDone #-} -parserDone :: Applicative m => - ParserK.ParseResult b -> Int -> ParserK.Input a -> m (ParserK.Step a m b) -parserDone (ParserK.Success n b) _ _ = pure $ ParserK.Done n b -parserDone (ParserK.Failure n e) _ _ = pure $ ParserK.Error n e - -- XXX parseDBreakChunks may be faster than converting parserD to parserK and -- using parseBreakChunks. We can also use parseBreak as an alternative to the -- monad instance of ParserD. -- | Run a 'ParserK' over a chunked 'StreamK' and return the parse result and -- the remaining Stream. +{-# DEPRECATED parseBreakChunks "Use Streamly.Data.Array.parseBreak instead" #-} {-# INLINE_NORMAL parseBreakChunks #-} parseBreakChunks :: (Monad m, Unbox a) => ParserK (Array a) m b -> StreamK m (Array a) -> m (Either ParseError b, StreamK m (Array a)) -parseBreakChunks parser input = do - let parserk = ParserK.runParser parser parserDone 0 0 - in go [] parserk input - - where - - {-# INLINE goStop #-} - goStop backBuf parserk = do - pRes <- parserk ParserK.None - case pRes of - -- If we stop in an alternative, it will try calling the next - -- parser, the next parser may call initial returning Partial and - -- then immediately we have to call extract on it. - ParserK.Partial 0 cont1 -> - go [] cont1 nil - ParserK.Partial n cont1 -> do - let n1 = negate n - assertM(n1 >= 0 && n1 <= sum (Prelude.map Array.length backBuf)) - let (s1, backBuf1) = backTrack n1 backBuf nil - in go backBuf1 cont1 s1 - ParserK.Continue 0 cont1 -> - go backBuf cont1 nil - ParserK.Continue n cont1 -> do - let n1 = negate n - assertM(n1 >= 0 && n1 <= sum (Prelude.map Array.length backBuf)) - let (s1, backBuf1) = backTrack n1 backBuf nil - in go backBuf1 cont1 s1 - ParserK.Done 0 b -> - return (Right b, nil) - ParserK.Done n b -> do - let n1 = negate n - assertM(n1 >= 0 && n1 <= sum (Prelude.map Array.length backBuf)) - let (s1, _) = backTrack n1 backBuf nil - in return (Right b, s1) - ParserK.Error _ err -> do - let (s1, _) = backTrack maxBound backBuf nil - return (Left (ParseError err), s1) - - seekErr n len = - error $ "parseBreak: Partial: forward seek not implemented n = " - ++ show n ++ " len = " ++ show len - - yieldk backBuf parserk arr stream = do - pRes <- parserk (ParserK.Chunk arr) - let len = Array.length arr - case pRes of - ParserK.Partial n cont1 -> - case compare n len of - EQ -> go [] cont1 stream - LT -> do - if n >= 0 - then yieldk [] cont1 arr stream - else do - let n1 = negate n - bufLen = sum (Prelude.map Array.length backBuf) - s = cons arr stream - assertM(n1 >= 0 && n1 <= bufLen) - let (s1, _) = backTrack n1 backBuf s - go [] cont1 s1 - GT -> seekErr n len - ParserK.Continue n cont1 -> - case compare n len of - EQ -> go (arr:backBuf) cont1 stream - LT -> do - if n >= 0 - then yieldk backBuf cont1 arr stream - else do - let n1 = negate n - bufLen = sum (Prelude.map Array.length backBuf) - s = cons arr stream - assertM(n1 >= 0 && n1 <= bufLen) - let (s1, backBuf1) = backTrack n1 backBuf s - go backBuf1 cont1 s1 - GT -> seekErr n len - ParserK.Done n b -> do - let n1 = len - n - assertM(n1 <= sum (Prelude.map Array.length (arr:backBuf))) - let (s1, _) = backTrack n1 (arr:backBuf) stream - in return (Right b, s1) - ParserK.Error _ err -> do - let (s1, _) = backTrack maxBound (arr:backBuf) stream - return (Left (ParseError err), s1) - - go backBuf parserk stream = do - let stop = goStop backBuf parserk - single a = yieldk backBuf parserk a nil - in foldStream - defState (yieldk backBuf parserk) single stop stream +parseBreakChunks = Array.parseBreak +{-# DEPRECATED parseChunks "Use Streamly.Data.Array.parse instead" #-} {-# INLINE parseChunks #-} parseChunks :: (Monad m, Unbox a) => ParserK (Array a) m b -> StreamK m (Array a) -> m (Either ParseError b) -parseChunks f = fmap fst . parseBreakChunks f +parseChunks = Array.parse ------------------------------------------------------------------------------- -- ParserK Singular @@ -1449,7 +1320,7 @@ parseBreak -> StreamK m a -> m (Either ParseError b, StreamK m a) parseBreak parser input = do - let parserk = ParserK.runParser parser parserDone 0 0 + let parserk = ParserK.runParser parser ParserK.parserDone 0 0 in go [] parserk input where @@ -1565,153 +1436,25 @@ parse f = fmap fst . parseBreak f -- ParserK Chunked Generic ------------------------------------------------------------------------------- -{-# INLINE backTrackGenericChunks #-} -backTrackGenericChunks :: - Int - -> [GenArr.Array a] - -> StreamK m (GenArr.Array a) - -> (StreamK m (GenArr.Array a), [GenArr.Array a]) -backTrackGenericChunks = go - - where - - go _ [] stream = (stream, []) - go n xs stream | n <= 0 = (stream, xs) - go n (x:xs) stream = - let len = GenArr.length x - in if n > len - then go (n - len) xs (cons x stream) - else if n == len - then (cons x stream, xs) - else let arr1 = GenArr.unsafeSliceOffLen (len - n) n x - arr2 = GenArr.unsafeSliceOffLen 0 (len - n) x - in (cons arr1 stream, arr2:xs) - -- | Similar to 'parseBreak' but works on generic arrays -- +{-# DEPRECATED parseBreakChunksGeneric "Use Streamly.Data.Array.Generic.parseBreak" #-} {-# INLINE_NORMAL parseBreakChunksGeneric #-} parseBreakChunksGeneric :: forall m a b. Monad m => ParserK.ParserK (GenArr.Array a) m b -> StreamK m (GenArr.Array a) -> m (Either ParseError b, StreamK m (GenArr.Array a)) -parseBreakChunksGeneric parser input = do - let parserk = ParserK.runParser parser parserDone 0 0 - in go [] parserk input - - where - - {-# INLINE goStop #-} - goStop - :: [GenArr.Array a] - -> (ParserK.Input (GenArr.Array a) - -> m (ParserK.Step (GenArr.Array a) m b)) - -> m (Either ParseError b, StreamK m (GenArr.Array a)) - goStop backBuf parserk = do - pRes <- parserk ParserK.None - case pRes of - -- If we stop in an alternative, it will try calling the next - -- parser, the next parser may call initial returning Partial and - -- then immediately we have to call extract on it. - ParserK.Partial 0 cont1 -> - go [] cont1 nil - ParserK.Partial n cont1 -> do - let n1 = negate n - assertM(n1 >= 0 && n1 <= sum (Prelude.map GenArr.length backBuf)) - let (s1, backBuf1) = backTrackGenericChunks n1 backBuf nil - in go backBuf1 cont1 s1 - ParserK.Continue 0 cont1 -> - go backBuf cont1 nil - ParserK.Continue n cont1 -> do - let n1 = negate n - assertM(n1 >= 0 && n1 <= sum (Prelude.map GenArr.length backBuf)) - let (s1, backBuf1) = backTrackGenericChunks n1 backBuf nil - in go backBuf1 cont1 s1 - ParserK.Done 0 b -> - return (Right b, nil) - ParserK.Done n b -> do - let n1 = negate n - assertM(n1 >= 0 && n1 <= sum (Prelude.map GenArr.length backBuf)) - let (s1, _) = backTrackGenericChunks n1 backBuf nil - in return (Right b, s1) - ParserK.Error _ err -> - let strm = fromList (Prelude.reverse backBuf) - in return (Left (ParseError err), strm) - - seekErr n len = - error $ "parseBreak: Partial: forward seek not implemented n = " - ++ show n ++ " len = " ++ show len - - yieldk - :: [GenArr.Array a] - -> (ParserK.Input (GenArr.Array a) - -> m (ParserK.Step (GenArr.Array a) m b)) - -> GenArr.Array a - -> StreamK m (GenArr.Array a) - -> m (Either ParseError b, StreamK m (GenArr.Array a)) - yieldk backBuf parserk arr stream = do - pRes <- parserk (ParserK.Chunk arr) - let len = GenArr.length arr - case pRes of - ParserK.Partial n cont1 -> - case compare n len of - EQ -> go [] cont1 stream - LT -> do - if n >= 0 - then yieldk [] cont1 arr stream - else do - let n1 = negate n - bufLen = sum (Prelude.map GenArr.length backBuf) - s = cons arr stream - assertM(n1 >= 0 && n1 <= bufLen) - let (s1, _) = backTrackGenericChunks n1 backBuf s - go [] cont1 s1 - GT -> seekErr n len - ParserK.Continue n cont1 -> - case compare n len of - EQ -> go (arr:backBuf) cont1 stream - LT -> do - if n >= 0 - then yieldk backBuf cont1 arr stream - else do - let n1 = negate n - bufLen = sum (Prelude.map GenArr.length backBuf) - s = cons arr stream - assertM(n1 >= 0 && n1 <= bufLen) - let (s1, backBuf1) = backTrackGenericChunks n1 backBuf s - go backBuf1 cont1 s1 - GT -> seekErr n len - ParserK.Done n b -> do - let n1 = len - n - assertM(n1 <= sum (Prelude.map GenArr.length (arr:backBuf))) - let (s1, _) = backTrackGenericChunks n1 (arr:backBuf) stream - in return (Right b, s1) - ParserK.Error _ err -> - let strm = - append - (fromList (Prelude.reverse backBuf)) - (cons arr stream) - in return (Left (ParseError err), strm) - - go - :: [GenArr.Array a] - -> (ParserK.Input (GenArr.Array a) - -> m (ParserK.Step (GenArr.Array a) m b)) - -> StreamK m (GenArr.Array a) - -> m (Either ParseError b, StreamK m (GenArr.Array a)) - go backBuf parserk stream = do - let stop = goStop backBuf parserk - single a = yieldk backBuf parserk a nil - in foldStream - defState (yieldk backBuf parserk) single stop stream +parseBreakChunksGeneric = GenArr.parseBreak +{-# DEPRECATED parseChunksGeneric "Use Streamly.Data.Array.Generic.parse" #-} {-# INLINE parseChunksGeneric #-} parseChunksGeneric :: (Monad m) => ParserK.ParserK (GenArr.Array a) m b -> StreamK m (GenArr.Array a) -> m (Either ParseError b) -parseChunksGeneric f = fmap fst . parseBreakChunksGeneric f +parseChunksGeneric = GenArr.parse ------------------------------------------------------------------------------- -- Sorting diff --git a/core/src/Streamly/Internal/Data/StreamK/Type.hs b/core/src/Streamly/Internal/Data/StreamK/Type.hs index 5e15d214c7..c284fa12b4 100644 --- a/core/src/Streamly/Internal/Data/StreamK/Type.hs +++ b/core/src/Streamly/Internal/Data/StreamK/Type.hs @@ -77,6 +77,7 @@ module Streamly.Internal.Data.StreamK.Type -- ** From Containers , fromFoldable , fromFoldableM + , Streamly.Internal.Data.StreamK.Type.fromList -- ** Cyclic , mfix @@ -1090,13 +1091,13 @@ instance Show a => Show (StreamK Identity a) where instance Read a => Read (StreamK Identity a) where readPrec = parens $ prec 10 $ do Ident "fromList" <- lexP - fromList <$> readPrec + GHC.Exts.fromList <$> readPrec readListPrec = readListPrecDefault instance (a ~ Char) => IsString (StreamK Identity a) where {-# INLINE fromString #-} - fromString = fromList + fromString = GHC.Exts.fromList ------------------------------------------------------------------------------- -- Foldable @@ -2024,6 +2025,10 @@ fromFoldable = Prelude.foldr cons nil fromFoldableM :: (Foldable f, Monad m) => f (m a) -> StreamK m a fromFoldableM = Prelude.foldr consM nil +{-# INLINE fromList #-} +fromList :: [a] -> StreamK m a +fromList = fromFoldable + ------------------------------------------------------------------------------- -- Deconstruction ------------------------------------------------------------------------------- diff --git a/hie.yaml b/hie.yaml index 330faa0aa9..fb8bd5aa4e 100644 --- a/hie.yaml +++ b/hie.yaml @@ -112,6 +112,8 @@ cradle: component: "test:Data.List" - path: "./test/Streamly/Test/Data/Parser.hs" component: "test:Data.Parser" + - path: "./test/Streamly/Test/Data/ParserK.hs" + component: "test:Data.ParserK" - path: "./test/Streamly/Test/Data/Stream/Concurrent.hs" component: "test:Data.Stream.Concurrent" - path: "./test/Streamly/Test/Data/Stream/Common.hs" diff --git a/test/Streamly/Test/Data/Parser.hs b/test/Streamly/Test/Data/Parser.hs index 0ab87c82b6..171f55fa0f 100644 --- a/test/Streamly/Test/Data/Parser.hs +++ b/test/Streamly/Test/Data/Parser.hs @@ -1375,13 +1375,13 @@ runParserTC :: (Unbox a, Monad m) => TestMode -> ParserTestCase a m b c -> c runParserTC tm runner = case tm of TMParserStream -> runner S.fromList S.parse - TMParserKStreamK -> runner K.fromList (K.parse . PK.adapt) + TMParserKStreamK -> runner K.fromList (K.parse . PK.parserK) TMParserKStreamKChunks -> - runner (producerChunks A.fromList) (K.parseChunks . PK.adaptC) + runner (producerChunks A.fromList) (A.parse . A.parserK) TMParserKStreamKChunksGeneric -> runner (producerChunks GA.fromList) - (K.parseChunksGeneric . PK.adaptCG) + (GA.parse . GA.parserK) where cSize = 50 diff --git a/test/Streamly/Test/Data/ParserK.hs b/test/Streamly/Test/Data/ParserK.hs index 79483ce4f2..d140ffc65b 100644 --- a/test/Streamly/Test/Data/ParserK.hs +++ b/test/Streamly/Test/Data/ParserK.hs @@ -27,9 +27,9 @@ maxTestCount = 100 toParser :: Spec toParser = do - let p = ParserK.toParser (ParserK.adapt Parser.one) + let p = ParserK.toParser (ParserK.parserK Parser.one) runP xs = Stream.parse p (Stream.fromList xs) - describe "toParser . adapt" $ do + describe "toParser . parserK" $ do it "empty stream" $ do r1 <- runP ([] :: [Int]) case r1 of @@ -43,9 +43,9 @@ toParser = do r3 <- runP [0,1::Int] fromRight undefined r3 `shouldBe` 0 - let p1 = ParserK.adapt $ ParserK.toParser (ParserK.adapt Parser.one) + let p1 = ParserK.parserK $ ParserK.toParser (ParserK.parserK Parser.one) runP1 xs = StreamK.parse p1 (StreamK.fromStream $ Stream.fromList xs) - describe "adapt . toParser . adapt" $ do + describe "parserK . toParser . parserK" $ do it "empty stream" $ do r1 <- runP1 ([] :: [Int]) case r1 of @@ -64,15 +64,15 @@ toParser = do let p2 = Parser.takeWhile (<= 3) FL.toList runP2 xs = Stream.parseBreak p2 (Stream.fromList xs) - p3 = ParserK.adapt (Parser.takeWhile (<= 3) FL.toList) + p3 = ParserK.parserK (Parser.takeWhile (<= 3) FL.toList) runP3 xs = StreamK.parseBreak p3 (StreamK.fromList xs) p4 = ParserK.toParser $ fusionBreaker - $ ParserK.adapt (Parser.takeWhile (<= 3) FL.toList) + $ ParserK.parserK (Parser.takeWhile (<= 3) FL.toList) runP4 xs = Stream.parseBreak p4 (Stream.fromList xs) - describe "toParser . adapt" $ do + describe "toParser . parserK" $ do it "(<= 3) for [1, 2, 3, 4, 5]" $ do (a, b) <- runP2 ([1, 2, 3, 4, 5] :: [Int]) fromRight undefined a `shouldBe` [1, 2, 3] @@ -105,7 +105,7 @@ fusionBreaker = id sanityParseBreak :: [Move] -> H.SpecWith () sanityParseBreak jumps = it (show jumps) $ do (val, rest) <- - StreamK.parseBreak (ParserK.adapt (jumpParser jumps)) + StreamK.parseBreak (ParserK.parserK (jumpParser jumps)) $ StreamK.fromList tape lst <- StreamK.toList rest (val, lst) `shouldBe` (expectedResult jumps tape) @@ -113,7 +113,7 @@ sanityParseBreak jumps = it (show jumps) $ do sanityParseBreakChunks :: [Move] -> H.SpecWith () sanityParseBreakChunks jumps = it (show jumps) $ do (val, rest) <- - StreamK.parseBreakChunks (ParserK.adaptC (jumpParser jumps)) + A.parseBreak (A.parserK (jumpParser jumps)) $ StreamK.fromList $ Prelude.map A.fromList chunkedTape lst <- Prelude.map A.toList <$> StreamK.toList rest (val, concat lst) `shouldBe` (expectedResult jumps tape) @@ -121,7 +121,7 @@ sanityParseBreakChunks jumps = it (show jumps) $ do sanityParseBreakChunksGeneric :: [Move] -> H.SpecWith () sanityParseBreakChunksGeneric jumps = it (show jumps) $ do (val, rest) <- - StreamK.parseBreakChunksGeneric (ParserK.adaptCG (jumpParser jumps)) + AG.parseBreak (AG.parserK (jumpParser jumps)) $ StreamK.fromList $ Prelude.map AG.fromList chunkedTape lst <- Prelude.map AG.toList <$> StreamK.toList rest (val, concat lst) `shouldBe` (expectedResult jumps tape)