X-Git-Url: https://git.immae.eu/?a=blobdiff_plain;f=Pipes%2FText.hs;h=f71f17f77528b2acbaa7b1785d5cf54e2c0b96d7;hb=89cf7024a1ea56acdb7c39581141ecc9de1dc031;hp=d5b93f1e4351f8a445fe9e7fbcccd0c86dc94a3d;hpb=e7ad36437caf83c4c25296542764bc4b1c819e24;p=github%2Ffretlink%2Ftext-pipes.git diff --git a/Pipes/Text.hs b/Pipes/Text.hs index d5b93f1..f71f17f 100644 --- a/Pipes/Text.hs +++ b/Pipes/Text.hs @@ -1,93 +1,8 @@ {-# LANGUAGE RankNTypes, TypeFamilies, BangPatterns, Trustworthy #-} -{-| This package provides @pipes@ utilities for \'text streams\', which are - streams of 'Text' chunks. The individual chunks are uniformly @strict@, and you - will generally want @Data.Text@ in scope. But the type @Producer Text m r@ is - in some ways the pipes equivalent of the lazy @Text@ type. - - This module provides many functions equivalent in one way or another to - the 'pure' functions in - . - They transform, divide, group and fold text streams. Though @Producer Text m r@ - is \'effectful\' Text, functions - in this module are \'pure\' in the sense that they are uniformly monad-independent. - Simple IO operations are defined in @Pipes.Text.IO@ -- as lazy IO @Text@ - operations are in @Data.Text.Lazy.IO@. Interoperation with @ByteString@ - is provided in @Pipes.Text.Encoding@, which parallels @Data.Text.Lazy.Encoding@. - - The Text type exported by @Data.Text.Lazy@ is basically '[Text]'. The implementation - is arranged so that the individual strict 'Text' chunks are kept to a reasonable size; - the user is not aware of the divisions between the connected 'Text' chunks. - So also here: the functions in this module are designed to operate on streams that - are insensitive to text boundaries. This means that they may freely split - text into smaller texts and /discard empty texts/. However, the objective is - that they should /never concatenate texts/ in order to provide strict upper - bounds on memory usage. - - For example, to stream only the first three lines of 'stdin' to 'stdout' you - might write: - -> import Pipes -> import qualified Pipes.Text as Text -> import qualified Pipes.Text.IO as Text -> import Pipes.Group -> import Lens.Family -> -> main = runEffect $ takeLines 3 Text.stdin >-> Text.stdout -> where -> takeLines n = Text.unlines . takes' n . view Text.lines -> -- or equivalently: -> -- takeLines n = over Text.lines (takes' n) - - The above program will never bring more than one chunk of text (~ 32 KB) into - memory, no matter how long the lines are. - - As this example shows, one superficial difference from @Data.Text.Lazy@ - is that many of the operations, like 'lines', - are \'lensified\'; this has a number of advantages where it is possible, in particular - it facilitates their use with 'Parser's of Text (in the general - - sense.) - Each such expression, e.g. 'lines', 'chunksOf' or 'splitAt', reduces to the - intuitively corresponding function when used with @view@ or @(^.)@. - - A more important difference the example reveals is in the types closely associated with - the central type, @Producer Text m r@. In @Data.Text@ and @Data.Text.Lazy@ - we find functions like - -> splitAt :: Int -> Text -> (Text, Text) -> lines :: Int -> Text -> [Text] -> chunksOf :: Int -> Text -> [Text] - - which relate a Text with a pair or list of Texts. The corresponding functions here (taking - account of \'lensification\') are - -> view . splitAt :: (Monad m, Integral n) -> => n -> Producer Text m r -> Producer Text.Text m (Producer Text.Text m r) -> view lines :: Monad m => Producer Text m r -> FreeT (Producer Text m) m r -> view . chunksOf :: (Monad m, Integral n) => n -> Producer Text m r -> FreeT (Producer Text m) m r - - In the type @Producer Text m (Producer Text m r)@ the second - element of the \'pair\' of of \'effectful Texts\' cannot simply be retrieved - with 'snd'. This is an \'effectful\' pair, and one must work through the effects - of the first element to arrive at the second. Similarly in @FreeT (Producer Text m) m r@, - which corresponds with @[Text]@, on cannot simply drop 10 Producers and take the others; - we can only get to the ones we want to take by working through their predecessors. - - Some of the types may be more readable if you imagine that we have introduced - our own type synonyms - -> type Text m r = Producer T.Text m r -> type Texts m r = FreeT (Producer T.Text m) m r - - Then we would think of the types above as - -> view . splitAt :: (Monad m, Integral n) => n -> Text m r -> Text m (Text m r) -> view lines :: (Monad m) => Text m r -> Texts m r -> view . chunksOf :: (Monad m, Integral n) => n -> Text m r -> Texts m r - - which brings one closer to the types of the similar functions in @Data.Text.Lazy@ - +{-| The module @Pipes.Text@ closely follows @Pipes.ByteString@ from + the @pipes-bytestring@ package. A draft tutorial can be found in + @Pipes.Text.Tutorial@. -} module Pipes.Text ( @@ -98,17 +13,13 @@ module Pipes.Text ( , map , concatMap , take - , drop , takeWhile - , dropWhile , filter - , scan - , pack - , unpack , toCaseFold , toLower , toUpper , stripStart + , scan -- * Folds , toLazy @@ -124,7 +35,6 @@ module Pipes.Text ( , minimum , find , index - , count -- * Primitive Character Parsers , nextChar @@ -133,7 +43,7 @@ module Pipes.Text ( , peekChar , isEndOfChars - -- * Parsing Lenses + -- * Parsing Lenses , splitAt , span , break @@ -142,54 +52,53 @@ module Pipes.Text ( , word , line - -- * FreeT Splitters + -- * Transforming Text and Character Streams + , drop + , dropWhile + , pack + , unpack + , intersperse + + -- * FreeT Transformations , chunksOf , splitsWith , splits , groupsBy , groups , lines - , words - - -- * Transformations - , intersperse - , packChars - - -- * Joiners - , intercalate , unlines + , words , unwords + , intercalate -- * Re-exports -- $reexports , module Data.ByteString , module Data.Text - , module Data.Profunctor , module Pipes.Parse , module Pipes.Group ) where -import Control.Applicative ((<*)) +import Control.Applicative ((<*)) import Control.Monad (liftM, join) import Control.Monad.Trans.State.Strict (StateT(..), modify) import qualified Data.Text as T import Data.Text (Text) import qualified Data.Text.Lazy as TL -import Data.Text.Lazy.Internal (foldrChunks, defaultChunkSize) import Data.ByteString (ByteString) import Data.Functor.Constant (Constant(Constant, getConstant)) import Data.Functor.Identity (Identity) -import Data.Profunctor (Profunctor) -import qualified Data.Profunctor + import Pipes -import Pipes.Group (concats, intercalates, FreeT(..), FreeF(..)) +import Pipes.Group (folds, maps, concats, intercalates, FreeT(..), FreeF(..)) import qualified Pipes.Group as PG import qualified Pipes.Parse as PP import Pipes.Parse (Parser) import qualified Pipes.Prelude as P import Data.Char (isSpace) import Data.Word (Word8) - +import Foreign.Storable (sizeOf) +import Data.Bits (shiftL) import Prelude hiding ( all, any, @@ -219,105 +128,47 @@ import Prelude hiding ( words, writeFile ) --- | Convert a lazy 'TL.Text' into a 'Producer' of strict 'Text's +-- $setup +-- >>> :set -XOverloadedStrings +-- >>> import Data.Text (Text) +-- >>> import qualified Data.Text as T +-- >>> import qualified Data.Text.Lazy.IO as TL +-- >>> import Data.Char + +-- | Convert a lazy 'TL.Text' into a 'Producer' of strict 'Text's. Producers in +-- IO can be found in 'Pipes.Text.IO' or in pipes-bytestring, employed with the +-- decoding lenses in 'Pipes.Text.Encoding' fromLazy :: (Monad m) => TL.Text -> Producer' Text m () -fromLazy = foldrChunks (\e a -> yield e >> a) (return ()) +fromLazy = TL.foldrChunks (\e a -> yield e >> a) (return ()) {-# INLINE fromLazy #-} - -type Lens' a b = forall f . Functor f => (b -> f b) -> (a -> f a) - -type Iso' a b = forall f p . (Functor f, Profunctor p) => p b (f b) -> p a (f a) - (^.) :: a -> ((b -> Constant b b) -> (a -> Constant b a)) -> b a ^. lens = getConstant (lens Constant a) - -- | Apply a transformation to each 'Char' in the stream + +-- >>> let margaret = ["Margaret, are you grieving\nOver Golde","ngrove unleaving?":: Text] +-- >>> TL.putStrLn . toLazy $ each margaret >-> map Data.Char.toUpper +-- MARGARET, ARE YOU GRIEVING +-- OVER GOLDENGROVE UNLEAVING? map :: (Monad m) => (Char -> Char) -> Pipe Text Text m r map f = P.map (T.map f) {-# INLINABLE map #-} -{-# RULES "p >-> map f" forall p f . - p >-> map f = for p (\txt -> yield (T.map f txt)) - #-} - -- | Map a function over the characters of a text stream and concatenate the results + concatMap :: (Monad m) => (Char -> Text) -> Pipe Text Text m r concatMap f = P.map (T.concatMap f) {-# INLINABLE concatMap #-} -{-# RULES "p >-> concatMap f" forall p f . - p >-> concatMap f = for p (\txt -> yield (T.concatMap f txt)) - #-} - - --- | Transform a Pipe of 'String's into one of 'Text' chunks -pack :: Monad m => Pipe String Text m r -pack = P.map T.pack -{-# INLINEABLE pack #-} - -{-# RULES "p >-> pack" forall p . - p >-> pack = for p (\txt -> yield (T.pack txt)) - #-} - --- | Transform a Pipes of 'Text' chunks into one of 'String's -unpack :: Monad m => Pipe Text String m r -unpack = for cat (\t -> yield (T.unpack t)) -{-# INLINEABLE unpack #-} - -{-# RULES "p >-> unpack" forall p . - p >-> unpack = for p (\txt -> yield (T.unpack txt)) - #-} - --- | @toCaseFold@, @toLower@, @toUpper@ and @stripStart@ are standard 'Text' utilities, --- here acting as 'Text' pipes, rather as they would on a lazy text -toCaseFold :: Monad m => Pipe Text Text m () -toCaseFold = P.map T.toCaseFold -{-# INLINEABLE toCaseFold #-} - -{-# RULES "p >-> toCaseFold" forall p . - p >-> toCaseFold = for p (\txt -> yield (T.toCaseFold txt)) - #-} - - --- | lowercase incoming 'Text' -toLower :: Monad m => Pipe Text Text m () -toLower = P.map T.toLower -{-# INLINEABLE toLower #-} - -{-# RULES "p >-> toLower" forall p . - p >-> toLower = for p (\txt -> yield (T.toLower txt)) - #-} - --- | uppercase incoming 'Text' -toUpper :: Monad m => Pipe Text Text m () -toUpper = P.map T.toUpper -{-# INLINEABLE toUpper #-} - -{-# RULES "p >-> toUpper" forall p . - p >-> toUpper = for p (\txt -> yield (T.toUpper txt)) - #-} - --- | Remove leading white space from an incoming succession of 'Text's -stripStart :: Monad m => Pipe Text Text m r -stripStart = do - chunk <- await - let text = T.stripStart chunk - if T.null text - then stripStart - else do yield text - cat -{-# INLINEABLE stripStart #-} - --- | @(take n)@ only allows @n@ individual characters to pass; +-- | @(take n)@ only allows @n@ individual characters to pass; -- contrast @Pipes.Prelude.take@ which would let @n@ chunks pass. take :: (Monad m, Integral a) => a -> Pipe Text Text m () take n0 = go n0 where go n | n <= 0 = return () - | otherwise = do + | otherwise = do txt <- await let len = fromIntegral (T.length txt) if (len > n) @@ -327,21 +178,6 @@ take n0 = go n0 where go (n - len) {-# INLINABLE take #-} --- | @(drop n)@ drops the first @n@ characters -drop :: (Monad m, Integral a) => a -> Pipe Text Text m r -drop n0 = go n0 where - go n - | n <= 0 = cat - | otherwise = do - txt <- await - let len = fromIntegral (T.length txt) - if (len >= n) - then do - yield (T.drop (fromIntegral n) txt) - cat - else go (n - len) -{-# INLINABLE drop #-} - -- | Take characters until they fail the predicate takeWhile :: (Monad m) => (Char -> Bool) -> Pipe Text Text m () takeWhile predicate = go @@ -356,28 +192,17 @@ takeWhile predicate = go else yield prefix {-# INLINABLE takeWhile #-} --- | Drop characters until they fail the predicate -dropWhile :: (Monad m) => (Char -> Bool) -> Pipe Text Text m r -dropWhile predicate = go where - go = do - txt <- await - case T.findIndex (not . predicate) txt of - Nothing -> go - Just i -> do - yield (T.drop i txt) - cat -{-# INLINABLE dropWhile #-} - -- | Only allows 'Char's to pass if they satisfy the predicate filter :: (Monad m) => (Char -> Bool) -> Pipe Text Text m r filter predicate = P.map (T.filter predicate) {-# INLINABLE filter #-} -{-# RULES "p >-> filter q" forall p q . - p >-> filter q = for p (\txt -> yield (T.filter q txt)) - #-} - -- | Strict left scan over the characters +-- >>> let margaret = ["Margaret, are you grieving\nOver Golde","ngrove unleaving?":: Text] +-- >>> let title_caser a x = case a of ' ' -> Data.Char.toUpper x; _ -> x +-- >>> toLazy $ each margaret >-> scan title_caser ' ' +-- " Margaret, Are You Grieving\nOver Goldengrove Unleaving?" + scan :: (Monad m) => (Char -> Char -> Char) -> Char -> Pipe Text Text m r @@ -393,6 +218,33 @@ scan step begin = do go c' {-# INLINABLE scan #-} +-- | @toCaseFold@, @toLower@, @toUpper@ and @stripStart@ are standard 'Text' utilities, +-- here acting as 'Text' pipes, rather as they would on a lazy text +toCaseFold :: Monad m => Pipe Text Text m r +toCaseFold = P.map T.toCaseFold +{-# INLINEABLE toCaseFold #-} + +-- | lowercase incoming 'Text' +toLower :: Monad m => Pipe Text Text m r +toLower = P.map T.toLower +{-# INLINEABLE toLower #-} + +-- | uppercase incoming 'Text' +toUpper :: Monad m => Pipe Text Text m r +toUpper = P.map T.toUpper +{-# INLINEABLE toUpper #-} + +-- | Remove leading white space from an incoming succession of 'Text's +stripStart :: Monad m => Pipe Text Text m r +stripStart = do + chunk <- await + let text = T.stripStart chunk + if T.null text + then stripStart + else do yield text + cat +{-# INLINEABLE stripStart #-} + {-| Fold a pure 'Producer' of strict 'Text's into a lazy 'TL.Text' -} @@ -418,6 +270,7 @@ foldChars foldChars step begin done = P.fold (T.foldl' step) begin done {-# INLINABLE foldChars #-} + -- | Retrieve the first 'Char' head :: (Monad m) => Producer Text m () -> m (Maybe Char) head = go @@ -498,18 +351,13 @@ find predicate p = head (p >-> filter predicate) index :: (Monad m, Integral a) => a-> Producer Text m () -> m (Maybe Char) -index n p = head (p >-> drop n) +index n p = head (drop n p) {-# INLINABLE index #-} --- | Store a tally of how many segments match the given 'Text' -count :: (Monad m, Num n) => Text -> Producer Text m () -> m n -count c p = P.fold (+) 0 id (p >-> P.map (fromIntegral . T.count c)) -{-# INLINABLE count #-} - -- | Consume the first character from a stream of 'Text' --- +-- -- 'next' either fails with a 'Left' if the 'Producer' has no more characters or -- succeeds with a 'Right' providing the next character and the remainder of the -- 'Producer'. @@ -585,7 +433,6 @@ isEndOfChars = do Just _-> False ) {-# INLINABLE isEndOfChars #-} - -- | Splits a 'Producer' after the given number of characters splitAt :: (Monad m, Integral n) @@ -664,11 +511,11 @@ groupBy equals k p0 = fmap join (k ((go p0))) where Left r -> return (return r) Right (txt, p') -> case T.uncons txt of Nothing -> go p' - Just (c, _) -> (yield txt >> p') ^. span (equals c) + Just (c, _) -> (yield txt >> p') ^. span (equals c) {-# INLINABLE groupBy #-} -- | Improper lens that splits after the first succession of identical 'Char' s -group :: Monad m +group :: Monad m => Lens' (Producer Text m r) (Producer Text m (Producer Text m r)) group = groupBy (==) @@ -676,9 +523,9 @@ group = groupBy (==) {-| Improper lens that splits a 'Producer' after the first word - Unlike 'words', this does not drop leading whitespace + Unlike 'words', this does not drop leading whitespace -} -word :: (Monad m) +word :: (Monad m) => Lens' (Producer Text m r) (Producer Text m (Producer Text m r)) word k p0 = fmap join (k (to p0)) @@ -688,14 +535,27 @@ word k p0 = fmap join (k (to p0)) p'^.break isSpace {-# INLINABLE word #-} - -line :: (Monad m) +line :: (Monad m) => Lens' (Producer Text m r) (Producer Text m (Producer Text m r)) line = break (== '\n') - {-# INLINABLE line #-} +-- | @(drop n)@ drops the first @n@ characters +drop :: (Monad m, Integral n) + => n -> Producer Text m r -> Producer Text m r +drop n p = do + p' <- lift $ runEffect (for (p ^. splitAt n) discard) + p' +{-# INLINABLE drop #-} + +-- | Drop characters until they fail the predicate +dropWhile :: (Monad m) + => (Char -> Bool) -> Producer Text m r -> Producer Text m r +dropWhile predicate p = do + p' <- lift $ runEffect (for (p ^. span predicate) discard) + p' +{-# INLINABLE dropWhile #-} -- | Intersperse a 'Char' in between the characters of stream of 'Text' intersperse @@ -720,27 +580,36 @@ intersperse c = go0 {-# INLINABLE intersperse #-} +-- | Improper lens from unpacked 'Word8's to packaged 'ByteString's +pack :: Monad m => Lens' (Producer Char m r) (Producer Text m r) +pack k p = fmap _unpack (k (_pack p)) +{-# INLINABLE pack #-} --- | Improper isomorphism between a 'Producer' of 'ByteString's and 'Word8's -packChars :: Monad m => Iso' (Producer Char m x) (Producer Text m x) -packChars = Data.Profunctor.dimap to (fmap from) - where - -- to :: Monad m => Producer Char m x -> Producer Text m x - to p = PG.folds step id done (p^.PG.chunksOf defaultChunkSize) +-- | Improper lens from packed 'ByteString's to unpacked 'Word8's +unpack :: Monad m => Lens' (Producer Text m r) (Producer Char m r) +unpack k p = fmap _pack (k (_unpack p)) +{-# INLINABLE unpack #-} - step diffAs c = diffAs . (c:) +_pack :: Monad m => Producer Char m r -> Producer Text m r +_pack p = folds step id done (p^.PG.chunksOf defaultChunkSize) + where + step diffAs w8 = diffAs . (w8:) done diffAs = T.pack (diffAs []) +{-# INLINABLE _pack #-} + +_unpack :: Monad m => Producer Text m r -> Producer Char m r +_unpack p = for p (each . T.unpack) +{-# INLINABLE _unpack #-} - -- from :: Monad m => Producer Text m x -> Producer Char m x - from p = for p (each . T.unpack) -{-# INLINABLE packChars #-} +defaultChunkSize :: Int +defaultChunkSize = 16384 - (sizeOf (undefined :: Int) `shiftL` 1) -- | Split a text stream into 'FreeT'-delimited text streams of fixed size chunksOf :: (Monad m, Integral n) - => n -> Lens' (Producer Text m r) + => n -> Lens' (Producer Text m r) (FreeT (Producer Text m) m r) chunksOf n k p0 = fmap concats (k (FreeT (go p0))) where @@ -749,7 +618,7 @@ chunksOf n k p0 = fmap concats (k (FreeT (go p0))) return $ case x of Left r -> Pure r Right (txt, p') -> Free $ do - p'' <- (yield txt >> p') ^. splitAt n + p'' <- (yield txt >> p') ^. splitAt n return $ FreeT (go p'') {-# INLINABLE chunksOf #-} @@ -760,8 +629,7 @@ chunksOf n k p0 = fmap concats (k (FreeT (go p0))) splitsWith :: (Monad m) => (Char -> Bool) - -> Producer Text m r - -> FreeT (Producer Text m) m r + -> Producer Text m r -> FreeT (Producer Text m) m r splitsWith predicate p0 = FreeT (go0 p0) where go0 p = do @@ -779,7 +647,7 @@ splitsWith predicate p0 = FreeT (go0 p0) return $ case x of Left r -> Pure r Right (_, p') -> Free $ do - p'' <- p' ^. span (not . predicate) + p'' <- p' ^. span (not . predicate) return $ FreeT (go1 p'') {-# INLINABLE splitsWith #-} @@ -789,7 +657,7 @@ splits :: (Monad m) -> Lens' (Producer Text m r) (FreeT (Producer Text m) m r) splits c k p = - fmap (PG.intercalates (yield (T.singleton c))) (k (splitsWith (c ==) p)) + fmap (intercalates (yield (T.singleton c))) (k (splitsWith (c ==) p)) {-# INLINABLE splits #-} {-| Isomorphism between a stream of 'Text' and groups of equivalent 'Char's , using the @@ -799,7 +667,7 @@ groupsBy :: Monad m => (Char -> Char -> Bool) -> Lens' (Producer Text m x) (FreeT (Producer Text m) m x) -groupsBy equals k p0 = fmap concats (k (FreeT (go p0))) where +groupsBy equals k p0 = fmap concats (k (FreeT (go p0))) where go p = do x <- next p case x of Left r -> return (Pure r) Right (bs, p') -> case T.uncons bs of @@ -822,10 +690,19 @@ groups = groupsBy (==) {-| Split a text stream into 'FreeT'-delimited lines -} lines - :: (Monad m) => Iso' (Producer Text m r) (FreeT (Producer Text m) m r) -lines = Data.Profunctor.dimap _lines (fmap _unlines) - where - _lines p0 = FreeT (go0 p0) + :: (Monad m) => Lens' (Producer Text m r) (FreeT (Producer Text m) m r) +lines k p = fmap _unlines (k (_lines p)) +{-# INLINABLE lines #-} + +unlines + :: Monad m + => Lens' (FreeT (Producer Text m) m r) (Producer Text m r) +unlines k p = fmap _lines (k (_unlines p)) +{-# INLINABLE unlines #-} + +_lines :: Monad m + => Producer Text m r -> FreeT (Producer Text m) m r +_lines p0 = FreeT (go0 p0) where go0 p = do x <- next p @@ -842,30 +719,40 @@ lines = Data.Profunctor.dimap _lines (fmap _unlines) case x of Left r -> return $ Pure r Right (_, p'') -> go0 p'' - -- _unlines - -- :: Monad m - -- => FreeT (Producer Text m) m x -> Producer Text m x - _unlines = concats . PG.maps (<* yield (T.singleton '\n')) - - -{-# INLINABLE lines #-} +{-# INLINABLE _lines #-} +_unlines :: Monad m + => FreeT (Producer Text m) m r -> Producer Text m r +_unlines = concats . maps (<* yield (T.singleton '\n')) +{-# INLINABLE _unlines #-} --- | Split a text stream into 'FreeT'-delimited words +-- | Split a text stream into 'FreeT'-delimited words. Note that +-- roundtripping with e.g. @over words id@ eliminates extra space +-- characters as with @Prelude.unwords . Prelude.words@ words - :: (Monad m) => Iso' (Producer Text m r) (FreeT (Producer Text m) m r) -words = Data.Profunctor.dimap go (fmap _unwords) - where - go p = FreeT $ do - x <- next (p >-> dropWhile isSpace) + :: (Monad m) => Lens' (Producer Text m r) (FreeT (Producer Text m) m r) +words k p = fmap _unwords (k (_words p)) +{-# INLINABLE words #-} + +unwords + :: Monad m + => Lens' (FreeT (Producer Text m) m r) (Producer Text m r) +unwords k p = fmap _words (k (_unwords p)) +{-# INLINABLE unwords #-} + +_words :: (Monad m) => Producer Text m r -> FreeT (Producer Text m) m r +_words p = FreeT $ do + x <- next (dropWhile isSpace p) return $ case x of Left r -> Pure r Right (bs, p') -> Free $ do p'' <- (yield bs >> p') ^. break isSpace - return (go p'') - _unwords = PG.intercalates (yield $ T.singleton ' ') - -{-# INLINABLE words #-} + return (_words p'') +{-# INLINABLE _words #-} + +_unwords :: (Monad m) => FreeT (Producer Text m) m r -> Producer Text m r +_unwords = intercalates (yield $ T.singleton ' ') +{-# INLINABLE _unwords #-} {-| 'intercalate' concatenates the 'FreeT'-delimited text streams after @@ -873,9 +760,7 @@ words = Data.Profunctor.dimap go (fmap _unwords) -} intercalate :: (Monad m) - => Producer Text m () - -> FreeT (Producer Text m) m r - -> Producer Text m r + => Producer Text m () -> FreeT (Producer Text m) m r -> Producer Text m r intercalate p0 = go0 where go0 f = do @@ -895,35 +780,14 @@ intercalate p0 = go0 go1 f' {-# INLINABLE intercalate #-} -{-| Join 'FreeT'-delimited lines into a text stream --} -unlines - :: (Monad m) => FreeT (Producer Text m) m r -> Producer Text m r -unlines = go - where - go f = do - x <- lift (runFreeT f) - case x of - Pure r -> return r - Free p -> do - f' <- p - yield $ T.singleton '\n' - go f' -{-# INLINABLE unlines #-} - -{-| Join 'FreeT'-delimited words into a text stream --} -unwords - :: (Monad m) => FreeT (Producer Text m) m r -> Producer Text m r -unwords = intercalate (yield $ T.singleton ' ') -{-# INLINABLE unwords #-} {- $reexports - + @Data.Text@ re-exports the 'Text' type. - @Pipes.Parse@ re-exports 'input', 'concat', 'FreeT' (the type) and the 'Parse' synonym. + @Pipes.Parse@ re-exports 'input', 'concat', 'FreeT' (the type) and the 'Parse' synonym. -} +type Lens' a b = forall f . Functor f => (b -> f b) -> (a -> f a)