aboutsummaryrefslogtreecommitdiffhomepage
diff options
context:
space:
mode:
authormichaelt <what_is_it_to_do_anything@yahoo.com>2014-06-26 21:40:43 +0200
committermichaelt <what_is_it_to_do_anything@yahoo.com>2014-06-26 21:40:43 +0200
commit2f4a83f82d206217456a035b7e8a1e56c585bfd0 (patch)
tree1a6882a4eece84bbf187273b81e4a04378da13df
parent57454c33c13da1f07101bf096b010b4d2ce97292 (diff)
downloadtext-pipes-2f4a83f82d206217456a035b7e8a1e56c585bfd0.tar.gz
text-pipes-2f4a83f82d206217456a035b7e8a1e56c585bfd0.tar.zst
text-pipes-2f4a83f82d206217456a035b7e8a1e56c585bfd0.zip
Bumped bounds for pipes-bytestring. Made types agree with pipes-bytestring where possible. Scrapped Iso and profunctor
-rw-r--r--.gitignore1
-rw-r--r--Fu.sh22
-rw-r--r--Pipes/Text.hs509
-rw-r--r--Pipes/Text/Encoding.hs12
-rw-r--r--changelog6
-rw-r--r--pipes-text.cabal1
6 files changed, 249 insertions, 302 deletions
diff --git a/.gitignore b/.gitignore
index 2e23740..3050479 100644
--- a/.gitignore
+++ b/.gitignore
@@ -6,5 +6,6 @@ cabal-dev
6*.chi 6*.chi
7*.chs.h 7*.chs.h
8*.py 8*.py
9*.sh
9.virthualenv 10.virthualenv
10 11
diff --git a/Fu.sh b/Fu.sh
deleted file mode 100644
index 4bf20d6..0000000
--- a/Fu.sh
+++ /dev/null
@@ -1,22 +0,0 @@
1#!/usr/bin/env bash
2cabal configure && cabal build && cabal haddock --hyperlink-source \
3 --html-location='/package/$pkg-version/docs' \
4 --contents-location='/package/$pkg'
5S=$?
6if [ "${S}" -eq "0" ]; then
7 cd "dist/doc/html"
8 DDIR="${1}-${2}-docs"
9 cp -r "${1}" "${DDIR}" && tar -c -v -z --format=ustar -f "${DDIR}.tar.gz" "${DDIR}"
10 CS=$?
11 if [ "${CS}" -eq "0" ]; then
12 echo "Uploading to Hackageā€¦"
13 curl -X PUT -H 'Content-Type: application/x-tar' -H 'Content-Encoding: gzip' --data-binary "@${DDIR}.tar.gz" "http://${3}:${4}@hackage.haskell.org/package/${1}-${2}/docs"
14 exit $?
15 else
16 echo "Error when packaging the documentation"
17 exit $CS
18 fi
19else
20 echo "Error when trying to build the package."
21 exit $S
22fi \ No newline at end of file
diff --git a/Pipes/Text.hs b/Pipes/Text.hs
index 38811ed..45b9299 100644
--- a/Pipes/Text.hs
+++ b/Pipes/Text.hs
@@ -1,25 +1,24 @@
1{-# LANGUAGE RankNTypes, TypeFamilies, BangPatterns, Trustworthy #-} 1{-# LANGUAGE RankNTypes, TypeFamilies, BangPatterns, Trustworthy #-}
2 2
3
4module Pipes.Text ( 3module Pipes.Text (
5 -- * Effectful Text 4 -- * Effectful Text
6 -- $intro 5 -- $intro
7 6
8 -- * Lenses 7 -- * Lenses
9 -- $lenses 8 -- $lenses
10 9
11 -- ** @view@ \/ @(^.)@ 10 -- ** @view@ \/ @(^.)@
12 -- $view 11 -- $view
13 12
14 -- ** @over@ \/ @(%~)@ 13 -- ** @over@ \/ @(%~)@
15 -- $over 14 -- $over
16 15
17 -- ** @zoom@ 16 -- ** @zoom@
18 -- $zoom 17 -- $zoom
19 18
20 -- * Special types: @Producer Text m (Producer Text m r)@ and @FreeT (Producer Text m) m r@ 19 -- * Special types: @Producer Text m (Producer Text m r)@ and @FreeT (Producer Text m) m r@
21 -- $special 20 -- $special
22 21
23 -- * Producers 22 -- * Producers
24 fromLazy 23 fromLazy
25 24
@@ -27,17 +26,13 @@ module Pipes.Text (
27 , map 26 , map
28 , concatMap 27 , concatMap
29 , take 28 , take
30 , drop
31 , takeWhile 29 , takeWhile
32 , dropWhile
33 , filter 30 , filter
34 , scan
35 , pack
36 , unpack
37 , toCaseFold 31 , toCaseFold
38 , toLower 32 , toLower
39 , toUpper 33 , toUpper
40 , stripStart 34 , stripStart
35 , scan
41 36
42 -- * Folds 37 -- * Folds
43 , toLazy 38 , toLazy
@@ -53,7 +48,6 @@ module Pipes.Text (
53 , minimum 48 , minimum
54 , find 49 , find
55 , index 50 , index
56 , count
57 51
58 -- * Primitive Character Parsers 52 -- * Primitive Character Parsers
59 , nextChar 53 , nextChar
@@ -62,7 +56,7 @@ module Pipes.Text (
62 , peekChar 56 , peekChar
63 , isEndOfChars 57 , isEndOfChars
64 58
65 -- * Parsing Lenses 59 -- * Parsing Lenses
66 , splitAt 60 , splitAt
67 , span 61 , span
68 , break 62 , break
@@ -71,34 +65,34 @@ module Pipes.Text (
71 , word 65 , word
72 , line 66 , line
73 67
74 -- * FreeT Splitters 68 -- * Transforming Text and Character Streams
69 , drop
70 , dropWhile
71 , pack
72 , unpack
73 , intersperse
74
75 -- * FreeT Transformations
75 , chunksOf 76 , chunksOf
76 , splitsWith 77 , splitsWith
77 , splits 78 , splits
78 , groupsBy 79 , groupsBy
79 , groups 80 , groups
80 , lines 81 , lines
81 , words
82
83 -- * Transformations
84 , intersperse
85 , packChars
86
87 -- * Joiners
88 , intercalate
89 , unlines 82 , unlines
83 , words
90 , unwords 84 , unwords
85 , intercalate
91 86
92 -- * Re-exports 87 -- * Re-exports
93 -- $reexports 88 -- $reexports
94 , module Data.ByteString 89 , module Data.ByteString
95 , module Data.Text 90 , module Data.Text
96 , module Data.Profunctor
97 , module Pipes.Parse 91 , module Pipes.Parse
98 , module Pipes.Group 92 , module Pipes.Group
99 ) where 93 ) where
100 94
101import Control.Applicative ((<*)) 95import Control.Applicative ((<*))
102import Control.Monad (liftM, join) 96import Control.Monad (liftM, join)
103import Control.Monad.Trans.State.Strict (StateT(..), modify) 97import Control.Monad.Trans.State.Strict (StateT(..), modify)
104import qualified Data.Text as T 98import qualified Data.Text as T
@@ -107,10 +101,9 @@ import qualified Data.Text.Lazy as TL
107import Data.ByteString (ByteString) 101import Data.ByteString (ByteString)
108import Data.Functor.Constant (Constant(Constant, getConstant)) 102import Data.Functor.Constant (Constant(Constant, getConstant))
109import Data.Functor.Identity (Identity) 103import Data.Functor.Identity (Identity)
110import Data.Profunctor (Profunctor) 104
111import qualified Data.Profunctor
112import Pipes 105import Pipes
113import Pipes.Group (concats, intercalates, FreeT(..), FreeF(..)) 106import Pipes.Group (folds, maps, concats, intercalates, FreeT(..), FreeF(..))
114import qualified Pipes.Group as PG 107import qualified Pipes.Group as PG
115import qualified Pipes.Parse as PP 108import qualified Pipes.Parse as PP
116import Pipes.Parse (Parser) 109import Pipes.Parse (Parser)
@@ -149,30 +142,30 @@ import Prelude hiding (
149 writeFile ) 142 writeFile )
150 143
151{- $intro 144{- $intro
152 This package provides @pipes@ utilities for /text streams/ or /character streams/, 145 This package provides @pipes@ utilities for /text streams/ or /character streams/,
153 realized as streams of 'Text' chunks. The individual chunks are uniformly /strict/, 146 realized as streams of 'Text' chunks. The individual chunks are uniformly /strict/,
154 and thus you will generally want @Data.Text@ in scope. But the type 147 and thus you will generally want @Data.Text@ in scope. But the type
155 @Producer Text m r@ ,as we are using it, is a sort of /pipes/ equivalent of the lazy @Text@ type. 148 @Producer Text m r@ ,as we are using it, is a sort of /pipes/ equivalent of the lazy @Text@ type.
156 149
157 This particular module provides many functions equivalent in one way or another to 150 This particular module provides many functions equivalent in one way or another to
158 the pure functions in 151 the pure functions in
159 <https://hackage.haskell.org/package/text-1.1.0.0/docs/Data-Text-Lazy.html Data.Text.Lazy>. 152 <https://hackage.haskell.org/package/text-1.1.0.0/docs/Data-Text-Lazy.html Data.Text.Lazy>.
160 They transform, divide, group and fold text streams. Though @Producer Text m r@ 153 They transform, divide, group and fold text streams. Though @Producer Text m r@
161 is the type of \'effectful Text\', the functions in this module are \'pure\' 154 is the type of \'effectful Text\', the functions in this module are \'pure\'
162 in the sense that they are uniformly monad-independent. 155 in the sense that they are uniformly monad-independent.
163 Simple /IO/ operations are defined in @Pipes.Text.IO@ -- as lazy IO @Text@ 156 Simple /IO/ operations are defined in @Pipes.Text.IO@ -- as lazy IO @Text@
164 operations are in @Data.Text.Lazy.IO@. Inter-operation with @ByteString@ 157 operations are in @Data.Text.Lazy.IO@. Inter-operation with @ByteString@
165 is provided in @Pipes.Text.Encoding@, which parallels @Data.Text.Lazy.Encoding@. 158 is provided in @Pipes.Text.Encoding@, which parallels @Data.Text.Lazy.Encoding@.
166 159
167 The Text type exported by @Data.Text.Lazy@ is basically that of a lazy list of 160 The Text type exported by @Data.Text.Lazy@ is basically that of a lazy list of
168 strict Text: the implementation is arranged so that the individual strict 'Text' 161 strict Text: the implementation is arranged so that the individual strict 'Text'
169 chunks are kept to a reasonable size; the user is not aware of the divisions 162 chunks are kept to a reasonable size; the user is not aware of the divisions
170 between the connected 'Text' chunks. 163 between the connected 'Text' chunks.
171 So also here: the functions in this module are designed to operate on streams that 164 So also here: the functions in this module are designed to operate on streams that
172 are insensitive to text boundaries. This means that they may freely split 165 are insensitive to text boundaries. This means that they may freely split
173 text into smaller texts and /discard empty texts/. The objective, though, is 166 text into smaller texts and /discard empty texts/. The objective, though, is
174 that they should /never concatenate texts/ in order to provide strict upper 167 that they should /never concatenate texts/ in order to provide strict upper
175 bounds on memory usage. 168 bounds on memory usage.
176 169
177 For example, to stream only the first three lines of 'stdin' to 'stdout' you 170 For example, to stream only the first three lines of 'stdin' to 'stdout' you
178 might write: 171 might write:
@@ -181,10 +174,10 @@ import Prelude hiding (
181> import qualified Pipes.Text as Text 174> import qualified Pipes.Text as Text
182> import qualified Pipes.Text.IO as Text 175> import qualified Pipes.Text.IO as Text
183> import Pipes.Group (takes') 176> import Pipes.Group (takes')
184> import Lens.Family 177> import Lens.Family
185> 178>
186> main = runEffect $ takeLines 3 Text.stdin >-> Text.stdout 179> main = runEffect $ takeLines 3 Text.stdin >-> Text.stdout
187> where 180> where
188> takeLines n = Text.unlines . takes' n . view Text.lines 181> takeLines n = Text.unlines . takes' n . view Text.lines
189 182
190 The above program will never bring more than one chunk of text (~ 32 KB) into 183 The above program will never bring more than one chunk of text (~ 32 KB) into
@@ -192,49 +185,49 @@ import Prelude hiding (
192 185
193-} 186-}
194{- $lenses 187{- $lenses
195 As this example shows, one superficial difference from @Data.Text.Lazy@ 188 As this example shows, one superficial difference from @Data.Text.Lazy@
196 is that many of the operations, like 'lines', are \'lensified\'; this has a 189 is that many of the operations, like 'lines', are \'lensified\'; this has a
197 number of advantages (where it is possible); in particular it facilitates their 190 number of advantages (where it is possible); in particular it facilitates their
198 use with 'Parser's of Text (in the general <http://hackage.haskell.org/package/pipes-parse-3.0.1/docs/Pipes-Parse-Tutorial.html pipes-parse> 191 use with 'Parser's of Text (in the general <http://hackage.haskell.org/package/pipes-parse-3.0.1/docs/Pipes-Parse-Tutorial.html pipes-parse>
199 sense.) The disadvantage, famously, is that the messages you get for type errors can be 192 sense.) The disadvantage, famously, is that the messages you get for type errors can be
200 a little alarming. The remarks that follow in this section are for non-lens adepts. 193 a little alarming. The remarks that follow in this section are for non-lens adepts.
201 194
202 Each lens exported here, e.g. 'lines', 'chunksOf' or 'splitAt', reduces to the 195 Each lens exported here, e.g. 'lines', 'chunksOf' or 'splitAt', reduces to the
203 intuitively corresponding function when used with @view@ or @(^.)@. Instead of 196 intuitively corresponding function when used with @view@ or @(^.)@. Instead of
204 writing: 197 writing:
205 198
206 > splitAt 17 producer 199 > splitAt 17 producer
207 200
208 as we would with the Prelude or Text functions, we write 201 as we would with the Prelude or Text functions, we write
209 202
210 > view (splitAt 17) producer 203 > view (splitAt 17) producer
211 204
212 or equivalently 205 or equivalently
213 206
214 > producer ^. splitAt 17 207 > producer ^. splitAt 17
215 208
216 This may seem a little indirect, but note that many equivalents of 209 This may seem a little indirect, but note that many equivalents of
217 @Text -> Text@ functions are exported here as 'Pipe's. Here too we recover the intuitively 210 @Text -> Text@ functions are exported here as 'Pipe's. Here too we recover the intuitively
218 corresponding functions by prefixing them with @(>->)@. Thus something like 211 corresponding functions by prefixing them with @(>->)@. Thus something like
219 212
220> stripLines = Text.unlines . Group.maps (>-> Text.stripStart) . view Text.lines 213> stripLines = Text.unlines . Group.maps (>-> Text.stripStart) . view Text.lines
221 214
222 would drop the leading white space from each line. 215 would drop the leading white space from each line.
223 216
224 The lenses in this library are marked as /improper/; this just means that 217 The lenses in this library are marked as /improper/; this just means that
225 they don't admit all the operations of an ideal lens, but only /getting/ and /focusing/. 218 they don't admit all the operations of an ideal lens, but only /getting/ and /focusing/.
226 Just for this reason, though, the magnificent complexities of the lens libraries 219 Just for this reason, though, the magnificent complexities of the lens libraries
227 are a distraction. The lens combinators to keep in mind, the ones that make sense for 220 are a distraction. The lens combinators to keep in mind, the ones that make sense for
228 our lenses, are @view@ \/ @(^.)@), @over@ \/ @(%~)@ , and @zoom@. 221 our lenses, are @view@ \/ @(^.)@), @over@ \/ @(%~)@ , and @zoom@.
229 222
230 One need only keep in mind that if @l@ is a @Lens' a b@, then: 223 One need only keep in mind that if @l@ is a @Lens' a b@, then:
231 224
232-} 225-}
233{- $view 226{- $view
234 @view l@ is a function @a -> b@ . Thus @view l a@ (also written @a ^. l@ ) 227 @view l@ is a function @a -> b@ . Thus @view l a@ (also written @a ^. l@ )
235 is the corresponding @b@; as was said above, this function will be exactly the 228 is the corresponding @b@; as was said above, this function will be exactly the
236 function you think it is, given its name. Thus to uppercase the first n characters 229 function you think it is, given its name. Thus to uppercase the first n characters
237 of a Producer, leaving the rest the same, we could write: 230 of a Producer, leaving the rest the same, we could write:
238 231
239 232
240 > upper n p = do p' <- p ^. Text.splitAt n >-> Text.toUpper 233 > upper n p = do p' <- p ^. Text.splitAt n >-> Text.toUpper
@@ -242,11 +235,11 @@ import Prelude hiding (
242-} 235-}
243{- $over 236{- $over
244 @over l@ is a function @(b -> b) -> a -> a@. Thus, given a function that modifies 237 @over l@ is a function @(b -> b) -> a -> a@. Thus, given a function that modifies
245 @b@s, the lens lets us modify an @a@ by applying @f :: b -> b@ to 238 @b@s, the lens lets us modify an @a@ by applying @f :: b -> b@ to
246 the @b@ that we can \"see\" through the lens. So @over l f :: a -> a@ 239 the @b@ that we can \"see\" through the lens. So @over l f :: a -> a@
247 (it can also be written @l %~ f@). 240 (it can also be written @l %~ f@).
248 For any particular @a@, then, @over l f a@ or @(l %~ f) a@ is a revised @a@. 241 For any particular @a@, then, @over l f a@ or @(l %~ f) a@ is a revised @a@.
249 So above we might have written things like these: 242 So above we might have written things like these:
250 243
251 > stripLines = Text.lines %~ maps (>-> Text.stripStart) 244 > stripLines = Text.lines %~ maps (>-> Text.stripStart)
252 > stripLines = over Text.lines (maps (>-> Text.stripStart)) 245 > stripLines = over Text.lines (maps (>-> Text.stripStart))
@@ -254,23 +247,23 @@ import Prelude hiding (
254 247
255-} 248-}
256{- $zoom 249{- $zoom
257 @zoom l@, finally, is a function from a @Parser b m r@ 250 @zoom l@, finally, is a function from a @Parser b m r@
258 to a @Parser a m r@ (or more generally a @StateT (Producer b m x) m r@). 251 to a @Parser a m r@ (or more generally a @StateT (Producer b m x) m r@).
259 Its use is easiest to see with an decoding lens like 'utf8', which 252 Its use is easiest to see with an decoding lens like 'utf8', which
260 \"sees\" a Text producer hidden inside a ByteString producer: 253 \"sees\" a Text producer hidden inside a ByteString producer:
261 @drawChar@ is a Text parser, returning a @Maybe Char@, @zoom utf8 drawChar@ is 254 @drawChar@ is a Text parser, returning a @Maybe Char@, @zoom utf8 drawChar@ is
262 a /ByteString/ parser, returning a @Maybe Char@. @drawAll@ is a Parser that returns 255 a /ByteString/ parser, returning a @Maybe Char@. @drawAll@ is a Parser that returns
263 a list of everything produced from a Producer, leaving only the return value; it would 256 a list of everything produced from a Producer, leaving only the return value; it would
264 usually be unreasonable to use it. But @zoom (splitAt 17) drawAll@ 257 usually be unreasonable to use it. But @zoom (splitAt 17) drawAll@
265 returns a list of Text chunks containing the first seventeen Chars, and returns the rest of 258 returns a list of Text chunks containing the first seventeen Chars, and returns the rest of
266 the Text Producer for further parsing. Suppose that we want, inexplicably, to 259 the Text Producer for further parsing. Suppose that we want, inexplicably, to
267 modify the casing of a Text Producer according to any instruction it might 260 modify the casing of a Text Producer according to any instruction it might
268 contain at the start. Then we might write something like this: 261 contain at the start. Then we might write something like this:
269 262
270> obey :: Monad m => Producer Text m b -> Producer Text m b 263> obey :: Monad m => Producer Text m b -> Producer Text m b
271> obey p = do (ts, p') <- lift $ runStateT (zoom (Text.splitAt 7) drawAll) p 264> obey p = do (ts, p') <- lift $ runStateT (zoom (Text.splitAt 7) drawAll) p
272> let seven = T.concat ts 265> let seven = T.concat ts
273> case T.toUpper seven of 266> case T.toUpper seven of
274> "TOUPPER" -> p' >-> Text.toUpper 267> "TOUPPER" -> p' >-> Text.toUpper
275> "TOLOWER" -> p' >-> Text.toLower 268> "TOLOWER" -> p' >-> Text.toLower
276> _ -> do yield seven 269> _ -> do yield seven
@@ -281,31 +274,31 @@ import Prelude hiding (
281> >>> runEffect $ obey doc >-> Text.stdout 274> >>> runEffect $ obey doc >-> Text.stdout
282> THIS DOCUMENT. 275> THIS DOCUMENT.
283 276
284 The purpose of exporting lenses is the mental economy achieved with this three-way 277 The purpose of exporting lenses is the mental economy achieved with this three-way
285 applicability. That one expression, e.g. @lines@ or @splitAt 17@ can have these 278 applicability. That one expression, e.g. @lines@ or @splitAt 17@ can have these
286 three uses is no more surprising than that a pipe can act as a function modifying 279 three uses is no more surprising than that a pipe can act as a function modifying
287 the output of a producer, namely by using @>->@ to its left: @producer >-> pipe@ 280 the output of a producer, namely by using @>->@ to its left: @producer >-> pipe@
288 -- but can /also/ modify the inputs to a consumer by using @>->@ to its right: 281 -- but can /also/ modify the inputs to a consumer by using @>->@ to its right:
289 @pipe >-> consumer@ 282 @pipe >-> consumer@
290 283
291 The three functions, @view@ \/ @(^.)@, @over@ \/ @(%~)@ and @zoom@ are supplied by 284 The three functions, @view@ \/ @(^.)@, @over@ \/ @(%~)@ and @zoom@ are supplied by
292 both <http://hackage.haskell.org/package/lens lens> and 285 both <http://hackage.haskell.org/package/lens lens> and
293 <http://hackage.haskell.org/package/lens-family lens-family> The use of 'zoom' is explained 286 <http://hackage.haskell.org/package/lens-family lens-family> The use of 'zoom' is explained
294 in <http://hackage.haskell.org/package/pipes-parse-3.0.1/docs/Pipes-Parse-Tutorial.html Pipes.Parse.Tutorial> 287 in <http://hackage.haskell.org/package/pipes-parse-3.0.1/docs/Pipes-Parse-Tutorial.html Pipes.Parse.Tutorial>
295 and to some extent in the @Pipes.Text.Encoding@ module here. 288 and to some extent in the @Pipes.Text.Encoding@ module here.
296 289
297-} 290-}
298{- $special 291{- $special
299 These simple 'lines' examples reveal a more important difference from @Data.Text.Lazy@ . 292 These simple 'lines' examples reveal a more important difference from @Data.Text.Lazy@ .
300 This is in the types that are most closely associated with our central text type, 293 This is in the types that are most closely associated with our central text type,
301 @Producer Text m r@. In @Data.Text@ and @Data.Text.Lazy@ we find functions like 294 @Producer Text m r@. In @Data.Text@ and @Data.Text.Lazy@ we find functions like
302 295
303> splitAt :: Int -> Text -> (Text, Text) 296> splitAt :: Int -> Text -> (Text, Text)
304> lines :: Text -> [Text] 297> lines :: Text -> [Text]
305> chunksOf :: Int -> Text -> [Text] 298> chunksOf :: Int -> Text -> [Text]
306 299
307 which relate a Text with a pair of Texts or a list of Texts. 300 which relate a Text with a pair of Texts or a list of Texts.
308 The corresponding functions here (taking account of \'lensification\') are 301 The corresponding functions here (taking account of \'lensification\') are
309 302
310> view . splitAt :: (Monad m, Integral n) => n -> Producer Text m r -> Producer Text m (Producer Text m r) 303> view . splitAt :: (Monad m, Integral n) => n -> Producer Text m r -> Producer Text m (Producer Text m r)
311> view lines :: Monad m => Producer Text m r -> FreeT (Producer Text m) m r 304> view lines :: Monad m => Producer Text m r -> FreeT (Producer Text m) m r
@@ -325,12 +318,12 @@ import Prelude hiding (
325 318
326 which brings one closer to the types of the similar functions in @Data.Text.Lazy@ 319 which brings one closer to the types of the similar functions in @Data.Text.Lazy@
327 320
328 In the type @Producer Text m (Producer Text m r)@ the second 321 In the type @Producer Text m (Producer Text m r)@ the second
329 element of the \'pair\' of effectful Texts cannot simply be retrieved 322 element of the \'pair\' of effectful Texts cannot simply be retrieved
330 with something like 'snd'. This is an \'effectful\' pair, and one must work 323 with something like 'snd'. This is an \'effectful\' pair, and one must work
331 through the effects of the first element to arrive at the second Text stream, even 324 through the effects of the first element to arrive at the second Text stream, even
332 if you are proposing to throw the Text in the first element away. 325 if you are proposing to throw the Text in the first element away.
333 Note that we use Control.Monad.join to fuse the pair back together, since it specializes to 326 Note that we use Control.Monad.join to fuse the pair back together, since it specializes to
334 327
335> join :: Monad m => Producer Text m (Producer m r) -> Producer m r 328> join :: Monad m => Producer Text m (Producer m r) -> Producer m r
336 329
@@ -346,12 +339,12 @@ import Prelude hiding (
346> (Text, (Text, (Text, (Text, r)))) 339> (Text, (Text, (Text, (Text, r))))
347> ... 340> ...
348 341
349 (We might also have identified the sum of those types with @Free ((,) Text) r@ 342 (We might also have identified the sum of those types with @Free ((,) Text) r@
350 -- or, more absurdly, @FreeT ((,) Text) Identity r@.) 343 -- or, more absurdly, @FreeT ((,) Text) Identity r@.)
351 344
352 Similarly, our type @Texts m r@, or @FreeT (Text m) m r@ -- in fact called 345 Similarly, our type @Texts m r@, or @FreeT (Text m) m r@ -- in fact called
353 @FreeT (Producer Text m) m r@ here -- encompasses all the members of the sequence: 346 @FreeT (Producer Text m) m r@ here -- encompasses all the members of the sequence:
354 347
355> m r 348> m r
356> Text m r 349> Text m r
357> Text m (Text m r) 350> Text m (Text m r)
@@ -361,43 +354,43 @@ import Prelude hiding (
361 354
362 We might have used a more specialized type in place of @FreeT (Producer a m) m r@, 355 We might have used a more specialized type in place of @FreeT (Producer a m) m r@,
363 or indeed of @FreeT (Producer Text m) m r@, but it is clear that the correct 356 or indeed of @FreeT (Producer Text m) m r@, but it is clear that the correct
364 result type of 'lines' will be isomorphic to @FreeT (Producer Text m) m r@ . 357 result type of 'lines' will be isomorphic to @FreeT (Producer Text m) m r@ .
365 358
366 One might think that 359 One might think that
367 360
368> lines :: Monad m => Lens' (Producer Text m r) (FreeT (Producer Text m) m r) 361> lines :: Monad m => Lens' (Producer Text m r) (FreeT (Producer Text m) m r)
369> view . lines :: Monad m => Producer Text m r -> FreeT (Producer Text m) m r 362> view . lines :: Monad m => Producer Text m r -> FreeT (Producer Text m) m r
370 363
371 should really have the type 364 should really have the type
372 365
373> lines :: Monad m => Pipe Text Text m r 366> lines :: Monad m => Pipe Text Text m r
374 367
375 as e.g. 'toUpper' does. But this would spoil the control we are 368 as e.g. 'toUpper' does. But this would spoil the control we are
376 attempting to maintain over the size of chunks. It is in fact just 369 attempting to maintain over the size of chunks. It is in fact just
377 as unreasonable to want such a pipe as to want 370 as unreasonable to want such a pipe as to want
378 371
379> Data.Text.Lazy.lines :: Text -> Text 372> Data.Text.Lazy.lines :: Text -> Text
380 373
381 to 'rechunk' the strict Text chunks inside the lazy Text to respect 374 to 'rechunk' the strict Text chunks inside the lazy Text to respect
382 line boundaries. In fact we have 375 line boundaries. In fact we have
383 376
384> Data.Text.Lazy.lines :: Text -> [Text] 377> Data.Text.Lazy.lines :: Text -> [Text]
385> Prelude.lines :: String -> [String] 378> Prelude.lines :: String -> [String]
386 379
387 where the elements of the list are themselves lazy Texts or Strings; the use 380 where the elements of the list are themselves lazy Texts or Strings; the use
388 of @FreeT (Producer Text m) m r@ is simply the 'effectful' version of this. 381 of @FreeT (Producer Text m) m r@ is simply the 'effectful' version of this.
389 382
390 The @Pipes.Group@ module, which can generally be imported without qualification, 383 The @Pipes.Group@ module, which can generally be imported without qualification,
391 provides many functions for working with things of type @FreeT (Producer a m) m r@. 384 provides many functions for working with things of type @FreeT (Producer a m) m r@.
392 In particular it conveniently exports the constructors for @FreeT@ and the associated 385 In particular it conveniently exports the constructors for @FreeT@ and the associated
393 @FreeF@ type -- a fancy form of @Either@, namely 386 @FreeF@ type -- a fancy form of @Either@, namely
394 387
395> data FreeF f a b = Pure a | Free (f b) 388> data FreeF f a b = Pure a | Free (f b)
396 389
397 for pattern-matching. Consider the implementation of the 'words' function, or 390 for pattern-matching. Consider the implementation of the 'words' function, or
398 of the part of the lens that takes us to the words; it is compact but exhibits many 391 of the part of the lens that takes us to the words; it is compact but exhibits many
399 of the points under discussion, including explicit handling of the @FreeT@ and @FreeF@ 392 of the points under discussion, including explicit handling of the @FreeT@ and @FreeF@
400 constuctors. Keep in mind that 393 constuctors. Keep in mind that
401 394
402> newtype FreeT f m a = FreeT (m (FreeF f a (FreeT f m a))) 395> newtype FreeT f m a = FreeT (m (FreeF f a (FreeT f m a)))
403> next :: Monad m => Producer a m r -> m (Either r (a, Producer a m r)) 396> next :: Monad m => Producer a m r -> m (Either r (a, Producer a m r))
@@ -414,12 +407,12 @@ import Prelude hiding (
414> p'' <- view (break isSpace) -- When we apply 'break isSpace', we get a Producer that returns a Producer; 407> p'' <- view (break isSpace) -- When we apply 'break isSpace', we get a Producer that returns a Producer;
415> (yield txt >> p') -- so here we yield everything up to the next space, and get the rest back. 408> (yield txt >> p') -- so here we yield everything up to the next space, and get the rest back.
416> return (words p'') -- We then carry on with the rest, which is likely to begin with space. 409> return (words p'') -- We then carry on with the rest, which is likely to begin with space.
417 410
418-} 411-}
419 412
420-- | Convert a lazy 'TL.Text' into a 'Producer' of strict 'Text's 413-- | Convert a lazy 'TL.Text' into a 'Producer' of strict 'Text's
421fromLazy :: (Monad m) => TL.Text -> Producer' Text m () 414fromLazy :: (Monad m) => TL.Text -> Producer' Text m ()
422fromLazy = TL.foldrChunks (\e a -> yield e >> a) (return ()) 415fromLazy = TL.foldrChunks (\e a -> yield e >> a) (return ())
423{-# INLINE fromLazy #-} 416{-# INLINE fromLazy #-}
424 417
425(^.) :: a -> ((b -> Constant b b) -> (a -> Constant b a)) -> b 418(^.) :: a -> ((b -> Constant b b) -> (a -> Constant b a)) -> b
@@ -436,44 +429,7 @@ concatMap
436concatMap f = P.map (T.concatMap f) 429concatMap f = P.map (T.concatMap f)
437{-# INLINABLE concatMap #-} 430{-# INLINABLE concatMap #-}
438 431
439-- | Transform a Pipe of 'String's into one of 'Text' chunks 432-- | @(take n)@ only allows @n@ individual characters to pass;
440pack :: Monad m => Pipe String Text m r
441pack = P.map T.pack
442{-# INLINEABLE pack #-}
443
444-- | Transform a Pipes of 'Text' chunks into one of 'String's
445unpack :: Monad m => Pipe Text String m r
446unpack = for cat (\t -> yield (T.unpack t))
447{-# INLINEABLE unpack #-}
448
449-- | @toCaseFold@, @toLower@, @toUpper@ and @stripStart@ are standard 'Text' utilities,
450-- here acting as 'Text' pipes, rather as they would on a lazy text
451toCaseFold :: Monad m => Pipe Text Text m r
452toCaseFold = P.map T.toCaseFold
453{-# INLINEABLE toCaseFold #-}
454
455-- | lowercase incoming 'Text'
456toLower :: Monad m => Pipe Text Text m r
457toLower = P.map T.toLower
458{-# INLINEABLE toLower #-}
459
460-- | uppercase incoming 'Text'
461toUpper :: Monad m => Pipe Text Text m r
462toUpper = P.map T.toUpper
463{-# INLINEABLE toUpper #-}
464
465-- | Remove leading white space from an incoming succession of 'Text's
466stripStart :: Monad m => Pipe Text Text m r
467stripStart = do
468 chunk <- await
469 let text = T.stripStart chunk
470 if T.null text
471 then stripStart
472 else do yield text
473 cat
474{-# INLINEABLE stripStart #-}
475
476-- | @(take n)@ only allows @n@ individual characters to pass;
477-- contrast @Pipes.Prelude.take@ which would let @n@ chunks pass. 433-- contrast @Pipes.Prelude.take@ which would let @n@ chunks pass.
478take :: (Monad m, Integral a) => a -> Pipe Text Text m () 434take :: (Monad m, Integral a) => a -> Pipe Text Text m ()
479take n0 = go n0 where 435take n0 = go n0 where
@@ -489,21 +445,6 @@ take n0 = go n0 where
489 go (n - len) 445 go (n - len)
490{-# INLINABLE take #-} 446{-# INLINABLE take #-}
491 447
492-- | @(drop n)@ drops the first @n@ characters
493drop :: (Monad m, Integral a) => a -> Pipe Text Text m r
494drop n0 = go n0 where
495 go n
496 | n <= 0 = cat
497 | otherwise = do
498 txt <- await
499 let len = fromIntegral (T.length txt)
500 if (len >= n)
501 then do
502 yield (T.drop (fromIntegral n) txt)
503 cat
504 else go (n - len)
505{-# INLINABLE drop #-}
506
507-- | Take characters until they fail the predicate 448-- | Take characters until they fail the predicate
508takeWhile :: (Monad m) => (Char -> Bool) -> Pipe Text Text m () 449takeWhile :: (Monad m) => (Char -> Bool) -> Pipe Text Text m ()
509takeWhile predicate = go 450takeWhile predicate = go
@@ -518,18 +459,6 @@ takeWhile predicate = go
518 else yield prefix 459 else yield prefix
519{-# INLINABLE takeWhile #-} 460{-# INLINABLE takeWhile #-}
520 461
521-- | Drop characters until they fail the predicate
522dropWhile :: (Monad m) => (Char -> Bool) -> Pipe Text Text m r
523dropWhile predicate = go where
524 go = do
525 txt <- await
526 case T.findIndex (not . predicate) txt of
527 Nothing -> go
528 Just i -> do
529 yield (T.drop i txt)
530 cat
531{-# INLINABLE dropWhile #-}
532
533-- | Only allows 'Char's to pass if they satisfy the predicate 462-- | Only allows 'Char's to pass if they satisfy the predicate
534filter :: (Monad m) => (Char -> Bool) -> Pipe Text Text m r 463filter :: (Monad m) => (Char -> Bool) -> Pipe Text Text m r
535filter predicate = P.map (T.filter predicate) 464filter predicate = P.map (T.filter predicate)
@@ -551,6 +480,33 @@ scan step begin = do
551 go c' 480 go c'
552{-# INLINABLE scan #-} 481{-# INLINABLE scan #-}
553 482
483-- | @toCaseFold@, @toLower@, @toUpper@ and @stripStart@ are standard 'Text' utilities,
484-- here acting as 'Text' pipes, rather as they would on a lazy text
485toCaseFold :: Monad m => Pipe Text Text m r
486toCaseFold = P.map T.toCaseFold
487{-# INLINEABLE toCaseFold #-}
488
489-- | lowercase incoming 'Text'
490toLower :: Monad m => Pipe Text Text m r
491toLower = P.map T.toLower
492{-# INLINEABLE toLower #-}
493
494-- | uppercase incoming 'Text'
495toUpper :: Monad m => Pipe Text Text m r
496toUpper = P.map T.toUpper
497{-# INLINEABLE toUpper #-}
498
499-- | Remove leading white space from an incoming succession of 'Text's
500stripStart :: Monad m => Pipe Text Text m r
501stripStart = do
502 chunk <- await
503 let text = T.stripStart chunk
504 if T.null text
505 then stripStart
506 else do yield text
507 cat
508{-# INLINEABLE stripStart #-}
509
554{-| Fold a pure 'Producer' of strict 'Text's into a lazy 510{-| Fold a pure 'Producer' of strict 'Text's into a lazy
555 'TL.Text' 511 'TL.Text'
556-} 512-}
@@ -576,6 +532,7 @@ foldChars
576foldChars step begin done = P.fold (T.foldl' step) begin done 532foldChars step begin done = P.fold (T.foldl' step) begin done
577{-# INLINABLE foldChars #-} 533{-# INLINABLE foldChars #-}
578 534
535
579-- | Retrieve the first 'Char' 536-- | Retrieve the first 'Char'
580head :: (Monad m) => Producer Text m () -> m (Maybe Char) 537head :: (Monad m) => Producer Text m () -> m (Maybe Char)
581head = go 538head = go
@@ -656,18 +613,13 @@ find predicate p = head (p >-> filter predicate)
656index 613index
657 :: (Monad m, Integral a) 614 :: (Monad m, Integral a)
658 => a-> Producer Text m () -> m (Maybe Char) 615 => a-> Producer Text m () -> m (Maybe Char)
659index n p = head (p >-> drop n) 616index n p = head (drop n p)
660{-# INLINABLE index #-} 617{-# INLINABLE index #-}
661 618
662 619
663-- | Store a tally of how many segments match the given 'Text'
664count :: (Monad m, Num n) => Text -> Producer Text m () -> m n
665count c p = P.fold (+) 0 id (p >-> P.map (fromIntegral . T.count c))
666{-# INLINABLE count #-}
667
668 620
669-- | Consume the first character from a stream of 'Text' 621-- | Consume the first character from a stream of 'Text'
670-- 622--
671-- 'next' either fails with a 'Left' if the 'Producer' has no more characters or 623-- 'next' either fails with a 'Left' if the 'Producer' has no more characters or
672-- succeeds with a 'Right' providing the next character and the remainder of the 624-- succeeds with a 'Right' providing the next character and the remainder of the
673-- 'Producer'. 625-- 'Producer'.
@@ -743,7 +695,6 @@ isEndOfChars = do
743 Just _-> False ) 695 Just _-> False )
744{-# INLINABLE isEndOfChars #-} 696{-# INLINABLE isEndOfChars #-}
745 697
746
747-- | Splits a 'Producer' after the given number of characters 698-- | Splits a 'Producer' after the given number of characters
748splitAt 699splitAt
749 :: (Monad m, Integral n) 700 :: (Monad m, Integral n)
@@ -822,11 +773,11 @@ groupBy equals k p0 = fmap join (k ((go p0))) where
822 Left r -> return (return r) 773 Left r -> return (return r)
823 Right (txt, p') -> case T.uncons txt of 774 Right (txt, p') -> case T.uncons txt of
824 Nothing -> go p' 775 Nothing -> go p'
825 Just (c, _) -> (yield txt >> p') ^. span (equals c) 776 Just (c, _) -> (yield txt >> p') ^. span (equals c)
826{-# INLINABLE groupBy #-} 777{-# INLINABLE groupBy #-}
827 778
828-- | Improper lens that splits after the first succession of identical 'Char' s 779-- | Improper lens that splits after the first succession of identical 'Char' s
829group :: Monad m 780group :: Monad m
830 => Lens' (Producer Text m r) 781 => Lens' (Producer Text m r)
831 (Producer Text m (Producer Text m r)) 782 (Producer Text m (Producer Text m r))
832group = groupBy (==) 783group = groupBy (==)
@@ -834,9 +785,9 @@ group = groupBy (==)
834 785
835{-| Improper lens that splits a 'Producer' after the first word 786{-| Improper lens that splits a 'Producer' after the first word
836 787
837 Unlike 'words', this does not drop leading whitespace 788 Unlike 'words', this does not drop leading whitespace
838-} 789-}
839word :: (Monad m) 790word :: (Monad m)
840 => Lens' (Producer Text m r) 791 => Lens' (Producer Text m r)
841 (Producer Text m (Producer Text m r)) 792 (Producer Text m (Producer Text m r))
842word k p0 = fmap join (k (to p0)) 793word k p0 = fmap join (k (to p0))
@@ -846,14 +797,27 @@ word k p0 = fmap join (k (to p0))
846 p'^.break isSpace 797 p'^.break isSpace
847{-# INLINABLE word #-} 798{-# INLINABLE word #-}
848 799
849 800line :: (Monad m)
850line :: (Monad m)
851 => Lens' (Producer Text m r) 801 => Lens' (Producer Text m r)
852 (Producer Text m (Producer Text m r)) 802 (Producer Text m (Producer Text m r))
853line = break (== '\n') 803line = break (== '\n')
854
855{-# INLINABLE line #-} 804{-# INLINABLE line #-}
856 805
806-- | @(drop n)@ drops the first @n@ characters
807drop :: (Monad m, Integral n)
808 => n -> Producer Text m r -> Producer Text m r
809drop n p = do
810 p' <- lift $ runEffect (for (p ^. splitAt n) discard)
811 p'
812{-# INLINABLE drop #-}
813
814-- | Drop characters until they fail the predicate
815dropWhile :: (Monad m)
816 => (Char -> Bool) -> Producer Text m r -> Producer Text m r
817dropWhile predicate p = do
818 p' <- lift $ runEffect (for (p ^. span predicate) discard)
819 p'
820{-# INLINABLE dropWhile #-}
857 821
858-- | Intersperse a 'Char' in between the characters of stream of 'Text' 822-- | Intersperse a 'Char' in between the characters of stream of 'Text'
859intersperse 823intersperse
@@ -878,28 +842,36 @@ intersperse c = go0
878{-# INLINABLE intersperse #-} 842{-# INLINABLE intersperse #-}
879 843
880 844
881-- | Improper isomorphism between a 'Producer' of 'ByteString's and 'Word8's 845-- | Improper lens from unpacked 'Word8's to packaged 'ByteString's
882packChars :: Monad m => Iso'_ (Producer Char m x) (Producer Text m x) 846pack :: Monad m => Lens' (Producer Char m r) (Producer Text m r)
883packChars = Data.Profunctor.dimap to (fmap from) 847pack k p = fmap _unpack (k (_pack p))
884 where 848{-# INLINABLE pack #-}
885 -- to :: Monad m => Producer Char m x -> Producer Text m x 849
886 to p = PG.folds step id done (p^.PG.chunksOf defaultChunkSize) 850-- | Improper lens from packed 'ByteString's to unpacked 'Word8's
851unpack :: Monad m => Lens' (Producer Text m r) (Producer Char m r)
852unpack k p = fmap _pack (k (_unpack p))
853{-# INLINABLE unpack #-}
887 854
888 step diffAs c = diffAs . (c:) 855_pack :: Monad m => Producer Char m r -> Producer Text m r
856_pack p = folds step id done (p^.PG.chunksOf defaultChunkSize)
857 where
858 step diffAs w8 = diffAs . (w8:)
889 859
890 done diffAs = T.pack (diffAs []) 860 done diffAs = T.pack (diffAs [])
861{-# INLINABLE _pack #-}
891 862
892 -- from :: Monad m => Producer Text m x -> Producer Char m x 863_unpack :: Monad m => Producer Text m r -> Producer Char m r
893 from p = for p (each . T.unpack) 864_unpack p = for p (each . T.unpack)
894{-# INLINABLE packChars #-} 865{-# INLINABLE _unpack #-}
895 866
896defaultChunkSize :: Int 867defaultChunkSize :: Int
897defaultChunkSize = 16384 - (sizeOf (undefined :: Int) `shiftL` 1) 868defaultChunkSize = 16384 - (sizeOf (undefined :: Int) `shiftL` 1)
898 869
870
899-- | Split a text stream into 'FreeT'-delimited text streams of fixed size 871-- | Split a text stream into 'FreeT'-delimited text streams of fixed size
900chunksOf 872chunksOf
901 :: (Monad m, Integral n) 873 :: (Monad m, Integral n)
902 => n -> Lens' (Producer Text m r) 874 => n -> Lens' (Producer Text m r)
903 (FreeT (Producer Text m) m r) 875 (FreeT (Producer Text m) m r)
904chunksOf n k p0 = fmap concats (k (FreeT (go p0))) 876chunksOf n k p0 = fmap concats (k (FreeT (go p0)))
905 where 877 where
@@ -908,7 +880,7 @@ chunksOf n k p0 = fmap concats (k (FreeT (go p0)))
908 return $ case x of 880 return $ case x of
909 Left r -> Pure r 881 Left r -> Pure r
910 Right (txt, p') -> Free $ do 882 Right (txt, p') -> Free $ do
911 p'' <- (yield txt >> p') ^. splitAt n 883 p'' <- (yield txt >> p') ^. splitAt n
912 return $ FreeT (go p'') 884 return $ FreeT (go p'')
913{-# INLINABLE chunksOf #-} 885{-# INLINABLE chunksOf #-}
914 886
@@ -919,8 +891,7 @@ chunksOf n k p0 = fmap concats (k (FreeT (go p0)))
919splitsWith 891splitsWith
920 :: (Monad m) 892 :: (Monad m)
921 => (Char -> Bool) 893 => (Char -> Bool)
922 -> Producer Text m r 894 -> Producer Text m r -> FreeT (Producer Text m) m r
923 -> FreeT (Producer Text m) m r
924splitsWith predicate p0 = FreeT (go0 p0) 895splitsWith predicate p0 = FreeT (go0 p0)
925 where 896 where
926 go0 p = do 897 go0 p = do
@@ -938,7 +909,7 @@ splitsWith predicate p0 = FreeT (go0 p0)
938 return $ case x of 909 return $ case x of
939 Left r -> Pure r 910 Left r -> Pure r
940 Right (_, p') -> Free $ do 911 Right (_, p') -> Free $ do
941 p'' <- p' ^. span (not . predicate) 912 p'' <- p' ^. span (not . predicate)
942 return $ FreeT (go1 p'') 913 return $ FreeT (go1 p'')
943{-# INLINABLE splitsWith #-} 914{-# INLINABLE splitsWith #-}
944 915
@@ -948,7 +919,7 @@ splits :: (Monad m)
948 -> Lens' (Producer Text m r) 919 -> Lens' (Producer Text m r)
949 (FreeT (Producer Text m) m r) 920 (FreeT (Producer Text m) m r)
950splits c k p = 921splits c k p =
951 fmap (PG.intercalates (yield (T.singleton c))) (k (splitsWith (c ==) p)) 922 fmap (intercalates (yield (T.singleton c))) (k (splitsWith (c ==) p))
952{-# INLINABLE splits #-} 923{-# INLINABLE splits #-}
953 924
954{-| Isomorphism between a stream of 'Text' and groups of equivalent 'Char's , using the 925{-| Isomorphism between a stream of 'Text' and groups of equivalent 'Char's , using the
@@ -958,7 +929,7 @@ groupsBy
958 :: Monad m 929 :: Monad m
959 => (Char -> Char -> Bool) 930 => (Char -> Char -> Bool)
960 -> Lens' (Producer Text m x) (FreeT (Producer Text m) m x) 931 -> Lens' (Producer Text m x) (FreeT (Producer Text m) m x)
961groupsBy equals k p0 = fmap concats (k (FreeT (go p0))) where 932groupsBy equals k p0 = fmap concats (k (FreeT (go p0))) where
962 go p = do x <- next p 933 go p = do x <- next p
963 case x of Left r -> return (Pure r) 934 case x of Left r -> return (Pure r)
964 Right (bs, p') -> case T.uncons bs of 935 Right (bs, p') -> case T.uncons bs of
@@ -981,10 +952,19 @@ groups = groupsBy (==)
981{-| Split a text stream into 'FreeT'-delimited lines 952{-| Split a text stream into 'FreeT'-delimited lines
982-} 953-}
983lines 954lines
984 :: (Monad m) => Iso'_ (Producer Text m r) (FreeT (Producer Text m) m r) 955 :: (Monad m) => Lens' (Producer Text m r) (FreeT (Producer Text m) m r)
985lines = Data.Profunctor.dimap _lines (fmap _unlines) 956lines k p = fmap _unlines (k (_lines p))
986 where 957{-# INLINABLE lines #-}
987 _lines p0 = FreeT (go0 p0) 958
959unlines
960 :: Monad m
961 => Lens' (FreeT (Producer Text m) m r) (Producer Text m r)
962unlines k p = fmap _lines (k (_unlines p))
963{-# INLINABLE unlines #-}
964
965_lines :: Monad m
966 => Producer Text m r -> FreeT (Producer Text m) m r
967_lines p0 = FreeT (go0 p0)
988 where 968 where
989 go0 p = do 969 go0 p = do
990 x <- next p 970 x <- next p
@@ -1001,29 +981,40 @@ lines = Data.Profunctor.dimap _lines (fmap _unlines)
1001 case x of 981 case x of
1002 Left r -> return $ Pure r 982 Left r -> return $ Pure r
1003 Right (_, p'') -> go0 p'' 983 Right (_, p'') -> go0 p''
1004 -- _unlines 984{-# INLINABLE _lines #-}
1005 -- :: Monad m
1006 -- => FreeT (Producer Text m) m x -> Producer Text m x
1007 _unlines = concats . PG.maps (<* yield (T.singleton '\n'))
1008 985
1009{-# INLINABLE lines #-} 986_unlines :: Monad m
987 => FreeT (Producer Text m) m r -> Producer Text m r
988_unlines = concats . maps (<* yield (T.singleton '\n'))
989{-# INLINABLE _unlines #-}
1010 990
1011 991-- | Split a text stream into 'FreeT'-delimited words. Note that
1012-- | Split a text stream into 'FreeT'-delimited words 992-- roundtripping with e.g. @over words id@ eliminates extra space
993-- characters as with @Prelude.unwords . Prelude.words@
1013words 994words
1014 :: (Monad m) => Iso'_ (Producer Text m r) (FreeT (Producer Text m) m r) 995 :: (Monad m) => Lens' (Producer Text m r) (FreeT (Producer Text m) m r)
1015words = Data.Profunctor.dimap go (fmap _unwords) 996words k p = fmap _unwords (k (_words p))
1016 where 997{-# INLINABLE words #-}
1017 go p = FreeT $ do 998
1018 x <- next (p >-> dropWhile isSpace) 999unwords
1000 :: Monad m
1001 => Lens' (FreeT (Producer Text m) m r) (Producer Text m r)
1002unwords k p = fmap _words (k (_unwords p))
1003{-# INLINABLE unwords #-}
1004
1005_words :: (Monad m) => Producer Text m r -> FreeT (Producer Text m) m r
1006_words p = FreeT $ do
1007 x <- next (dropWhile isSpace p)
1019 return $ case x of 1008 return $ case x of
1020 Left r -> Pure r 1009 Left r -> Pure r
1021 Right (bs, p') -> Free $ do 1010 Right (bs, p') -> Free $ do
1022 p'' <- (yield bs >> p') ^. break isSpace 1011 p'' <- (yield bs >> p') ^. break isSpace
1023 return (go p'') 1012 return (_words p'')
1024 _unwords = PG.intercalates (yield $ T.singleton ' ') 1013{-# INLINABLE _words #-}
1025 1014
1026{-# INLINABLE words #-} 1015_unwords :: (Monad m) => FreeT (Producer Text m) m r -> Producer Text m r
1016_unwords = intercalates (yield $ T.singleton ' ')
1017{-# INLINABLE _unwords #-}
1027 1018
1028 1019
1029{-| 'intercalate' concatenates the 'FreeT'-delimited text streams after 1020{-| 'intercalate' concatenates the 'FreeT'-delimited text streams after
@@ -1031,9 +1022,7 @@ words = Data.Profunctor.dimap go (fmap _unwords)
1031-} 1022-}
1032intercalate 1023intercalate
1033 :: (Monad m) 1024 :: (Monad m)
1034 => Producer Text m () 1025 => Producer Text m () -> FreeT (Producer Text m) m r -> Producer Text m r
1035 -> FreeT (Producer Text m) m r
1036 -> Producer Text m r
1037intercalate p0 = go0 1026intercalate p0 = go0
1038 where 1027 where
1039 go0 f = do 1028 go0 f = do
@@ -1053,35 +1042,13 @@ intercalate p0 = go0
1053 go1 f' 1042 go1 f'
1054{-# INLINABLE intercalate #-} 1043{-# INLINABLE intercalate #-}
1055 1044
1056{-| Join 'FreeT'-delimited lines into a text stream
1057-}
1058unlines
1059 :: (Monad m) => FreeT (Producer Text m) m r -> Producer Text m r
1060unlines = go
1061 where
1062 go f = do
1063 x <- lift (runFreeT f)
1064 case x of
1065 Pure r -> return r
1066 Free p -> do
1067 f' <- p
1068 yield $ T.singleton '\n'
1069 go f'
1070{-# INLINABLE unlines #-}
1071
1072{-| Join 'FreeT'-delimited words into a text stream
1073-}
1074unwords
1075 :: (Monad m) => FreeT (Producer Text m) m r -> Producer Text m r
1076unwords = intercalate (yield $ T.singleton ' ')
1077{-# INLINABLE unwords #-}
1078 1045
1079 1046
1080{- $reexports 1047{- $reexports
1081 1048
1082 @Data.Text@ re-exports the 'Text' type. 1049 @Data.Text@ re-exports the 'Text' type.
1083 1050
1084 @Pipes.Parse@ re-exports 'input', 'concat', 'FreeT' (the type) and the 'Parse' synonym. 1051 @Pipes.Parse@ re-exports 'input', 'concat', 'FreeT' (the type) and the 'Parse' synonym.
1085-} 1052-}
1086 1053
1087 1054
diff --git a/Pipes/Text/Encoding.hs b/Pipes/Text/Encoding.hs
index 991000f..e00cd43 100644
--- a/Pipes/Text/Encoding.hs
+++ b/Pipes/Text/Encoding.hs
@@ -41,13 +41,10 @@ module Pipes.Text.Encoding
41 , decodeAscii 41 , decodeAscii
42 , encodeIso8859_1 42 , encodeIso8859_1
43 , decodeIso8859_1 43 , decodeIso8859_1
44 , Lens'_
45 , Iso'_
46 ) 44 )
47 where 45 where
48 46
49import Data.Functor.Constant (Constant(..)) 47import Data.Functor.Constant (Constant(..))
50import Data.Profunctor (Profunctor)
51import Data.Char (ord) 48import Data.Char (ord)
52import Data.ByteString as B 49import Data.ByteString as B
53import Data.ByteString (ByteString) 50import Data.ByteString (ByteString)
@@ -61,16 +58,15 @@ import Control.Monad (join)
61import Data.Word (Word8) 58import Data.Word (Word8)
62import Pipes 59import Pipes
63 60
64type Lens'_ a b = forall f . Functor f => (b -> f b) -> (a -> f a) 61type Lens' a b = forall f . Functor f => (b -> f b) -> (a -> f a)
65type Iso'_ a b = forall f p . (Functor f, Profunctor p) => p b (f b) -> p a (f a)
66 62
67{- $lenses 63{- $lenses
68 The 'Codec' type is a simple specializion of 64 The 'Codec' type is a simple specializion of
69 the @Lens'_@ type synonymn used by the standard lens libraries, 65 the @Lens'@ type synonymn used by the standard lens libraries,
70 <http://hackage.haskell.org/package/lens lens> and 66 <http://hackage.haskell.org/package/lens lens> and
71 <http://hackage.haskell.org/package/lens-family lens-family>. That type, 67 <http://hackage.haskell.org/package/lens-family lens-family>. That type,
72 68
73> type Lens'_ a b = forall f . Functor f => (b -> f b) -> (a -> f a) 69> type Lens' a b = forall f . Functor f => (b -> f b) -> (a -> f a)
74 70
75 is just an alias for a Prelude type. Thus you use any particular codec with 71 is just an alias for a Prelude type. Thus you use any particular codec with
76 the @view@ / @(^.)@ , @zoom@ and @over@ functions from either of those libraries; 72 the @view@ / @(^.)@ , @zoom@ and @over@ functions from either of those libraries;
@@ -81,7 +77,7 @@ type Iso'_ a b = forall f p . (Functor f, Profunctor p) => p b (f b) -> p a (f a
81type Codec 77type Codec
82 = forall m r 78 = forall m r
83 . Monad m 79 . Monad m
84 => Lens'_ (Producer ByteString m r) 80 => Lens' (Producer ByteString m r)
85 (Producer Text m (Producer ByteString m r)) 81 (Producer Text m (Producer ByteString m r))
86 82
87{- | 'decode' is just the ordinary @view@ or @(^.)@ of the lens libraries; 83{- | 'decode' is just the ordinary @view@ or @(^.)@ of the lens libraries;
diff --git a/changelog b/changelog
index d768a80..ed4b3d9 100644
--- a/changelog
+++ b/changelog
@@ -1,3 +1,9 @@
1# Version 0.0.0.12
2
3* Opposing lenses for `lines` and `unlines` and `words` and `unwords`.
4 Brought closer in line with `pipes-bytestring` again. Removed `count`, which
5 was wrong. Scrapped `Iso` and the `profunctors` dependency.
6
1# Version 0.0.0.11 7# Version 0.0.0.11
2 8
3* Updated to use streaming-commons in place of text-stream-decoding. 9* Updated to use streaming-commons in place of text-stream-decoding.
diff --git a/pipes-text.cabal b/pipes-text.cabal
index 0b5db95..041f2c7 100644
--- a/pipes-text.cabal
+++ b/pipes-text.cabal
@@ -38,7 +38,6 @@ library
38 bytestring >= 0.9.2.1 && < 0.11, 38 bytestring >= 0.9.2.1 && < 0.11,
39 text >= 0.11.2 && < 1.2 , 39 text >= 0.11.2 && < 1.2 ,
40 streaming-commons >= 0.1 && < 0.2 , 40 streaming-commons >= 0.1 && < 0.2 ,
41 profunctors >= 3.1.1 && < 4.1 ,
42 pipes >= 4.0 && < 4.2 , 41 pipes >= 4.0 && < 4.2 ,
43 pipes-group >= 1.0.0 && < 1.1 , 42 pipes-group >= 1.0.0 && < 1.1 ,
44 pipes-parse >= 3.0.0 && < 3.1 , 43 pipes-parse >= 3.0.0 && < 3.1 ,