1 {-# LANGUAGE RankNTypes, TypeFamilies, BangPatterns, CPP #-}
2 #if __GLASGOW_HASKELL__ >= 702
3 {-# LANGUAGE Trustworthy #-}
5 {-| This module provides @pipes@ utilities for \"text streams\", which are
6 streams of 'Text' chunks. The individual chunks are uniformly @strict@, but
7 a 'Producer' can be converted to and from lazy 'Text's, though this is generally
8 unwise. Where pipes IO replaces lazy IO, 'Producer Text m r' replaces lazy 'Text'.
9 An 'IO.Handle' can be associated with a 'Producer' or 'Consumer' according as it is read or written to.
11 To stream to or from 'IO.Handle's, one can use 'fromHandle' or 'toHandle'. For
12 example, the following program copies a document from one file to another:
15 > import qualified Data.Text.Pipes as Text
19 > withFile "inFile.txt" ReadMode $ \hIn ->
20 > withFile "outFile.txt" WriteMode $ \hOut ->
21 > runEffect $ Text.fromHandle hIn >-> Text.toHandle hOut
23 To stream from files, the following is perhaps more Prelude-like (note that it uses Pipes.Safe):
26 > import qualified Data.Text.Pipes as Text
29 > main = runSafeT $ runEffect $ Text.readFile "inFile.txt" >-> Text.writeFile "outFile.txt"
31 You can stream to and from 'stdin' and 'stdout' using the predefined 'stdin'
32 and 'stdout' proxies, as with the following \"echo\" program:
34 > main = runEffect $ Text.stdin >-> Text.stdout
36 You can also translate pure lazy 'TL.Text's to and from proxies:
38 > main = runEffect $ Text.fromLazy (TL.pack "Hello, world!\n") >-> Text.stdout
40 In addition, this module provides many functions equivalent to lazy
41 'Text' functions so that you can transform or fold text streams. For
42 example, to stream only the first three lines of 'stdin' to 'stdout' you
46 > import qualified Pipes.Text as Text
47 > import qualified Pipes.Parse as Parse
49 > main = runEffect $ takeLines 3 Text.stdin >-> Text.stdout
51 > takeLines n = Text.unlines . Parse.takeFree n . Text.lines
53 The above program will never bring more than one chunk of text (~ 32 KB) into
54 memory, no matter how long the lines are.
56 Note that functions in this library are designed to operate on streams that
57 are insensitive to text boundaries. This means that they may freely split
58 text into smaller texts, /discard empty texts/. However, apart from the
59 special case of 'concatMap', they will /never concatenate texts/ in order
60 to provide strict upper bounds on memory usage -- with the single exception of 'concatMap'.
110 -- * Primitive Character Parsers
149 , module Data.ByteString
151 , module Data.Profunctor
156 import Control.Exception (throwIO, try)
157 import Control.Monad (liftM, unless, join)
158 import Control.Monad.Trans.State.Strict (StateT(..))
159 import Data.Monoid ((<>))
160 import qualified Data.Text as T
161 import qualified Data.Text.IO as T
162 import qualified Data.Text.Encoding as TE
163 import qualified Data.Text.Encoding.Error as TE
164 import Data.Text (Text)
165 import qualified Data.Text.Lazy as TL
166 import qualified Data.Text.Lazy.IO as TL
167 import Data.Text.Lazy.Internal (foldrChunks, defaultChunkSize)
168 import Data.ByteString.Unsafe (unsafeTake, unsafeDrop)
169 import Data.ByteString (ByteString)
170 import qualified Data.ByteString as B
171 import Data.Char (ord, isSpace)
172 import Data.Functor.Constant (Constant(Constant, getConstant))
173 import Data.Functor.Identity (Identity)
174 import Data.Profunctor (Profunctor)
175 import qualified Data.Profunctor
176 import qualified Data.List as List
177 import Foreign.C.Error (Errno(Errno), ePIPE)
178 import qualified GHC.IO.Exception as G
180 import qualified Pipes.ByteString as PB
181 import qualified Pipes.Text.Internal as PE
182 import Pipes.Text.Internal (Codec(..))
183 import Pipes.Text.Parse (nextChar, drawChar, unDrawChar, peekChar, isEndOfChars )
184 import Pipes.Core (respond, Server')
185 import qualified Pipes.Parse as PP
186 import Pipes.Parse (Parser, concats, intercalates, FreeT)
187 import qualified Pipes.Safe.Prelude as Safe
188 import qualified Pipes.Safe as Safe
189 import Pipes.Safe (MonadSafe(..), Base(..))
190 import qualified Pipes.Prelude as P
191 import qualified System.IO as IO
192 import Data.Char (isSpace)
193 import Data.Word (Word8)
195 import Prelude hiding (
224 -- | Convert a lazy 'TL.Text' into a 'Producer' of strict 'Text's
225 fromLazy :: (Monad m) => TL.Text -> Producer' Text m ()
226 fromLazy = foldrChunks (\e a -> yield e >> a) (return ())
227 {-# INLINE fromLazy #-}
229 -- | Stream text from 'stdin'
230 stdin :: MonadIO m => Producer Text m ()
231 stdin = fromHandle IO.stdin
234 {-| Convert a 'IO.Handle' into a text stream using a text size
235 determined by the good sense of the text library; note that this
236 is distinctly slower than @decideUtf8 (Pipes.ByteString.fromHandle h)@
237 but uses the system encoding and has other `Data.Text.IO` features
240 fromHandle :: MonadIO m => IO.Handle -> Producer Text m ()
241 fromHandle h = go where
242 go = do txt <- liftIO (T.hGetChunk h)
243 unless (T.null txt) $ do yield txt
245 {-# INLINABLE fromHandle#-}
248 {-| Stream text from a file in the simple fashion of @Data.Text.IO@
250 >>> runSafeT $ runEffect $ Text.readFile "hello.hs" >-> Text.map toUpper >-> hoist lift Text.stdout
251 MAIN = PUTSTRLN "HELLO WORLD"
254 readFile :: MonadSafe m => FilePath -> Producer Text m ()
255 readFile file = Safe.withFile file IO.ReadMode fromHandle
256 {-# INLINE readFile #-}
258 {-| Stream lines of text from stdin (for testing in ghci etc.)
260 >>> let safely = runSafeT . runEffect
261 >>> safely $ for Text.stdinLn (lift . lift . print . T.length)
268 stdinLn :: MonadIO m => Producer' Text m ()
271 eof <- liftIO (IO.hIsEOF IO.stdin)
273 txt <- liftIO (T.hGetLine IO.stdin)
276 {-# INLINABLE stdinLn #-}
278 {-| Stream text to 'stdout'
280 Unlike 'toHandle', 'stdout' gracefully terminates on a broken output pipe.
282 Note: For best performance, use @(for source (liftIO . putStr))@ instead of
283 @(source >-> stdout)@ in suitable cases.
285 stdout :: MonadIO m => Consumer' Text m ()
290 x <- liftIO $ try (T.putStr txt)
292 Left (G.IOError { G.ioe_type = G.ResourceVanished
293 , G.ioe_errno = Just ioe })
296 Left e -> liftIO (throwIO e)
298 {-# INLINABLE stdout #-}
300 stdoutLn :: (MonadIO m) => Consumer' Text m ()
305 x <- liftIO $ try (T.putStrLn str)
307 Left (G.IOError { G.ioe_type = G.ResourceVanished
308 , G.ioe_errno = Just ioe })
311 Left e -> liftIO (throwIO e)
313 {-# INLINABLE stdoutLn #-}
315 {-| Convert a text stream into a 'Handle'
317 Note: again, for best performance, where possible use
318 @(for source (liftIO . hPutStr handle))@ instead of @(source >-> toHandle handle)@.
320 toHandle :: MonadIO m => IO.Handle -> Consumer' Text m r
321 toHandle h = for cat (liftIO . T.hPutStr h)
322 {-# INLINABLE toHandle #-}
324 {-# RULES "p >-> toHandle h" forall p h .
325 p >-> toHandle h = for p (\txt -> liftIO (T.hPutStr h txt))
329 -- | Stream text into a file. Uses @pipes-safe@.
330 writeFile :: (MonadSafe m) => FilePath -> Consumer' Text m ()
331 writeFile file = Safe.withFile file IO.WriteMode toHandle
332 {-# INLINE writeFile #-}
335 type Lens' a b = forall f . Functor f => (b -> f b) -> (a -> f a)
337 type Iso' a b = forall f p . (Functor f, Profunctor p) => p b (f b) -> p a (f a)
339 (^.) :: a -> ((b -> Constant b b) -> (a -> Constant b a)) -> b
340 a ^. lens = getConstant (lens Constant a)
343 -- | Apply a transformation to each 'Char' in the stream
344 map :: (Monad m) => (Char -> Char) -> Pipe Text Text m r
345 map f = P.map (T.map f)
346 {-# INLINABLE map #-}
348 {-# RULES "p >-> map f" forall p f .
349 p >-> map f = for p (\txt -> yield (T.map f txt))
352 -- | Map a function over the characters of a text stream and concatenate the results
354 :: (Monad m) => (Char -> Text) -> Pipe Text Text m r
355 concatMap f = P.map (T.concatMap f)
356 {-# INLINABLE concatMap #-}
358 {-# RULES "p >-> concatMap f" forall p f .
359 p >-> concatMap f = for p (\txt -> yield (T.concatMap f txt))
362 -- | Transform a Pipe of 'Text' into a Pipe of 'ByteString's using UTF-8
363 -- encoding; @encodeUtf8 = Pipes.Prelude.map TE.encodeUtf8@ so more complex
364 -- encoding pipes can easily be constructed with the functions in @Data.Text.Encoding@
365 encodeUtf8 :: Monad m => Pipe Text ByteString m r
366 encodeUtf8 = P.map TE.encodeUtf8
367 {-# INLINEABLE encodeUtf8 #-}
369 {-# RULES "p >-> encodeUtf8" forall p .
370 p >-> encodeUtf8 = for p (\txt -> yield (TE.encodeUtf8 txt))
373 -- | Transform a Pipe of 'String's into one of 'Text' chunks
374 pack :: Monad m => Pipe String Text m r
376 {-# INLINEABLE pack #-}
378 {-# RULES "p >-> pack" forall p .
379 p >-> pack = for p (\txt -> yield (T.pack txt))
382 -- | Transform a Pipes of 'Text' chunks into one of 'String's
383 unpack :: Monad m => Pipe Text String m r
384 unpack = for cat (\t -> yield (T.unpack t))
385 {-# INLINEABLE unpack #-}
387 {-# RULES "p >-> unpack" forall p .
388 p >-> unpack = for p (\txt -> yield (T.unpack txt))
391 -- | @toCaseFold@, @toLower@, @toUpper@ and @stripStart@ are standard 'Text' utility,
392 -- here acting on a 'Text' pipe, rather as they would on a lazy text
393 toCaseFold :: Monad m => Pipe Text Text m ()
394 toCaseFold = P.map T.toCaseFold
395 {-# INLINEABLE toCaseFold #-}
397 {-# RULES "p >-> toCaseFold" forall p .
398 p >-> toCaseFold = for p (\txt -> yield (T.toCaseFold txt))
402 -- | lowercase incoming 'Text'
403 toLower :: Monad m => Pipe Text Text m ()
404 toLower = P.map T.toLower
405 {-# INLINEABLE toLower #-}
407 {-# RULES "p >-> toLower" forall p .
408 p >-> toLower = for p (\txt -> yield (T.toLower txt))
411 -- | uppercase incoming 'Text'
412 toUpper :: Monad m => Pipe Text Text m ()
413 toUpper = P.map T.toUpper
414 {-# INLINEABLE toUpper #-}
416 {-# RULES "p >-> toUpper" forall p .
417 p >-> toUpper = for p (\txt -> yield (T.toUpper txt))
420 -- | Remove leading white space from an incoming succession of 'Text's
421 stripStart :: Monad m => Pipe Text Text m r
424 let text = T.stripStart chunk
428 {-# INLINEABLE stripStart #-}
430 -- | @(take n)@ only allows @n@ individual characters to pass;
431 -- contrast @Pipes.Prelude.take@ which would let @n@ chunks pass.
432 take :: (Monad m, Integral a) => a -> Pipe Text Text m ()
433 take n0 = go n0 where
438 let len = fromIntegral (T.length txt)
440 then yield (T.take (fromIntegral n) txt)
444 {-# INLINABLE take #-}
446 -- | @(drop n)@ drops the first @n@ characters
447 drop :: (Monad m, Integral a) => a -> Pipe Text Text m r
448 drop n0 = go n0 where
453 let len = fromIntegral (T.length txt)
456 yield (T.drop (fromIntegral n) txt)
459 {-# INLINABLE drop #-}
461 -- | Take characters until they fail the predicate
462 takeWhile :: (Monad m) => (Char -> Bool) -> Pipe Text Text m ()
463 takeWhile predicate = go
467 let (prefix, suffix) = T.span predicate txt
473 {-# INLINABLE takeWhile #-}
475 -- | Drop characters until they fail the predicate
476 dropWhile :: (Monad m) => (Char -> Bool) -> Pipe Text Text m r
477 dropWhile predicate = go where
480 case T.findIndex (not . predicate) txt of
485 {-# INLINABLE dropWhile #-}
487 -- | Only allows 'Char's to pass if they satisfy the predicate
488 filter :: (Monad m) => (Char -> Bool) -> Pipe Text Text m r
489 filter predicate = P.map (T.filter predicate)
490 {-# INLINABLE filter #-}
492 {-# RULES "p >-> filter q" forall p q .
493 p >-> filter q = for p (\txt -> yield (T.filter q txt))
496 -- | Strict left scan over the characters
499 => (Char -> Char -> Char) -> Char -> Pipe Text Text m r
500 scan step begin = go begin
504 let txt' = T.scanl step c txt
508 {-# INLINABLE scan #-}
510 {-| Fold a pure 'Producer' of strict 'Text's into a lazy
513 toLazy :: Producer Text Identity () -> TL.Text
514 toLazy = TL.fromChunks . P.toList
515 {-# INLINABLE toLazy #-}
517 {-| Fold an effectful 'Producer' of strict 'Text's into a lazy
520 Note: 'toLazyM' is not an idiomatic use of @pipes@, but I provide it for
521 simple testing purposes. Idiomatic @pipes@ style consumes the chunks
522 immediately as they are generated instead of loading them all into memory.
524 toLazyM :: (Monad m) => Producer Text m () -> m TL.Text
525 toLazyM = liftM TL.fromChunks . P.toListM
526 {-# INLINABLE toLazyM #-}
528 -- | Reduce the text stream using a strict left fold over characters
531 => (x -> Char -> x) -> x -> (x -> r) -> Producer Text m () -> m r
532 foldChars step begin done = P.fold (T.foldl' step) begin done
533 {-# INLINABLE foldChars #-}
535 -- | Retrieve the first 'Char'
536 head :: (Monad m) => Producer Text m () -> m (Maybe Char)
542 Left _ -> return Nothing
543 Right (c, _) -> return (Just c)
544 {-# INLINABLE head #-}
546 -- | Retrieve the last 'Char'
547 last :: (Monad m) => Producer Text m () -> m (Maybe Char)
557 else go (Just $ T.last txt) p'
558 {-# INLINABLE last #-}
560 -- | Determine if the stream is empty
561 null :: (Monad m) => Producer Text m () -> m Bool
563 {-# INLINABLE null #-}
565 -- | Count the number of characters in the stream
566 length :: (Monad m, Num n) => Producer Text m () -> m n
567 length = P.fold (\n txt -> n + fromIntegral (T.length txt)) 0 id
568 {-# INLINABLE length #-}
570 -- | Fold that returns whether 'M.Any' received 'Char's satisfy the predicate
571 any :: (Monad m) => (Char -> Bool) -> Producer Text m () -> m Bool
572 any predicate = P.any (T.any predicate)
573 {-# INLINABLE any #-}
575 -- | Fold that returns whether 'M.All' received 'Char's satisfy the predicate
576 all :: (Monad m) => (Char -> Bool) -> Producer Text m () -> m Bool
577 all predicate = P.all (T.all predicate)
578 {-# INLINABLE all #-}
580 -- | Return the maximum 'Char' within a text stream
581 maximum :: (Monad m) => Producer Text m () -> m (Maybe Char)
582 maximum = P.fold step Nothing id
587 else Just $ case mc of
588 Nothing -> T.maximum txt
589 Just c -> max c (T.maximum txt)
590 {-# INLINABLE maximum #-}
592 -- | Return the minimum 'Char' within a text stream (surely very useful!)
593 minimum :: (Monad m) => Producer Text m () -> m (Maybe Char)
594 minimum = P.fold step Nothing id
600 Nothing -> Just (T.minimum txt)
601 Just c -> Just (min c (T.minimum txt))
602 {-# INLINABLE minimum #-}
605 -- | Find the first element in the stream that matches the predicate
608 => (Char -> Bool) -> Producer Text m () -> m (Maybe Char)
609 find predicate p = head (p >-> filter predicate)
610 {-# INLINABLE find #-}
612 -- | Index into a text stream
614 :: (Monad m, Integral a)
615 => a-> Producer Text m () -> m (Maybe Char)
616 index n p = head (p >-> drop n)
617 {-# INLINABLE index #-}
620 -- | Store a tally of how many segments match the given 'Text'
621 count :: (Monad m, Num n) => Text -> Producer Text m () -> m n
622 count c p = P.fold (+) 0 id (p >-> P.map (fromIntegral . T.count c))
623 {-# INLINABLE count #-}
625 -- | Transform a Pipe of 'ByteString's expected to be UTF-8 encoded into a Pipe of Text
626 -- returning a Pipe of ByteStrings that begins at the point of failure.
628 decodeUtf8 :: Monad m => Producer ByteString m r -> Producer Text m (Producer ByteString m r)
629 decodeUtf8 = go B.empty PE.streamDecodeUtf8 where
630 go !carry dec0 p = do
632 case x of Left r -> if B.null carry
633 then return (return r) -- all bytestrinput was consumed
634 else return (do yield carry -- a potentially valid fragment remains
637 Right (chunk, p') -> case dec0 chunk of
638 PE.Some text carry2 dec -> do yield text
640 PE.Other text bs -> do yield text
641 return (do yield bs -- an invalid blob remains
643 {-# INLINABLE decodeUtf8 #-}
646 -- | Splits a 'Producer' after the given number of characters
648 :: (Monad m, Integral n)
651 -> Producer' Text m (Producer Text m r)
658 Left r -> return (return r)
659 Right (txt, p') -> do
660 let len = fromIntegral (T.length txt)
666 let (prefix, suffix) = T.splitAt (fromIntegral n) txt
668 return (yield suffix >> p')
669 {-# INLINABLE splitAt #-}
671 -- | Split a text stream into 'FreeT'-delimited text streams of fixed size
673 :: (Monad m, Integral n)
674 => n -> Producer Text m r -> FreeT (Producer Text m) m r
675 chunksOf n p0 = PP.FreeT (go p0)
681 Right (txt, p') -> PP.Free $ do
682 p'' <- splitAt n (yield txt >> p')
683 return $ PP.FreeT (go p'')
684 {-# INLINABLE chunksOf #-}
686 {-| Split a text stream in two, where the first text stream is the longest
687 consecutive group of text that satisfy the predicate
693 -> Producer' Text m (Producer Text m r)
699 Left r -> return (return r)
700 Right (txt, p') -> do
701 let (prefix, suffix) = T.span predicate txt
708 return (yield suffix >> p')
709 {-# INLINABLE span #-}
711 {-| Split a text stream in two, where the first text stream is the longest
712 consecutive group of characters that don't satisfy the predicate
718 -> Producer Text m (Producer Text m r)
719 break predicate = span (not . predicate)
720 {-# INLINABLE break #-}
722 {-| Split a text stream into sub-streams delimited by characters that satisfy the
729 -> PP.FreeT (Producer Text m) m r
730 splitsWith predicate p0 = PP.FreeT (go0 p0)
735 Left r -> return (PP.Pure r)
739 else return $ PP.Free $ do
740 p'' <- span (not . predicate) (yield txt >> p')
741 return $ PP.FreeT (go1 p'')
746 Right (_, p') -> PP.Free $ do
747 p'' <- span (not . predicate) p'
748 return $ PP.FreeT (go1 p'')
749 {-# INLINABLE splitsWith #-}
751 -- | Split a text stream using the given 'Char' as the delimiter
755 -> FreeT (Producer Text m) m r
756 split c = splitsWith (c ==)
757 {-# INLINABLE split #-}
759 {-| Group a text stream into 'FreeT'-delimited text streams using the supplied
764 => (Char -> Char -> Bool)
766 -> FreeT (Producer Text m) m r
767 groupBy equal p0 = PP.FreeT (go p0)
772 Left r -> return (PP.Pure r)
773 Right (txt, p') -> case (T.uncons txt) of
776 return $ PP.Free $ do
777 p'' <- span (equal c) (yield txt >> p')
778 return $ PP.FreeT (go p'')
779 {-# INLINABLE groupBy #-}
781 -- | Group a text stream into 'FreeT'-delimited text streams of identical characters
783 :: (Monad m) => Producer Text m r -> FreeT (Producer Text m) m r
785 {-# INLINABLE group #-}
787 {-| Split a text stream into 'FreeT'-delimited lines
790 :: (Monad m) => Producer Text m r -> FreeT (Producer Text m) m r
791 lines p0 = PP.FreeT (go0 p0)
796 Left r -> return (PP.Pure r)
800 else return $ PP.Free $ go1 (yield txt >> p')
802 p' <- break ('\n' ==) p
803 return $ PP.FreeT $ do
806 Left r -> return $ PP.Pure r
807 Right (_, p'') -> go0 p''
808 {-# INLINABLE lines #-}
812 -- | Split a text stream into 'FreeT'-delimited words
814 :: (Monad m) => Producer Text m r -> FreeT (Producer Text m) m r
818 x <- next (p >-> dropWhile isSpace)
821 Right (bs, p') -> PP.Free $ do
822 p'' <- break isSpace (yield bs >> p')
824 {-# INLINABLE words #-}
827 -- | Intersperse a 'Char' in between the characters of the text stream
829 :: (Monad m) => Char -> Producer Text m r -> Producer Text m r
836 Right (txt, p') -> do
837 yield (T.intersperse c txt)
843 Right (txt, p') -> do
844 yield (T.singleton c)
845 yield (T.intersperse c txt)
847 {-# INLINABLE intersperse #-}
849 {-| 'intercalate' concatenates the 'FreeT'-delimited text streams after
850 interspersing a text stream in between them
854 => Producer Text m ()
855 -> FreeT (Producer Text m) m r
860 x <- lift (PP.runFreeT f)
862 PP.Pure r -> return r
867 x <- lift (PP.runFreeT f)
869 PP.Pure r -> return r
874 {-# INLINABLE intercalate #-}
876 {-| Join 'FreeT'-delimited lines into a text stream
879 :: (Monad m) => FreeT (Producer Text m) m r -> Producer Text m r
883 x <- lift (PP.runFreeT f)
885 PP.Pure r -> return r
888 yield $ T.singleton '\n'
890 {-# INLINABLE unlines #-}
892 {-| Join 'FreeT'-delimited words into a text stream
895 :: (Monad m) => FreeT (Producer Text m) m r -> Producer Text m r
896 unwords = intercalate (yield $ T.pack " ")
897 {-# INLINABLE unwords #-}
900 The following parsing utilities are single-character analogs of the ones found
905 @Pipes.Text.Parse@ re-exports 'nextChar', 'drawChar', 'unDrawChar', 'peekChar', and 'isEndOfChars'.
907 @Data.Text@ re-exports the 'Text' type.
909 @Pipes.Parse@ re-exports 'input', 'concat', and 'FreeT' (the type).
914 decode :: Monad m => PE.Decoding -> Producer ByteString m r -> Producer Text m (Producer ByteString m r)
915 -- decode codec = go B.empty where
917 -- do x <- lift (next p0)
918 -- case x of Right (chunk, p) ->
919 -- do let (text, stuff) = codecDecode codec (B.append extra chunk)
921 -- case stuff of Right extra' -> go extra' p
922 -- Left (exc,bs) -> do yield text
923 -- return (do yield bs
925 -- Left r -> return (do yield extra
928 decode d p0 = case d of
929 PE.Other txt bad -> do yield txt
932 PE.Some txt extra dec -> do yield txt
934 case x of Left r -> return (do yield extra
936 Right (chunk,p1) -> decode (dec chunk) p1
938 -- go !carry dec0 p = do
939 -- x <- lift (next p)
940 -- case x of Left r -> if B.null carry
941 -- then return (return r) -- all bytestrinput was consumed
942 -- else return (do yield carry -- a potentially valid fragment remains
945 -- Right (chunk, p') -> case dec0 chunk of
946 -- PE.Some text carry2 dec -> do yield text
948 -- PE.Other text bs -> do yield text
949 -- return (do yield bs -- an invalid blob remains
951 -- {-# INLINABLE decodeUtf8 #-}