1

我想用最多元素组合Stream[F, A]成 内部流。Stream[Stream[F, A]]n

这就是我所做的,基本上是通过管道将块放入Queue[F, Queue[F, Chunk[A]],然后将队列元素作为结果流产生。

 implicit class StreamSyntax[F[_], A](s: Stream[F, A])(
    implicit F: Concurrent[F]) {

    def groupedPipe(
      lastQRef: Ref[F, Queue[F, Option[Chunk[A]]]],
      n: Int): Pipe[F, A, Stream[F, A]] = { in =>
      val initQs =
        Queue.unbounded[F, Option[Queue[F, Option[Chunk[A]]]]].flatMap { qq =>
          Queue.bounded[F, Option[Chunk[A]]](1).flatMap { q =>
            lastQRef.set(q) *> qq.enqueue1(Some(q)).as(qq -> q)
          }
        }

      Stream.eval(initQs).flatMap {
        case (qq, initQ) =>
          def newQueue = Queue.bounded[F, Option[Chunk[A]]](1).flatMap { q =>
            qq.enqueue1(Some(q)) *> lastQRef.set(q).as(q)
          }

          val evalStream = {
            in.chunks
              .evalMapAccumulate((0, initQ)) {
                case ((i, q), c) if i + c.size >= n =>
                  val (l, r) = c.splitAt(n - i)
                  q.enqueue1(Some(l)) >> q.enqueue1(None) >> q
                    .enqueue1(None) >> newQueue.flatMap { nq =>
                    nq.enqueue1(Some(r)).as(((r.size, nq), c))
                  }
                case ((i, q), c) if (i + c.size) < n =>
                  q.enqueue1(Some(c)).as(((i + c.size, q), c))
              }
              .attempt ++ Stream.eval {
              lastQRef.get.flatMap { last =>
                last.enqueue1(None) *> last.enqueue1(None)
              } *> qq.enqueue1(None)
            }
          }
          qq.dequeue.unNoneTerminate
            .map(
              q =>
                q.dequeue.unNoneTerminate
                  .flatMap(Stream.chunk)
                  .onFinalize(
                    q.dequeueChunk(Int.MaxValue).unNoneTerminate.compile.drain))
            .concurrently(evalStream)
      }
    }

    def grouped(n: Int) = {
      Stream.eval {
        Queue.unbounded[F, Option[Chunk[A]]].flatMap { empty =>
          Ref.of[F, Queue[F, Option[Chunk[A]]]](empty)
        }
      }.flatMap { ref =>
        val p = groupedPipe(ref, n)
        s.through(p)
      }
    }
  }

但这很复杂,有没有更简单的方法?

4

3 回答 3

3

fs2 有可以帮助分组的方法chunkN chunkLimit

stream.chunkN(n).map(Stream.chunk)

stream.chunkLimit(n).map(Stream.chunk)

chunkN生成大小为 n 的块,直到流结束

chunkLimit拆分现有块并可以生成大小可变的块。

scala> Stream(1,2,3).repeat.chunkN(2).take(5).toList
res0: List[Chunk[Int]] = List(Chunk(1, 2), Chunk(3, 1), Chunk(2, 3), Chunk(1, 2), Chunk(3, 1))

scala> (Stream(1) ++ Stream(2, 3) ++ Stream(4, 5, 6)).chunkLimit(2).toList
res0: List[Chunk[Int]] = List(Chunk(1), Chunk(2, 3), Chunk(4, 5), Chunk(6))
于 2018-10-23T14:43:30.713 回答
0

除了已经提到的chunksN,还可以考虑使用groupWithin(fs2 1.0.1):

def groupWithin[F2[x] >: F[x]](n: Int, d: FiniteDuration)(implicit timer: Timer[F2], F: Concurrent[F2]): Stream[F2, Chunk[O]]

将此流划分为在时间窗口内接收的元素组,或受元素数量限制,以先发生者为准。如果在给定的时间窗口内无法从上游拉出任何元素,则可能会出现空组,不会发出。

注意:每次下游拉动时都会启动一个时间窗口。

我不确定你为什么希望它是嵌套流,因为要求是n在一批中有“最多元素”——这意味着你正在跟踪有限数量的元素(这正是aChunk代表)。无论哪种方式, aChunk总是可以表示为 a Streamwith Stream.chunk

val chunks: Stream[F, Chunk[O]] = ???
val streamOfStreams:  Stream[F, Stream[F, O]] = chunks.map(Stream.chunk)

这是如何使用的完整示例groupWithin

import cats.implicits._
import cats.effect.{ExitCode, IO, IOApp}
import fs2._
import scala.concurrent.duration._

object GroupingDemo extends IOApp {
  override def run(args: List[String]): IO[ExitCode] = {
    Stream('a, 'b, 'c).covary[IO]
      .groupWithin(2, 1.second)
      .map(_.toList)
      .showLinesStdOut
      .compile.drain
      .as(ExitCode.Success)
  }
}

输出:

列表('a,'b)

列表('c)

于 2019-02-05T04:57:05.460 回答
0

最后我使用了一个更可靠的版本(使用 Hotswap 确保队列终止),就像这样。

  def grouped(
      innerSize: Int
    )(implicit F: Async[F]): Stream[F, Stream[F, A]] = {

      type InnerQueue = Queue[F, Option[Chunk[A]]]
      type OuterQueue = Queue[F, Option[InnerQueue]]

      def swapperInner(swapper: Hotswap[F, InnerQueue], outer: OuterQueue) = {
        val innerRes =
          Resource.make(Queue.unbounded[F, Option[Chunk[A]]])(_.offer(None))
        swapper.swap(innerRes).flatTap(q => outer.offer(q.some))
      }

      def loopChunk(
        gathered: Int,
        curr: Queue[F, Option[Chunk[A]]],
        chunk: Chunk[A],
        newInnerQueue: F[InnerQueue]
      ): F[(Int, Queue[F, Option[Chunk[A]]])] = {
        if (gathered + chunk.size > innerSize) {
          val (left, right) = chunk.splitAt(innerSize - gathered)
          curr.offer(left.some) >> newInnerQueue.flatMap { nq =>
            loopChunk(0, nq, right, newInnerQueue)
          }
        } else if (gathered + chunk.size == innerSize) {
          curr.offer(chunk.some) >> newInnerQueue.tupleLeft(
            0
          )
        } else {
          curr.offer(chunk.some).as(gathered + chunk.size -> curr)
        }
      }

      val prepare = for {
        outer   <- Resource.eval(Queue.unbounded[F, Option[InnerQueue]])
        swapper <- Hotswap.create[F, InnerQueue]
      } yield outer -> swapper

      Stream.resource(prepare).flatMap {
        case (outer, swapper) =>
          val newInner = swapperInner(swapper, outer)
          val background = Stream.eval(newInner).flatMap { initQueue =>
            s.chunks
              .filter(_.nonEmpty)
              .evalMapAccumulate(0 -> initQueue) { (state, chunk) =>
                val (gathered, curr) = state
                loopChunk(gathered, curr, chunk, newInner).tupleRight({})
              }
              .onFinalize(swapper.clear *> outer.offer(None))
          }
          val foreground = Stream
            .fromQueueNoneTerminated(outer)
            .map(i => Stream.fromQueueNoneTerminatedChunk(i))
          foreground.concurrently(background)
      }

    }
于 2021-05-10T03:49:07.933 回答