reduceByKeyAndWindow(func, windowLength, slideInterval, [numTasks])

reduceByKeyAndWindow(func, windowLength, slideInterval, [numTasks])

When called on a DStream of (K, V) pairs, returns a new DStream of (K, V) pairs where the values for each key are aggregated using the given reduce function func over batches in a sliding window. Note: By default, this uses Spark's default number of parallel tasks (2 for local mode, and in cluster mode the number is determined by the config property spark.default.parallelism) to do the grouping. You can pass an optional numTasks argument to set a different number of tasks.

Example:

import org.apache.spark._
import org.apache.spark.SparkContext._
import org.apache.spark.streaming._
import org.apache.spark.streaming.StreamingContext._
import org.apache.log4j.{Level, Logger}
import org.apache.spark.sql
  .{Row, SaveMode, SparkSession}
import org.apache.spark.sql.SQLContext

Logger.getLogger("org").setLevel(Level.ERROR)  
val spark = SparkSession
          .builder()
          .config("spark.master", "local[2]")
          .appName("streaming for book")
          .getOrCreate()
import spark.implicits._
val sc=spark.sparkContext
val ssc = new StreamingContext(sc, Seconds(1))

val messages1 = ssc.textFileStream("/tmp/filestream1/")
val messages2 = ssc.textFileStream("/tmp/filestream2/")

val messages3 = messages1.union(messages2)
val messages=messages3.filter(_.nonEmpty)
  .map(x=>(x.toInt,1))
val msgs: 
  org.apache.spark.streaming
    .dstream.DStream[(Int, Int)] = messages

val reduceFn: (Int, Int) => Int = (a,b)=> {
a+b
}

val windowedMsgs: 
  org.apache.spark.streaming
    .dstream.DStream[(Int, Int)] =
msgs.reduceByKeyAndWindow(reduceFn, Seconds(10)
  , Seconds(5))

windowedMsgs.print()
ssc.start()
ssc.awaitTermination()

/*
input:
 cat cccc.txt
1
2
3
4
5

cat dddd.txt
1
2
3
4
5

cp cccc.txt /tmp/filestream1;cp dddd.txt /tmp/filestream2

output:


-------------------------------------------
Time: 1583633096000 ms
-------------------------------------------

-------------------------------------------
Time: 1583633101000 ms
-------------------------------------------

-------------------------------------------
Time: 1583633106000 ms
-------------------------------------------

-------------------------------------------
Time: 1583633111000 ms
-------------------------------------------

-------------------------------------------
Time: 1583633116000 ms
-------------------------------------------

-------------------------------------------
Time: 1583633121000 ms
-------------------------------------------

-------------------------------------------
Time: 1583633126000 ms
-------------------------------------------

-------------------------------------------
Time: 1583633131000 ms
-------------------------------------------

-------------------------------------------
Time: 1583633136000 ms
-------------------------------------------
(4,2)
(2,2)
(1,2)
(3,2)
(5,2)

-------------------------------------------
Time: 1583633141000 ms
-------------------------------------------
(4,2)
(2,2)
(1,2)
(3,2)
(5,2)





*/

Last updated