package org.hnsw
import org.apache.spark.SparkConf
import org.apache.spark.streaming.{Seconds, StreamingContext}
object DStream_ReduceByWindow {
def main(args: Array[String]): Unit = {
//1、初始化Context上下文
val conf = new SparkConf().setAppName("jxq").setMaster("local[*]")
val sc = new StreamingContext(conf, Seconds(1))
//2、指定采集服务器ip和端口
//设置切换
// sc.checkpoint("out") //执行countByWindow前需要设置checkpoint
val dStream = sc.socketTextStream("192.168.3.66",8888)
//3、业务逻辑
//窗口类rdd数据的数量
val dStream_win = dStream.reduceByWindow((x, y)=>{
//函数拼接字符串
x + y
//字符串转整型求和后转String字符串
// val result = x.toInt + y.toInt
// result.toString
}, Seconds(4),Seconds(2)) //指定窗口大小 和 滑动频率 必须是批处理时间的整数倍
dStream_win.print()
//4、启动stream
sc.start()
//5、挂起stream
sc.awaitTermination()
}
}