DStream操作实战:3.SparkStreaming开窗函数reduceByKeyAndWindow,实现单词计数
2018-03-11 22:10
441 查看
package cn.testdemo.dstream.socket
import org.apache.spark.{SparkConf, SparkContext}
import org.apache.spark.streaming.{Seconds, StreamingContext}
import org.apache.spark.streaming.dstream.{DStream, ReceiverInputDStream}
//todo:利用sparkStreaming开窗函数reduceBykeyAndWindow实现单词计数
object SparkStreamingSocketWindow {
//currentValues:表示的是当前批次中相同单词出现所有的1 (hadoop,1)(hadoop,1)(hadoop,1)
//historyValue:表示之前所有批次中单词出现的总次数
def updateFunc(currentValues:Seq[Int], historyValue:Option[Int]):Option[Int] ={
val newValues: Int = currentValues.sum + historyValue.getOrElse(0)
Some(newValues)
}
def main(args: Array[String]): Unit = {
//1、创建sparkConf
val sparkConf: SparkConf = new SparkConf().setAppName("SparkStreamingSocketWindow").setMaster("local[2]")
//2、创建sparkContext
val sc = new SparkContext(sparkConf)
sc.setLogLevel("WARN")
//3、创建streamingContext
val ssc = new StreamingContext(sc,Seconds(5))
//设置checkpoint,用于保存中间结果数据
ssc.checkpoint("./ck2018")
//4、获取socket数据
val stream: ReceiverInputDStream[String] = ssc.socketTextStream("192.168.216.128",9999)
//5、操作数据流
val wordAndOne: DStream[(String, Int)] = stream.flatMap(_.split(" ")).map((_,1))
//6、实现单词计数reduceByKeyAndWindow
//reduceFunc:需要一个函数作用在Dstream
//windowDuration:表示窗口的长度
//slideDuration :表示窗口的滑动时间,即每隔多久计算一次
val result: DStream[(String, Int)] = wordAndOne.reduceByKeyAndWindow((x:Int,y:Int)=>x+y,Seconds(5),Seconds(10))
//7、打印结果数据
result.print()
//8、开启计算
ssc.start()
ssc.awaitTermination()
}
}
import org.apache.spark.{SparkConf, SparkContext}
import org.apache.spark.streaming.{Seconds, StreamingContext}
import org.apache.spark.streaming.dstream.{DStream, ReceiverInputDStream}
//todo:利用sparkStreaming开窗函数reduceBykeyAndWindow实现单词计数
object SparkStreamingSocketWindow {
//currentValues:表示的是当前批次中相同单词出现所有的1 (hadoop,1)(hadoop,1)(hadoop,1)
//historyValue:表示之前所有批次中单词出现的总次数
def updateFunc(currentValues:Seq[Int], historyValue:Option[Int]):Option[Int] ={
val newValues: Int = currentValues.sum + historyValue.getOrElse(0)
Some(newValues)
}
def main(args: Array[String]): Unit = {
//1、创建sparkConf
val sparkConf: SparkConf = new SparkConf().setAppName("SparkStreamingSocketWindow").setMaster("local[2]")
//2、创建sparkContext
val sc = new SparkContext(sparkConf)
sc.setLogLevel("WARN")
//3、创建streamingContext
val ssc = new StreamingContext(sc,Seconds(5))
//设置checkpoint,用于保存中间结果数据
ssc.checkpoint("./ck2018")
//4、获取socket数据
val stream: ReceiverInputDStream[String] = ssc.socketTextStream("192.168.216.128",9999)
//5、操作数据流
val wordAndOne: DStream[(String, Int)] = stream.flatMap(_.split(" ")).map((_,1))
//6、实现单词计数reduceByKeyAndWindow
//reduceFunc:需要一个函数作用在Dstream
//windowDuration:表示窗口的长度
//slideDuration :表示窗口的滑动时间,即每隔多久计算一次
val result: DStream[(String, Int)] = wordAndOne.reduceByKeyAndWindow((x:Int,y:Int)=>x+y,Seconds(5),Seconds(10))
//7、打印结果数据
result.print()
//8、开启计算
ssc.start()
ssc.awaitTermination()
}
}
相关文章推荐
- DStream操作实战:2.SparkStreaming接受socket数据,实现单词计数累加
- DStream操作实战:1.SparkStreaming接受socket数据,实现单词计数WordCount
- Spark Streaming实现实时WordCount,DStream的使用,updateStateByKey(func)实现累计计算单词出现频率
- Sparkstreaming reduceByKeyAndWindow(_+_, _-_, Duration, Duration) 的源码/原理解析
- <转>Sparkstreaming reduceByKeyAndWindow(_+_, _-_, Duration, Duration) 的源码/原理解析
- reduceByKeyAndWindow实现基于滑动窗口的热点搜索词实时统计(Java版本)
- SparkStreaming之Transform、foreachRDD、updateStateByKey以及reduceByKeyAndWindow
- Spark API编程动手实战-04-以在Spark 1.2版本实现对union、groupByKey、join、reduce、lookup等操作实践
- Spark API编程动手实战-04-以在Spark 1.2版本实现对union、groupByKey、join、reduce、lookup等操作实践
- reduceByKeyAndWindow实现基于滑动窗口的热点搜索词实时统计(Java版本)
- 大数据10_02_SparkStreaming输入源、foreachRDD、transform、updateStateByKey、reduceByKeyAndWindow
- reduceByKeyAndWindow基于滑动窗口的热点搜索词实时统计(Scala版本)
- countByValueAndWindow 与countByWindow=reduceByWindow与reduceByKeyAndWindow
- 第93课:SparkStreaming updateStateByKey 基本操作综合案例实战和内幕源码解密
- 第110讲: Spark Streaming电商广告点击综合案例通过updateStateByKey等实现广告点击流量的在线更新统计
- Spark算子:RDD键值转换操作(3)–groupBy、keyBy、groupByKey、reduceByKey、reduceByKeyLocally
- RDD键值转换操作(3)–groupByKey、reduceByKey、reduceByKeyLocally
- Spark算子:RDD键值转换操作(3)–groupByKey、reduceByKey、reduceByKeyLocally
- Spark Streaming updateStateByKey案例实战和内幕源码解密
- iOS 【程序启动原理/手动创建UIWindow/UIApplicationMain做了些什么/makeKeyAndVisible底层实现】