object H extends App{
val conf=new SparkConf().setMaster("local[2]").setAppName("hello")
val ss=new StreamingContext(conf,Seconds(5))
val kafkaParams=Map[String,String]("metadata.broker.list"->"myhadoop1:9092")
ss.checkpoint("hdfs://myhadoop1:8020/data")
val topic=Set[String]("wordcount1")
//kafka
val lines=KafkaUtils.createDirectStream[String,String,StringDecoder,StringDecoder](ss,kafkaParams,topic)
lines.flatMap(_._2.split(" ")).map((_,1)).updateStateByKey((seqs:Seq[Int],option:Option[Int])=>{
var oldValue=option.getOrElse(0)
for(seq<-seqs){
oldValue+=seq
}
Option[Int](oldValue)
}).print()
ss.start()
ss.awaitTermination()
}
文章名称:Streaming与kafkaupdateStateBykey()
当前网址:
http://bzwzjz.com/article/gcojeh.html