使用Flink消费Kafka中ChangeRecord主题的数据,统计每三分钟各设备状态为“预警”且未处理的数据总数。将结果存入MySQL的shtd_industry.threemin_warning_state_agg表(追加写入),表结构如下,同时备份到Hbase一份,表结构同MySQL表的。请在将任务启动命令截图,启动且数据进入后按照设备id倒序排序查询threemin_warning_state_agg表进行截图,第一次截图后等待三分钟再次查询并截图,将结果截图粘贴至对应报告中。
连接kafka
val kafkaSource=KafkaSource.builder()
.setTopics("ChangeRecord")
.setBootstrapServers("bigdata1:9092")
.setValueOnlyDeserializer(new SimpleStringSchema())
.setStartingOffsets(OffsetsInitializer.earliest())
.build()
设置flink流处理环境
val env:StreamExecutionEnvironment=StreamExecutionEnvironment.getExecutionEnvironment
env.setParallelism(1)
从kafka源创建数据流
val dataStream = env.fromSource(kafkaSource, WatermarkStrategy.noWatermarks(), "03")
- formSource : 从外部数据源创建一个数据流
- kafkaSource : kafka地址及配置
- WatermarkStrategy.noWatermarks() : 不使用水印,用处理时间来处理数据
- 03 : 船体给fromSource方法的第二个参数,表示消费组ID
指定kafka数据 并显示
val transsformDataStream=dataStream
.map(line => {
val data = line.split(",")
(data(1).toInt, data(3),data(6).toInt)
})
.filter(_._2 == "预警")
.filter(_._3%2==0)
.keyBy(_._1)
.window(SlidingProcessingTimeWindows.of(Time.minutes(1),Time.seconds(1)))
.min(0)
.map(x=>SensorReading(x._1,x._3,new SimpleDateFormat("yyyy-MM-dd HH:mm:ss").format(new Date )))
// 获取当前系统时间
transsformDataStream.print()
transsformDataStream.addSink(new JDBCSink)
写入mysql
transsformDataStream.addSink(new JDBCSink)
执行flink程序
env.execute()
创建样例类
case class SensorReading(change_machine_id:Int,totalwarning:Int,window_end_time:String)
创建与MySQL连接方法的类
class JDBCSink() extends RichSinkFunction[SensorReading]{
//定义sql连接、预编译器
var conn: sql.Connection = _
var insertStmt: sql.PreparedStatement = _
var updateStmt: sql.PreparedStatement = _
//_是一个占位符,表示该变量尚未被初始化
override def open(parameters: Configuration): Unit = {
super.open(parameters)
conn=DriverManager.getConnection("jdbc:mysql://localhost:3306/databasename", "root", "密码")
insertStmt = conn.prepareStatement("INSERT INTO threemin_warning_state_agg (change_machine_id, totalwarning, window_end_time) VALUES (?,?,?)")
//updateStmt = conn.prepareStatement("UPDATE threemin_warning_state_agg SET totalwarning = ?,window_end_time=? WHERE change_machine_id = ?")
}
//调用连接,执行sql
override def invoke(value: SensorReading, context: SinkFunction.Context): Unit = {
//执行插入语句
insertStmt.setInt(1,value.change_machine_id)
insertStmt.setInt(2,value.totalwarning)
insertStmt.setString(3,value.window_end_time)
insertStmt.execute()
}
//关闭时做清理工作
override def close(): Unit = {
insertStmt.close()
updateStmt.close()
conn.close()
}
}
完整代码
import com.ibm.icu.text.SimpleDateFormat
import flink.g1.MyRedisMapper
import org.apache.flink.api.common.eventtime.WatermarkStrategy
import org.apache.flink.api.common.serialization.SimpleStringSchema
import org.apache.flink.api.scala.createTypeInformation
import org.apache.flink.configuration.Configuration
import org.apache.flink.connector.kafka.source.KafkaSource
import org.apache.flink.connector.kafka.source.enumerator.initializer.OffsetsInitializer
import org.apache.flink.streaming.api.functions.sink.{RichSinkFunction, SinkFunction}
import org.apache.flink.streaming.api.scala.StreamExecutionEnvironment
import org.apache.flink.streaming.api.windowing.assigners.SlidingProcessingTimeWindows
import org.apache.flink.streaming.api.windowing.time.Time
import java.sql. DriverManager
import java.sql
import java.util.Date
case class SensorReading(change_machine_id:Int,totalwarning:Int,window_end_time:String)
object g3 {
def main(args: Array[String]): Unit = {
val kafkaSource=KafkaSource.builder()
.setTopics("ChangeRecord")
.setBootstrapServers("bigdata1:9092")
.setValueOnlyDeserializer(new SimpleStringSchema())
.setStartingOffsets(OffsetsInitializer.earliest())
.build()
val env:StreamExecutionEnvironment=StreamExecutionEnvironment.getExecutionEnvironment
env.setParallelism(1)
// 实时读取Kafka数据,不设置水印:watermark,目的是使用处理时间
//不设置水印,不生成时间戳 时间戳表示事件时间 不生成则使用处理时间
val dataStream = env.fromSource(kafkaSource, WatermarkStrategy.noWatermarks(), "03")
val transsformDataStream=dataStream
.filter( _ != "==> /data_log/2024-01-23@20:34-changerecord.csv <==")
.map(line => {
val data = line.split(",")
(data(1).toInt, data(3),data(6).toInt)
})
.filter(_._2 == "预警")
.filter(_._3%2==0)
.keyBy(_._1)
.window(SlidingProcessingTimeWindows.of(Time.minutes(1),Time.seconds(1)))
.min(0)
.map(x=>SensorReading(x._1,x._3,new SimpleDateFormat("yyyy-MM-dd HH:mm:ss").format(new Date )))
// 获取当前系统时间
transsformDataStream.print()
transsformDataStream.addSink(new JDBCSink)
//执行Flink程序
env.execute()
}
}
class JDBCSink() extends RichSinkFunction[SensorReading]{
//定义sql连接、预编译器
//_是一个占位符,表示该变量尚未被初始化
var conn: sql.Connection = _
var insertStmt: sql.PreparedStatement = _
var updateStmt: sql.PreparedStatement = _
override def open(parameters: Configuration): Unit = {
super.open(parameters)
conn=DriverManager.getConnection("jdbc:mysql://10.2.60.156:3306/shtd_industry", "root", "123456")
insertStmt = conn.prepareStatement("INSERT INTO threemin_warning_state_agg (change_machine_id, totalwarning, window_end_time) VALUES (?,?,?)")
updateStmt = conn.prepareStatement("UPDATE threemin_warning_state_agg SET totalwarning = ?,window_end_time=? WHERE change_machine_id = ?")
}
//调用连接,执行sql
override def invoke(value: SensorReading, context: SinkFunction.Context): Unit = {
//执行插入语句
insertStmt.setInt(1,value.change_machine_id)
insertStmt.setInt(2,value.totalwarning)
insertStmt.setString(3,value.window_end_time)
insertStmt.execute()
}
//关闭时做清理工作
override def close(): Unit = {
insertStmt.close()
updateStmt.close()
conn.close()
}
}
版权归原作者 内向仓鼠 所有, 如有侵权,请联系我们删除。