如有更佳的保存MySQL方法 欢迎私信或留言分享 相互学习~
依赖
<dependencies>
<dependency>
<groupId>org.apache.flink</groupId>
<artifactId>flink-scala_2.11</artifactId>
<version>1.10.2</version>
</dependency>
<dependency>
<groupId>org.apache.flink</groupId>
<artifactId>flink-streaming-scala_2.11</artifactId>
<version>1.10.2</version>
</dependency>
<dependency>
<groupId>org.apache.flink</groupId>
<artifactId>flink-connector-kafka-0.11_2.11</artifactId>
<version>1.10.2</version>
</dependency>
<dependency>
<groupId>org.apache.bahir</groupId>
<artifactId>flink-connector-redis_2.11</artifactId>
<version>1.0</version>
</dependency>
<dependency>
<groupId>mysql</groupId>
<artifactId>mysql-connector-java</artifactId>
<version>8.0.25</version>
</dependency>
<dependency>
<groupId>org.apache.flink</groupId>
<artifactId>flink-statebackend-rocksdb_2.12</artifactId>
<version>1.10.1</version>
</dependency>
<dependency>
<groupId>org.apache.flink</groupId>
<artifactId>flink-table-planner_2.11</artifactId>
<version>1.10.1</version>
</dependency>
<dependency>
<groupId>org.apache.flink</groupId>
<artifactId>flink-table-planner-blink_2.11</artifactId>
<version>1.10.1</version>
</dependency>
<dependency>
<groupId>org.apache.flink</groupId>
<artifactId>flink-table-api-scala-bridge_2.11</artifactId>
<version>1.10.1</version>
</dependency>
<dependency>
<groupId>org.apache.flink</groupId>
<artifactId>flink-csv</artifactId>
<version>1.10.1</version>
</dependency>
</dependencies>
<build>
<plugins> <!-- 该插件用于将 Scala 代码编译成 class 文件 -->
<plugin>
<groupId>net.alchim31.maven</groupId>
<artifactId>scala-maven-plugin</artifactId>
<version>3.4.6</version>
<executions>
<execution> <!-- 声明绑定到 maven 的 compile 阶段 -->
<goals>
<goal>compile</goal>
</goals>
</execution>
</executions>
</plugin>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-assembly-plugin</artifactId>
<version>3.0.0</version>
<configuration>
<descriptorRefs>
<descriptorRef>jar-with-dependencies</descriptorRef>
</descriptorRefs>
</configuration>
<executions>
<execution>
<id>make-assembly</id>
<phase>package</phase>
<goals>
<goal>single</goal>
</goals>
</execution>
</executions>
</plugin>
</plugins>
</build>
代码
import org.apache.flink.streaming.api.functions.sink.RichSinkFunction
// 自定义Sink
dataStream.addSink( new JDBCSink() )
// 继承RichSinkFunction
class JDBCSink extends RichSinkFunction[输入的数据类型]{
// 定义sql连接、插入预编译器、更新预编译器
var conn: Connection = _
var insertStatement: PreparedStatement = _
var updateStatement: PreparedStatement = _
// 重写open函数 在此函数初始化,创建连接和预编译语句
override def open(parameters: Configuration): Unit = {
// 初始化连接
conn = DriverManager.getConnection("jdbc:mysql://localhost:3306/数据库", "账号", "密码")
// 初始化插入预编译器
insertStatement = conn.prepareStatement("INSERT INTO 表名 VALUES (?, ?, ?(占位符));")
// 初始化更新与编译器
updateStatement = conn.prepareStatement("UPDATE 表名 SET 字段 = ? WHERE 字段 = ?;")
}
// 重写close函数 关闭与编译器和sql连接
override def close(): Unit = {
insertStatement.close()
updateStatement.close()
conn.close()
}
// 重写invoke函数
override def invoke(value: 输入的数据类型, context: SinkFunction.Context[_]): Unit = {
// 先执行更新操作 给跟更新预编译器的占位符赋值
updateStatement.setInt(1, value.count.toInt)
updateStatement.setString(2, value.url)
updateStatement.setDouble(3, value.windowEnd)
// 执行更新
updateStatement.execute()
// 判断如果更新的行数为0 则执行插入
if(updateStatement.getUpdateCount == 0){
// 给插入预编译器的占位符赋值
insertStatement.setDouble(1, value.windowEnd)
insertStatement.setString(2, value.url)
insertStatement.setInt(3, value.count.toInt)
// 执行插入
insertStatement.execute()
}
}
}