Exception in thread "main" org.apache.flink.runtime.client.JobExecutionException: Job execution failed.
at org.apache.flink.runtime.jobmaster.JobResult.toJobExecutionResult(JobResult.java:144)
at org.apache.flink.runtime.minicluster.MiniClusterJobClient.lambda$getJobExecutionResult$3(MiniClusterJobClient.java:137)
at java.util.concurrent.CompletableFuture.uniApply(CompletableFuture.java:602)
at java.util.concurrent.CompletableFuture$UniApply.tryFire(CompletableFuture.java:577)
at java.util.concurrent.CompletableFuture.postComplete(CompletableFuture.java:474)
at java.util.concurrent.CompletableFuture.complete(CompletableFuture.java:1962)
at org.apache.flink.runtime.rpc.akka.AkkaInvocationHandler.lambda$invokeRpc$1(AkkaInvocationHandler.java:258)
at java.util.concurrent.CompletableFuture.uniWhenComplete(CompletableFuture.java:760)
at java.util.concurrent.CompletableFuture$UniWhenComplete.tryFire(CompletableFuture.java:736)
at java.util.concurrent.CompletableFuture.postComplete(CompletableFuture.java:474)
at java.util.concurrent.CompletableFuture.complete(CompletableFuture.java:1962)
at org.apache.flink.util.concurrent.FutureUtils.doForward(FutureUtils.java:1389)
at org.apache.flink.runtime.concurrent.akka.ClassLoadingUtils.lambda$null$1(ClassLoadingUtils.java:93)
at org.apache.flink.runtime.concurrent.akka.ClassLoadingUtils.runWithContextClassLoader(ClassLoadingUtils.java:68)
at org.apache.flink.runtime.concurrent.akka.ClassLoadingUtils.lambda$guardCompletionWithContextClassLoader$2(ClassLoadingUtils.java:92)
at java.util.concurrent.CompletableFuture.uniWhenComplete(CompletableFuture.java:760)
at java.util.concurrent.CompletableFuture$UniWhenComplete.tryFire(CompletableFuture.java:736)
at java.util.concurrent.CompletableFuture.postComplete(CompletableFuture.java:474)
at java.util.concurrent.CompletableFuture.complete(CompletableFuture.java:1962)
at org.apache.flink.runtime.concurrent.akka.AkkaFutureUtils$1.onComplete(AkkaFutureUtils.java:47)
at akka.dispatch.OnComplete.internal(Future.scala:300)
at akka.dispatch.OnComplete.internal(Future.scala:297)
at akka.dispatch.japi$CallbackBridge.apply(Future.scala:224)
at akka.dispatch.japi$CallbackBridge.apply(Future.scala:221)
at scala.concurrent.impl.CallbackRunnable.run(Promise.scala:60)
at org.apache.flink.runtime.concurrent.akka.AkkaFutureUtils$DirectExecutionContext.execute(AkkaFutureUtils.java:65)
at scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:68)
at scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:284)
at scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:284)
at scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:284)
at akka.pattern.PromiseActorRef.$bang(AskSupport.scala:621)
at akka.pattern.PipeToSupport$PipeableFuture$$anonfun$pipeTo$1.applyOrElse(PipeToSupport.scala:24)
at akka.pattern.PipeToSupport$PipeableFuture$$anonfun$pipeTo$1.applyOrElse(PipeToSupport.scala:23)
at scala.concurrent.Future.$anonfun$andThen$1(Future.scala:532)
at scala.concurrent.impl.Promise.liftedTree1$1(Promise.scala:29)
at scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:29)
at scala.concurrent.impl.CallbackRunnable.run(Promise.scala:60)
at akka.dispatch.BatchingExecutor$AbstractBatch.processBatch(BatchingExecutor.scala:63)
at akka.dispatch.BatchingExecutor$BlockableBatch.$anonfun$run$1(BatchingExecutor.scala:100)
at scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:12)
at scala.concurrent.BlockContext$.withBlockContext(BlockContext.scala:81)
at akka.dispatch.BatchingExecutor$BlockableBatch.run(BatchingExecutor.scala:100)
at akka.dispatch.TaskInvocation.run(AbstractDispatcher.scala:49)
at akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(ForkJoinExecutorConfigurator.scala:48)
at java.util.concurrent.ForkJoinTask.doExec(ForkJoinTask.java:289)
at java.util.concurrent.ForkJoinPool$WorkQueue.runTask(ForkJoinPool.java:1056)
at java.util.concurrent.ForkJoinPool.runWorker(ForkJoinPool.java:1692)
at java.util.concurrent.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:157)
Caused by: org.apache.flink.runtime.JobException: Recovery is suppressed by FixedDelayRestartBackoffTimeStrategy(maxNumberRestartAttempts=3, backoffTimeMS=10000)
at org.apache.flink.runtime.executiongraph.failover.flip1.ExecutionFailureHandler.handleFailure(ExecutionFailureHandler.java:138)
at org.apache.flink.runtime.executiongraph.failover.flip1.ExecutionFailureHandler.getFailureHandlingResult(ExecutionFailureHandler.java:82)
at org.apache.flink.runtime.scheduler.DefaultScheduler.handleTaskFailure(DefaultScheduler.java:252)
at org.apache.flink.runtime.scheduler.DefaultScheduler.maybeHandleTaskFailure(DefaultScheduler.java:242)
at org.apache.flink.runtime.scheduler.DefaultScheduler.updateTaskExecutionStateInternal(DefaultScheduler.java:233)
at org.apache.flink.runtime.scheduler.SchedulerBase.updateTaskExecutionState(SchedulerBase.java:684)
at org.apache.flink.runtime.scheduler.SchedulerNG.updateTaskExecutionState(SchedulerNG.java:79)
at org.apache.flink.runtime.jobmaster.JobMaster.updateTaskExecutionState(JobMaster.java:444)
at sun.reflect.GeneratedMethodAccessor13.invoke(Unknown Source)
at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
at java.lang.reflect.Method.invoke(Method.java:498)
at org.apache.flink.runtime.rpc.akka.AkkaRpcActor.lambda$handleRpcInvocation$1(AkkaRpcActor.java:316)
at org.apache.flink.runtime.concurrent.akka.ClassLoadingUtils.runWithContextClassLoader(ClassLoadingUtils.java:83)
at org.apache.flink.runtime.rpc.akka.AkkaRpcActor.handleRpcInvocation(AkkaRpcActor.java:314)
at org.apache.flink.runtime.rpc.akka.AkkaRpcActor.handleRpcMessage(AkkaRpcActor.java:217)
at org.apache.flink.runtime.rpc.akka.FencedAkkaRpcActor.handleRpcMessage(FencedAkkaRpcActor.java:78)
at org.apache.flink.runtime.rpc.akka.AkkaRpcActor.handleMessage(AkkaRpcActor.java:163)
at akka.japi.pf.UnitCaseStatement.apply(CaseStatements.scala:24)
at akka.japi.pf.UnitCaseStatement.apply(CaseStatements.scala:20)
at scala.PartialFunction.applyOrElse(PartialFunction.scala:123)
at scala.PartialFunction.applyOrElse$(PartialFunction.scala:122)
at akka.japi.pf.UnitCaseStatement.applyOrElse(CaseStatements.scala:20)
at scala.PartialFunction$OrElse.applyOrElse(PartialFunction.scala:171)
at scala.PartialFunction$OrElse.applyOrElse(PartialFunction.scala:172)
at scala.PartialFunction$OrElse.applyOrElse(PartialFunction.scala:172)
at akka.actor.Actor.aroundReceive(Actor.scala:537)
at akka.actor.Actor.aroundReceive$(Actor.scala:535)
at akka.actor.AbstractActor.aroundReceive(AbstractActor.scala:220)
at akka.actor.ActorCell.receiveMessage(ActorCell.scala:580)
at akka.actor.ActorCell.invoke(ActorCell.scala:548)
at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:270)
at akka.dispatch.Mailbox.run(Mailbox.scala:231)
at akka.dispatch.Mailbox.exec(Mailbox.scala:243)
... 4 more
Caused by: java.lang.RuntimeException: One or more fetchers have encountered exception
at org.apache.flink.connector.base.source.reader.fetcher.SplitFetcherManager.checkErrors(SplitFetcherManager.java:225)
at org.apache.flink.connector.base.source.reader.SourceReaderBase.getNextFetch(SourceReaderBase.java:169)
at org.apache.flink.connector.base.source.reader.SourceReaderBase.pollNext(SourceReaderBase.java:130)
at org.apache.flink.streaming.api.operators.SourceOperator.emitNext(SourceOperator.java:354)
at org.apache.flink.streaming.runtime.io.StreamTaskSourceInput.emitNext(StreamTaskSourceInput.java:68)
at org.apache.flink.streaming.runtime.io.StreamOneInputProcessor.processInput(StreamOneInputProcessor.java:65)
at org.apache.flink.streaming.runtime.tasks.StreamTask.processInput(StreamTask.java:496)
at org.apache.flink.streaming.runtime.tasks.mailbox.MailboxProcessor.runMailboxLoop(MailboxProcessor.java:203)
at org.apache.flink.streaming.runtime.tasks.StreamTask.runMailboxLoop(StreamTask.java:809)
at org.apache.flink.streaming.runtime.tasks.StreamTask.invoke(StreamTask.java:761)
at org.apache.flink.runtime.taskmanager.Task.runWithSystemExitMonitoring(Task.java:958)
at org.apache.flink.runtime.taskmanager.Task.restoreAndInvoke(Task.java:937)
at org.apache.flink.runtime.taskmanager.Task.doRun(Task.java:766)
at org.apache.flink.runtime.taskmanager.Task.run(Task.java:575)
at java.lang.Thread.run(Thread.java:748)
Caused by: java.lang.RuntimeException: SplitFetcher thread 0 received unexpected exception while polling the records
at org.apache.flink.connector.base.source.reader.fetcher.SplitFetcher.runOnce(SplitFetcher.java:150)
at org.apache.flink.connector.base.source.reader.fetcher.SplitFetcher.run(SplitFetcher.java:105)
at java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:511)
at java.util.concurrent.FutureTask.run(FutureTask.java:266)
at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1142)
at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:617)
... 1 more
Caused by: com.ververica.cdc.connectors.shaded.org.apache.kafka.connect.errors.ConnectException: An exception occurred in the change event producer. This connector will be stopped.
at io.debezium.pipeline.ErrorHandler.setProducerThrowable(ErrorHandler.java:50)
at com.ververica.cdc.connectors.mysql.debezium.task.context.MySqlErrorHandler.setProducerThrowable(MySqlErrorHandler.java:85)
at io.debezium.connector.mysql.MySqlStreamingChangeEventSource$ReaderThreadLifecycleListener.onCommunicationFailure(MySqlStreamingChangeEventSource.java:1545)
at com.github.shyiko.mysql.binlog.BinaryLogClient.listenForEventPackets(BinaryLogClient.java:1079)
at com.github.shyiko.mysql.binlog.BinaryLogClient.connect(BinaryLogClient.java:631)
at com.github.shyiko.mysql.binlog.BinaryLogClient$7.run(BinaryLogClient.java:932)
... 1 more
Caused by: io.debezium.DebeziumException: A slave with the same server_uuid/server_id as this slave has connected to the master; the first event '' at 4, the last event read from './mysql-bin.000001' at 482020, the last byte read from './mysql-bin.000001' at 482020. Error code: 1236; SQLSTATE: HY000.
The 'server-id' in the mysql cdc connector should be globally unique, but conflicts happen now.
The server id conflict may happen in the following situations:
1. The server id has been used by other mysql cdc table in the current job.
2. The server id has been used by the mysql cdc table in other jobs.
3. The server id has been used by other sync tools like canal, debezium and so on.
at io.debezium.connector.mysql.MySqlStreamingChangeEventSource.wrap(MySqlStreamingChangeEventSource.java:1489)
... 5 more
Caused by: com.github.shyiko.mysql.binlog.network.ServerException: A slave with the same server_uuid/server_id as this slave has connected to the master; the first event '' at 4, the last event read from './mysql-bin.000001' at 482020, the last byte read from './mysql-bin.000001' at 482020.
at com.github.shyiko.mysql.binlog.BinaryLogClient.listenForEventPackets(BinaryLogClient.java:1043)
... 3 more我现在出现这个报错 我的调用代码是package net.bwie.realtime.jtp.common.utils;
import com.ververica.cdc.connectors.mysql.source.MySqlSource;
import com.ververica.cdc.connectors.mysql.table.StartupOptions;
import com.ververica.cdc.connectors.shaded.org.apache.kafka.connect.data.SchemaBuilder;
import com.ververica.cdc.connectors.shaded.org.apache.kafka.connect.json.JsonConverterConfig;
import com.ververica.cdc.debezium.JsonDebeziumDeserializationSchema;
import io.debezium.spi.converter.CustomConverter;
import io.debezium.spi.converter.RelationalColumn;
import org.apache.flink.api.common.eventtime.WatermarkStrategy;
import org.apache.flink.streaming.api.datastream.DataStream;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
//import org.apache.kafka.connect.data.SchemaBuilder;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.net.InetAddress;
import java.net.UnknownHostException;
import java.net.InetAddress;
import java.net.UnknownHostException;
import java.time.*;
import java.time.format.DateTimeFormatter;
import java.util.HashMap;
import java.util.Map;
import java.util.Properties;
import java.util.function.Consumer;
/**
* Flink CDC实时捕获Mysql数据库表中数据
* @author xuanyun
*/
public class MysqlCdcUtil {
/**
* Flink CDC 读取数据时,没有特殊设置反序列化,及针对Decimal类型和DateTime类型数据。
*/
public static DataStream<String> cdcMysqlRaw(StreamExecutionEnvironment env, String database, String table) throws UnknownHostException {
// a. 数据源
MySqlSource<String> mysqlsource = MySqlSource.<String>builder()
.hostname("node101")
.port(3306)
.databaseList(database)
.tableList(database + "." + table)
.username("root")
.password("123456")
.serverId(generateUniqueServerId())
.serverTimeZone("Asia/Shanghai")
.startupOptions(StartupOptions.earliest())
.deserializer(new JsonDebeziumDeserializationSchema())
.build();
// b. 读取数据
DataStreamSource<String> stream = env.fromSource(
mysqlsource, WatermarkStrategy.noWatermarks(), "MysqlSource"
);
// c. 返回
return stream;
}
/**
* 使用Flink CDC方式,拉取Mysql表数据,从最新offset偏移量读取数据
* @param env 流式执行环境
* @param database 数据库名称
* @param table 表名称
* @return 数据流,数据类型为json字符串
*/
public static DataStream<String> cdcMysqlDeser(StreamExecutionEnvironment env, String database, String table) throws UnknownHostException {
// a. 反序列化:DECIMAL类型数据使用NUMERIC数值转换
Map<String, Object> configs = new HashMap<>();
configs.put(JsonConverterConfig.DECIMAL_FORMAT_CONFIG, "numeric");
JsonDebeziumDeserializationSchema schema = new JsonDebeziumDeserializationSchema(false, configs);
// b. 数据源
MySqlSource<String> mySqlSource = MySqlSource.<String>builder()
.hostname("node101")
.port(3306)
.databaseList(database)
.tableList(database + "." + table)
.username("root")
.password("123456")
.serverId(generateUniqueServerId())
.serverTimeZone("Asia/Shanghai")
.startupOptions(StartupOptions.earliest())
.debeziumProperties(getDebeziumProperties())
.deserializer(schema)
.build();
// c. 读取数据
DataStreamSource<String> stream = env.fromSource(
mySqlSource, WatermarkStrategy.noWatermarks(), "MySQL Source"
);
// d. 返回
return stream;
}
/**
* 使用Flink CDC方式,拉取Mysql表数据,从最新offset偏移量读取数据
* @param env 流式执行环境
* @param database 数据库名称
* @param tableList 表名称,可以传递多个
* @return 数据流,数据类型为json字符串
*/
public static DataStream<String> cdcMysqlEarliest(StreamExecutionEnvironment env, String database, String... tableList) throws UnknownHostException {
// a. 反序列化
Map<String, Object> configs = new HashMap<>();
configs.put(JsonConverterConfig.DECIMAL_FORMAT_CONFIG, "numeric");
JsonDebeziumDeserializationSchema schema = new JsonDebeziumDeserializationSchema(false, configs);
StringBuffer buffer = new StringBuffer();
for (String table : tableList) {
buffer.append(database).append(".").append(table).append(",");
}
buffer = buffer.deleteCharAt(buffer.length() - 1);
// b. 数据源
MySqlSource<String> mySqlSource = MySqlSource.<String>builder()
.hostname("node101")
.port(3306)
.databaseList(database)
.tableList(buffer.toString())
.username("root")
.password("123456")
.serverId(generateUniqueServerId())
.serverTimeZone("Asia/Shanghai")
.startupOptions(StartupOptions.earliest())
.debeziumProperties(getDebeziumProperties())
.deserializer(schema)
.build();
// c. 读取数据
DataStreamSource<String> stream = env.fromSource(
mySqlSource, WatermarkStrategy.noWatermarks(), "MysqlEarliestSource"
);
// d. 返回
return stream;
}
/**
* 使用Flink CDC方式,拉取Mysql表数据,从binlog中最早offset偏移量读取数据
* @param env 流式执行环境
* @param database 数据库名称
* @return 数据流,数据类型为json字符串
*/
public static DataStream<String> cdcMysqlInitial(StreamExecutionEnvironment env, String database, String... tableList) throws UnknownHostException {
// a. 反序列化
Map<String, Object> configs = new HashMap<>();
configs.put(JsonConverterConfig.DECIMAL_FORMAT_CONFIG, "numeric");
JsonDebeziumDeserializationSchema schema = new JsonDebeziumDeserializationSchema(false, configs);
StringBuffer buffer = new StringBuffer();
for (String table : tableList) {
buffer.append(database).append(".").append(table).append(",");
}
buffer = buffer.deleteCharAt(buffer.length() - 1);
// b. 数据源
MySqlSource<String> mySqlSource = MySqlSource.<String>builder()
.hostname("node101")
.port(3306)
.databaseList(database)
.tableList(buffer.toString())
.username("root")
.password("123456")
.serverId(generateUniqueServerId())
.serverTimeZone("Asia/Shanghai")
.startupOptions(StartupOptions.initial())
.debeziumProperties(getDebeziumProperties())
.deserializer(schema)
.build();
// c. 读取数据
DataStreamSource<String> stream = env.fromSource(
mySqlSource, WatermarkStrategy.noWatermarks(), "MysqlLInitialSource"
);
// d. 返回
return stream;
}
/**
* 使用Flink CDC方式,拉取Mysql表数据,从最新offset偏移量读取数据
* @param env 流式执行环境
* @param database 数据库名称
* @return 数据流,数据类型为json字符串
*/
public static DataStream<String> cdcMysql(StreamExecutionEnvironment env, String database) throws UnknownHostException {
// a. 反序列化
Map<String, Object> configs = new HashMap<>();
configs.put(JsonConverterConfig.DECIMAL_FORMAT_CONFIG, "numeric");
JsonDebeziumDeserializationSchema schema = new JsonDebeziumDeserializationSchema(false, configs);
// b. 数据源
MySqlSource<String> mySqlSource = MySqlSource.<String>builder()
.hostname("node101")
.port(3306)
.databaseList(database)
.tableList()
.username("root")
.password("123456")
.serverId(generateUniqueServerId())
.serverTimeZone("Asia/Shanghai")
.startupOptions(StartupOptions.latest())
.debeziumProperties(getDebeziumProperties())
.deserializer(schema)
.build();
// c. 读取数据
DataStreamSource<String> stream = env.fromSource(
mySqlSource, WatermarkStrategy.noWatermarks(), "MysqlLatestSource"
);
// d. 返回
return stream;
}
/**
* 使用Flink CDC方式,拉取Mysql表数据,从最新offset偏移量读取数据
* @param env 流式执行环境
* @param database 数据库名称
* @param tableList 表名称,可以传递多个
* @return 数据流,数据类型为json字符串
*/
public static DataStream<String> cdcMysql(StreamExecutionEnvironment env, String database, String... tableList) throws UnknownHostException {
// a. 反序列化
Map<String, Object> configs = new HashMap<>();
configs.put(JsonConverterConfig.DECIMAL_FORMAT_CONFIG, "numeric");
JsonDebeziumDeserializationSchema schema = new JsonDebeziumDeserializationSchema(false, configs);
StringBuffer buffer = new StringBuffer();
for (String table : tableList) {
buffer.append(database).append(".").append(table).append(",");
}
buffer = buffer.deleteCharAt(buffer.length() - 1);
// b. 数据源
MySqlSource<String> mySqlSource = MySqlSource.<String>builder()
.hostname("node101")
.port(3306)
.databaseList(database)
.tableList(buffer.toString())
.username("root")
.password("123456" )
.serverId(generateUniqueServerId())
.serverTimeZone("Asia/Shanghai")
.startupOptions(StartupOptions.latest())
.debeziumProperties(getDebeziumProperties())
.deserializer(schema)
.build();
// c. 读取数据
DataStreamSource<String> stream = env.fromSource(
mySqlSource, WatermarkStrategy.noWatermarks(), "MySQL Source"
);
// d. 返回
return stream;
}
private static Properties getDebeziumProperties(){
Properties properties = new Properties();
properties.setProperty("converters", "dateConverters");
properties.setProperty("dateConverters.type", MySqlDateTimeConverter.class.getName());
properties.setProperty("dateConverters.format.date", "yyyy-MM-dd");
properties.setProperty("dateConverters.format.time", "HH:mm:ss");
properties.setProperty("dateConverters.format.datetime", "yyyy-MM-dd HH:mm:ss");
properties.setProperty("dateConverters.format.timestamp", "yyyy-MM-dd HH:mm:ss");
properties.setProperty("dateConverters.format.timestamp.zone", "UTC+8");
return properties;
}
/**
* 自定义时间转换配置。
*/
public static class MySqlDateTimeConverter implements CustomConverter<SchemaBuilder, RelationalColumn> {
private final static Logger logger = LoggerFactory.getLogger(MySqlDateTimeConverter.class);
private DateTimeFormatter dateFormatter = DateTimeFormatter.ISO_DATE;
private DateTimeFormatter timeFormatter = DateTimeFormatter.ISO_TIME;
private DateTimeFormatter datetimeFormatter = DateTimeFormatter.ISO_DATE_TIME;
private DateTimeFormatter timestampFormatter = DateTimeFormatter.ISO_DATE_TIME;
private ZoneId timestampZoneId = ZoneId.systemDefault();
@Override
public void configure(Properties props) {
readProps(props, "format.date", p -> dateFormatter = DateTimeFormatter.ofPattern(p));
readProps(props, "format.time", p -> timeFormatter = DateTimeFormatter.ofPattern(p));
readProps(props, "format.datetime", p -> datetimeFormatter = DateTimeFormatter.ofPattern(p));
readProps(props, "format.timestamp", p -> timestampFormatter = DateTimeFormatter.ofPattern(p));
readProps(props, "format.timestamp.zone", z -> timestampZoneId = ZoneId.of(z));
}
private void readProps(Properties properties, String settingKey, Consumer<String> callback) {
String settingValue = (String) properties.get(settingKey);
if (settingValue == null || settingValue.isEmpty()) {
return;
}
try {
callback.accept(settingValue.trim());
} catch (IllegalArgumentException | DateTimeException e) {
logger.error("The {} setting is illegal: {}",settingKey,settingValue);
throw e;
}
}
@Override
public void converterFor(RelationalColumn column, ConverterRegistration<SchemaBuilder> registration) {
String sqlType = column.typeName().toUpperCase();
SchemaBuilder schemaBuilder = null;
Converter converter = null;
if ("DATE".equals(sqlType)) {
schemaBuilder = SchemaBuilder.string().optional().name("debezium.date.string");
converter = this::convertDate;
}
if ("TIME".equals(sqlType)) {
schemaBuilder = SchemaBuilder.string().optional().name("debezium.date.string");
converter = this::convertTime;
}
if ("DATETIME".equals(sqlType)) {
schemaBuilder = SchemaBuilder.string().optional().name("debezium.date.string");
converter = this::convertDateTime;
}
if ("TIMESTAMP".equals(sqlType)) {
schemaBuilder = SchemaBuilder.string().optional().name("debezium.date.string");
converter = this::convertTimestamp;
}
if (schemaBuilder != null) {
registration.register(schemaBuilder, converter);
}
}
private String convertDate(Object input) {
if (input == null) return null;
if (input instanceof LocalDate) {
return dateFormatter.format((LocalDate) input);
}
if (input instanceof Integer) {
LocalDate date = LocalDate.ofEpochDay((Integer) input);
return dateFormatter.format(date);
}
return String.valueOf(input);
}
private String convertTime(Object input) {
if (input == null) return null;
if (input instanceof Duration) {
Duration duration = (Duration) input;
long seconds = duration.getSeconds();
int nano = duration.getNano();
LocalTime time = LocalTime.ofSecondOfDay(seconds).withNano(nano);
return timeFormatter.format(time);
}
return String.valueOf(input);
}
private String convertDateTime(Object input) {
if (input == null) return null;
if (input instanceof LocalDateTime) {
return datetimeFormatter.format((LocalDateTime) input).replaceAll("T", " ");
}
return String.valueOf(input);
}
private String convertTimestamp(Object input) {
if (input == null) return null;
if (input instanceof ZonedDateTime) {
// mysql的timestamp会转成UTC存储,这里的zonedDatetime都是UTC时间
ZonedDateTime zonedDateTime = (ZonedDateTime) input;
LocalDateTime localDateTime = zonedDateTime.withZoneSameInstant(timestampZoneId).toLocalDateTime();
return timestampFormatter.format(localDateTime).replaceAll("T", " ");
}
return String.valueOf(input);
}
}
// ...
private static String generateUniqueServerId() throws UnknownHostException {
long baseTimestamp = System.currentTimeMillis() / 1000;
String hostName;
try {
hostName = InetAddress.getLocalHost().getHostName();
} catch (UnknownHostException e) {
hostName = "unknown";
}
// 使用主机名 + 时间戳生成唯一 serverId
int hash = (hostName + baseTimestamp).hashCode();
// 确保为正数并控制在合理范围
return String.valueOf(Math.abs(hash) % 100000000 + 5);
}
}
4
最新发布