java.lang.IllegalArgumentException at java.sql.Date.valueOf(Unknown Source)解决方案

本文介绍了一种常见的Java服务端处理客户端提交数据时出现的日期参数异常问题,并提供了针对Java和C#客户端的具体解决方案。

摘要生成于 C知道 ,由 DeepSeek-R1 满血版支持, 前往体验 >

客户端项服务端提交数据,服务端处理时经常会出现以下错误,纠结了很长时间,才找到正真解决的原因

严重: Fault occurred!
java.lang.IllegalArgumentException
 at java.sql.Date.valueOf(Unknown Source)

 at com.yjt.dao.AssetsDao.insertMsgOne(AssetsDao.java:202)
 at com.yjt.service.InventoryImpl.InsertDataOne(InventoryImpl.java:27)
 at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
 at sun.reflect.NativeMethodAccessorImpl.invoke(Unknown Source)
 at sun.reflect.DelegatingMethodAccessorImpl.invoke(Unknown Source)
 at java.lang.reflect.Method.invoke(Unknown Source)
 at org.codehaus.xfire.service.invoker.AbstractInvoker.invoke(AbstractInvoker.java:59)
 at org.codehaus.xfire.service.invoker.ObjectInvoker.invoke(ObjectInvoker.java:45)
 at org.codehaus.xfire.service.binding.ServiceInvocationHandler.sendMessage(ServiceInvocationHandler.java:320)
 at org.codehaus.xfire.service.binding.ServiceInvocationHandler$1.run(ServiceInvocationHandler.java:86)
 at org.codehaus.xfire.service.binding.ServiceInvocationHandler.execute(ServiceInvocationHandler.java:134)
 at org.codehaus.xfire.service.binding.ServiceInvocationHandler.invoke(ServiceInvocationHandler.java:109)
 at org.codehaus.xfire.handler.HandlerPipeline.invoke(HandlerPipeline.java:131)
 at org.codehaus.xfire.transport.DefaultEndpoint.onReceive(DefaultEndpoint.java:64)
 at org.codehaus.xfire.transport.AbstractChannel.receive(AbstractChannel.java:38)
 at org.codehaus.xfire.transport.http.XFireServletController.invoke(XFireServletController.java:304)
 at org.codehaus.xfire.transport.http.XFireServletController.doService(XFireServletController.java:129)
 at org.codehaus.xfire.transport.http.XFireServlet.doPost(XFireServlet.java:116)
 at javax.servlet.http.HttpServlet.service(HttpServlet.java:637)
 at javax.servlet.http.HttpServlet.service(HttpServlet.java:717)
 at org.apache.catalina.core.ApplicationFilterChain.internalDoFilter(ApplicationFilterChain.java:290)
 at org.apache.catalina.core.ApplicationFilterChain.doFilter(ApplicationFilterChain.java:206)
 at org.apache.catalina.core.StandardWrapperValve.invoke(StandardWrapperValve.java:233)
 at org.apache.catalina.core.StandardContextValve.invoke(StandardContextValve.java:191)
 at org.apache.catalina.core.StandardHostValve.invoke(StandardHostValve.java:127)
 at org.apache.catalina.valves.ErrorReportValve.invoke(ErrorReportValve.java:102)
 at org.apache.catalina.core.StandardEngineValve.invoke(StandardEngineValve.java:109)
 at org.apache.catalina.connector.CoyoteAdapter.service(CoyoteAdapter.java:291)
 at org.apache.coyote.http11.Http11Processor.process(Http11Processor.java:859)
 at org.apache.coyote.http11.Http11Protocol$Http11ConnectionHandler.process(Http11Protocol.java:602)
 at org.apache.tomcat.util.net.JIoEndpoint$Worker.run(JIoEndpoint.java:489)
 at java.lang.Thread.run(Unknown Source)

 

 

分析是参数不合规定,sql.date类型装换出现问题。

关于java.sql.Date.Date.valueOf(String date);

其中date是表示"yyyy-MM-dd"的形式日期字符串。
以下是API的方法说明:
public static Date valueOf(String s)将 JDBC 日期转义形式的字符串转换成 Date 值。

参数:
s - 表示 "yyyy-mm-dd" 形式的日期的 String 对象
返回:
表示给定日期的 java.sql.Date 对象
抛出:
IllegalArgumentException - 如果给定日期不是 JDBC 日期转义形式 (yyyy-mm-dd)

知道了这个问题,那应该就明白怎么解决了吧。

对于java客户端传过来的date参数,要用

SimpleDateFormat df = new SimpleDateFormat("yyyy-MM-dd");//定义格式,

Date dt=new Date();

String date=df.format(dt);处理完后再将date传给服务端处理。

对于c#客户端传过来的date参数,要用

 DateTime cgsj = CGSJDatePicker.Value;
  string format="yyyy-MM-dd";
  string tm=cgsj.ToString(format);

处理完后再将date传给服务端处理



 

Exception in thread "main" org.apache.flink.runtime.client.JobExecutionException: Job execution failed. at org.apache.flink.runtime.jobmaster.JobResult.toJobExecutionResult(JobResult.java:144) at org.apache.flink.runtime.minicluster.MiniClusterJobClient.lambda$getJobExecutionResult$3(MiniClusterJobClient.java:137) at java.util.concurrent.CompletableFuture.uniApply(CompletableFuture.java:602) at java.util.concurrent.CompletableFuture$UniApply.tryFire(CompletableFuture.java:577) at java.util.concurrent.CompletableFuture.postComplete(CompletableFuture.java:474) at java.util.concurrent.CompletableFuture.complete(CompletableFuture.java:1962) at org.apache.flink.runtime.rpc.akka.AkkaInvocationHandler.lambda$invokeRpc$1(AkkaInvocationHandler.java:258) at java.util.concurrent.CompletableFuture.uniWhenComplete(CompletableFuture.java:760) at java.util.concurrent.CompletableFuture$UniWhenComplete.tryFire(CompletableFuture.java:736) at java.util.concurrent.CompletableFuture.postComplete(CompletableFuture.java:474) at java.util.concurrent.CompletableFuture.complete(CompletableFuture.java:1962) at org.apache.flink.util.concurrent.FutureUtils.doForward(FutureUtils.java:1389) at org.apache.flink.runtime.concurrent.akka.ClassLoadingUtils.lambda$null$1(ClassLoadingUtils.java:93) at org.apache.flink.runtime.concurrent.akka.ClassLoadingUtils.runWithContextClassLoader(ClassLoadingUtils.java:68) at org.apache.flink.runtime.concurrent.akka.ClassLoadingUtils.lambda$guardCompletionWithContextClassLoader$2(ClassLoadingUtils.java:92) at java.util.concurrent.CompletableFuture.uniWhenComplete(CompletableFuture.java:760) at java.util.concurrent.CompletableFuture$UniWhenComplete.tryFire(CompletableFuture.java:736) at java.util.concurrent.CompletableFuture.postComplete(CompletableFuture.java:474) at java.util.concurrent.CompletableFuture.complete(CompletableFuture.java:1962) at org.apache.flink.runtime.concurrent.akka.AkkaFutureUtils$1.onComplete(AkkaFutureUtils.java:47) at akka.dispatch.OnComplete.internal(Future.scala:300) at akka.dispatch.OnComplete.internal(Future.scala:297) at akka.dispatch.japi$CallbackBridge.apply(Future.scala:224) at akka.dispatch.japi$CallbackBridge.apply(Future.scala:221) at scala.concurrent.impl.CallbackRunnable.run(Promise.scala:60) at org.apache.flink.runtime.concurrent.akka.AkkaFutureUtils$DirectExecutionContext.execute(AkkaFutureUtils.java:65) at scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:68) at scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:284) at scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:284) at scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:284) at akka.pattern.PromiseActorRef.$bang(AskSupport.scala:621) at akka.pattern.PipeToSupport$PipeableFuture$$anonfun$pipeTo$1.applyOrElse(PipeToSupport.scala:24) at akka.pattern.PipeToSupport$PipeableFuture$$anonfun$pipeTo$1.applyOrElse(PipeToSupport.scala:23) at scala.concurrent.Future.$anonfun$andThen$1(Future.scala:532) at scala.concurrent.impl.Promise.liftedTree1$1(Promise.scala:29) at scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:29) at scala.concurrent.impl.CallbackRunnable.run(Promise.scala:60) at akka.dispatch.BatchingExecutor$AbstractBatch.processBatch(BatchingExecutor.scala:63) at akka.dispatch.BatchingExecutor$BlockableBatch.$anonfun$run$1(BatchingExecutor.scala:100) at scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:12) at scala.concurrent.BlockContext$.withBlockContext(BlockContext.scala:81) at akka.dispatch.BatchingExecutor$BlockableBatch.run(BatchingExecutor.scala:100) at akka.dispatch.TaskInvocation.run(AbstractDispatcher.scala:49) at akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(ForkJoinExecutorConfigurator.scala:48) at java.util.concurrent.ForkJoinTask.doExec(ForkJoinTask.java:289) at java.util.concurrent.ForkJoinPool$WorkQueue.runTask(ForkJoinPool.java:1056) at java.util.concurrent.ForkJoinPool.runWorker(ForkJoinPool.java:1692) at java.util.concurrent.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:157) Caused by: org.apache.flink.runtime.JobException: Recovery is suppressed by FixedDelayRestartBackoffTimeStrategy(maxNumberRestartAttempts=3, backoffTimeMS=10000) at org.apache.flink.runtime.executiongraph.failover.flip1.ExecutionFailureHandler.handleFailure(ExecutionFailureHandler.java:138) at org.apache.flink.runtime.executiongraph.failover.flip1.ExecutionFailureHandler.getFailureHandlingResult(ExecutionFailureHandler.java:82) at org.apache.flink.runtime.scheduler.DefaultScheduler.handleTaskFailure(DefaultScheduler.java:252) at org.apache.flink.runtime.scheduler.DefaultScheduler.maybeHandleTaskFailure(DefaultScheduler.java:242) at org.apache.flink.runtime.scheduler.DefaultScheduler.updateTaskExecutionStateInternal(DefaultScheduler.java:233) at org.apache.flink.runtime.scheduler.SchedulerBase.updateTaskExecutionState(SchedulerBase.java:684) at org.apache.flink.runtime.scheduler.SchedulerNG.updateTaskExecutionState(SchedulerNG.java:79) at org.apache.flink.runtime.jobmaster.JobMaster.updateTaskExecutionState(JobMaster.java:444) at sun.reflect.GeneratedMethodAccessor13.invoke(Unknown Source) at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) at java.lang.reflect.Method.invoke(Method.java:498) at org.apache.flink.runtime.rpc.akka.AkkaRpcActor.lambda$handleRpcInvocation$1(AkkaRpcActor.java:316) at org.apache.flink.runtime.concurrent.akka.ClassLoadingUtils.runWithContextClassLoader(ClassLoadingUtils.java:83) at org.apache.flink.runtime.rpc.akka.AkkaRpcActor.handleRpcInvocation(AkkaRpcActor.java:314) at org.apache.flink.runtime.rpc.akka.AkkaRpcActor.handleRpcMessage(AkkaRpcActor.java:217) at org.apache.flink.runtime.rpc.akka.FencedAkkaRpcActor.handleRpcMessage(FencedAkkaRpcActor.java:78) at org.apache.flink.runtime.rpc.akka.AkkaRpcActor.handleMessage(AkkaRpcActor.java:163) at akka.japi.pf.UnitCaseStatement.apply(CaseStatements.scala:24) at akka.japi.pf.UnitCaseStatement.apply(CaseStatements.scala:20) at scala.PartialFunction.applyOrElse(PartialFunction.scala:123) at scala.PartialFunction.applyOrElse$(PartialFunction.scala:122) at akka.japi.pf.UnitCaseStatement.applyOrElse(CaseStatements.scala:20) at scala.PartialFunction$OrElse.applyOrElse(PartialFunction.scala:171) at scala.PartialFunction$OrElse.applyOrElse(PartialFunction.scala:172) at scala.PartialFunction$OrElse.applyOrElse(PartialFunction.scala:172) at akka.actor.Actor.aroundReceive(Actor.scala:537) at akka.actor.Actor.aroundReceive$(Actor.scala:535) at akka.actor.AbstractActor.aroundReceive(AbstractActor.scala:220) at akka.actor.ActorCell.receiveMessage(ActorCell.scala:580) at akka.actor.ActorCell.invoke(ActorCell.scala:548) at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:270) at akka.dispatch.Mailbox.run(Mailbox.scala:231) at akka.dispatch.Mailbox.exec(Mailbox.scala:243) ... 4 more Caused by: java.lang.RuntimeException: One or more fetchers have encountered exception at org.apache.flink.connector.base.source.reader.fetcher.SplitFetcherManager.checkErrors(SplitFetcherManager.java:225) at org.apache.flink.connector.base.source.reader.SourceReaderBase.getNextFetch(SourceReaderBase.java:169) at org.apache.flink.connector.base.source.reader.SourceReaderBase.pollNext(SourceReaderBase.java:130) at org.apache.flink.streaming.api.operators.SourceOperator.emitNext(SourceOperator.java:354) at org.apache.flink.streaming.runtime.io.StreamTaskSourceInput.emitNext(StreamTaskSourceInput.java:68) at org.apache.flink.streaming.runtime.io.StreamOneInputProcessor.processInput(StreamOneInputProcessor.java:65) at org.apache.flink.streaming.runtime.tasks.StreamTask.processInput(StreamTask.java:496) at org.apache.flink.streaming.runtime.tasks.mailbox.MailboxProcessor.runMailboxLoop(MailboxProcessor.java:203) at org.apache.flink.streaming.runtime.tasks.StreamTask.runMailboxLoop(StreamTask.java:809) at org.apache.flink.streaming.runtime.tasks.StreamTask.invoke(StreamTask.java:761) at org.apache.flink.runtime.taskmanager.Task.runWithSystemExitMonitoring(Task.java:958) at org.apache.flink.runtime.taskmanager.Task.restoreAndInvoke(Task.java:937) at org.apache.flink.runtime.taskmanager.Task.doRun(Task.java:766) at org.apache.flink.runtime.taskmanager.Task.run(Task.java:575) at java.lang.Thread.run(Thread.java:748) Caused by: java.lang.RuntimeException: SplitFetcher thread 0 received unexpected exception while polling the records at org.apache.flink.connector.base.source.reader.fetcher.SplitFetcher.runOnce(SplitFetcher.java:150) at org.apache.flink.connector.base.source.reader.fetcher.SplitFetcher.run(SplitFetcher.java:105) at java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:511) at java.util.concurrent.FutureTask.run(FutureTask.java:266) at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1142) at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:617) ... 1 more Caused by: com.ververica.cdc.connectors.shaded.org.apache.kafka.connect.errors.ConnectException: An exception occurred in the change event producer. This connector will be stopped. at io.debezium.pipeline.ErrorHandler.setProducerThrowable(ErrorHandler.java:50) at com.ververica.cdc.connectors.mysql.debezium.task.context.MySqlErrorHandler.setProducerThrowable(MySqlErrorHandler.java:85) at io.debezium.connector.mysql.MySqlStreamingChangeEventSource$ReaderThreadLifecycleListener.onCommunicationFailure(MySqlStreamingChangeEventSource.java:1545) at com.github.shyiko.mysql.binlog.BinaryLogClient.listenForEventPackets(BinaryLogClient.java:1079) at com.github.shyiko.mysql.binlog.BinaryLogClient.connect(BinaryLogClient.java:631) at com.github.shyiko.mysql.binlog.BinaryLogClient$7.run(BinaryLogClient.java:932) ... 1 more Caused by: io.debezium.DebeziumException: A slave with the same server_uuid/server_id as this slave has connected to the master; the first event '' at 4, the last event read from './mysql-bin.000001' at 482020, the last byte read from './mysql-bin.000001' at 482020. Error code: 1236; SQLSTATE: HY000. The 'server-id' in the mysql cdc connector should be globally unique, but conflicts happen now. The server id conflict may happen in the following situations: 1. The server id has been used by other mysql cdc table in the current job. 2. The server id has been used by the mysql cdc table in other jobs. 3. The server id has been used by other sync tools like canal, debezium and so on. at io.debezium.connector.mysql.MySqlStreamingChangeEventSource.wrap(MySqlStreamingChangeEventSource.java:1489) ... 5 more Caused by: com.github.shyiko.mysql.binlog.network.ServerException: A slave with the same server_uuid/server_id as this slave has connected to the master; the first event '' at 4, the last event read from './mysql-bin.000001' at 482020, the last byte read from './mysql-bin.000001' at 482020. at com.github.shyiko.mysql.binlog.BinaryLogClient.listenForEventPackets(BinaryLogClient.java:1043) ... 3 more我现在出现这个报错 我的调用代码是package net.bwie.realtime.jtp.common.utils; import com.ververica.cdc.connectors.mysql.source.MySqlSource; import com.ververica.cdc.connectors.mysql.table.StartupOptions; import com.ververica.cdc.connectors.shaded.org.apache.kafka.connect.data.SchemaBuilder; import com.ververica.cdc.connectors.shaded.org.apache.kafka.connect.json.JsonConverterConfig; import com.ververica.cdc.debezium.JsonDebeziumDeserializationSchema; import io.debezium.spi.converter.CustomConverter; import io.debezium.spi.converter.RelationalColumn; import org.apache.flink.api.common.eventtime.WatermarkStrategy; import org.apache.flink.streaming.api.datastream.DataStream; import org.apache.flink.streaming.api.datastream.DataStreamSource; import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment; //import org.apache.kafka.connect.data.SchemaBuilder; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.net.InetAddress; import java.net.UnknownHostException; import java.net.InetAddress; import java.net.UnknownHostException; import java.time.*; import java.time.format.DateTimeFormatter; import java.util.HashMap; import java.util.Map; import java.util.Properties; import java.util.function.Consumer; /** * Flink CDC实时捕获Mysql数据库表中数据 * @author xuanyun */ public class MysqlCdcUtil { /** * Flink CDC 读取数据时,没有特殊设置反序列化,及针对Decimal类型和DateTime类型数据。 */ public static DataStream<String> cdcMysqlRaw(StreamExecutionEnvironment env, String database, String table) throws UnknownHostException { // a. 数据源 MySqlSource<String> mysqlsource = MySqlSource.<String>builder() .hostname("node101") .port(3306) .databaseList(database) .tableList(database + "." + table) .username("root") .password("123456") .serverId(generateUniqueServerId()) .serverTimeZone("Asia/Shanghai") .startupOptions(StartupOptions.earliest()) .deserializer(new JsonDebeziumDeserializationSchema()) .build(); // b. 读取数据 DataStreamSource<String> stream = env.fromSource( mysqlsource, WatermarkStrategy.noWatermarks(), "MysqlSource" ); // c. 返回 return stream; } /** * 使用Flink CDC方式,拉取Mysql表数据,从最新offset偏移量读取数据 * @param env 流式执行环境 * @param database 数据库名称 * @param table 表名称 * @return 数据流,数据类型为json字符串 */ public static DataStream<String> cdcMysqlDeser(StreamExecutionEnvironment env, String database, String table) throws UnknownHostException { // a. 反序列化:DECIMAL类型数据使用NUMERIC数值转换 Map<String, Object> configs = new HashMap<>(); configs.put(JsonConverterConfig.DECIMAL_FORMAT_CONFIG, "numeric"); JsonDebeziumDeserializationSchema schema = new JsonDebeziumDeserializationSchema(false, configs); // b. 数据源 MySqlSource<String> mySqlSource = MySqlSource.<String>builder() .hostname("node101") .port(3306) .databaseList(database) .tableList(database + "." + table) .username("root") .password("123456") .serverId(generateUniqueServerId()) .serverTimeZone("Asia/Shanghai") .startupOptions(StartupOptions.earliest()) .debeziumProperties(getDebeziumProperties()) .deserializer(schema) .build(); // c. 读取数据 DataStreamSource<String> stream = env.fromSource( mySqlSource, WatermarkStrategy.noWatermarks(), "MySQL Source" ); // d. 返回 return stream; } /** * 使用Flink CDC方式,拉取Mysql表数据,从最新offset偏移量读取数据 * @param env 流式执行环境 * @param database 数据库名称 * @param tableList 表名称,可以传递多个 * @return 数据流,数据类型为json字符串 */ public static DataStream<String> cdcMysqlEarliest(StreamExecutionEnvironment env, String database, String... tableList) throws UnknownHostException { // a. 反序列化 Map<String, Object> configs = new HashMap<>(); configs.put(JsonConverterConfig.DECIMAL_FORMAT_CONFIG, "numeric"); JsonDebeziumDeserializationSchema schema = new JsonDebeziumDeserializationSchema(false, configs); StringBuffer buffer = new StringBuffer(); for (String table : tableList) { buffer.append(database).append(".").append(table).append(","); } buffer = buffer.deleteCharAt(buffer.length() - 1); // b. 数据源 MySqlSource<String> mySqlSource = MySqlSource.<String>builder() .hostname("node101") .port(3306) .databaseList(database) .tableList(buffer.toString()) .username("root") .password("123456") .serverId(generateUniqueServerId()) .serverTimeZone("Asia/Shanghai") .startupOptions(StartupOptions.earliest()) .debeziumProperties(getDebeziumProperties()) .deserializer(schema) .build(); // c. 读取数据 DataStreamSource<String> stream = env.fromSource( mySqlSource, WatermarkStrategy.noWatermarks(), "MysqlEarliestSource" ); // d. 返回 return stream; } /** * 使用Flink CDC方式,拉取Mysql表数据,从binlog中最早offset偏移量读取数据 * @param env 流式执行环境 * @param database 数据库名称 * @return 数据流,数据类型为json字符串 */ public static DataStream<String> cdcMysqlInitial(StreamExecutionEnvironment env, String database, String... tableList) throws UnknownHostException { // a. 反序列化 Map<String, Object> configs = new HashMap<>(); configs.put(JsonConverterConfig.DECIMAL_FORMAT_CONFIG, "numeric"); JsonDebeziumDeserializationSchema schema = new JsonDebeziumDeserializationSchema(false, configs); StringBuffer buffer = new StringBuffer(); for (String table : tableList) { buffer.append(database).append(".").append(table).append(","); } buffer = buffer.deleteCharAt(buffer.length() - 1); // b. 数据源 MySqlSource<String> mySqlSource = MySqlSource.<String>builder() .hostname("node101") .port(3306) .databaseList(database) .tableList(buffer.toString()) .username("root") .password("123456") .serverId(generateUniqueServerId()) .serverTimeZone("Asia/Shanghai") .startupOptions(StartupOptions.initial()) .debeziumProperties(getDebeziumProperties()) .deserializer(schema) .build(); // c. 读取数据 DataStreamSource<String> stream = env.fromSource( mySqlSource, WatermarkStrategy.noWatermarks(), "MysqlLInitialSource" ); // d. 返回 return stream; } /** * 使用Flink CDC方式,拉取Mysql表数据,从最新offset偏移量读取数据 * @param env 流式执行环境 * @param database 数据库名称 * @return 数据流,数据类型为json字符串 */ public static DataStream<String> cdcMysql(StreamExecutionEnvironment env, String database) throws UnknownHostException { // a. 反序列化 Map<String, Object> configs = new HashMap<>(); configs.put(JsonConverterConfig.DECIMAL_FORMAT_CONFIG, "numeric"); JsonDebeziumDeserializationSchema schema = new JsonDebeziumDeserializationSchema(false, configs); // b. 数据源 MySqlSource<String> mySqlSource = MySqlSource.<String>builder() .hostname("node101") .port(3306) .databaseList(database) .tableList() .username("root") .password("123456") .serverId(generateUniqueServerId()) .serverTimeZone("Asia/Shanghai") .startupOptions(StartupOptions.latest()) .debeziumProperties(getDebeziumProperties()) .deserializer(schema) .build(); // c. 读取数据 DataStreamSource<String> stream = env.fromSource( mySqlSource, WatermarkStrategy.noWatermarks(), "MysqlLatestSource" ); // d. 返回 return stream; } /** * 使用Flink CDC方式,拉取Mysql表数据,从最新offset偏移量读取数据 * @param env 流式执行环境 * @param database 数据库名称 * @param tableList 表名称,可以传递多个 * @return 数据流,数据类型为json字符串 */ public static DataStream<String> cdcMysql(StreamExecutionEnvironment env, String database, String... tableList) throws UnknownHostException { // a. 反序列化 Map<String, Object> configs = new HashMap<>(); configs.put(JsonConverterConfig.DECIMAL_FORMAT_CONFIG, "numeric"); JsonDebeziumDeserializationSchema schema = new JsonDebeziumDeserializationSchema(false, configs); StringBuffer buffer = new StringBuffer(); for (String table : tableList) { buffer.append(database).append(".").append(table).append(","); } buffer = buffer.deleteCharAt(buffer.length() - 1); // b. 数据源 MySqlSource<String> mySqlSource = MySqlSource.<String>builder() .hostname("node101") .port(3306) .databaseList(database) .tableList(buffer.toString()) .username("root") .password("123456" ) .serverId(generateUniqueServerId()) .serverTimeZone("Asia/Shanghai") .startupOptions(StartupOptions.latest()) .debeziumProperties(getDebeziumProperties()) .deserializer(schema) .build(); // c. 读取数据 DataStreamSource<String> stream = env.fromSource( mySqlSource, WatermarkStrategy.noWatermarks(), "MySQL Source" ); // d. 返回 return stream; } private static Properties getDebeziumProperties(){ Properties properties = new Properties(); properties.setProperty("converters", "dateConverters"); properties.setProperty("dateConverters.type", MySqlDateTimeConverter.class.getName()); properties.setProperty("dateConverters.format.date", "yyyy-MM-dd"); properties.setProperty("dateConverters.format.time", "HH:mm:ss"); properties.setProperty("dateConverters.format.datetime", "yyyy-MM-dd HH:mm:ss"); properties.setProperty("dateConverters.format.timestamp", "yyyy-MM-dd HH:mm:ss"); properties.setProperty("dateConverters.format.timestamp.zone", "UTC+8"); return properties; } /** * 自定义时间转换配置。 */ public static class MySqlDateTimeConverter implements CustomConverter<SchemaBuilder, RelationalColumn> { private final static Logger logger = LoggerFactory.getLogger(MySqlDateTimeConverter.class); private DateTimeFormatter dateFormatter = DateTimeFormatter.ISO_DATE; private DateTimeFormatter timeFormatter = DateTimeFormatter.ISO_TIME; private DateTimeFormatter datetimeFormatter = DateTimeFormatter.ISO_DATE_TIME; private DateTimeFormatter timestampFormatter = DateTimeFormatter.ISO_DATE_TIME; private ZoneId timestampZoneId = ZoneId.systemDefault(); @Override public void configure(Properties props) { readProps(props, "format.date", p -> dateFormatter = DateTimeFormatter.ofPattern(p)); readProps(props, "format.time", p -> timeFormatter = DateTimeFormatter.ofPattern(p)); readProps(props, "format.datetime", p -> datetimeFormatter = DateTimeFormatter.ofPattern(p)); readProps(props, "format.timestamp", p -> timestampFormatter = DateTimeFormatter.ofPattern(p)); readProps(props, "format.timestamp.zone", z -> timestampZoneId = ZoneId.of(z)); } private void readProps(Properties properties, String settingKey, Consumer<String> callback) { String settingValue = (String) properties.get(settingKey); if (settingValue == null || settingValue.isEmpty()) { return; } try { callback.accept(settingValue.trim()); } catch (IllegalArgumentException | DateTimeException e) { logger.error("The {} setting is illegal: {}",settingKey,settingValue); throw e; } } @Override public void converterFor(RelationalColumn column, ConverterRegistration<SchemaBuilder> registration) { String sqlType = column.typeName().toUpperCase(); SchemaBuilder schemaBuilder = null; Converter converter = null; if ("DATE".equals(sqlType)) { schemaBuilder = SchemaBuilder.string().optional().name("debezium.date.string"); converter = this::convertDate; } if ("TIME".equals(sqlType)) { schemaBuilder = SchemaBuilder.string().optional().name("debezium.date.string"); converter = this::convertTime; } if ("DATETIME".equals(sqlType)) { schemaBuilder = SchemaBuilder.string().optional().name("debezium.date.string"); converter = this::convertDateTime; } if ("TIMESTAMP".equals(sqlType)) { schemaBuilder = SchemaBuilder.string().optional().name("debezium.date.string"); converter = this::convertTimestamp; } if (schemaBuilder != null) { registration.register(schemaBuilder, converter); } } private String convertDate(Object input) { if (input == null) return null; if (input instanceof LocalDate) { return dateFormatter.format((LocalDate) input); } if (input instanceof Integer) { LocalDate date = LocalDate.ofEpochDay((Integer) input); return dateFormatter.format(date); } return String.valueOf(input); } private String convertTime(Object input) { if (input == null) return null; if (input instanceof Duration) { Duration duration = (Duration) input; long seconds = duration.getSeconds(); int nano = duration.getNano(); LocalTime time = LocalTime.ofSecondOfDay(seconds).withNano(nano); return timeFormatter.format(time); } return String.valueOf(input); } private String convertDateTime(Object input) { if (input == null) return null; if (input instanceof LocalDateTime) { return datetimeFormatter.format((LocalDateTime) input).replaceAll("T", " "); } return String.valueOf(input); } private String convertTimestamp(Object input) { if (input == null) return null; if (input instanceof ZonedDateTime) { // mysql的timestamp会转成UTC存储,这里的zonedDatetime都是UTC时间 ZonedDateTime zonedDateTime = (ZonedDateTime) input; LocalDateTime localDateTime = zonedDateTime.withZoneSameInstant(timestampZoneId).toLocalDateTime(); return timestampFormatter.format(localDateTime).replaceAll("T", " "); } return String.valueOf(input); } } // ... private static String generateUniqueServerId() throws UnknownHostException { long baseTimestamp = System.currentTimeMillis() / 1000; String hostName; try { hostName = InetAddress.getLocalHost().getHostName(); } catch (UnknownHostException e) { hostName = "unknown"; } // 使用主机名 + 时间戳生成唯一 serverId int hash = (hostName + baseTimestamp).hashCode(); // 确保为正数并控制在合理范围 return String.valueOf(Math.abs(hash) % 100000000 + 5); } } 4
最新发布
06-07
评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值