RuntimeException - Unmarshalling unknown type code **** at offset ****

本文详细分析了ProGuard混淆过程中导致Android应用出现运行时异常的具体原因及堆栈跟踪,探讨了Parcel中未知类型代码28在偏移量524处反序列化失败的问题,并提供了可能的解决方案。

http://proguard.sourceforge.net/#manual/retrace/examples.html

http://stackoverflow.com/questions/21342700/proguard-causing-runtimeexception-unmarshalling-unknown-type-code-in-parcelabl

06-21 12:47:52.445: E/AndroidRuntime(15426): FATAL EXCEPTION: main
06-21 12:47:52.445: E/AndroidRuntime(15426): Process: com.sohutv.tv, PID: 15426
06-21 12:47:52.445: E/AndroidRuntime(15426): java.lang.RuntimeException: Unable to start activity ComponentInfo{com.sohutv.tv/com.sohutv.tv.work.classification.activity.CategoryVideoListActivity}: java.lang.RuntimeException: Parcel android.os.Parcel@4194dbe8: Unmarshalling unknown type code 28 at offset 524
06-21 12:47:52.445: E/AndroidRuntime(15426): at android.app.ActivityThread.performLaunchActivity(ActivityThread.java:2195)
06-21 12:47:52.445: E/AndroidRuntime(15426): at android.app.ActivityThread.handleLaunchActivity(ActivityThread.java:2245)
06-21 12:47:52.445: E/AndroidRuntime(15426): at android.app.ActivityThread.access$800(ActivityThread.java:135)
06-21 12:47:52.445: E/AndroidRuntime(15426): at android.app.ActivityThread$H.handleMessage(ActivityThread.java:1196)
06-21 12:47:52.445: E/AndroidRuntime(15426): at android.os.Handler.dispatchMessage(Handler.java:102)
06-21 12:47:52.445: E/AndroidRuntime(15426): at android.os.Looper.loop(Looper.java:136)
06-21 12:47:52.445: E/AndroidRuntime(15426): at android.app.ActivityThread.main(ActivityThread.java:5017)
06-21 12:47:52.445: E/AndroidRuntime(15426): at java.lang.reflect.Method.invokeNative(Native Method)
06-21 12:47:52.445: E/AndroidRuntime(15426): at java.lang.reflect.Method.invoke(Method.java:515)
06-21 12:47:52.445: E/AndroidRuntime(15426): at com.android.internal.os.ZygoteInit$MethodAndArgsCaller.run(ZygoteInit.java:787)
06-21 12:47:52.445: E/AndroidRuntime(15426): at com.android.internal.os.ZygoteInit.main(ZygoteInit.java:603)
06-21 12:47:52.445: E/AndroidRuntime(15426): at dalvik.system.NativeStart.main(Native Method)
06-21 12:47:52.445: E/AndroidRuntime(15426): Caused by: java.lang.RuntimeException: Parcel android.os.Parcel@4194dbe8: Unmarshalling unknown type code 28 at offset 524
06-21 12:47:52.445: E/AndroidRuntime(15426): at android.os.Parcel.readValue(Parcel.java:2080)
06-21 12:47:52.445: E/AndroidRuntime(15426): at android.os.Parcel.readSparseArrayInternal(Parcel.java:2363)
06-21 12:47:52.445: E/AndroidRuntime(15426): at android.os.Parcel.readSparseArray(Parcel.java:1735)
06-21 12:47:52.445: E/AndroidRuntime(15426): at android.os.Parcel.readValue(Parcel.java:2070)
06-21 12:47:52.445: E/AndroidRuntime(15426): at android.os.Parcel.readArrayMapInternal(Parcel.java:2314)
06-21 12:47:52.445: E/AndroidRuntime(15426): at android.os.Bundle.unparcel(Bundle.java:249)
06-21 12:47:52.445: E/AndroidRuntime(15426): at android.os.Bundle.getSparseParcelableArray(Bundle.java:1273)
06-21 12:47:52.445: E/AndroidRuntime(15426): at android.app.FragmentManagerImpl.moveToState(FragmentManager.java:827)
06-21 12:47:52.445: E/AndroidRuntime(15426): at android.app.FragmentManagerImpl.moveToState(FragmentManager.java:1062)
06-21 12:47:52.445: E/AndroidRuntime(15426): at android.app.FragmentManagerImpl.moveToState(FragmentManager.java:1044)
06-21 12:47:52.445: E/AndroidRuntime(15426): at android.app.FragmentManagerImpl.dispatchCreate(FragmentManager.java:1848)
06-21 12:47:52.445: E/AndroidRuntime(15426): at android.app.Activity.onCreate(Activity.java:902)
06-21 12:47:52.445: E/AndroidRuntime(15426): at com.sohutv.tv.activity.SohuFragmentActivity.onCreate(SohuFragmentActivity.java:30)
06-21 12:47:52.445: E/AndroidRuntime(15426): at com.sohutv.tv.activity.BaseActivity.onCreate(BaseActivity.java:70)
06-21 12:47:52.445: E/AndroidRuntime(15426): at com.sohutv.tv.work.classification.activity.CategoryVideoListActivity.onCreate(CategoryVideoListActivity.java:48)
06-21 12:47:52.445: E/AndroidRuntime(15426): at android.app.Activity.performCreate(Activity.java:5273)
06-21 12:47:52.445: E/AndroidRuntime(15426): at android.app.Instrumentation.callActivityOnCreate(Instrumentation.java:1101)
06-21 12:47:52.445: E/AndroidRuntime(15426): at android.app.ActivityThread.performLaunchActivity(ActivityThread.java:2159)
06-21 12:47:52.445: E/AndroidRuntime(15426): ... 11 more 


Exception in thread "main" org.apache.flink.runtime.client.JobExecutionException: Job execution failed. at org.apache.flink.runtime.jobmaster.JobResult.toJobExecutionResult(JobResult.java:144) at org.apache.flink.runtime.minicluster.MiniClusterJobClient.lambda$getJobExecutionResult$3(MiniClusterJobClient.java:137) at java.util.concurrent.CompletableFuture.uniApply(CompletableFuture.java:602) at java.util.concurrent.CompletableFuture$UniApply.tryFire(CompletableFuture.java:577) at java.util.concurrent.CompletableFuture.postComplete(CompletableFuture.java:474) at java.util.concurrent.CompletableFuture.complete(CompletableFuture.java:1962) at org.apache.flink.runtime.rpc.akka.AkkaInvocationHandler.lambda$invokeRpc$1(AkkaInvocationHandler.java:258) at java.util.concurrent.CompletableFuture.uniWhenComplete(CompletableFuture.java:760) at java.util.concurrent.CompletableFuture$UniWhenComplete.tryFire(CompletableFuture.java:736) at java.util.concurrent.CompletableFuture.postComplete(CompletableFuture.java:474) at java.util.concurrent.CompletableFuture.complete(CompletableFuture.java:1962) at org.apache.flink.util.concurrent.FutureUtils.doForward(FutureUtils.java:1389) at org.apache.flink.runtime.concurrent.akka.ClassLoadingUtils.lambda$null$1(ClassLoadingUtils.java:93) at org.apache.flink.runtime.concurrent.akka.ClassLoadingUtils.runWithContextClassLoader(ClassLoadingUtils.java:68) at org.apache.flink.runtime.concurrent.akka.ClassLoadingUtils.lambda$guardCompletionWithContextClassLoader$2(ClassLoadingUtils.java:92) at java.util.concurrent.CompletableFuture.uniWhenComplete(CompletableFuture.java:760) at java.util.concurrent.CompletableFuture$UniWhenComplete.tryFire(CompletableFuture.java:736) at java.util.concurrent.CompletableFuture.postComplete(CompletableFuture.java:474) at java.util.concurrent.CompletableFuture.complete(CompletableFuture.java:1962) at org.apache.flink.runtime.concurrent.akka.AkkaFutureUtils$1.onComplete(AkkaFutureUtils.java:47) at akka.dispatch.OnComplete.internal(Future.scala:300) at akka.dispatch.OnComplete.internal(Future.scala:297) at akka.dispatch.japi$CallbackBridge.apply(Future.scala:224) at akka.dispatch.japi$CallbackBridge.apply(Future.scala:221) at scala.concurrent.impl.CallbackRunnable.run(Promise.scala:60) at org.apache.flink.runtime.concurrent.akka.AkkaFutureUtils$DirectExecutionContext.execute(AkkaFutureUtils.java:65) at scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:68) at scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:284) at scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:284) at scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:284) at akka.pattern.PromiseActorRef.$bang(AskSupport.scala:621) at akka.pattern.PipeToSupport$PipeableFuture$$anonfun$pipeTo$1.applyOrElse(PipeToSupport.scala:24) at akka.pattern.PipeToSupport$PipeableFuture$$anonfun$pipeTo$1.applyOrElse(PipeToSupport.scala:23) at scala.concurrent.Future.$anonfun$andThen$1(Future.scala:532) at scala.concurrent.impl.Promise.liftedTree1$1(Promise.scala:29) at scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:29) at scala.concurrent.impl.CallbackRunnable.run(Promise.scala:60) at akka.dispatch.BatchingExecutor$AbstractBatch.processBatch(BatchingExecutor.scala:63) at akka.dispatch.BatchingExecutor$BlockableBatch.$anonfun$run$1(BatchingExecutor.scala:100) at scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:12) at scala.concurrent.BlockContext$.withBlockContext(BlockContext.scala:81) at akka.dispatch.BatchingExecutor$BlockableBatch.run(BatchingExecutor.scala:100) at akka.dispatch.TaskInvocation.run(AbstractDispatcher.scala:49) at akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(ForkJoinExecutorConfigurator.scala:48) at java.util.concurrent.ForkJoinTask.doExec(ForkJoinTask.java:289) at java.util.concurrent.ForkJoinPool$WorkQueue.runTask(ForkJoinPool.java:1056) at java.util.concurrent.ForkJoinPool.runWorker(ForkJoinPool.java:1692) at java.util.concurrent.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:157) Caused by: org.apache.flink.runtime.JobException: Recovery is suppressed by FixedDelayRestartBackoffTimeStrategy(maxNumberRestartAttempts=3, backoffTimeMS=10000) at org.apache.flink.runtime.executiongraph.failover.flip1.ExecutionFailureHandler.handleFailure(ExecutionFailureHandler.java:138) at org.apache.flink.runtime.executiongraph.failover.flip1.ExecutionFailureHandler.getFailureHandlingResult(ExecutionFailureHandler.java:82) at org.apache.flink.runtime.scheduler.DefaultScheduler.handleTaskFailure(DefaultScheduler.java:252) at org.apache.flink.runtime.scheduler.DefaultScheduler.maybeHandleTaskFailure(DefaultScheduler.java:242) at org.apache.flink.runtime.scheduler.DefaultScheduler.updateTaskExecutionStateInternal(DefaultScheduler.java:233) at org.apache.flink.runtime.scheduler.SchedulerBase.updateTaskExecutionState(SchedulerBase.java:684) at org.apache.flink.runtime.scheduler.SchedulerNG.updateTaskExecutionState(SchedulerNG.java:79) at org.apache.flink.runtime.jobmaster.JobMaster.updateTaskExecutionState(JobMaster.java:444) at sun.reflect.GeneratedMethodAccessor13.invoke(Unknown Source) at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) at java.lang.reflect.Method.invoke(Method.java:498) at org.apache.flink.runtime.rpc.akka.AkkaRpcActor.lambda$handleRpcInvocation$1(AkkaRpcActor.java:316) at org.apache.flink.runtime.concurrent.akka.ClassLoadingUtils.runWithContextClassLoader(ClassLoadingUtils.java:83) at org.apache.flink.runtime.rpc.akka.AkkaRpcActor.handleRpcInvocation(AkkaRpcActor.java:314) at org.apache.flink.runtime.rpc.akka.AkkaRpcActor.handleRpcMessage(AkkaRpcActor.java:217) at org.apache.flink.runtime.rpc.akka.FencedAkkaRpcActor.handleRpcMessage(FencedAkkaRpcActor.java:78) at org.apache.flink.runtime.rpc.akka.AkkaRpcActor.handleMessage(AkkaRpcActor.java:163) at akka.japi.pf.UnitCaseStatement.apply(CaseStatements.scala:24) at akka.japi.pf.UnitCaseStatement.apply(CaseStatements.scala:20) at scala.PartialFunction.applyOrElse(PartialFunction.scala:123) at scala.PartialFunction.applyOrElse$(PartialFunction.scala:122) at akka.japi.pf.UnitCaseStatement.applyOrElse(CaseStatements.scala:20) at scala.PartialFunction$OrElse.applyOrElse(PartialFunction.scala:171) at scala.PartialFunction$OrElse.applyOrElse(PartialFunction.scala:172) at scala.PartialFunction$OrElse.applyOrElse(PartialFunction.scala:172) at akka.actor.Actor.aroundReceive(Actor.scala:537) at akka.actor.Actor.aroundReceive$(Actor.scala:535) at akka.actor.AbstractActor.aroundReceive(AbstractActor.scala:220) at akka.actor.ActorCell.receiveMessage(ActorCell.scala:580) at akka.actor.ActorCell.invoke(ActorCell.scala:548) at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:270) at akka.dispatch.Mailbox.run(Mailbox.scala:231) at akka.dispatch.Mailbox.exec(Mailbox.scala:243) ... 4 more Caused by: java.lang.RuntimeException: One or more fetchers have encountered exception at org.apache.flink.connector.base.source.reader.fetcher.SplitFetcherManager.checkErrors(SplitFetcherManager.java:225) at org.apache.flink.connector.base.source.reader.SourceReaderBase.getNextFetch(SourceReaderBase.java:169) at org.apache.flink.connector.base.source.reader.SourceReaderBase.pollNext(SourceReaderBase.java:130) at org.apache.flink.streaming.api.operators.SourceOperator.emitNext(SourceOperator.java:354) at org.apache.flink.streaming.runtime.io.StreamTaskSourceInput.emitNext(StreamTaskSourceInput.java:68) at org.apache.flink.streaming.runtime.io.StreamOneInputProcessor.processInput(StreamOneInputProcessor.java:65) at org.apache.flink.streaming.runtime.tasks.StreamTask.processInput(StreamTask.java:496) at org.apache.flink.streaming.runtime.tasks.mailbox.MailboxProcessor.runMailboxLoop(MailboxProcessor.java:203) at org.apache.flink.streaming.runtime.tasks.StreamTask.runMailboxLoop(StreamTask.java:809) at org.apache.flink.streaming.runtime.tasks.StreamTask.invoke(StreamTask.java:761) at org.apache.flink.runtime.taskmanager.Task.runWithSystemExitMonitoring(Task.java:958) at org.apache.flink.runtime.taskmanager.Task.restoreAndInvoke(Task.java:937) at org.apache.flink.runtime.taskmanager.Task.doRun(Task.java:766) at org.apache.flink.runtime.taskmanager.Task.run(Task.java:575) at java.lang.Thread.run(Thread.java:748) Caused by: java.lang.RuntimeException: SplitFetcher thread 0 received unexpected exception while polling the records at org.apache.flink.connector.base.source.reader.fetcher.SplitFetcher.runOnce(SplitFetcher.java:150) at org.apache.flink.connector.base.source.reader.fetcher.SplitFetcher.run(SplitFetcher.java:105) at java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:511) at java.util.concurrent.FutureTask.run(FutureTask.java:266) at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1142) at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:617) ... 1 more Caused by: com.ververica.cdc.connectors.shaded.org.apache.kafka.connect.errors.ConnectException: An exception occurred in the change event producer. This connector will be stopped. at io.debezium.pipeline.ErrorHandler.setProducerThrowable(ErrorHandler.java:50) at com.ververica.cdc.connectors.mysql.debezium.task.context.MySqlErrorHandler.setProducerThrowable(MySqlErrorHandler.java:85) at io.debezium.connector.mysql.MySqlStreamingChangeEventSource$ReaderThreadLifecycleListener.onCommunicationFailure(MySqlStreamingChangeEventSource.java:1545) at com.github.shyiko.mysql.binlog.BinaryLogClient.listenForEventPackets(BinaryLogClient.java:1079) at com.github.shyiko.mysql.binlog.BinaryLogClient.connect(BinaryLogClient.java:631) at com.github.shyiko.mysql.binlog.BinaryLogClient$7.run(BinaryLogClient.java:932) ... 1 more Caused by: io.debezium.DebeziumException: A slave with the same server_uuid/server_id as this slave has connected to the master; the first event '' at 4, the last event read from './mysql-bin.000001' at 482020, the last byte read from './mysql-bin.000001' at 482020. Error code: 1236; SQLSTATE: HY000. The 'server-id' in the mysql cdc connector should be globally unique, but conflicts happen now. The server id conflict may happen in the following situations: 1. The server id has been used by other mysql cdc table in the current job. 2. The server id has been used by the mysql cdc table in other jobs. 3. The server id has been used by other sync tools like canal, debezium and so on. at io.debezium.connector.mysql.MySqlStreamingChangeEventSource.wrap(MySqlStreamingChangeEventSource.java:1489) ... 5 more Caused by: com.github.shyiko.mysql.binlog.network.ServerException: A slave with the same server_uuid/server_id as this slave has connected to the master; the first event '' at 4, the last event read from './mysql-bin.000001' at 482020, the last byte read from './mysql-bin.000001' at 482020. at com.github.shyiko.mysql.binlog.BinaryLogClient.listenForEventPackets(BinaryLogClient.java:1043) ... 3 more我现在出现这个报错 我的调用代码是package net.bwie.realtime.jtp.common.utils; import com.ververica.cdc.connectors.mysql.source.MySqlSource; import com.ververica.cdc.connectors.mysql.table.StartupOptions; import com.ververica.cdc.connectors.shaded.org.apache.kafka.connect.data.SchemaBuilder; import com.ververica.cdc.connectors.shaded.org.apache.kafka.connect.json.JsonConverterConfig; import com.ververica.cdc.debezium.JsonDebeziumDeserializationSchema; import io.debezium.spi.converter.CustomConverter; import io.debezium.spi.converter.RelationalColumn; import org.apache.flink.api.common.eventtime.WatermarkStrategy; import org.apache.flink.streaming.api.datastream.DataStream; import org.apache.flink.streaming.api.datastream.DataStreamSource; import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment; //import org.apache.kafka.connect.data.SchemaBuilder; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.net.InetAddress; import java.net.UnknownHostException; import java.net.InetAddress; import java.net.UnknownHostException; import java.time.*; import java.time.format.DateTimeFormatter; import java.util.HashMap; import java.util.Map; import java.util.Properties; import java.util.function.Consumer; /** * Flink CDC实时捕获Mysql数据库表中数据 * @author xuanyun */ public class MysqlCdcUtil { /** * Flink CDC 读取数据时,没有特殊设置反序列化,及针对Decimal类型和DateTime类型数据。 */ public static DataStream<String> cdcMysqlRaw(StreamExecutionEnvironment env, String database, String table) throws UnknownHostException { // a. 数据源 MySqlSource<String> mysqlsource = MySqlSource.<String>builder() .hostname("node101") .port(3306) .databaseList(database) .tableList(database + "." + table) .username("root") .password("123456") .serverId(generateUniqueServerId()) .serverTimeZone("Asia/Shanghai") .startupOptions(StartupOptions.earliest()) .deserializer(new JsonDebeziumDeserializationSchema()) .build(); // b. 读取数据 DataStreamSource<String> stream = env.fromSource( mysqlsource, WatermarkStrategy.noWatermarks(), "MysqlSource" ); // c. 返回 return stream; } /** * 使用Flink CDC方式,拉取Mysql表数据,从最新offset偏移量读取数据 * @param env 流式执行环境 * @param database 数据库名称 * @param table 表名称 * @return 数据流,数据类型为json字符串 */ public static DataStream<String> cdcMysqlDeser(StreamExecutionEnvironment env, String database, String table) throws UnknownHostException { // a. 反序列化:DECIMAL类型数据使用NUMERIC数值转换 Map<String, Object> configs = new HashMap<>(); configs.put(JsonConverterConfig.DECIMAL_FORMAT_CONFIG, "numeric"); JsonDebeziumDeserializationSchema schema = new JsonDebeziumDeserializationSchema(false, configs); // b. 数据源 MySqlSource<String> mySqlSource = MySqlSource.<String>builder() .hostname("node101") .port(3306) .databaseList(database) .tableList(database + "." + table) .username("root") .password("123456") .serverId(generateUniqueServerId()) .serverTimeZone("Asia/Shanghai") .startupOptions(StartupOptions.earliest()) .debeziumProperties(getDebeziumProperties()) .deserializer(schema) .build(); // c. 读取数据 DataStreamSource<String> stream = env.fromSource( mySqlSource, WatermarkStrategy.noWatermarks(), "MySQL Source" ); // d. 返回 return stream; } /** * 使用Flink CDC方式,拉取Mysql表数据,从最新offset偏移量读取数据 * @param env 流式执行环境 * @param database 数据库名称 * @param tableList 表名称,可以传递多个 * @return 数据流,数据类型为json字符串 */ public static DataStream<String> cdcMysqlEarliest(StreamExecutionEnvironment env, String database, String... tableList) throws UnknownHostException { // a. 反序列化 Map<String, Object> configs = new HashMap<>(); configs.put(JsonConverterConfig.DECIMAL_FORMAT_CONFIG, "numeric"); JsonDebeziumDeserializationSchema schema = new JsonDebeziumDeserializationSchema(false, configs); StringBuffer buffer = new StringBuffer(); for (String table : tableList) { buffer.append(database).append(".").append(table).append(","); } buffer = buffer.deleteCharAt(buffer.length() - 1); // b. 数据源 MySqlSource<String> mySqlSource = MySqlSource.<String>builder() .hostname("node101") .port(3306) .databaseList(database) .tableList(buffer.toString()) .username("root") .password("123456") .serverId(generateUniqueServerId()) .serverTimeZone("Asia/Shanghai") .startupOptions(StartupOptions.earliest()) .debeziumProperties(getDebeziumProperties()) .deserializer(schema) .build(); // c. 读取数据 DataStreamSource<String> stream = env.fromSource( mySqlSource, WatermarkStrategy.noWatermarks(), "MysqlEarliestSource" ); // d. 返回 return stream; } /** * 使用Flink CDC方式,拉取Mysql表数据,从binlog中最早offset偏移量读取数据 * @param env 流式执行环境 * @param database 数据库名称 * @return 数据流,数据类型为json字符串 */ public static DataStream<String> cdcMysqlInitial(StreamExecutionEnvironment env, String database, String... tableList) throws UnknownHostException { // a. 反序列化 Map<String, Object> configs = new HashMap<>(); configs.put(JsonConverterConfig.DECIMAL_FORMAT_CONFIG, "numeric"); JsonDebeziumDeserializationSchema schema = new JsonDebeziumDeserializationSchema(false, configs); StringBuffer buffer = new StringBuffer(); for (String table : tableList) { buffer.append(database).append(".").append(table).append(","); } buffer = buffer.deleteCharAt(buffer.length() - 1); // b. 数据源 MySqlSource<String> mySqlSource = MySqlSource.<String>builder() .hostname("node101") .port(3306) .databaseList(database) .tableList(buffer.toString()) .username("root") .password("123456") .serverId(generateUniqueServerId()) .serverTimeZone("Asia/Shanghai") .startupOptions(StartupOptions.initial()) .debeziumProperties(getDebeziumProperties()) .deserializer(schema) .build(); // c. 读取数据 DataStreamSource<String> stream = env.fromSource( mySqlSource, WatermarkStrategy.noWatermarks(), "MysqlLInitialSource" ); // d. 返回 return stream; } /** * 使用Flink CDC方式,拉取Mysql表数据,从最新offset偏移量读取数据 * @param env 流式执行环境 * @param database 数据库名称 * @return 数据流,数据类型为json字符串 */ public static DataStream<String> cdcMysql(StreamExecutionEnvironment env, String database) throws UnknownHostException { // a. 反序列化 Map<String, Object> configs = new HashMap<>(); configs.put(JsonConverterConfig.DECIMAL_FORMAT_CONFIG, "numeric"); JsonDebeziumDeserializationSchema schema = new JsonDebeziumDeserializationSchema(false, configs); // b. 数据源 MySqlSource<String> mySqlSource = MySqlSource.<String>builder() .hostname("node101") .port(3306) .databaseList(database) .tableList() .username("root") .password("123456") .serverId(generateUniqueServerId()) .serverTimeZone("Asia/Shanghai") .startupOptions(StartupOptions.latest()) .debeziumProperties(getDebeziumProperties()) .deserializer(schema) .build(); // c. 读取数据 DataStreamSource<String> stream = env.fromSource( mySqlSource, WatermarkStrategy.noWatermarks(), "MysqlLatestSource" ); // d. 返回 return stream; } /** * 使用Flink CDC方式,拉取Mysql表数据,从最新offset偏移量读取数据 * @param env 流式执行环境 * @param database 数据库名称 * @param tableList 表名称,可以传递多个 * @return 数据流,数据类型为json字符串 */ public static DataStream<String> cdcMysql(StreamExecutionEnvironment env, String database, String... tableList) throws UnknownHostException { // a. 反序列化 Map<String, Object> configs = new HashMap<>(); configs.put(JsonConverterConfig.DECIMAL_FORMAT_CONFIG, "numeric"); JsonDebeziumDeserializationSchema schema = new JsonDebeziumDeserializationSchema(false, configs); StringBuffer buffer = new StringBuffer(); for (String table : tableList) { buffer.append(database).append(".").append(table).append(","); } buffer = buffer.deleteCharAt(buffer.length() - 1); // b. 数据源 MySqlSource<String> mySqlSource = MySqlSource.<String>builder() .hostname("node101") .port(3306) .databaseList(database) .tableList(buffer.toString()) .username("root") .password("123456" ) .serverId(generateUniqueServerId()) .serverTimeZone("Asia/Shanghai") .startupOptions(StartupOptions.latest()) .debeziumProperties(getDebeziumProperties()) .deserializer(schema) .build(); // c. 读取数据 DataStreamSource<String> stream = env.fromSource( mySqlSource, WatermarkStrategy.noWatermarks(), "MySQL Source" ); // d. 返回 return stream; } private static Properties getDebeziumProperties(){ Properties properties = new Properties(); properties.setProperty("converters", "dateConverters"); properties.setProperty("dateConverters.type", MySqlDateTimeConverter.class.getName()); properties.setProperty("dateConverters.format.date", "yyyy-MM-dd"); properties.setProperty("dateConverters.format.time", "HH:mm:ss"); properties.setProperty("dateConverters.format.datetime", "yyyy-MM-dd HH:mm:ss"); properties.setProperty("dateConverters.format.timestamp", "yyyy-MM-dd HH:mm:ss"); properties.setProperty("dateConverters.format.timestamp.zone", "UTC+8"); return properties; } /** * 自定义时间转换配置。 */ public static class MySqlDateTimeConverter implements CustomConverter<SchemaBuilder, RelationalColumn> { private final static Logger logger = LoggerFactory.getLogger(MySqlDateTimeConverter.class); private DateTimeFormatter dateFormatter = DateTimeFormatter.ISO_DATE; private DateTimeFormatter timeFormatter = DateTimeFormatter.ISO_TIME; private DateTimeFormatter datetimeFormatter = DateTimeFormatter.ISO_DATE_TIME; private DateTimeFormatter timestampFormatter = DateTimeFormatter.ISO_DATE_TIME; private ZoneId timestampZoneId = ZoneId.systemDefault(); @Override public void configure(Properties props) { readProps(props, "format.date", p -> dateFormatter = DateTimeFormatter.ofPattern(p)); readProps(props, "format.time", p -> timeFormatter = DateTimeFormatter.ofPattern(p)); readProps(props, "format.datetime", p -> datetimeFormatter = DateTimeFormatter.ofPattern(p)); readProps(props, "format.timestamp", p -> timestampFormatter = DateTimeFormatter.ofPattern(p)); readProps(props, "format.timestamp.zone", z -> timestampZoneId = ZoneId.of(z)); } private void readProps(Properties properties, String settingKey, Consumer<String> callback) { String settingValue = (String) properties.get(settingKey); if (settingValue == null || settingValue.isEmpty()) { return; } try { callback.accept(settingValue.trim()); } catch (IllegalArgumentException | DateTimeException e) { logger.error("The {} setting is illegal: {}",settingKey,settingValue); throw e; } } @Override public void converterFor(RelationalColumn column, ConverterRegistration<SchemaBuilder> registration) { String sqlType = column.typeName().toUpperCase(); SchemaBuilder schemaBuilder = null; Converter converter = null; if ("DATE".equals(sqlType)) { schemaBuilder = SchemaBuilder.string().optional().name("debezium.date.string"); converter = this::convertDate; } if ("TIME".equals(sqlType)) { schemaBuilder = SchemaBuilder.string().optional().name("debezium.date.string"); converter = this::convertTime; } if ("DATETIME".equals(sqlType)) { schemaBuilder = SchemaBuilder.string().optional().name("debezium.date.string"); converter = this::convertDateTime; } if ("TIMESTAMP".equals(sqlType)) { schemaBuilder = SchemaBuilder.string().optional().name("debezium.date.string"); converter = this::convertTimestamp; } if (schemaBuilder != null) { registration.register(schemaBuilder, converter); } } private String convertDate(Object input) { if (input == null) return null; if (input instanceof LocalDate) { return dateFormatter.format((LocalDate) input); } if (input instanceof Integer) { LocalDate date = LocalDate.ofEpochDay((Integer) input); return dateFormatter.format(date); } return String.valueOf(input); } private String convertTime(Object input) { if (input == null) return null; if (input instanceof Duration) { Duration duration = (Duration) input; long seconds = duration.getSeconds(); int nano = duration.getNano(); LocalTime time = LocalTime.ofSecondOfDay(seconds).withNano(nano); return timeFormatter.format(time); } return String.valueOf(input); } private String convertDateTime(Object input) { if (input == null) return null; if (input instanceof LocalDateTime) { return datetimeFormatter.format((LocalDateTime) input).replaceAll("T", " "); } return String.valueOf(input); } private String convertTimestamp(Object input) { if (input == null) return null; if (input instanceof ZonedDateTime) { // mysql的timestamp会转成UTC存储,这里的zonedDatetime都是UTC时间 ZonedDateTime zonedDateTime = (ZonedDateTime) input; LocalDateTime localDateTime = zonedDateTime.withZoneSameInstant(timestampZoneId).toLocalDateTime(); return timestampFormatter.format(localDateTime).replaceAll("T", " "); } return String.valueOf(input); } } // ... private static String generateUniqueServerId() throws UnknownHostException { long baseTimestamp = System.currentTimeMillis() / 1000; String hostName; try { hostName = InetAddress.getLocalHost().getHostName(); } catch (UnknownHostException e) { hostName = "unknown"; } // 使用主机名 + 时间戳生成唯一 serverId int hash = (hostName + baseTimestamp).hashCode(); // 确保为正数并控制在合理范围 return String.valueOf(Math.abs(hash) % 100000000 + 5); } } 4
06-07
2025-05-12 09:28:16 org.apache.flink.runtime.JobException: Recovery is suppressed by NoRestartBackoffTimeStrategy at org.apache.flink.runtime.executiongraph.failover.flip1.ExecutionFailureHandler.handleFailure(ExecutionFailureHandler.java:139) at org.apache.flink.runtime.executiongraph.failover.flip1.ExecutionFailureHandler.getFailureHandlingResult(ExecutionFailureHandler.java:83) at org.apache.flink.runtime.scheduler.DefaultScheduler.recordTaskFailure(DefaultScheduler.java:258) at org.apache.flink.runtime.scheduler.DefaultScheduler.handleTaskFailure(DefaultScheduler.java:249) at org.apache.flink.runtime.scheduler.DefaultScheduler.onTaskFailed(DefaultScheduler.java:242) at org.apache.flink.runtime.scheduler.SchedulerBase.onTaskExecutionStateUpdate(SchedulerBase.java:748) at org.apache.flink.runtime.scheduler.SchedulerBase.updateTaskExecutionState(SchedulerBase.java:725) at org.apache.flink.runtime.scheduler.SchedulerNG.updateTaskExecutionState(SchedulerNG.java:80) at org.apache.flink.runtime.jobmaster.JobMaster.updateTaskExecutionState(JobMaster.java:479) at sun.reflect.GeneratedMethodAccessor61.invoke(Unknown Source) at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) at java.lang.reflect.Method.invoke(Method.java:498) at org.apache.flink.runtime.rpc.akka.AkkaRpcActor.lambda$handleRpcInvocation$1(AkkaRpcActor.java:309) at org.apache.flink.runtime.concurrent.akka.ClassLoadingUtils.runWithContextClassLoader(ClassLoadingUtils.java:83) at org.apache.flink.runtime.rpc.akka.AkkaRpcActor.handleRpcInvocation(AkkaRpcActor.java:307) at org.apache.flink.runtime.rpc.akka.AkkaRpcActor.handleRpcMessage(AkkaRpcActor.java:222) at org.apache.flink.runtime.rpc.akka.FencedAkkaRpcActor.handleRpcMessage(FencedAkkaRpcActor.java:84) at org.apache.flink.runtime.rpc.akka.AkkaRpcActor.handleMessage(AkkaRpcActor.java:168) at akka.japi.pf.UnitCaseStatement.apply(CaseStatements.scala:24) at akka.japi.pf.UnitCaseStatement.apply(CaseStatements.scala:20) at scala.PartialFunction.applyOrElse(PartialFunction.scala:127) at scala.PartialFunction.applyOrElse$(PartialFunction.scala:126) at akka.japi.pf.UnitCaseStatement.applyOrElse(CaseStatements.scala:20) at scala.PartialFunction$OrElse.applyOrElse(PartialFunction.scala:175) at scala.PartialFunction$OrElse.applyOrElse(PartialFunction.scala:176) at scala.PartialFunction$OrElse.applyOrElse(PartialFunction.scala:176) at akka.actor.Actor.aroundReceive(Actor.scala:537) at akka.actor.Actor.aroundReceive$(Actor.scala:535) at akka.actor.AbstractActor.aroundReceive(AbstractActor.scala:220) at akka.actor.ActorCell.receiveMessage(ActorCell.scala:579) at akka.actor.ActorCell.invoke(ActorCell.scala:547) at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:270) at akka.dispatch.Mailbox.run(Mailbox.scala:231) at akka.dispatch.Mailbox.exec(Mailbox.scala:243) at java.util.concurrent.ForkJoinTask.doExec(ForkJoinTask.java:289) at java.util.concurrent.ForkJoinPool$WorkQueue.runTask(ForkJoinPool.java:1056) at java.util.concurrent.ForkJoinPool.runWorker(ForkJoinPool.java:1692) at java.util.concurrent.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:157) Caused by: java.lang.RuntimeException: One or more fetchers have encountered exception at org.apache.flink.connector.base.source.reader.fetcher.SplitFetcherManager.checkErrors(SplitFetcherManager.java:261) at org.apache.flink.connector.base.source.reader.SourceReaderBase.getNextFetch(SourceReaderBase.java:169) at org.apache.flink.connector.base.source.reader.SourceReaderBase.pollNext(SourceReaderBase.java:131) at org.apache.flink.streaming.api.operators.SourceOperator.emitNext(SourceOperator.java:417) at org.apache.flink.streaming.runtime.io.StreamTaskSourceInput.emitNext(StreamTaskSourceInput.java:68) at org.apache.flink.streaming.runtime.io.StreamOneInputProcessor.processInput(StreamOneInputProcessor.java:65) at org.apache.flink.streaming.runtime.tasks.StreamTask.processInput(StreamTask.java:550) at org.apache.flink.streaming.runtime.tasks.mailbox.MailboxProcessor.runMailboxLoop(MailboxProcessor.java:231) at org.apache.flink.streaming.runtime.tasks.StreamTask.runMailboxLoop(StreamTask.java:839) at org.apache.flink.streaming.runtime.tasks.StreamTask.invoke(StreamTask.java:788) at org.apache.flink.runtime.taskmanager.Task.runWithSystemExitMonitoring(Task.java:952) at org.apache.flink.runtime.taskmanager.Task.restoreAndInvoke(Task.java:931) at org.apache.flink.runtime.taskmanager.Task.doRun(Task.java:745) at org.apache.flink.runtime.taskmanager.Task.run(Task.java:562) at java.lang.Thread.run(Thread.java:834) Caused by: java.lang.RuntimeException: SplitFetcher thread 0 received unexpected exception while polling the records at org.apache.flink.connector.base.source.reader.fetcher.SplitFetcher.runOnce(SplitFetcher.java:165) at org.apache.flink.connector.base.source.reader.fetcher.SplitFetcher.run(SplitFetcher.java:114) at java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:515) at java.util.concurrent.FutureTask.run(FutureTask.java:264) at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128) at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628) ... 1 more Caused by: org.apache.flink.util.FlinkRuntimeException: Read split SnapshotSplit{tableId=tn_iot_v3.dbo.bs_material, splitId='tn_iot_v3.dbo.bs_material:0', splitKeyType=[`material_id` STRING NOT NULL], splitStart=null, splitEnd=[11.003.00001], highWatermark=null} error due to java.lang.IllegalArgumentException: Column 'is_feed_limit' not found in result set 'material_id, material_code, material_name, material_en_name, material_short_name, material_spec, material_model, material_config, material_unit, material_std, barcode_std, production_mode, material_color, custom_sort, material_type_id, material_type_code, material_class_id, material_group_id, location_id, lot_value, lot_value_unit, sublot_qty, sublot_qty_unit, pack_size, description, setup_time, cycle_time, beat_time, offset_time, status, pic_path, material_weight, gross_weight, wh_id, wh_min, wh_max, wh_unit, pic_code, erp_id, material_map, material_prop, sale_price, qty_low_alarm, raw_type, batch_type, barcode_code, box_rule, pkg_rule, box_barcode, pkg_barcode, batch_barcode, is_qc, is_stop, is_common, reserved1, reserved2, reserved3, reserved4, reserved5, reserved6, def01, def02, def03, def04, def05, def06, def07, def08, def09, def10, def11, def12, customer_group, is_delete, create_time, create_by, create_by_name, modified_time, modified_by, modified_by_name, check_eqpt, pressing_technology, material_max_weight, material_min_weight, temp_code_box, print_box_num, temp_code_pkg, print_pkg_num, is_box_print, is_pkg_print, is_grid, group_code_start, group_code_end, qc_sample_percent, is_assemble, is_die_trace, is_mutiple, is_pkg, need_weighing, process_route_temp_id, material_kind_id, fault_part_id, creator, trace_interval, die_casting, is_feed_limit' for table 'tn_iot_v3.dbo.bs_material', columns: { material_id varchar(50) NOT NULL material_code varchar(50) DEFAULT VALUE NULL material_name varchar(100) DEFAULT VALUE ('') material_en_name varchar(100) DEFAULT VALUE ('') material_short_name varchar(20) DEFAULT VALUE ('') material_spec varchar(100) DEFAULT VALUE ('') material_model varchar(50) DEFAULT VALUE ('') material_config varchar(50) DEFAULT VALUE ('') material_unit varchar(50) DEFAULT VALUE ('') material_std varchar(50) DEFAULT VALUE ('') barcode_std varchar(50) DEFAULT VALUE ('') production_mode int(10, 0) DEFAULT VALUE ((0)) material_color varchar(50) DEFAULT VALUE ('') custom_sort varchar(50) DEFAULT VALUE NULL material_type_id varchar(50) NOT NULL DEFAULT VALUE ('') material_type_code varchar(50) DEFAULT VALUE NULL material_class_id varchar(50) DEFAULT VALUE ('') material_group_id varchar(50) DEFAULT VALUE ('') location_id varchar(50) DEFAULT VALUE ('') lot_value numeric(18, 6) DEFAULT VALUE ((0)) lot_value_unit varchar(50) DEFAULT VALUE ('') sublot_qty numeric(18, 6) DEFAULT VALUE ((0)) sublot_qty_unit varchar(50) DEFAULT VALUE ('') pack_size varchar(50) DEFAULT VALUE ('') description varchar(500) DEFAULT VALUE ('') setup_time numeric(18, 6) DEFAULT VALUE ((0)) cycle_time numeric(18, 6) DEFAULT VALUE ((0)) beat_time numeric(18, 6) DEFAULT VALUE ((0)) offset_time int(10, 0) DEFAULT VALUE ((0)) status varchar(20) DEFAULT VALUE ((0)) pic_path varchar(400) DEFAULT VALUE ('') material_weight numeric(18, 4) DEFAULT VALUE ((0)) gross_weight numeric(18, 4) DEFAULT VALUE ((0)) wh_id varchar(50) DEFAULT VALUE ('') wh_min numeric(18, 4) DEFAULT VALUE ((0)) wh_max numeric(18, 4) DEFAULT VALUE ((0)) wh_unit varchar(50) DEFAULT VALUE ('') pic_code varchar(400) DEFAULT VALUE ('') erp_id varchar(60) DEFAULT VALUE ('') material_map varchar(60) DEFAULT VALUE ('') material_prop varchar(100) DEFAULT VALUE ('') sale_price numeric(30, 6) DEFAULT VALUE ((0)) qty_low_alarm numeric(16, 4) DEFAULT VALUE ((0)) raw_type varchar(50) DEFAULT VALUE ('1') batch_type varchar(50) DEFAULT VALUE ('') barcode_code varchar(100) DEFAULT VALUE ('') box_rule varchar(50) DEFAULT VALUE ('') pkg_rule varchar(50) DEFAULT VALUE ('') box_barcode varchar(100) DEFAULT VALUE ('') pkg_barcode varchar(100) DEFAULT VALUE ('') batch_barcode varchar(100) DEFAULT VALUE ('') is_qc int(10, 0) DEFAULT VALUE ((0)) is_stop int(10, 0) DEFAULT VALUE ((0)) is_common int(10, 0) DEFAULT VALUE ((0)) reserved1 varchar(100) DEFAULT VALUE ('') reserved2 varchar(100) DEFAULT VALUE ('') reserved3 varchar(100) DEFAULT VALUE ('') reserved4 varchar(100) DEFAULT VALUE ('') reserved5 varchar(100) DEFAULT VALUE ('') reserved6 varchar(100) DEFAULT VALUE ('') def01 varchar(100) DEFAULT VALUE ('') def02 varchar(100) DEFAULT VALUE ('') def03 varchar(100) DEFAULT VALUE ('') def04 varchar(100) DEFAULT VALUE ('') def05 varchar(100) DEFAULT VALUE ('') def06 varchar(100) DEFAULT VALUE ('') def07 varchar(100) DEFAULT VALUE ('') def08 varchar(100) DEFAULT VALUE ('') def09 varchar(100) DEFAULT VALUE ('') def10 varchar(100) DEFAULT VALUE ('') def11 varchar(100) DEFAULT VALUE ('') def12 varchar(100) DEFAULT VALUE ('') customer_group varchar(50) DEFAULT VALUE ('') is_delete int(10, 0) DEFAULT VALUE ((0)) create_time datetime(23, 3) DEFAULT VALUE (getdate()) create_by varchar(50) DEFAULT VALUE ('') create_by_name varchar(50) DEFAULT VALUE ('') modified_time datetime(23, 3) DEFAULT VALUE ('') modified_by varchar(50) DEFAULT VALUE ('') modified_by_name varchar(50) DEFAULT VALUE ('') check_eqpt varchar(50) DEFAULT VALUE NULL pressing_technology varchar(50) DEFAULT VALUE NULL material_max_weight varchar(50) DEFAULT VALUE NULL material_min_weight varchar(50) DEFAULT VALUE NULL temp_code_box varchar(50) DEFAULT VALUE NULL print_box_num varchar(1) DEFAULT VALUE NULL temp_code_pkg varchar(50) DEFAULT VALUE NULL print_pkg_num varchar(50) DEFAULT VALUE NULL is_box_print varchar(50) DEFAULT VALUE NULL is_pkg_print varchar(50) DEFAULT VALUE NULL is_grid varchar(50) DEFAULT VALUE NULL group_code_start varchar(50) DEFAULT VALUE NULL group_code_end varchar(50) DEFAULT VALUE NULL qc_sample_percent varchar(50) DEFAULT VALUE NULL is_assemble int(10, 0) DEFAULT VALUE ('') is_die_trace int(10, 0) DEFAULT VALUE NULL is_mutiple int(10, 0) DEFAULT VALUE (NULL) is_pkg int(10, 0) DEFAULT VALUE NULL need_weighing varchar(255) DEFAULT VALUE NULL process_route_temp_id varchar(50) DEFAULT VALUE NULL material_kind_id varchar(50) DEFAULT VALUE NULL fault_part_id varchar(50) DEFAULT VALUE NULL creator varchar(255) DEFAULT VALUE NULL trace_interval int(10, 0) DEFAULT VALUE NULL die_casting varchar(100) DEFAULT VALUE NULL } primary key: [material_id] default charset: null comment: null . This might be caused by DBZ-4350. at com.ververica.cdc.connectors.base.source.reader.external.IncrementalSourceScanFetcher.checkReadException(IncrementalSourceScanFetcher.java:182) at com.ververica.cdc.connectors.base.source.reader.external.IncrementalSourceScanFetcher.pollSplitRecords(IncrementalSourceScanFetcher.java:129) at com.ververica.cdc.connectors.base.source.reader.IncrementalSourceSplitReader.fetch(IncrementalSourceSplitReader.java:73) at org.apache.flink.connector.base.source.reader.fetcher.FetchTask.run(FetchTask.java:58) at org.apache.flink.connector.base.source.reader.fetcher.SplitFetcher.runOnce(SplitFetcher.java:162) ... 6 more Caused by: io.debezium.DebeziumException: java.lang.IllegalArgumentException: Column 'is_feed_limit' not found in result set 'material_id, material_code, material_name, material_en_name, material_short_name, material_spec, material_model, material_config, material_unit, material_std, barcode_std, production_mode, material_color, custom_sort, material_type_id, material_type_code, material_class_id, material_group_id, location_id, lot_value, lot_value_unit, sublot_qty, sublot_qty_unit, pack_size, description, setup_time, cycle_time, beat_time, offset_time, status, pic_path, material_weight, gross_weight, wh_id, wh_min, wh_max, wh_unit, pic_code, erp_id, material_map, material_prop, sale_price, qty_low_alarm, raw_type, batch_type, barcode_code, box_rule, pkg_rule, box_barcode, pkg_barcode, batch_barcode, is_qc, is_stop, is_common, reserved1, reserved2, reserved3, reserved4, reserved5, reserved6, def01, def02, def03, def04, def05, def06, def07, def08, def09, def10, def11, def12, customer_group, is_delete, create_time, create_by, create_by_name, modified_time, modified_by, modified_by_name, check_eqpt, pressing_technology, material_max_weight, material_min_weight, temp_code_box, print_box_num, temp_code_pkg, print_pkg_num, is_box_print, is_pkg_print, is_grid, group_code_start, group_code_end, qc_sample_percent, is_assemble, is_die_trace, is_mutiple, is_pkg, need_weighing, process_route_temp_id, material_kind_id, fault_part_id, creator, trace_interval, die_casting, is_feed_limit' for table 'tn_iot_v3.dbo.bs_material', columns: { material_id varchar(50) NOT NULL material_code varchar(50) DEFAULT VALUE NULL material_name varchar(100) DEFAULT VALUE ('') material_en_name varchar(100) DEFAULT VALUE ('') material_short_name varchar(20) DEFAULT VALUE ('') material_spec varchar(100) DEFAULT VALUE ('') material_model varchar(50) DEFAULT VALUE ('') material_config varchar(50) DEFAULT VALUE ('') material_unit varchar(50) DEFAULT VALUE ('') material_std varchar(50) DEFAULT VALUE ('') barcode_std varchar(50) DEFAULT VALUE ('') production_mode int(10, 0) DEFAULT VALUE ((0)) material_color varchar(50) DEFAULT VALUE ('') custom_sort varchar(50) DEFAULT VALUE NULL material_type_id varchar(50) NOT NULL DEFAULT VALUE ('') material_type_code varchar(50) DEFAULT VALUE NULL material_class_id varchar(50) DEFAULT VALUE ('') material_group_id varchar(50) DEFAULT VALUE ('') location_id varchar(50) DEFAULT VALUE ('') lot_value numeric(18, 6) DEFAULT VALUE ((0)) lot_value_unit varchar(50) DEFAULT VALUE ('') sublot_qty numeric(18, 6) DEFAULT VALUE ((0)) sublot_qty_unit varchar(50) DEFAULT VALUE ('') pack_size varchar(50) DEFAULT VALUE ('') description varchar(500) DEFAULT VALUE ('') setup_time numeric(18, 6) DEFAULT VALUE ((0)) cycle_time numeric(18, 6) DEFAULT VALUE ((0)) beat_time numeric(18, 6) DEFAULT VALUE ((0)) offset_time int(10, 0) DEFAULT VALUE ((0)) status varchar(20) DEFAULT VALUE ((0)) pic_path varchar(400) DEFAULT VALUE ('') material_weight numeric(18, 4) DEFAULT VALUE ((0)) gross_weight numeric(18, 4) DEFAULT VALUE ((0)) wh_id varchar(50) DEFAULT VALUE ('') wh_min numeric(18, 4) DEFAULT VALUE ((0)) wh_max numeric(18, 4) DEFAULT VALUE ((0)) wh_unit varchar(50) DEFAULT VALUE ('') pic_code varchar(400) DEFAULT VALUE ('') erp_id varchar(60) DEFAULT VALUE ('') material_map varchar(60) DEFAULT VALUE ('') material_prop varchar(100) DEFAULT VALUE ('') sale_price numeric(30, 6) DEFAULT VALUE ((0)) qty_low_alarm numeric(16, 4) DEFAULT VALUE ((0)) raw_type varchar(50) DEFAULT VALUE ('1') batch_type varchar(50) DEFAULT VALUE ('') barcode_code varchar(100) DEFAULT VALUE ('') box_rule varchar(50) DEFAULT VALUE ('') pkg_rule varchar(50) DEFAULT VALUE ('') box_barcode varchar(100) DEFAULT VALUE ('') pkg_barcode varchar(100) DEFAULT VALUE ('') batch_barcode varchar(100) DEFAULT VALUE ('') is_qc int(10, 0) DEFAULT VALUE ((0)) is_stop int(10, 0) DEFAULT VALUE ((0)) is_common int(10, 0) DEFAULT VALUE ((0)) reserved1 varchar(100) DEFAULT VALUE ('') reserved2 varchar(100) DEFAULT VALUE ('') reserved3 varchar(100) DEFAULT VALUE ('') reserved4 varchar(100) DEFAULT VALUE ('') reserved5 varchar(100) DEFAULT VALUE ('') reserved6 varchar(100) DEFAULT VALUE ('') def01 varchar(100) DEFAULT VALUE ('') def02 varchar(100) DEFAULT VALUE ('') def03 varchar(100) DEFAULT VALUE ('') def04 varchar(100) DEFAULT VALUE ('') def05 varchar(100) DEFAULT VALUE ('') def06 varchar(100) DEFAULT VALUE ('') def07 varchar(100) DEFAULT VALUE ('') def08 varchar(100) DEFAULT VALUE ('') def09 varchar(100) DEFAULT VALUE ('') def10 varchar(100) DEFAULT VALUE ('') def11 varchar(100) DEFAULT VALUE ('') def12 varchar(100) DEFAULT VALUE ('') customer_group varchar(50) DEFAULT VALUE ('') is_delete int(10, 0) DEFAULT VALUE ((0)) create_time datetime(23, 3) DEFAULT VALUE (getdate()) create_by varchar(50) DEFAULT VALUE ('') create_by_name varchar(50) DEFAULT VALUE ('') modified_time datetime(23, 3) DEFAULT VALUE ('') modified_by varchar(50) DEFAULT VALUE ('') modified_by_name varchar(50) DEFAULT VALUE ('') check_eqpt varchar(50) DEFAULT VALUE NULL pressing_technology varchar(50) DEFAULT VALUE NULL material_max_weight varchar(50) DEFAULT VALUE NULL material_min_weight varchar(50) DEFAULT VALUE NULL temp_code_box varchar(50) DEFAULT VALUE NULL print_box_num varchar(1) DEFAULT VALUE NULL temp_code_pkg varchar(50) DEFAULT VALUE NULL print_pkg_num varchar(50) DEFAULT VALUE NULL is_box_print varchar(50) DEFAULT VALUE NULL is_pkg_print varchar(50) DEFAULT VALUE NULL is_grid varchar(50) DEFAULT VALUE NULL group_code_start varchar(50) DEFAULT VALUE NULL group_code_end varchar(50) DEFAULT VALUE NULL qc_sample_percent varchar(50) DEFAULT VALUE NULL is_assemble int(10, 0) DEFAULT VALUE ('') is_die_trace int(10, 0) DEFAULT VALUE NULL is_mutiple int(10, 0) DEFAULT VALUE (NULL) is_pkg int(10, 0) DEFAULT VALUE NULL need_weighing varchar(255) DEFAULT VALUE NULL process_route_temp_id varchar(50) DEFAULT VALUE NULL material_kind_id varchar(50) DEFAULT VALUE NULL fault_part_id varchar(50) DEFAULT VALUE NULL creator varchar(255) DEFAULT VALUE NULL trace_interval int(10, 0) DEFAULT VALUE NULL die_casting varchar(100) DEFAULT VALUE NULL } primary key: [material_id] default charset: null comment: null . This might be caused by DBZ-4350 at com.ververica.cdc.connectors.sqlserver.source.reader.fetch.SqlServerScanFetchTask$SqlServerSnapshotSplitReadTask.execute(SqlServerScanFetchTask.java:256) at com.ververica.cdc.connectors.sqlserver.source.reader.fetch.SqlServerScanFetchTask.execute(SqlServerScanFetchTask.java:94) at com.ververica.cdc.connectors.base.source.reader.external.IncrementalSourceScanFetcher.lambda$submitTask$0(IncrementalSourceScanFetcher.java:95) ... 5 more Caused by: java.lang.IllegalArgumentException: Column 'is_feed_limit' not found in result set 'material_id, material_code, material_name, material_en_name, material_short_name, material_spec, material_model, material_config, material_unit, material_std, barcode_std, production_mode, material_color, custom_sort, material_type_id, material_type_code, material_class_id, material_group_id, location_id, lot_value, lot_value_unit, sublot_qty, sublot_qty_unit, pack_size, description, setup_time, cycle_time, beat_time, offset_time, status, pic_path, material_weight, gross_weight, wh_id, wh_min, wh_max, wh_unit, pic_code, erp_id, material_map, material_prop, sale_price, qty_low_alarm, raw_type, batch_type, barcode_code, box_rule, pkg_rule, box_barcode, pkg_barcode, batch_barcode, is_qc, is_stop, is_common, reserved1, reserved2, reserved3, reserved4, reserved5, reserved6, def01, def02, def03, def04, def05, def06, def07, def08, def09, def10, def11, def12, customer_group, is_delete, create_time, create_by, create_by_name, modified_time, modified_by, modified_by_name, check_eqpt, pressing_technology, material_max_weight, material_min_weight, temp_code_box, print_box_num, temp_code_pkg, print_pkg_num, is_box_print, is_pkg_print, is_grid, group_code_start, group_code_end, qc_sample_percent, is_assemble, is_die_trace, is_mutiple, is_pkg, need_weighing, process_route_temp_id, material_kind_id, fault_part_id, creator, trace_interval, die_casting, is_feed_limit' for table 'tn_iot_v3.dbo.bs_material', columns: { material_id varchar(50) NOT NULL material_code varchar(50) DEFAULT VALUE NULL material_name varchar(100) DEFAULT VALUE ('') material_en_name varchar(100) DEFAULT VALUE ('') material_short_name varchar(20) DEFAULT VALUE ('') material_spec varchar(100) DEFAULT VALUE ('') material_model varchar(50) DEFAULT VALUE ('') material_config varchar(50) DEFAULT VALUE ('') material_unit varchar(50) DEFAULT VALUE ('') material_std varchar(50) DEFAULT VALUE ('') barcode_std varchar(50) DEFAULT VALUE ('') production_mode int(10, 0) DEFAULT VALUE ((0)) material_color varchar(50) DEFAULT VALUE ('') custom_sort varchar(50) DEFAULT VALUE NULL material_type_id varchar(50) NOT NULL DEFAULT VALUE ('') material_type_code varchar(50) DEFAULT VALUE NULL material_class_id varchar(50) DEFAULT VALUE ('') material_group_id varchar(50) DEFAULT VALUE ('') location_id varchar(50) DEFAULT VALUE ('') lot_value numeric(18, 6) DEFAULT VALUE ((0)) lot_value_unit varchar(50) DEFAULT VALUE ('') sublot_qty numeric(18, 6) DEFAULT VALUE ((0)) sublot_qty_unit varchar(50) DEFAULT VALUE ('') pack_size varchar(50) DEFAULT VALUE ('') description varchar(500) DEFAULT VALUE ('') setup_time numeric(18, 6) DEFAULT VALUE ((0)) cycle_time numeric(18, 6) DEFAULT VALUE ((0)) beat_time numeric(18, 6) DEFAULT VALUE ((0)) offset_time int(10, 0) DEFAULT VALUE ((0)) status varchar(20) DEFAULT VALUE ((0)) pic_path varchar(400) DEFAULT VALUE ('') material_weight numeric(18, 4) DEFAULT VALUE ((0)) gross_weight numeric(18, 4) DEFAULT VALUE ((0)) wh_id varchar(50) DEFAULT VALUE ('') wh_min numeric(18, 4) DEFAULT VALUE ((0)) wh_max numeric(18, 4) DEFAULT VALUE ((0)) wh_unit varchar(50) DEFAULT VALUE ('') pic_code varchar(400) DEFAULT VALUE ('') erp_id varchar(60) DEFAULT VALUE ('') material_map varchar(60) DEFAULT VALUE ('') material_prop varchar(100) DEFAULT VALUE ('') sale_price numeric(30, 6) DEFAULT VALUE ((0)) qty_low_alarm numeric(16, 4) DEFAULT VALUE ((0)) raw_type varchar(50) DEFAULT VALUE ('1') batch_type varchar(50) DEFAULT VALUE ('') barcode_code varchar(100) DEFAULT VALUE ('') box_rule varchar(50) DEFAULT VALUE ('') pkg_rule varchar(50) DEFAULT VALUE ('') box_barcode varchar(100) DEFAULT VALUE ('') pkg_barcode varchar(100) DEFAULT VALUE ('') batch_barcode varchar(100) DEFAULT VALUE ('') is_qc int(10, 0) DEFAULT VALUE ((0)) is_stop int(10, 0) DEFAULT VALUE ((0)) is_common int(10, 0) DEFAULT VALUE ((0)) reserved1 varchar(100) DEFAULT VALUE ('') reserved2 varchar(100) DEFAULT VALUE ('') reserved3 varchar(100) DEFAULT VALUE ('') reserved4 varchar(100) DEFAULT VALUE ('') reserved5 varchar(100) DEFAULT VALUE ('') reserved6 varchar(100) DEFAULT VALUE ('') def01 varchar(100) DEFAULT VALUE ('') def02 varchar(100) DEFAULT VALUE ('') def03 varchar(100) DEFAULT VALUE ('') def04 varchar(100) DEFAULT VALUE ('') def05 varchar(100) DEFAULT VALUE ('') def06 varchar(100) DEFAULT VALUE ('') def07 varchar(100) DEFAULT VALUE ('') def08 varchar(100) DEFAULT VALUE ('') def09 varchar(100) DEFAULT VALUE ('') def10 varchar(100) DEFAULT VALUE ('') def11 varchar(100) DEFAULT VALUE ('') def12 varchar(100) DEFAULT VALUE ('') customer_group varchar(50) DEFAULT VALUE ('') is_delete int(10, 0) DEFAULT VALUE ((0)) create_time datetime(23, 3) DEFAULT VALUE (getdate()) create_by varchar(50) DEFAULT VALUE ('') create_by_name varchar(50) DEFAULT VALUE ('') modified_time datetime(23, 3) DEFAULT VALUE ('') modified_by varchar(50) DEFAULT VALUE ('') modified_by_name varchar(50) DEFAULT VALUE ('') check_eqpt varchar(50) DEFAULT VALUE NULL pressing_technology varchar(50) DEFAULT VALUE NULL material_max_weight varchar(50) DEFAULT VALUE NULL material_min_weight varchar(50) DEFAULT VALUE NULL temp_code_box varchar(50) DEFAULT VALUE NULL print_box_num varchar(1) DEFAULT VALUE NULL temp_code_pkg varchar(50) DEFAULT VALUE NULL print_pkg_num varchar(50) DEFAULT VALUE NULL is_box_print varchar(50) DEFAULT VALUE NULL is_pkg_print varchar(50) DEFAULT VALUE NULL is_grid varchar(50) DEFAULT VALUE NULL group_code_start varchar(50) DEFAULT VALUE NULL group_code_end varchar(50) DEFAULT VALUE NULL qc_sample_percent varchar(50) DEFAULT VALUE NULL is_assemble int(10, 0) DEFAULT VALUE ('') is_die_trace int(10, 0) DEFAULT VALUE NULL is_mutiple int(10, 0) DEFAULT VALUE (NULL) is_pkg int(10, 0) DEFAULT VALUE NULL need_weighing varchar(255) DEFAULT VALUE NULL process_route_temp_id varchar(50) DEFAULT VALUE NULL material_kind_id varchar(50) DEFAULT VALUE NULL fault_part_id varchar(50) DEFAULT VALUE NULL creator varchar(255) DEFAULT VALUE NULL trace_interval int(10, 0) DEFAULT VALUE NULL die_casting varchar(100) DEFAULT VALUE NULL } primary key: [material_id] default charset: null comment: null . This might be caused by DBZ-4350 at io.debezium.util.ColumnUtils.toArray(ColumnUtils.java:57) at com.ververica.cdc.connectors.sqlserver.source.reader.fetch.SqlServerScanFetchTask$SqlServerSnapshotSplitReadTask.createDataEventsForTable(SqlServerScanFetchTask.java:355) at com.ververica.cdc.connectors.sqlserver.source.reader.fetch.SqlServerScanFetchTask$SqlServerSnapshotSplitReadTask.createDataEvents(SqlServerScanFetchTask.java:313) at com.ververica.cdc.connectors.sqlserver.source.reader.fetch.SqlServerScanFetchTask$SqlServerSnapshotSplitReadTask.doExecute(SqlServerScanFetchTask.java:283) at com.ververica.cdc.connectors.sqlserver.source.reader.fetch.SqlServerScanFetchTask$SqlServerSnapshotSplitReadTask.execute(SqlServerScanFetchTask.java:251) ... 7 more
05-13
评论 1
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值