通过util:properties节点加载db.properties出现的小问题

本文详细介绍了如何使用db.propertise文件配置MySQL数据库连接池,包括设置URL、驱动、用户名、密码以及初始化和最大活动连接数。通过Spring的Bean定义,实现对数据库资源的有效管理和高效利用。

摘要生成于 C知道 ,由 DeepSeek-R1 满血版支持, 前往体验 >

db.propertise文件的标准:
url=jdbc:mysql://localhost:3306/wyt_store?useUnicode=true&characterEncoding=utf8
driver=com.mysql.jdbc.Driver
user=root
password=root
initSize=2
maxActive=10

<bean 	id="dataSource"
		class="org.apache.commons.dbcp.BasicDataSource"
		destroy-method="close">
			<property name="url"										//"name"的内容严谨,如果随意写会报错
				value="#{dbConfig.url}"/>
			<property name="driverClassName"		//name和value的点后缀不一样.
				value="#{dbConfig.driver}"/>
			<property name="username"
				value="#{dbConfig.user}"/>
			<property name="password"
				value="#{dbConfig.password}"/>
			<property name="initialSize" 				//注意的就是initialSize和diverClassName
				value="#{dbConfig.initSize}" />	
			<property name="maxActive"
				value="#{dbConfig.maxActive}"/>
</bean>
2025-06-11T09:14:50.485+0800 ERROR SplitRunner-20250611_011450_00344_fmh9x.1.1.0-11-2005 com.google.common.util.concurrent.AggregateFuture Got more than one input Future failure. Logging failures after the first io.trino.spi.TrinoException: Error opening Iceberg split hdfs://bigdata/user/hive/warehouse/chuzuche.db/ods_tocc_passenger_driver/data/00002-0-4232e133-6e41-47dd-b29a-dc3fd7f11dec-00048.parquet (offset=0, length=569): Cannot invoke "org.apache.hadoop.hdfs.BlockReader.available()" because "this.blockReader" is null at io.trino.plugin.iceberg.IcebergPageSourceProvider.createParquetPageSource(IcebergPageSourceProvider.java:1052) at io.trino.plugin.iceberg.IcebergPageSourceProvider.createDataPageSource(IcebergPageSourceProvider.java:553) at io.trino.plugin.iceberg.IcebergPageSourceProvider.openDeletes(IcebergPageSourceProvider.java:498) at io.trino.plugin.iceberg.IcebergPageSourceProvider.lambda$createPageSource$8(IcebergPageSourceProvider.java:395) at io.trino.plugin.iceberg.delete.EqualityDeleteFilter$EqualityDeleteFilterBuilder.readEqualityDeletesInternal(EqualityDeleteFilter.java:110) at io.trino.plugin.iceberg.delete.EqualityDeleteFilter$EqualityDeleteFilterBuilder.lambda$readEqualityDeletes$0(EqualityDeleteFilter.java:102) at java.base/java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:572) at java.base/java.util.concurrent.FutureTask.run(FutureTask.java:317) at io.trino.plugin.iceberg.delete.EqualityDeleteFilter$EqualityDeleteFilterBuilder.readEqualityDeletes(EqualityDeleteFilter.java:103) at io.trino.plugin.iceberg.delete.DeleteManager.createEqualityDeleteFilter(DeleteManager.java:200) at io.trino.plugin.iceberg.delete.DeleteManager.getDeletePredicate(DeleteManager.java:98) at io.trino.plugin.iceberg.IcebergPageSourceProvider.lambda$createPageSource$9(IcebergPageSourceProvider.java:388) at com.google.common.base.Suppliers$NonSerializableMemoizingSupplier.get(Suppliers.java:186) at io.trino.plugin.iceberg.IcebergPageSource.getNextPage(IcebergPageSource.java:132) at io.trino.operator.TableScanOperator.getOutput(TableScanOperator.java:268) at io.trino.operator.Driver.processInternal(Driver.java:403) at io.trino.operator.Driver.lambda$process$8(Driver.java:306) at io.trino.operator.Driver.tryWithLock(Driver.java:709) at io.trino.operator.Driver.process(Driver.java:298) at io.trino.operator.Driver.processForDuration(Driver.java:269) at io.trino.execution.SqlTaskExecution$DriverSplitRunner.processFor(SqlTaskExecution.java:890) at io.trino.execution.executor.dedicated.SplitProcessor.run(SplitProcessor.java:77) at io.trino.execution.executor.dedicated.TaskEntry$VersionEmbedderBridge.lambda$run$0(TaskEntry.java:191) at io.trino.$gen.Trino_451____20250610_092944_2.run(Unknown Source) at io.trino.execution.executor.dedicated.TaskEntry$VersionEmbedderBridge.run(TaskEntry.java:192) at io.trino.execution.executor.scheduler.FairScheduler.runTask(FairScheduler.java:168) at io.trino.execution.executor.scheduler.FairScheduler.lambda$submit$0(FairScheduler.java:155) at java.base/java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:572) at com.google.common.util.concurrent.TrustedListenableFutureTask$TrustedFutureInterruptibleTask.runInterruptibly(TrustedListenableFutureTask.java:131) at com.google.common.util.concurrent.InterruptibleTask.run(InterruptibleTask.java:76) at com.google.common.util.concurrent.TrustedListenableFutureTask.run(TrustedListenableFutureTask.java:82) at java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1144) at java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:642) at java.base/java.lang.Thread.run(Thread.java:1570) Caused by: java.lang.NullPointerException: Cannot invoke "org.apache.hadoop.hdfs.BlockReader.available()" because "this.blockReader" is null at org.apache.hadoop.hdfs.DFSInputStream.seek(DFSInputStream.java:1575) at org.apache.hadoop.fs.FSDataInputStream.seek(FSDataInputStream.java:73) at org.apache.hadoop.fs.FSDataInputStream.seek(FSDataInputStream.java:73) at io.trino.hdfs.FSDataInputStreamTail.readTail(FSDataInputStreamTail.java:67) at io.trino.filesystem.hdfs.HdfsInput.readTail(HdfsInput.java:63) at io.trino.filesystem.TrinoInput.readTail(TrinoInput.java:43) at io.trino.filesystem.tracing.TracingInput.lambda$readTail$3(TracingInput.java:81) at io.trino.filesystem.tracing.Tracing.withTracing(Tracing.java:47) at io.trino.filesystem.tracing.TracingInput.readTail(TracingInput.java:81) at io.trino.plugin.hive.parquet.MemoryParquetDataSource.<init>(MemoryParquetDataSource.java:56) at io.trino.plugin.hive.parquet.ParquetPageSourceFactory.createDataSource(ParquetPageSourceFactory.java:321) at io.trino.plugin.iceberg.IcebergPageSourceProvider.createParquetPageSource(IcebergPageSourceProvider.java:914) ... 33 more 2025-06-11T09:14:50.485+0800 ERROR SplitRunner-20250611_011450_00344_fmh9x.1.1.0-7-2010 com.google.common.util.concurrent.AggregateFuture Got more than one input Future failure. Logging failures after the first io.trino.spi.TrinoException: Error opening Iceberg split hdfs://bigdata/user/hive/warehouse/chuzuche.db/ods_tocc_passenger_driver/data/00002-0-4232e133-6e41-47dd-b29a-dc3fd7f11dec-00048.parquet (offset=0, length=569): Cannot invoke "org.apache.hadoop.hdfs.BlockReader.available()" because "this.blockReader" is null at io.trino.plugin.iceberg.IcebergPageSourceProvider.createParquetPageSource(IcebergPageSourceProvider.java:1052) at io.trino.plugin.iceberg.IcebergPageSourceProvider.createDataPageSource(IcebergPageSourceProvider.java:553) at io.trino.plugin.iceberg.IcebergPageSourceProvider.openDeletes(IcebergPageSourceProvider.java:498) at io.trino.plugin.iceberg.IcebergPageSourceProvider.lambda$createPageSource$8(IcebergPageSourceProvider.java:395) at io.trino.plugin.iceberg.delete.EqualityDeleteFilter$EqualityDeleteFilterBuilder.readEqualityDeletesInternal(EqualityDeleteFilter.java:110) at io.trino.plugin.iceberg.delete.EqualityDeleteFilter$EqualityDeleteFilterBuilder.lambda$readEqualityDeletes$0(EqualityDeleteFilter.java:102) at java.base/java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:572) at java.base/java.util.concurrent.FutureTask.run(FutureTask.java:317) at io.trino.plugin.iceberg.delete.EqualityDeleteFilter$EqualityDeleteFilterBuilder.readEqualityDeletes(EqualityDeleteFilter.java:103) at io.trino.plugin.iceberg.delete.DeleteManager.createEqualityDeleteFilter(DeleteManager.java:200) at io.trino.plugin.iceberg.delete.DeleteManager.getDeletePredicate(DeleteManager.java:98) at io.trino.plugin.iceberg.IcebergPageSourceProvider.lambda$createPageSource$9(IcebergPageSourceProvider.java:388) at com.google.common.base.Suppliers$NonSerializableMemoizingSupplier.get(Suppliers.java:186) at io.trino.plugin.iceberg.IcebergPageSource.getNextPage(IcebergPageSource.java:132) at io.trino.operator.TableScanOperator.getOutput(TableScanOperator.java:268) at io.trino.operator.Driver.processInternal(Driver.java:403) at io.trino.operator.Driver.lambda$process$8(Driver.java:306) at io.trino.operator.Driver.tryWithLock(Driver.java:709) at io.trino.operator.Driver.process(Driver.java:298) at io.trino.operator.Driver.processForDuration(Driver.java:269) at io.trino.execution.SqlTaskExecution$DriverSplitRunner.processFor(SqlTaskExecution.java:890) at io.trino.execution.executor.dedicated.SplitProcessor.run(SplitProcessor.java:77) at io.trino.execution.executor.dedicated.TaskEntry$VersionEmbedderBridge.lambda$run$0(TaskEntry.java:191) at io.trino.$gen.Trino_451____20250610_092944_2.run(Unknown Source) at io.trino.execution.executor.dedicated.TaskEntry$VersionEmbedderBridge.run(TaskEntry.java:192) at io.trino.execution.executor.scheduler.FairScheduler.runTask(FairScheduler.java:168) at io.trino.execution.executor.scheduler.FairScheduler.lambda$submit$0(FairScheduler.java:155) at java.base/java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:572) at com.google.common.util.concurrent.TrustedListenableFutureTask$TrustedFutureInterruptibleTask.runInterruptibly(TrustedListenableFutureTask.java:131) at com.google.common.util.concurrent.InterruptibleTask.run(InterruptibleTask.java:76) at com.google.common.util.concurrent.TrustedListenableFutureTask.run(TrustedListenableFutureTask.java:82) at java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1144) at java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:642) at java.base/java.lang.Thread.run(Thread.java:1570) Caused by: java.lang.NullPointerException: Cannot invoke "org.apache.hadoop.hdfs.BlockReader.available()" because "this.blockReader" is null at org.apache.hadoop.hdfs.DFSInputStream.seek(DFSInputStream.java:1575) at org.apache.hadoop.fs.FSDataInputStream.seek(FSDataInputStream.java:73) at org.apache.hadoop.fs.FSDataInputStream.seek(FSDataInputStream.java:73) at io.trino.hdfs.FSDataInputStreamTail.readTail(FSDataInputStreamTail.java:67) at io.trino.filesystem.hdfs.HdfsInput.readTail(HdfsInput.java:63) at io.trino.filesystem.TrinoInput.readTail(TrinoInput.java:43) at io.trino.filesystem.tracing.TracingInput.lambda$readTail$3(TracingInput.java:81) at io.trino.filesystem.tracing.Tracing.withTracing(Tracing.java:47) at io.trino.filesystem.tracing.TracingInput.readTail(TracingInput.java:81) at io.trino.plugin.hive.parquet.MemoryParquetDataSource.<init>(MemoryParquetDataSource.java:56) at io.trino.plugin.hive.parquet.ParquetPageSourceFactory.createDataSource(ParquetPageSourceFactory.java:321) at io.trino.plugin.iceberg.IcebergPageSourceProvider.createParquetPageSource(IcebergPageSourceProvider.java:914) ... 33 more 2025-06-11T09:14:50.485+0800 ERROR SplitRunner-20250611_011450_00344_fmh9x.1.1.0-10-1999 com.google.common.util.concurrent.AggregateFuture Got more than one input Future failure. Logging failures after the first io.trino.spi.TrinoException: Error opening Iceberg split hdfs://bigdata/user/hive/warehouse/chuzuche.db/ods_tocc_passenger_driver/data/00002-0-4232e133-6e41-47dd-b29a-dc3fd7f11dec-00048.parquet (offset=0, length=569): Cannot invoke "org.apache.hadoop.hdfs.BlockReader.available()" because "this.blockReader" is null at io.trino.plugin.iceberg.IcebergPageSourceProvider.createParquetPageSource(IcebergPageSourceProvider.java:1052) at io.trino.plugin.iceberg.IcebergPageSourceProvider.createDataPageSource(IcebergPageSourceProvider.java:553) at io.trino.plugin.iceberg.IcebergPageSourceProvider.openDeletes(IcebergPageSourceProvider.java:498) at io.trino.plugin.iceberg.IcebergPageSourceProvider.lambda$createPageSource$8(IcebergPageSourceProvider.java:395) at io.trino.plugin.iceberg.delete.EqualityDeleteFilter$EqualityDeleteFilterBuilder.readEqualityDeletesInternal(EqualityDeleteFilter.java:110) at io.trino.plugin.iceberg.delete.EqualityDeleteFilter$EqualityDeleteFilterBuilder.lambda$readEqualityDeletes$0(EqualityDeleteFilter.java:102) at java.base/java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:572) at java.base/java.util.concurrent.FutureTask.run(FutureTask.java:317) at io.trino.plugin.iceberg.delete.EqualityDeleteFilter$EqualityDeleteFilterBuilder.readEqualityDeletes(EqualityDeleteFilter.java:103) at io.trino.plugin.iceberg.delete.DeleteManager.createEqualityDeleteFilter(DeleteManager.java:200) at io.trino.plugin.iceberg.delete.DeleteManager.getDeletePredicate(DeleteManager.java:98) at io.trino.plugin.iceberg.IcebergPageSourceProvider.lambda$createPageSource$9(IcebergPageSourceProvider.java:388) at com.google.common.base.Suppliers$NonSerializableMemoizingSupplier.get(Suppliers.java:186) at io.trino.plugin.iceberg.IcebergPageSource.getNextPage(IcebergPageSource.java:132) at io.trino.operator.TableScanOperator.getOutput(TableScanOperator.java:268) at io.trino.operator.Driver.processInternal(Driver.java:403) at io.trino.operator.Driver.lambda$process$8(Driver.java:306) at io.trino.operator.Driver.tryWithLock(Driver.java:709) at io.trino.operator.Driver.process(Driver.java:298) at io.trino.operator.Driver.processForDuration(Driver.java:269) at io.trino.execution.SqlTaskExecution$DriverSplitRunner.processFor(SqlTaskExecution.java:890) at io.trino.execution.executor.dedicated.SplitProcessor.run(SplitProcessor.java:77) at io.trino.execution.executor.dedicated.TaskEntry$VersionEmbedderBridge.lambda$run$0(TaskEntry.java:191) at io.trino.$gen.Trino_451____20250610_092944_2.run(Unknown Source) at io.trino.execution.executor.dedicated.TaskEntry$VersionEmbedderBridge.run(TaskEntry.java:192) at io.trino.execution.executor.scheduler.FairScheduler.runTask(FairScheduler.java:168) at io.trino.execution.executor.scheduler.FairScheduler.lambda$submit$0(FairScheduler.java:155) at java.base/java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:572) at com.google.common.util.concurrent.TrustedListenableFutureTask$TrustedFutureInterruptibleTask.runInterruptibly(TrustedListenableFutureTask.java:131) at com.google.common.util.concurrent.InterruptibleTask.run(InterruptibleTask.java:76) at com.google.common.util.concurrent.TrustedListenableFutureTask.run(TrustedListenableFutureTask.java:82) at java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1144) at java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:642) at java.base/java.lang.Thread.run(Thread.java:1570) Caused by: java.lang.NullPointerException: Cannot invoke "org.apache.hadoop.hdfs.BlockReader.available()" because "this.blockReader" is null at org.apache.hadoop.hdfs.DFSInputStream.seek(DFSInputStream.java:1575) at org.apache.hadoop.fs.FSDataInputStream.seek(FSDataInputStream.java:73) at org.apache.hadoop.fs.FSDataInputStream.seek(FSDataInputStream.java:73) at io.trino.hdfs.FSDataInputStreamTail.readTail(FSDataInputStreamTail.java:67) at io.trino.filesystem.hdfs.HdfsInput.readTail(HdfsInput.java:63) at io.trino.filesystem.TrinoInput.readTail(TrinoInput.java:43) at io.trino.filesystem.tracing.TracingInput.lambda$readTail$3(TracingInput.java:81) at io.trino.filesystem.tracing.Tracing.withTracing(Tracing.java:47) at io.trino.filesystem.tracing.TracingInput.readTail(TracingInput.java:81) at io.trino.plugin.hive.parquet.MemoryParquetDataSource.<init>(MemoryParquetDataSource.java:56) at io.trino.plugin.hive.parquet.ParquetPageSourceFactory.createDataSource(ParquetPageSourceFactory.java:321) at io.trino.plugin.iceberg.IcebergPageSourceProvider.createParquetPageSource(IcebergPageSourceProvider.java:914) ... 33 more什么问题
06-12
Jun 04, 2025 4:11:52 PM org.apache.ranger.server.tomcat.EmbeddedServer start INFO: Webapp file =./webapp, webAppName = /kms Jun 04, 2025 4:11:52 PM org.apache.ranger.server.tomcat.EmbeddedServer start INFO: Adding webapp [/kms] = path [./webapp] ..... Jun 04, 2025 4:11:52 PM org.apache.ranger.server.tomcat.EmbeddedServer start INFO: Finished init of webapp [/kms] = path [./webapp]. Jun 04, 2025 4:11:52 PM org.apache.coyote.AbstractProtocol init INFO: Initializing ProtocolHandler ["http-bio-9292"] Jun 04, 2025 4:11:52 PM org.apache.catalina.core.StandardService startInternal INFO: Starting service Tomcat Jun 04, 2025 4:11:52 PM org.apache.catalina.core.StandardEngine startInternal INFO: Starting Servlet Engine: Apache Tomcat/7.0.81 Jun 04, 2025 4:11:52 PM org.apache.catalina.startup.ContextConfig getDefaultWebXmlFragment INFO: No global web.xml found Jun 04, 2025 4:11:55 PM org.apache.catalina.startup.TldConfig execute INFO: At least one JAR was scanned for TLDs yet contained no TLDs. Enable debug logging for this logger for a complete list of JARs that were scanned but no TLDs were found in them. Skipping unneeded JARs during scanning can improve startup time and JSP compilation time. log4j:WARN No such property [maxFileSize] in org.apache.log4j.DailyRollingFileAppender. log4j:WARN No such property [maxFileSize] in org.apache.log4j.DailyRollingFileAppender. java.lang.IllegalArgumentException: Can't get Kerberos realm at org.apache.hadoop.security.HadoopKerberosName.setConfiguration(HadoopKerberosName.java:65) at org.apache.hadoop.security.UserGroupInformation.initialize(UserGroupInformation.java:306) at org.apache.hadoop.security.UserGroupInformation.ensureInitialized(UserGroupInformation.java:291) at org.apache.hadoop.security.UserGroupInformation.loginUserFromSubject(UserGroupInformation.java:846) at org.apache.hadoop.security.UserGroupInformation.getLoginUser(UserGroupInformation.java:816) at org.apache.hadoop.security.UserGroupInformation.getCurrentUser(UserGroupInformation.java:689) at org.apache.hadoop.fs.FileSystem$Cache$Key.<init>(FileSystem.java:2954) at org.apache.hadoop.fs.FileSystem$Cache$Key.<init>(FileSystem.java:2944) at org.apache.hadoop.fs.FileSystem$Cache.get(FileSystem.java:2810) at org.apache.hadoop.fs.FileSystem.get(FileSystem.java:390) at org.apache.hadoop.fs.Path.getFileSystem(Path.java:295) at org.apache.hadoop.security.alias.JavaKeyStoreProvider.initFileSystem(JavaKeyStoreProvider.java:89) at org.apache.hadoop.security.alias.AbstractJavaKeyStoreProvider.<init>(AbstractJavaKeyStoreProvider.java:82) at org.apache.hadoop.security.alias.JavaKeyStoreProvider.<init>(JavaKeyStoreProvider.java:49) at org.apache.hadoop.security.alias.JavaKeyStoreProvider.<init>(JavaKeyStoreProvider.java:41) at org.apache.hadoop.security.alias.JavaKeyStoreProvider$Factory.createProvider(JavaKeyStoreProvider.java:100) at org.apache.hadoop.security.alias.CredentialProviderFactory.getProviders(CredentialProviderFactory.java:71) at org.apache.ranger.credentialapi.CredentialReader.getDecryptedString(CredentialReader.java:59) at org.apache.hadoop.crypto.key.RangerKeyStoreProvider.getFromJceks(RangerKeyStoreProvider.java:368) at org.apache.hadoop.crypto.key.RangerKeyStoreProvider.getDBKSConf(RangerKeyStoreProvider.java:118) at org.apache.hadoop.crypto.key.RangerKeyStoreProvider.<init>(RangerKeyStoreProvider.java:82) at org.apache.hadoop.crypto.key.RangerKeyStoreProvider$Factory.createProvider(RangerKeyStoreProvider.java:399) at org.apache.hadoop.crypto.key.KeyProviderFactory.get(KeyProviderFactory.java:95) at org.apache.hadoop.crypto.key.kms.server.KMSWebApp.contextInitialized(KMSWebApp.java:177) at org.apache.catalina.core.StandardContext.listenerStart(StandardContext.java:5110) at org.apache.catalina.core.StandardContext.startInternal(StandardContext.java:5633) at org.apache.catalina.util.LifecycleBase.start(LifecycleBase.java:145) at org.apache.catalina.core.ContainerBase$StartChild.call(ContainerBase.java:1694) at org.apache.catalina.core.ContainerBase$StartChild.call(ContainerBase.java:1684) at java.util.concurrent.FutureTask.run(FutureTask.java:266) at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1142) at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:617) at java.lang.Thread.run(Thread.java:745) Caused by: java.lang.reflect.InvocationTargetException at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) at java.lang.reflect.Method.invoke(Method.java:498) at org.apache.hadoop.security.authentication.util.KerberosUtil.getDefaultRealm(KerberosUtil.java:88) at org.apache.hadoop.security.HadoopKerberosName.setConfiguration(HadoopKerberosName.java:63) ... 32 more Caused by: KrbException: Cannot locate default realm at sun.security.krb5.Config.getDefaultRealm(Config.java:1029) ... 38 more java.lang.IllegalArgumentException: Can't get Kerberos realm at org.apache.hadoop.security.HadoopKerberosName.setConfiguration(HadoopKerberosName.java:65) at org.apache.hadoop.security.UserGroupInformation.initialize(UserGroupInformation.java:306) at org.apache.hadoop.security.UserGroupInformation.ensureInitialized(UserGroupInformation.java:291) at org.apache.hadoop.security.UserGroupInformation.loginUserFromSubject(UserGroupInformation.java:846) at org.apache.hadoop.security.UserGroupInformation.getLoginUser(UserGroupInformation.java:816) at org.apache.hadoop.security.UserGroupInformation.getCurrentUser(UserGroupInformation.java:689) at org.apache.hadoop.fs.FileSystem$Cache$Key.<init>(FileSystem.java:2954) at org.apache.hadoop.fs.FileSystem$Cache$Key.<init>(FileSystem.java:2944) at org.apache.hadoop.fs.FileSystem$Cache.get(FileSystem.java:2810) at org.apache.hadoop.fs.FileSystem.get(FileSystem.java:390) at org.apache.hadoop.fs.Path.getFileSystem(Path.java:295) at org.apache.hadoop.security.alias.JavaKeyStoreProvider.initFileSystem(JavaKeyStoreProvider.java:89) at org.apache.hadoop.security.alias.AbstractJavaKeyStoreProvider.<init>(AbstractJavaKeyStoreProvider.java:82) at org.apache.hadoop.security.alias.JavaKeyStoreProvider.<init>(JavaKeyStoreProvider.java:49) at org.apache.hadoop.security.alias.JavaKeyStoreProvider.<init>(JavaKeyStoreProvider.java:41) at org.apache.hadoop.security.alias.JavaKeyStoreProvider$Factory.createProvider(JavaKeyStoreProvider.java:100) at org.apache.hadoop.security.alias.CredentialProviderFactory.getProviders(CredentialProviderFactory.java:71) at org.apache.ranger.credentialapi.CredentialReader.getDecryptedString(CredentialReader.java:59) at org.apache.hadoop.crypto.key.RangerKeyStoreProvider.getFromJceks(RangerKeyStoreProvider.java:368) at org.apache.hadoop.crypto.key.RangerKeyStoreProvider.getDBKSConf(RangerKeyStoreProvider.java:119) at org.apache.hadoop.crypto.key.RangerKeyStoreProvider.<init>(RangerKeyStoreProvider.java:82) at org.apache.hadoop.crypto.key.RangerKeyStoreProvider$Factory.createProvider(RangerKeyStoreProvider.java:399) at org.apache.hadoop.crypto.key.KeyProviderFactory.get(KeyProviderFactory.java:95) at org.apache.hadoop.crypto.key.kms.server.KMSWebApp.contextInitialized(KMSWebApp.java:177) at org.apache.catalina.core.StandardContext.listenerStart(StandardContext.java:5110) at org.apache.catalina.core.StandardContext.startInternal(StandardContext.java:5633) at org.apache.catalina.util.LifecycleBase.start(LifecycleBase.java:145) at org.apache.catalina.core.ContainerBase$StartChild.call(ContainerBase.java:1694) at org.apache.catalina.core.ContainerBase$StartChild.call(ContainerBase.java:1684) at java.util.concurrent.FutureTask.run(FutureTask.java:266) at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1142) at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:617) at java.lang.Thread.run(Thread.java:745) Caused by: java.lang.reflect.InvocationTargetException at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) at java.lang.reflect.Method.invoke(Method.java:498) at org.apache.hadoop.security.authentication.util.KerberosUtil.getDefaultRealm(KerberosUtil.java:88) at org.apache.hadoop.security.HadoopKerberosName.setConfiguration(HadoopKerberosName.java:63) ... 32 more Caused by: KrbException: Cannot locate default realm at sun.security.krb5.Config.getDefaultRealm(Config.java:1029) ... 38 more java.lang.IllegalArgumentException: Can't get Kerberos realm at org.apache.hadoop.security.HadoopKerberosName.setConfiguration(HadoopKerberosName.java:65) at org.apache.hadoop.security.UserGroupInformation.initialize(UserGroupInformation.java:306) at org.apache.hadoop.security.UserGroupInformation.ensureInitialized(UserGroupInformation.java:291) at org.apache.hadoop.security.UserGroupInformation.loginUserFromSubject(UserGroupInformation.java:846) at org.apache.hadoop.security.UserGroupInformation.getLoginUser(UserGroupInformation.java:816) at org.apache.hadoop.security.UserGroupInformation.getCurrentUser(UserGroupInformation.java:689) at org.apache.hadoop.fs.FileSystem$Cache$Key.<init>(FileSystem.java:2954) at org.apache.hadoop.fs.FileSystem$Cache$Key.<init>(FileSystem.java:2944) at org.apache.hadoop.fs.FileSystem$Cache.get(FileSystem.java:2810) at org.apache.hadoop.fs.FileSystem.get(FileSystem.java:390) at org.apache.hadoop.fs.Path.getFileSystem(Path.java:295) at org.apache.hadoop.security.alias.JavaKeyStoreProvider.initFileSystem(JavaKeyStoreProvider.java:89) at org.apache.hadoop.security.alias.AbstractJavaKeyStoreProvider.<init>(AbstractJavaKeyStoreProvider.java:82) at org.apache.hadoop.security.alias.JavaKeyStoreProvider.<init>(JavaKeyStoreProvider.java:49) at org.apache.hadoop.security.alias.JavaKeyStoreProvider.<init>(JavaKeyStoreProvider.java:41) at org.apache.hadoop.security.alias.JavaKeyStoreProvider$Factory.createProvider(JavaKeyStoreProvider.java:100) at org.apache.hadoop.security.alias.CredentialProviderFactory.getProviders(CredentialProviderFactory.java:71) at org.apache.ranger.credentialapi.CredentialReader.getDecryptedString(CredentialReader.java:59) at org.apache.hadoop.crypto.key.RangerKeyStoreProvider.getFromJceks(RangerKeyStoreProvider.java:368) at org.apache.hadoop.crypto.key.RangerKeyStoreProvider.<init>(RangerKeyStoreProvider.java:83) at org.apache.hadoop.crypto.key.RangerKeyStoreProvider$Factory.createProvider(RangerKeyStoreProvider.java:399) at org.apache.hadoop.crypto.key.KeyProviderFactory.get(KeyProviderFactory.java:95) at org.apache.hadoop.crypto.key.kms.server.KMSWebApp.contextInitialized(KMSWebApp.java:177) at org.apache.catalina.core.StandardContext.listenerStart(StandardContext.java:5110) at org.apache.catalina.core.StandardContext.startInternal(StandardContext.java:5633) at org.apache.catalina.util.LifecycleBase.start(LifecycleBase.java:145) at org.apache.catalina.core.ContainerBase$StartChild.call(ContainerBase.java:1694) at org.apache.catalina.core.ContainerBase$StartChild.call(ContainerBase.java:1684) at java.util.concurrent.FutureTask.run(FutureTask.java:266) at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1142) at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:617) at java.lang.Thread.run(Thread.java:745) Caused by: java.lang.reflect.InvocationTargetException at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) at java.lang.reflect.Method.invoke(Method.java:498) at org.apache.hadoop.security.authentication.util.KerberosUtil.getDefaultRealm(KerberosUtil.java:88) at org.apache.hadoop.security.HadoopKerberosName.setConfiguration(HadoopKerberosName.java:63) ... 31 more Caused by: KrbException: Cannot locate default realm at sun.security.krb5.Config.getDefaultRealm(Config.java:1029) ... 37 more java.lang.IllegalArgumentException: Can't get Kerberos realm at org.apache.hadoop.security.HadoopKerberosName.setConfiguration(HadoopKerberosName.java:65) at org.apache.hadoop.security.UserGroupInformation.initialize(UserGroupInformation.java:306) at org.apache.hadoop.security.UserGroupInformation.ensureInitialized(UserGroupInformation.java:291) at org.apache.hadoop.security.UserGroupInformation.loginUserFromSubject(UserGroupInformation.java:846) at org.apache.hadoop.security.UserGroupInformation.getLoginUser(UserGroupInformation.java:816) at org.apache.hadoop.security.UserGroupInformation.getCurrentUser(UserGroupInformation.java:689) at org.apache.hadoop.fs.FileSystem$Cache$Key.<init>(FileSystem.java:2954) at org.apache.hadoop.fs.FileSystem$Cache$Key.<init>(FileSystem.java:2944) at org.apache.hadoop.fs.FileSystem$Cache.get(FileSystem.java:2810) at org.apache.hadoop.fs.FileSystem.get(FileSystem.java:390) at org.apache.hadoop.fs.Path.getFileSystem(Path.java:295) at org.apache.hadoop.security.alias.JavaKeyStoreProvider.initFileSystem(JavaKeyStoreProvider.java:89) at org.apache.hadoop.security.alias.AbstractJavaKeyStoreProvider.<init>(AbstractJavaKeyStoreProvider.java:82) at org.apache.hadoop.security.alias.JavaKeyStoreProvider.<init>(JavaKeyStoreProvider.java:49) at org.apache.hadoop.security.alias.JavaKeyStoreProvider.<init>(JavaKeyStoreProvider.java:41) at org.apache.hadoop.security.alias.JavaKeyStoreProvider$Factory.createProvider(JavaKeyStoreProvider.java:100) at org.apache.hadoop.security.alias.CredentialProviderFactory.getProviders(CredentialProviderFactory.java:71) at org.apache.ranger.credentialapi.CredentialReader.getDecryptedString(CredentialReader.java:59) at org.apache.hadoop.crypto.key.RangerKeyStoreProvider.getFromJceks(RangerKeyStoreProvider.java:368) at org.apache.hadoop.crypto.key.RangerKeyStoreProvider.<init>(RangerKeyStoreProvider.java:84) at org.apache.hadoop.crypto.key.RangerKeyStoreProvider$Factory.createProvider(RangerKeyStoreProvider.java:399) at org.apache.hadoop.crypto.key.KeyProviderFactory.get(KeyProviderFactory.java:95) at org.apache.hadoop.crypto.key.kms.server.KMSWebApp.contextInitialized(KMSWebApp.java:177) at org.apache.catalina.core.StandardContext.listenerStart(StandardContext.java:5110) at org.apache.catalina.core.StandardContext.startInternal(StandardContext.java:5633) at org.apache.catalina.util.LifecycleBase.start(LifecycleBase.java:145) at org.apache.catalina.core.ContainerBase$StartChild.call(ContainerBase.java:1694) at org.apache.catalina.core.ContainerBase$StartChild.call(ContainerBase.java:1684) at java.util.concurrent.FutureTask.run(FutureTask.java:266) at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1142) at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:617) at java.lang.Thread.run(Thread.java:745) Caused by: java.lang.reflect.InvocationTargetException at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) at java.lang.reflect.Method.invoke(Method.java:498) at org.apache.hadoop.security.authentication.util.KerberosUtil.getDefaultRealm(KerberosUtil.java:88) at org.apache.hadoop.security.HadoopKerberosName.setConfiguration(HadoopKerberosName.java:63) ... 31 more Caused by: KrbException: Cannot locate default realm at sun.security.krb5.Config.getDefaultRealm(Config.java:1029) ... 37 more java.lang.IllegalArgumentException: Can't get Kerberos realm at org.apache.hadoop.security.HadoopKerberosName.setConfiguration(HadoopKerberosName.java:65) at org.apache.hadoop.security.UserGroupInformation.initialize(UserGroupInformation.java:306) at org.apache.hadoop.security.UserGroupInformation.ensureInitialized(UserGroupInformation.java:291) at org.apache.hadoop.security.UserGroupInformation.loginUserFromSubject(UserGroupInformation.java:846) at org.apache.hadoop.security.UserGroupInformation.getLoginUser(UserGroupInformation.java:816) at org.apache.hadoop.security.UserGroupInformation.getCurrentUser(UserGroupInformation.java:689) at org.apache.hadoop.fs.FileSystem$Cache$Key.<init>(FileSystem.java:2954) at org.apache.hadoop.fs.FileSystem$Cache$Key.<init>(FileSystem.java:2944) at org.apache.hadoop.fs.FileSystem$Cache.get(FileSystem.java:2810) at org.apache.hadoop.fs.FileSystem.get(FileSystem.java:390) at org.apache.hadoop.fs.Path.getFileSystem(Path.java:295) at org.apache.hadoop.security.alias.JavaKeyStoreProvider.initFileSystem(JavaKeyStoreProvider.java:89) at org.apache.hadoop.security.alias.AbstractJavaKeyStoreProvider.<init>(AbstractJavaKeyStoreProvider.java:82) at org.apache.hadoop.security.alias.JavaKeyStoreProvider.<init>(JavaKeyStoreProvider.java:49) at org.apache.hadoop.security.alias.JavaKeyStoreProvider.<init>(JavaKeyStoreProvider.java:41) at org.apache.hadoop.security.alias.JavaKeyStoreProvider$Factory.createProvider(JavaKeyStoreProvider.java:100) at org.apache.hadoop.security.alias.CredentialProviderFactory.getProviders(CredentialProviderFactory.java:71) at org.apache.ranger.credentialapi.CredentialReader.getDecryptedString(CredentialReader.java:59) at org.apache.hadoop.crypto.key.RangerKeyStoreProvider.getFromJceks(RangerKeyStoreProvider.java:368) at org.apache.hadoop.crypto.key.RangerKeyStoreProvider.<init>(RangerKeyStoreProvider.java:85) at org.apache.hadoop.crypto.key.RangerKeyStoreProvider$Factory.createProvider(RangerKeyStoreProvider.java:399) at org.apache.hadoop.crypto.key.KeyProviderFactory.get(KeyProviderFactory.java:95) at org.apache.hadoop.crypto.key.kms.server.KMSWebApp.contextInitialized(KMSWebApp.java:177) at org.apache.catalina.core.StandardContext.listenerStart(StandardContext.java:5110) at org.apache.catalina.core.StandardContext.startInternal(StandardContext.java:5633) at org.apache.catalina.util.LifecycleBase.start(LifecycleBase.java:145) at org.apache.catalina.core.ContainerBase$StartChild.call(ContainerBase.java:1694) at org.apache.catalina.core.ContainerBase$StartChild.call(ContainerBase.java:1684) at java.util.concurrent.FutureTask.run(FutureTask.java:266) at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1142) at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:617) at java.lang.Thread.run(Thread.java:745) Caused by: java.lang.reflect.InvocationTargetException at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) at java.lang.reflect.Method.invoke(Method.java:498) at org.apache.hadoop.security.authentication.util.KerberosUtil.getDefaultRealm(KerberosUtil.java:88) at org.apache.hadoop.security.HadoopKerberosName.setConfiguration(HadoopKerberosName.java:63) ... 31 more Caused by: KrbException: Cannot locate default realm at sun.security.krb5.Config.getDefaultRealm(Config.java:1029) ... 37 more [EL Severe]: ejb: 2025-06-04 16:11:57.703--ServerSession(987689800)--Exception [EclipseLink-4002] (Eclipse Persistence Services - 2.5.2.v20140319-9ad6abd): org.eclipse.persistence.exceptions.DatabaseException Internal Exception: java.sql.SQLException: Access denied for user 'rangerkms'@'uc-hb2-zqkj-hadoop-hdp-node01' (using password: YES) Error Code: 1045 javax.persistence.PersistenceException: Exception [EclipseLink-4002] (Eclipse Persistence Services - 2.5.2.v20140319-9ad6abd): org.eclipse.persistence.exceptions.DatabaseException Internal Exception: java.sql.SQLException: Access denied for user 'rangerkms'@'uc-hb2-zqkj-hadoop-hdp-node01' (using password: YES) Error Code: 1045 at org.eclipse.persistence.internal.jpa.EntityManagerSetupImpl.deploy(EntityManagerSetupImpl.java:766) at org.eclipse.persistence.internal.jpa.EntityManagerFactoryDelegate.getAbstractSession(EntityManagerFactoryDelegate.java:204) at org.eclipse.persistence.internal.jpa.EntityManagerFactoryDelegate.createEntityManagerImpl(EntityManagerFactoryDelegate.java:304) at org.eclipse.persistence.internal.jpa.EntityManagerFactoryImpl.createEntityManagerImpl(EntityManagerFactoryImpl.java:336) at org.eclipse.persistence.internal.jpa.EntityManagerFactoryImpl.createEntityManager(EntityManagerFactoryImpl.java:302) at org.apache.ranger.kms.dao.DaoManager.getEntityManager(DaoManager.java:44) at org.apache.hadoop.crypto.key.RangerKMSDB.initDBConnectivity(RangerKMSDB.java:116) at org.apache.hadoop.crypto.key.RangerKMSDB.<init>(RangerKMSDB.java:81) at org.apache.hadoop.crypto.key.RangerKeyStoreProvider.<init>(RangerKeyStoreProvider.java:86) at org.apache.hadoop.crypto.key.RangerKeyStoreProvider$Factory.createProvider(RangerKeyStoreProvider.java:399) at org.apache.hadoop.crypto.key.KeyProviderFactory.get(KeyProviderFactory.java:95) at org.apache.hadoop.crypto.key.kms.server.KMSWebApp.contextInitialized(KMSWebApp.java:177) at org.apache.catalina.core.StandardContext.listenerStart(StandardContext.java:5110) at org.apache.catalina.core.StandardContext.startInternal(StandardContext.java:5633) at org.apache.catalina.util.LifecycleBase.start(LifecycleBase.java:145) at org.apache.catalina.core.ContainerBase$StartChild.call(ContainerBase.java:1694) at org.apache.catalina.core.ContainerBase$StartChild.call(ContainerBase.java:1684) at java.util.concurrent.FutureTask.run(FutureTask.java:266) at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1142) at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:617) at java.lang.Thread.run(Thread.java:745) Caused by: Exception [EclipseLink-4002] (Eclipse Persistence Services - 2.5.2.v20140319-9ad6abd): org.eclipse.persistence.exceptions.DatabaseException Internal Exception: java.sql.SQLException: Access denied for user 'rangerkms'@'uc-hb2-zqkj-hadoop-hdp-node01' (using password: YES) Error Code: 1045 at org.eclipse.persistence.exceptions.DatabaseException.sqlException(DatabaseException.java:331) at org.eclipse.persistence.exceptions.DatabaseException.sqlException(DatabaseException.java:326) at org.eclipse.persistence.sessions.DefaultConnector.connect(DefaultConnector.java:138) at org.eclipse.persistence.sessions.DatasourceLogin.connectToDatasource(DatasourceLogin.java:162) at org.eclipse.persistence.internal.sessions.DatabaseSessionImpl.setOrDetectDatasource(DatabaseSessionImpl.java:204) at org.eclipse.persistence.internal.sessions.DatabaseSessionImpl.loginAndDetectDatasource(DatabaseSessionImpl.java:741) at org.eclipse.persistence.internal.jpa.EntityManagerFactoryProvider.login(EntityManagerFactoryProvider.java:239) at org.eclipse.persistence.internal.jpa.EntityManagerSetupImpl.deploy(EntityManagerSetupImpl.java:685) ... 20 more Caused by: java.sql.SQLException: Access denied for user 'rangerkms'@'uc-hb2-zqkj-hadoop-hdp-node01' (using password: YES) at com.mysql.jdbc.SQLError.createSQLException(SQLError.java:964) at com.mysql.jdbc.MysqlIO.checkErrorPacket(MysqlIO.java:3970) at com.mysql.jdbc.MysqlIO.checkErrorPacket(MysqlIO.java:3906) at com.mysql.jdbc.MysqlIO.checkErrorPacket(MysqlIO.java:873) at com.mysql.jdbc.MysqlIO.proceedHandshakeWithPluggableAuthentication(MysqlIO.java:1710) at com.mysql.jdbc.MysqlIO.doHandshake(MysqlIO.java:1226) at com.mysql.jdbc.ConnectionImpl.coreConnect(ConnectionImpl.java:2253) at com.mysql.jdbc.ConnectionImpl.connectOneTryOnly(ConnectionImpl.java:2284) at com.mysql.jdbc.ConnectionImpl.createNewIO(ConnectionImpl.java:2083) at com.mysql.jdbc.ConnectionImpl.<init>(ConnectionImpl.java:806) at com.mysql.jdbc.JDBC4Connection.<init>(JDBC4Connection.java:47) at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method) at sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:62) at sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45) at java.lang.reflect.Constructor.newInstance(Constructor.java:423) at com.mysql.jdbc.Util.handleNewInstance(Util.java:425) at com.mysql.jdbc.ConnectionImpl.getInstance(ConnectionImpl.java:410) at com.mysql.jdbc.NonRegisteringDriver.connect(NonRegisteringDriver.java:328) at java.sql.DriverManager.getConnection(DriverManager.java:664) at java.sql.DriverManager.getConnection(DriverManager.java:208) at org.eclipse.persistence.sessions.DefaultConnector.connect(DefaultConnector.java:98) ... 25 more java.io.IOException: Master Key Jceks does not exists at org.apache.hadoop.crypto.key.RangerKeyStoreProvider.<init>(RangerKeyStoreProvider.java:92) at org.apache.hadoop.crypto.key.RangerKeyStoreProvider$Factory.createProvider(RangerKeyStoreProvider.java:399) at org.apache.hadoop.crypto.key.KeyProviderFactory.get(KeyProviderFactory.java:95) at org.apache.hadoop.crypto.key.kms.server.KMSWebApp.contextInitialized(KMSWebApp.java:177) at org.apache.catalina.core.StandardContext.listenerStart(StandardContext.java:5110) at org.apache.catalina.core.StandardContext.startInternal(StandardContext.java:5633) at org.apache.catalina.util.LifecycleBase.start(LifecycleBase.java:145) at org.apache.catalina.core.ContainerBase$StartChild.call(ContainerBase.java:1694) at org.apache.catalina.core.ContainerBase$StartChild.call(ContainerBase.java:1684) at java.util.concurrent.FutureTask.run(FutureTask.java:266) at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1142) at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:617) at java.lang.Thread.run(Thread.java:745) ERROR: Hadoop KMS could not be started REASON: java.lang.NullPointerException Stacktrace: --------------------------------------------------- java.lang.NullPointerException at org.apache.hadoop.crypto.key.kms.server.KMSWebApp.contextInitialized(KMSWebApp.java:178) at org.apache.catalina.core.StandardContext.listenerStart(StandardContext.java:5110) at org.apache.catalina.core.StandardContext.startInternal(StandardContext.java:5633) at org.apache.catalina.util.LifecycleBase.start(LifecycleBase.java:145) at org.apache.catalina.core.ContainerBase$StartChild.call(ContainerBase.java:1694) at org.apache.catalina.core.ContainerBase$StartChild.call(ContainerBase.java:1684) at java.util.concurrent.FutureTask.run(FutureTask.java:266) at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1142) at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:617) at java.lang.Thread.run(Thread.java:745) ---------------------------------------------------
06-05
2025-07-15 15:23:26,572 ERROR Startup errors : org.springframework.context.ApplicationContextException: Unable to start web server; nested exception is org.springframework.boot.web.server.WebServerException: Unable to start embedded Tomcat at org.springframework.boot.web.servlet.context.ServletWebServerApplicationContext.onRefresh(ServletWebServerApplicationContext.java:163) at org.springframework.context.support.AbstractApplicationContext.refresh(AbstractApplicationContext.java:577) at org.springframework.boot.web.servlet.context.ServletWebServerApplicationContext.refresh(ServletWebServerApplicationContext.java:145) at org.springframework.boot.SpringApplication.refresh(SpringApplication.java:745) at org.springframework.boot.SpringApplication.refreshContext(SpringApplication.java:423) at org.springframework.boot.SpringApplication.run(SpringApplication.java:307) at org.springframework.boot.SpringApplication.run(SpringApplication.java:1317) at org.springframework.boot.SpringApplication.run(SpringApplication.java:1306) at com.alibaba.nacos.Nacos.main(Nacos.java:35) at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) at java.lang.reflect.Method.invoke(Method.java:498) at org.springframework.boot.loader.MainMethodRunner.run(MainMethodRunner.java:49) at org.springframework.boot.loader.Launcher.launch(Launcher.java:108) at org.springframework.boot.loader.Launcher.launch(Launcher.java:58) at org.springframework.boot.loader.PropertiesLauncher.main(PropertiesLauncher.java:467) Caused by: org.springframework.boot.web.server.WebServerException: Unable to start embedded Tomcat at org.springframework.boot.web.embedded.tomcat.TomcatWebServer.initialize(TomcatWebServer.java:142) at org.springframework.boot.web.embedded.tomcat.TomcatWebServer.<init>(TomcatWebServer.java:104) at org.springframework.boot.web.embedded.tomcat.TomcatServletWebServerFactory.getTomcatWebServer(TomcatServletWebServerFactory.java:479) at org.springframework.boot.web.embedded.tomcat.TomcatServletWebServerFactory.getWebServer(TomcatServletWebServerFactory.java:211) at org.springframework.boot.web.servlet.context.ServletWebServerApplicationContext.createWebServer(ServletWebServerApplicationContext.java:182) at org.springframework.boot.web.servlet.context.ServletWebServerApplicationContext.onRefresh(ServletWebServerApplicationContext.java:160) ... 16 common frames omitted Caused by: org.springframework.beans.factory.BeanCreationException: Error creating bean with name 'distroFilterRegistration' defined in class path resource [com/alibaba/nacos/naming/web/NamingConfig.class]: Bean instantiation via factory method failed; nested exception is org.springframework.beans.BeanInstantiationException: Failed to instantiate [org.springframework.boot.web.servlet.FilterRegistrationBean]: Factory method 'distroFilterRegistration' threw exception; nested exception is org.springframework.beans.factory.UnsatisfiedDependencyException: Error creating bean with name 'distroFilter': Unsatisfied dependency expressed through field 'distroMapper'; nested exception is org.springframework.beans.factory.UnsatisfiedDependencyException: Error creating bean with name 'distroMapper' defined in URL [jar:file:/opt/package/aitransfer/16K_GPU_A/aizt/nacos/target/nacos-server.jar!/BOOT-INF/lib/nacos-naming-2.2.3.jar!/com/alibaba/nacos/naming/core/DistroMapper.class]: Unsatisfied dependency expressed through constructor parameter 0; nested exception is org.springframework.beans.factory.BeanCreationException: Error creating bean with name 'serverMemberManager' defined in URL [jar:file:/opt/package/aitransfer/16K_GPU_A/aizt/nacos/target/nacos-server.jar!/BOOT-INF/lib/nacos-core-2.2.3.jar!/com/alibaba/nacos/core/cluster/ServerMemberManager.class]: Bean instantiation via constructor failed; nested exception is org.springframework.beans.BeanInstantiationException: Failed to instantiate [com.alibaba.nacos.core.cluster.ServerMemberManager]: Constructor threw exception; nested exception is ErrCode:500, ErrMsg:jmenv.tbsite.net at org.springframework.beans.factory.support.ConstructorResolver.instantiate(ConstructorResolver.java:658) at org.springframework.beans.factory.support.ConstructorResolver.instantiateUsingFactoryMethod(ConstructorResolver.java:486) at org.springframework.beans.factory.support.AbstractAutowireCapableBeanFactory.instantiateUsingFactoryMethod(AbstractAutowireCapableBeanFactory.java:1352) at org.springframework.beans.factory.support.AbstractAutowireCapableBeanFactory.createBeanInstance(AbstractAutowireCapableBeanFactory.java:1195) at org.springframework.beans.factory.support.AbstractAutowireCapableBeanFactory.doCreateBean(AbstractAutowireCapableBeanFactory.java:582) at org.springframework.beans.factory.support.AbstractAutowireCapableBeanFactory.createBean(AbstractAutowireCapableBeanFactory.java:542) at org.springframework.beans.factory.support.AbstractBeanFactory.lambda$doGetBean$0(AbstractBeanFactory.java:335) at org.springframework.beans.factory.support.DefaultSingletonBeanRegistry.getSingleton(DefaultSingletonBeanRegistry.java:234) at org.springframework.beans.factory.support.AbstractBeanFactory.doGetBean(AbstractBeanFactory.java:333) at org.springframework.beans.factory.support.AbstractBeanFactory.getBean(AbstractBeanFactory.java:213) at org.springframework.boot.web.servlet.ServletContextInitializerBeans.getOrderedBeansOfType(ServletContextInitializerBeans.java:212) at org.springframework.boot.web.servlet.ServletContextInitializerBeans.getOrderedBeansOfType(ServletContextInitializerBeans.java:203) at org.springframework.boot.web.servlet.ServletContextInitializerBeans.addServletContextInitializerBeans(ServletContextInitializerBeans.java:97) at org.springframework.boot.web.servlet.ServletContextInitializerBeans.<init>(ServletContextInitializerBeans.java:86) at org.springframework.boot.web.servlet.context.ServletWebServerApplicationContext.getServletContextInitializerBeans(ServletWebServerApplicationContext.java:260) at org.springframework.boot.web.servlet.context.ServletWebServerApplicationContext.selfInitialize(ServletWebServerApplicationContext.java:234) at org.springframework.boot.web.embedded.tomcat.TomcatStarter.onStartup(TomcatStarter.java:53) at org.apache.catalina.core.StandardContext.startInternal(StandardContext.java:5211) at org.apache.catalina.util.LifecycleBase.start(LifecycleBase.java:183) at org.apache.catalina.core.ContainerBase$StartChild.call(ContainerBase.java:1393) at org.apache.catalina.core.ContainerBase$StartChild.call(ContainerBase.java:1383) at java.util.concurrent.FutureTask.run(FutureTask.java:266) at org.apache.tomcat.util.threads.InlineExecutorService.execute(InlineExecutorService.java:75) at java.util.concurrent.AbstractExecutorService.submit(AbstractExecutorService.java:134) at org.apache.catalina.core.ContainerBase.startInternal(ContainerBase.java:916) at org.apache.catalina.core.StandardHost.startInternal(StandardHost.java:835) at org.apache.catalina.util.LifecycleBase.start(LifecycleBase.java:183) at org.apache.catalina.core.ContainerBase$StartChild.call(ContainerBase.java:1393) at org.apache.catalina.core.ContainerBase$StartChild.call(ContainerBase.java:1383) at java.util.concurrent.FutureTask.run(FutureTask.java:266) at org.apache.tomcat.util.threads.InlineExecutorService.execute(InlineExecutorService.java:75) at java.util.concurrent.AbstractExecutorService.submit(AbstractExecutorService.java:134) at org.apache.catalina.core.ContainerBase.startInternal(ContainerBase.java:916) at org.apache.catalina.core.StandardEngine.startInternal(StandardEngine.java:265) at org.apache.catalina.util.LifecycleBase.start(LifecycleBase.java:183) at org.apache.catalina.core.StandardService.startInternal(StandardService.java:430) at org.apache.catalina.util.LifecycleBase.start(LifecycleBase.java:183) at org.apache.catalina.core.StandardServer.startInternal(StandardServer.java:930) at org.apache.catalina.util.LifecycleBase.start(LifecycleBase.java:183) at org.apache.catalina.startup.Tomcat.start(Tomcat.java:486) at org.springframework.boot.web.embedded.tomcat.TomcatWebServer.initialize(TomcatWebServer.java:123) ... 21 common frames omitted Caused by: org.springframework.beans.BeanInstantiationException: Failed to instantiate [org.springframework.boot.web.servlet.FilterRegistrationBean]: Factory method 'distroFilterRegistration' threw exception; nested exception is org.springframework.beans.factory.UnsatisfiedDependencyException: Error creating bean with name 'distroFilter': Unsatisfied dependency expressed through field 'distroMapper'; nested exception is org.springframework.beans.factory.UnsatisfiedDependencyException: Error creating bean with name 'distroMapper' defined in URL [jar:file:/opt/package/aitransfer/16K_GPU_A/aizt/nacos/target/nacos-server.jar!/BOOT-INF/lib/nacos-naming-2.2.3.jar!/com/alibaba/nacos/naming/core/DistroMapper.class]: Unsatisfied dependency expressed through constructor parameter 0; nested exception is org.springframework.beans.factory.BeanCreationException: Error creating bean with name 'serverMemberManager' defined in URL [jar:file:/opt/package/aitransfer/16K_GPU_A/aizt/nacos/target/nacos-server.jar!/BOOT-INF/lib/nacos-core-2.2.3.jar!/com/alibaba/nacos/core/cluster/ServerMemberManager.class]: Bean instantiation via constructor failed; nested exception is org.springframework.beans.BeanInstantiationException: Failed to instantiate [com.alibaba.nacos.core.cluster.ServerMemberManager]: Constructor threw exception; nested exception is ErrCode:500, ErrMsg:jmenv.tbsite.net at org.springframework.beans.factory.support.SimpleInstantiationStrategy.instantiate(SimpleInstantiationStrategy.java:185) at org.springframework.beans.factory.support.ConstructorResolver.instantiate(ConstructorResolver.java:653) ... 61 common frames omitted Caused by: org.springframework.beans.factory.UnsatisfiedDependencyException: Error creating bean with name 'distroFilter': Unsatisfied dependency expressed through field 'distroMapper'; nested exception is org.springframework.beans.factory.UnsatisfiedDependencyException: Error creating bean with name 'distroMapper' defined in URL [jar:file:/opt/package/aitransfer/16K_GPU_A/aizt/nacos/target/nacos-server.jar!/BOOT-INF/lib/nacos-naming-2.2.3.jar!/com/alibaba/nacos/naming/core/DistroMapper.class]: Unsatisfied dependency expressed through constructor parameter 0; nested exception is org.springframework.beans.factory.BeanCreationException: Error creating bean with name 'serverMemberManager' defined in URL [jar:file:/opt/package/aitransfer/16K_GPU_A/aizt/nacos/target/nacos-server.jar!/BOOT-INF/lib/nacos-core-2.2.3.jar!/com/alibaba/nacos/core/cluster/ServerMemberManager.class]: Bean instantiation via constructor failed; nested exception is org.springframework.beans.BeanInstantiationException: Failed to instantiate [com.alibaba.nacos.core.cluster.ServerMemberManager]: Constructor threw exception; nested exception is ErrCode:500, ErrMsg:jmenv.tbsite.net at org.springframework.beans.factory.annotation.AutowiredAnnotationBeanPostProcessor$AutowiredFieldElement.resolveFieldValue(AutowiredAnnotationBeanPostProcessor.java:660) at org.springframework.beans.factory.annotation.AutowiredAnnotationBeanPostProcessor$AutowiredFieldElement.inject(AutowiredAnnotationBeanPostProcessor.java:640) at org.springframework.beans.factory.annotation.InjectionMetadata.inject(InjectionMetadata.java:119) at org.springframework.beans.factory.annotation.AutowiredAnnotationBeanPostProcessor.postProcessProperties(AutowiredAnnotationBeanPostProcessor.java:399) at org.springframework.beans.factory.support.AbstractAutowireCapableBeanFactory.populateBean(AbstractAutowireCapableBeanFactory.java:1431) at org.springframework.beans.factory.support.AbstractAutowireCapableBeanFactory.doCreateBean(AbstractAutowireCapableBeanFactory.java:619) at org.springframework.beans.factory.support.AbstractAutowireCapableBeanFactory.createBean(AbstractAutowireCapableBeanFactory.java:542) at org.springframework.beans.factory.support.AbstractBeanFactory.lambda$doGetBean$0(AbstractBeanFactory.java:335) at org.springframework.beans.factory.support.DefaultSingletonBeanRegistry.getSingleton(DefaultSingletonBeanRegistry.java:234) at org.springframework.beans.factory.support.AbstractBeanFactory.doGetBean(AbstractBeanFactory.java:333) at org.springframework.beans.factory.support.AbstractBeanFactory.getBean(AbstractBeanFactory.java:208) at org.springframework.context.annotation.ConfigurationClassEnhancer$BeanMethodInterceptor.resolveBeanReference(ConfigurationClassEnhancer.java:362) 再次分析
最新发布
07-16
17:09:55.211 [restartedMain] ERROR o.s.b.SpringApplication - [reportFailure,870] - Application run failed org.yaml.snakeyaml.parser.ParserException: while parsing a block mapping in 'reader', line 105, column 3: typeAliasesPackage: com.ruoyi.** ... ^ expected <block end>, but found '<block mapping start>' in 'reader', line 111, column 5: global-config: ^ at org.yaml.snakeyaml.parser.ParserImpl$ParseBlockMappingKey.produce(ParserImpl.java:617) at org.yaml.snakeyaml.parser.ParserImpl.peekEvent(ParserImpl.java:165) at org.yaml.snakeyaml.comments.CommentEventsCollector$1.peek(CommentEventsCollector.java:59) at org.yaml.snakeyaml.comments.CommentEventsCollector$1.peek(CommentEventsCollector.java:45) at org.yaml.snakeyaml.comments.CommentEventsCollector.collectEvents(CommentEventsCollector.java:140) at org.yaml.snakeyaml.comments.CommentEventsCollector.collectEvents(CommentEventsCollector.java:119) at org.yaml.snakeyaml.composer.Composer.composeScalarNode(Composer.java:221) at org.yaml.snakeyaml.composer.Composer.composeNode(Composer.java:191) at org.yaml.snakeyaml.composer.Composer.composeValueNode(Composer.java:313) at org.yaml.snakeyaml.composer.Composer.composeMappingChildren(Composer.java:304) at org.yaml.snakeyaml.composer.Composer.composeMappingNode(Composer.java:288) at org.yaml.snakeyaml.composer.Composer.composeNode(Composer.java:195) at org.yaml.snakeyaml.composer.Composer.composeValueNode(Composer.java:313) at org.yaml.snakeyaml.composer.Composer.composeMappingChildren(Composer.java:304) at org.yaml.snakeyaml.composer.Composer.composeMappingNode(Composer.java:288) at org.yaml.snakeyaml.composer.Composer.composeNode(Composer.java:195) at org.yaml.snakeyaml.composer.Composer.getNode(Composer.java:115) at org.yaml.snakeyaml.constructor.BaseConstructor.getData(BaseConstructor.java:135) at org.springframework.boot.env.OriginTrackedYamlLoader$OriginTrackingConstructor.getData(OriginTrackedYamlLoader.java:103) at org.yaml.snakeyaml.Yaml$1.next(Yaml.java:512) at org.springframework.beans.factory.config.YamlProcessor.process(YamlProcessor.java:199) at org.springframework.beans.factory.config.YamlProcessor.process(YamlProcessor.java:166) at org.springframework.boot.env.OriginTrackedYamlLoader.load(OriginTrackedYamlLoader.java:88) at org.springframework.boot.env.YamlPropertySourceLoader.load(YamlPropertySourceLoader.java:50) at org.springframework.boot.context.config.StandardConfigDataLoader.load(StandardConfigDataLoader.java:54) at org.springframework.boot.context.config.StandardConfigDataLoader.load(StandardConfigDataLoader.java:36) at org.springframework.boot.context.config.ConfigDataLoaders.load(ConfigDataLoaders.java:107) at org.springframework.boot.context.config.ConfigDataImporter.load(ConfigDataImporter.java:128) at org.springframework.boot.context.config.ConfigDataImporter.resolveAndLoad(ConfigDataImporter.java:86) at org.springframework.boot.context.config.ConfigDataEnvironmentContributors.withProcessedImports(ConfigDataEnvironmentContributors.java:116) at org.springframework.boot.context.config.ConfigDataEnvironment.processInitial(ConfigDataEnvironment.java:240) at org.springframework.boot.context.config.ConfigDataEnvironment.processAndApply(ConfigDataEnvironment.java:227) at org.springframework.boot.context.config.ConfigDataEnvironmentPostProcessor.postProcessEnvironment(ConfigDataEnvironmentPostProcessor.java:102) at org.springframework.boot.context.config.ConfigDataEnvironmentPostProcessor.postProcessEnvironment(ConfigDataEnvironmentPostProcessor.java:94) at org.springframework.boot.env.EnvironmentPostProcessorApplicationListener.onApplicationEnvironmentPreparedEvent(EnvironmentPostProcessorApplicationListener.java:102) at org.springframework.boot.env.EnvironmentPostProcessorApplicationListener.onApplicationEvent(EnvironmentPostProcessorApplicationListener.java:87) at org.springframework.context.event.SimpleApplicationEventMulticaster.doInvokeListener(SimpleApplicationEventMulticaster.java:178) at org.springframework.context.event.SimpleApplicationEventMulticaster.invokeListener(SimpleApplicationEventMulticaster.java:171) at org.springframework.context.event.SimpleApplicationEventMulticaster.multicastEvent(SimpleApplicationEventMulticaster.java:145) at org.springframework.context.event.SimpleApplicationEventMulticaster.multicastEvent(SimpleApplicationEventMulticaster.java:133) at org.springframework.boot.context.event.EventPublishingRunListener.environmentPrepared(EventPublishingRunListener.java:82) at org.springframework.boot.SpringApplicationRunListeners.lambda$environmentPrepared$2(SpringApplicationRunListeners.java:63) at java.util.ArrayList.forEach(ArrayList.java:1249) at org.springframework.boot.SpringApplicationRunListeners.doWithListeners(SpringApplicationRunListeners.java:117) at org.springframework.boot.SpringApplicationRunListeners.doWithListeners(SpringApplicationRunListeners.java:111) at org.springframework.boot.SpringApplicationRunListeners.environmentPrepared(SpringApplicationRunListeners.java:62) at org.springframework.boot.SpringApplication.prepareEnvironment(SpringApplication.java:379) at org.springframework.boot.SpringApplication.run(SpringApplication.java:337) at org.springframework.boot.SpringApplication.run(SpringApplication.java:1370) at org.springframework.boot.SpringApplication.run(SpringApplication.java:1359) at com.ruoyi.RuoYiApplication.main(RuoYiApplication.java:21) at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) at java.lang.reflect.Method.invoke(Method.java:498) at org.springframework.boot.devtools.restart.RestartLauncher.run(RestartLauncher.java:49) Process finished with exit code 0 若依启动报错
06-03
Starting Job Manager [ERROR] The execution result is empty. [ERROR] Could not get JVM parameters and dynamic configurations properly. [ERROR] Raw output from BashJavaUtils: INFO [] - Using standard YAML parser to load flink configuration file from /opt/flink/conf/config.yaml. ERROR [] - Failed to parse YAML configuration org.snakeyaml.engine.v2.exceptions.YamlEngineException: expected '<document start>', but found '<scalar>' in reader, line 1, column 9 at org.snakeyaml.engine.v2.parser.ParserImpl$ParseDocumentStart.produce(ParserImpl.java:493) ~[flink-dist-2.0.0.jar:2.0.0] at org.snakeyaml.engine.v2.parser.ParserImpl.lambda$produce$1(ParserImpl.java:232) ~[flink-dist-2.0.0.jar:2.0.0] at java.util.Optional.ifPresent(Unknown Source) ~[?:?] at org.snakeyaml.engine.v2.parser.ParserImpl.produce(ParserImpl.java:232) ~[flink-dist-2.0.0.jar:2.0.0] at org.snakeyaml.engine.v2.parser.ParserImpl.peekEvent(ParserImpl.java:206) ~[flink-dist-2.0.0.jar:2.0.0] at org.snakeyaml.engine.v2.parser.ParserImpl.checkEvent(ParserImpl.java:198) ~[flink-dist-2.0.0.jar:2.0.0] at org.snakeyaml.engine.v2.composer.Composer.getSingleNode(Composer.java:131) ~[flink-dist-2.0.0.jar:2.0.0] at org.snakeyaml.engine.v2.api.Load.loadOne(Load.java:110) ~[flink-dist-2.0.0.jar:2.0.0] at org.snakeyaml.engine.v2.api.Load.loadFromInputStream(Load.java:123) ~[flink-dist-2.0.0.jar:2.0.0] at org.apache.flink.configuration.YamlParserUtils.loadYamlFile(YamlParserUtils.java:100) [flink-dist-2.0.0.jar:2.0.0] at org.apache.flink.configuration.GlobalConfiguration.loadYAMLResource(GlobalConfiguration.java:252) [flink-dist-2.0.0.jar:2.0.0] at org.apache.flink.configuration.GlobalConfiguration.loadConfiguration(GlobalConfiguration.java:150) [flink-dist-2.0.0.jar:2.0.0] at org.apache.flink.runtime.util.ConfigurationParserUtils.loadCommonConfiguration(ConfigurationParserUtils.java:153) [flink-dist-2.0.0.jar:2.0.0] at org.apache.flink.runtime.util.bash.FlinkConfigLoader.loadConfiguration(FlinkConfigLoader.java:41) [flink-dist-2.0.0.jar:2.24.1] at org.apache.flink.runtime.util.bash.BashJavaUtils.runCommand(BashJavaUtils.java:66) [bash-java-utils.jar:2.24.1] at org.apache.flink.runtime.util.bash.BashJavaUtils.main(BashJavaUtils.java:54) [bash-java-utils.jar:2.24.1] Exception in thread "main" java.lang.RuntimeException: Error parsing YAML configuration. at org.apache.flink.configuration.GlobalConfiguration.loadYAMLResource(GlobalConfiguration.java:257) at org.apache.flink.configuration.GlobalConfiguration.loadConfiguration(GlobalConfiguration.java:150) at org.apache.flink.runtime.util.ConfigurationParserUtils.loadCommonConfiguration(ConfigurationParserUtils.java:153) at org.apache.flink.runtime.util.bash.FlinkConfigLoader.loadConfiguration(FlinkConfigLoader.java:41)--- services: kafka-0: image: apache/kafka:3.9.1 container_name: kafka-0 ports: - "19092:9092" - "19093:9093" environment: KAFKA_NODE_ID: 1 KAFKA_PROCESS_ROLES: broker,controller KAFKA_LISTENERS: PLAINTEXT://:9092,CONTROLLER://:19093 KAFKA_ADVERTISED_LISTENERS: PLAINTEXT://localhost:19092,CONTROLLER://localhost:19093 KAFKA_CONTROLLER_LISTENER_NAMES: CONTROLLER KAFKA_LOG_DIRS: /var/lib/kafka/data KAFKA_AUTO_CREATE_TOPICS_ENABLE: "true" volumes: - ./kafka/conf/log4j.properties:/opt/kafka/config/log4j.properties - ./kafka/0/conf/kraft/server.properties:/opt/kafka/config/kraft/server.properties - ./kafka/0/data:/var/lib/kafka/data command: - sh - -c - > if [ ! -f /var/lib/kafka/data/meta.properties ]; then # 生成随机 UUID 并格式化存储(-c 指定配置文件路径) /opt/kafka/bin/kafka-storage.sh format \ -t $(/opt/kafka/bin/kafka-storage.sh random-uuid) \ -c /opt/kafka/config/kraft/server.properties fi exec /opt/kafka/bin/kafka-server-start.sh /opt/kafka/config/kraft/server.properties healthcheck: test: - CMD - kafka-broker-api-versions.sh - --bootstrap-server - localhost:19092 interval: 10s timeout: 10s retries: 5 networks: - datacamp-net flink-jobmanager-0: image: flink:2.0.0-java17 container_name: flink-jobmanager-0 ports: - "18081:8081" environment: FLINK_PROPERTIES: | jobmanager.rpc.address: flink-jobmanager-0 state.backend: filesystem state.checkpoints.dir: file:///tmp/flink-checkpoints heartbeat.interval: 1000 heartbeat.timeout: 5000 rest.flamegraph.enabled: true web.upload.dir: /opt/flink/usrlib volumes: - ./flink/jobmanager/conf:/opt/flink/conf - ./flink/jobmanager/0/flink-checkpoints:/tmp/flink-checkpoints - ./flink/jobmanager/0/usrlib:/opt/flink/usrlib command: jobmanager healthcheck: test: - CMD - curl - -f - http://localhost:8081 interval: 15s timeout: 5s retries: 10 networks: - datacamp-net flink-taskmanager-0: image: flink:2.0.0-java17 container_name: flink-taskmanager-0 environment: FLINK_PROPERTIES: | jobmanager.rpc.address: flink-jobmanager-0 taskmanager.numberOfTaskSlots: 2 state.backend: filesystem state.checkpoints.dir: file:///tmp/flink-checkpoints heartbeat.interval: 1000 heartbeat.timeout: 5000 volumes: - ./flink/taskmanager/conf:/opt/flink/conf - ./flink/taskmanager/0/flink-checkpoints:/tmp/flink-checkpoints - ./flink/taskmanager/0/usrlib:/opt/flink/usrlib command: taskmanager depends_on: flink-jobmanager-0: condition: service_healthy networks: - datacamp-net flink-taskmanager-1: image: flink:2.0.0-java17 container_name: flink-taskmanager-1 environment: FLINK_PROPERTIES: | jobmanager.rpc.address: flink-jobmanager-0 taskmanager.numberOfTaskSlots: 2 state.backend: filesystem state.checkpoints.dir: file:///tmp/flink-checkpoints heartbeat.interval: 1000 heartbeat.timeout: 5000 volumes: - ./flink/taskmanager/conf:/opt/flink/conf - ./flink/taskmanager/1/flink-checkpoints:/tmp/flink-checkpoints - ./flink/taskmanager/1/usrlib:/opt/flink/usrlib command: taskmanager depends_on: flink-jobmanager-0: condition: service_healthy networks: - datacamp-net networks: datacamp-net: driver: bridge
06-01
评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值