2025-10-14 14:27:05.536 INFO 32308 — [ main] c.t.n.demo.basicspringboot.DelayConsume : Starting DelayConsume using Java 1.8.0_462-462 on 18088363-BG with PID 32308 (D:\r\idmdemo\target\classes started by admin in D:\r\idmdemo)
2025-10-14 14:27:05.537 INFO 32308 — [ main] c.t.n.demo.basicspringboot.DelayConsume : No active profile set, falling back to 1 default profile: “default”
2025-10-14 14:27:06.129 INFO 32308 — [ main] o.s.b.w.embedded.tomcat.TomcatWebServer : Tomcat initialized with port(s): 8080 (http)
2025-10-14 14:27:06.135 INFO 32308 — [ main] o.apache.catalina.core.StandardService : Starting service [Tomcat]
2025-10-14 14:27:06.135 INFO 32308 — [ main] org.apache.catalina.core.StandardEngine : Starting Servlet engine: [Apache Tomcat/9.0.71]
2025-10-14 14:27:06.195 INFO 32308 — [ main] o.a.c.c.C.[Tomcat].[localhost].[/] : Initializing Spring embedded WebApplicationContext
2025-10-14 14:27:06.195 INFO 32308 — [ main] w.s.c.ServletWebServerApplicationContext : Root WebApplicationContext: initialization completed in 635 ms
2025-10-14 14:27:06.625 INFO 32308 — [ main] o.s.b.a.e.web.EndpointLinksResolver : Exposing 1 endpoint(s) beneath base path ‘/actuator’
2025-10-14 14:27:06.649 INFO 32308 — [ main] o.s.b.w.embedded.tomcat.TomcatWebServer : Tomcat started on port(s): 8080 (http) with context path ‘’
2025-10-14 14:27:06.656 INFO 32308 — [ main] c.t.n.demo.basicspringboot.DelayConsume : Started DelayConsume in 1.325 seconds (JVM running for 1.661)
2025-10-14 14:27:06.671 INFO 32308 — [ main] c.t.s.e.port.kafka.KafkaEventCenter : 注册延迟消费者
2025-10-14 14:27:06.672 INFO 32308 — [ main] c.t.s.e.port.kafka.KafkaEventCenter : start to register topic: delay_topic_level_2, groupId: delay_consumer_group
2025-10-14 14:27:06.677 INFO 32308 — [ main] c.t.s.e.port.kafka.KafkaEventCenter : 注册延迟消费者成功
2025-10-14 14:27:06.677 INFO 32308 — [ main] c.t.s.e.port.kafka.KafkaEventCenter : start to register topic: vms_dlq_hello-topic, groupId: delay
2025-10-14 14:27:06.678 INFO 32308 — [ main] c.t.s.e.port.kafka.KafkaEventCenter : DelayMs=2000, level=2, expirationTime=1760423228678
2025-10-14 14:27:06.688 INFO 32308 — [llo-topic_delay] o.a.k.clients.consumer.ConsumerConfig : ConsumerConfig values:
allow.auto.create.topics = true
auto.commit.interval.ms = 5000
auto.offset.reset = latest
bootstrap.servers = [192.168.203.128:9092]
check.crcs = true
client.dns.lookup = use_all_dns_ips
client.id = consumer_10.13.35.30_7815f9f9-5b41-47f5-9be8-6aa2b46e0d38
client.rack =
connections.max.idle.ms = 540000
default.api.timeout.ms = 60000
enable.auto.commit = false
exclude.internal.topics = true
fetch.max.bytes = 52428800
fetch.max.wait.ms = 500
fetch.min.bytes = 1
group.id = delay
group.instance.id = null
heartbeat.interval.ms = 3000
interceptor.classes = []
internal.leave.group.on.close = true
internal.throw.on.fetch.stable.offset.unsupported = false
isolation.level = read_uncommitted
key.deserializer = class org.apache.kafka.common.serialization.StringDeserializer
max.partition.fetch.bytes = 1048576
max.poll.interval.ms = 300000
max.poll.records = 200
metadata.max.age.ms = 300000
metric.reporters = []
metrics.num.samples = 2
metrics.recording.level = INFO
metrics.sample.window.ms = 30000
partition.assignment.strategy = [org.apache.kafka.clients.consumer.CooperativeStickyAssignor]
receive.buffer.bytes = -1
reconnect.backoff.max.ms = 1000
reconnect.backoff.ms = 50
request.timeout.ms = 30000
retry.backoff.ms = 100
sasl.client.callback.handler.class = null
sasl.jaas.config = null
sasl.kerberos.kinit.cmd = /usr/bin/kinit
sasl.kerberos.min.time.before.relogin = 60000
sasl.kerberos.service.name = null
sasl.kerberos.ticket.renew.jitter = 0.05
sasl.kerberos.ticket.renew.window.factor = 0.8
sasl.login.callback.handler.class = null
sasl.login.class = null
sasl.login.refresh.buffer.seconds = 300
sasl.login.refresh.min.period.seconds = 60
sasl.login.refresh.window.factor = 0.8
sasl.login.refresh.window.jitter = 0.05
sasl.mechanism = GSSAPI
security.protocol = PLAINTEXT
security.providers = null
send.buffer.bytes = 131072
session.timeout.ms = 45000
socket.connection.setup.timeout.max.ms = 30000
socket.connection.setup.timeout.ms = 10000
ssl.cipher.suites = null
ssl.enabled.protocols = [TLSv1.2]
ssl.endpoint.identification.algorithm = https
ssl.engine.factory.class = null
ssl.key.password = null
ssl.keymanager.algorithm = SunX509
ssl.keystore.certificate.chain = null
ssl.keystore.key = null
ssl.keystore.location = null
ssl.keystore.password = null
ssl.keystore.type = JKS
ssl.protocol = TLSv1.2
ssl.provider = null
ssl.secure.random.implementation = null
ssl.trustmanager.algorithm = PKIX
ssl.truststore.certificates = null
ssl.truststore.location = null
ssl.truststore.password = null
ssl.truststore.type = JKS
value.deserializer = class org.apache.kafka.common.serialization.ByteArrayDeserializer
2025-10-14 14:27:06.688 INFO 32308 — [_consumer_group] o.a.k.clients.consumer.ConsumerConfig : ConsumerConfig values:
allow.auto.create.topics = true
auto.commit.interval.ms = 5000
auto.offset.reset = latest
bootstrap.servers = [192.168.203.128:9092]
check.crcs = true
client.dns.lookup = use_all_dns_ips
client.id = consumer_10.13.35.30_eb89af4b-074d-4f77-86e7-c603baae8c7e
client.rack =
connections.max.idle.ms = 540000
default.api.timeout.ms = 60000
enable.auto.commit = false
exclude.internal.topics = true
fetch.max.bytes = 52428800
fetch.max.wait.ms = 500
fetch.min.bytes = 1
group.id = delay_consumer_group
group.instance.id = null
heartbeat.interval.ms = 3000
interceptor.classes = []
internal.leave.group.on.close = true
internal.throw.on.fetch.stable.offset.unsupported = false
isolation.level = read_uncommitted
key.deserializer = class org.apache.kafka.common.serialization.StringDeserializer
max.partition.fetch.bytes = 1048576
max.poll.interval.ms = 300000
max.poll.records = 200
metadata.max.age.ms = 300000
metric.reporters = []
metrics.num.samples = 2
metrics.recording.level = INFO
metrics.sample.window.ms = 30000
partition.assignment.strategy = [org.apache.kafka.clients.consumer.CooperativeStickyAssignor]
receive.buffer.bytes = -1
reconnect.backoff.max.ms = 1000
reconnect.backoff.ms = 50
request.timeout.ms = 30000
retry.backoff.ms = 100
sasl.client.callback.handler.class = null
sasl.jaas.config = null
sasl.kerberos.kinit.cmd = /usr/bin/kinit
sasl.kerberos.min.time.before.relogin = 60000
sasl.kerberos.service.name = null
sasl.kerberos.ticket.renew.jitter = 0.05
sasl.kerberos.ticket.renew.window.factor = 0.8
sasl.login.callback.handler.class = null
sasl.login.class = null
sasl.login.refresh.buffer.seconds = 300
sasl.login.refresh.min.period.seconds = 60
sasl.login.refresh.window.factor = 0.8
sasl.login.refresh.window.jitter = 0.05
sasl.mechanism = GSSAPI
security.protocol = PLAINTEXT
security.providers = null
send.buffer.bytes = 131072
session.timeout.ms = 45000
socket.connection.setup.timeout.max.ms = 30000
socket.connection.setup.timeout.ms = 10000
ssl.cipher.suites = null
ssl.enabled.protocols = [TLSv1.2]
ssl.endpoint.identification.algorithm = https
ssl.engine.factory.class = null
ssl.key.password = null
ssl.keymanager.algorithm = SunX509
ssl.keystore.certificate.chain = null
ssl.keystore.key = null
ssl.keystore.location = null
ssl.keystore.password = null
ssl.keystore.type = JKS
ssl.protocol = TLSv1.2
ssl.provider = null
ssl.secure.random.implementation = null
ssl.trustmanager.algorithm = PKIX
ssl.truststore.certificates = null
ssl.truststore.location = null
ssl.truststore.password = null
ssl.truststore.type = JKS
value.deserializer = class org.apache.kafka.common.serialization.ByteArrayDeserializer
2025-10-14 14:27:06.701 INFO 32308 — [ main] o.a.k.clients.producer.ProducerConfig : ProducerConfig values:
acks = -1
batch.size = 4096
bootstrap.servers = [192.168.203.128:9092]
buffer.memory = 33554432
client.dns.lookup = use_all_dns_ips
client.id = producer-1
compression.type = none
connections.max.idle.ms = 540000
delivery.timeout.ms = 120000
enable.idempotence = false
interceptor.classes = []
internal.auto.downgrade.txn.commit = false
key.serializer = class org.apache.kafka.common.serialization.StringSerializer
linger.ms = 1
max.block.ms = 60000
max.in.flight.requests.per.connection = 5
max.request.size = 1048576
metadata.max.age.ms = 300000
metadata.max.idle.ms = 300000
metric.reporters = []
metrics.num.samples = 2
metrics.recording.level = INFO
metrics.sample.window.ms = 30000
partitioner.class = class org.apache.kafka.clients.producer.internals.DefaultPartitioner
receive.buffer.bytes = 32768
reconnect.backoff.max.ms = 1000
reconnect.backoff.ms = 50
request.timeout.ms = 30000
retries = 2147483647
retry.backoff.ms = 100
sasl.client.callback.handler.class = null
sasl.jaas.config = null
sasl.kerberos.kinit.cmd = /usr/bin/kinit
sasl.kerberos.min.time.before.relogin = 60000
sasl.kerberos.service.name = null
sasl.kerberos.ticket.renew.jitter = 0.05
sasl.kerberos.ticket.renew.window.factor = 0.8
sasl.login.callback.handler.class = null
sasl.login.class = null
sasl.login.refresh.buffer.seconds = 300
sasl.login.refresh.min.period.seconds = 60
sasl.login.refresh.window.factor = 0.8
sasl.login.refresh.window.jitter = 0.05
sasl.mechanism = GSSAPI
security.protocol = PLAINTEXT
security.providers = null
send.buffer.bytes = 131072
socket.connection.setup.timeout.max.ms = 30000
socket.connection.setup.timeout.ms = 10000
ssl.cipher.suites = null
ssl.enabled.protocols = [TLSv1.2]
ssl.endpoint.identification.algorithm = https
ssl.engine.factory.class = null
ssl.key.password = null
ssl.keymanager.algorithm = SunX509
ssl.keystore.certificate.chain = null
ssl.keystore.key = null
ssl.keystore.location = null
ssl.keystore.password = null
ssl.keystore.type = JKS
ssl.protocol = TLSv1.2
ssl.provider = null
ssl.secure.random.implementation = null
ssl.trustmanager.algorithm = PKIX
ssl.truststore.certificates = null
ssl.truststore.location = null
ssl.truststore.password = null
ssl.truststore.type = JKS
transaction.timeout.ms = 60000
transactional.id = null
value.serializer = class org.apache.kafka.common.serialization.ByteArraySerializer
2025-10-14 14:27:06.723 INFO 32308 — [ main] o.a.kafka.common.utils.AppInfoParser : Kafka version: 2.8.0
2025-10-14 14:27:06.723 INFO 32308 — [ main] o.a.kafka.common.utils.AppInfoParser : Kafka commitId: ebb1d6e21cc92130
2025-10-14 14:27:06.723 INFO 32308 — [ main] o.a.kafka.common.utils.AppInfoParser : Kafka startTimeMs: 1760423226722
2025-10-14 14:27:06.732 INFO 32308 — [llo-topic_delay] o.a.kafka.common.utils.AppInfoParser : Kafka version: 2.8.0
2025-10-14 14:27:06.732 INFO 32308 — [llo-topic_delay] o.a.kafka.common.utils.AppInfoParser : Kafka commitId: ebb1d6e21cc92130
2025-10-14 14:27:06.732 INFO 32308 — [llo-topic_delay] o.a.kafka.common.utils.AppInfoParser : Kafka startTimeMs: 1760423226732
2025-10-14 14:27:06.733 INFO 32308 — [llo-topic_delay] c.t.s.e.p.k.consumer.KafkaConsumerTask : start to consumer kafka topic: vms_dlq_hello-topic
2025-10-14 14:27:06.733 INFO 32308 — [_consumer_group] o.a.kafka.common.utils.AppInfoParser : Kafka version: 2.8.0
2025-10-14 14:27:06.733 INFO 32308 — [_consumer_group] o.a.kafka.common.utils.AppInfoParser : Kafka commitId: ebb1d6e21cc92130
2025-10-14 14:27:06.733 INFO 32308 — [_consumer_group] o.a.kafka.common.utils.AppInfoParser : Kafka startTimeMs: 1760423226732
2025-10-14 14:27:06.733 INFO 32308 — [llo-topic_delay] c.t.s.e.p.k.c.AbstractTaskService : KafkaConsumerTask is running! topic:vms_dlq_hello-topic
2025-10-14 14:27:06.734 INFO 32308 — [_consumer_group] c.t.s.e.p.k.c.GenericKafkaConsumerTask : start to consumer kafka topic: delay_topic_level_2
2025-10-14 14:27:06.734 INFO 32308 — [_consumer_group] c.t.s.e.p.k.c.AbstractTaskService : KafkaConsumerTask is running! topic:delay_topic_level_2
2025-10-14 14:27:06.734 INFO 32308 — [_consumer_group] o.a.k.clients.consumer.KafkaConsumer : [Consumer clientId=consumer_10.13.35.30_eb89af4b-074d-4f77-86e7-c603baae8c7e, groupId=delay_consumer_group] Subscribed to topic(s): delay_topic_level_2
2025-10-14 14:27:06.734 INFO 32308 — [llo-topic_delay] o.a.k.clients.consumer.KafkaConsumer : [Consumer clientId=consumer_10.13.35.30_7815f9f9-5b41-47f5-9be8-6aa2b46e0d38, groupId=delay] Subscribed to topic(s): vms_dlq_hello-topic
2025-10-14 14:27:06.889 INFO 32308 — [_consumer_group] org.apache.kafka.clients.Metadata : [Consumer clientId=consumer_10.13.35.30_eb89af4b-074d-4f77-86e7-c603baae8c7e, groupId=delay_consumer_group] Cluster ID: Cp8MopI8TC6QJ8pHpIkP9A
2025-10-14 14:27:06.889 INFO 32308 — [llo-topic_delay] org.apache.kafka.clients.Metadata : [Consumer clientId=consumer_10.13.35.30_7815f9f9-5b41-47f5-9be8-6aa2b46e0d38, groupId=delay] Cluster ID: Cp8MopI8TC6QJ8pHpIkP9A
2025-10-14 14:27:06.889 INFO 32308 — [ad | producer-1] org.apache.kafka.clients.Metadata : [Producer clientId=producer-1] Cluster ID: Cp8MopI8TC6QJ8pHpIkP9A
2025-10-14 14:27:06.890 INFO 32308 — [_consumer_group] o.a.k.c.c.internals.AbstractCoordinator : [Consumer clientId=consumer_10.13.35.30_eb89af4b-074d-4f77-86e7-c603baae8c7e, groupId=delay_consumer_group] Discovered group coordinator admin1-virtual-machine:9092 (id: 2147483647 rack: null)
2025-10-14 14:27:06.890 INFO 32308 — [llo-topic_delay] o.a.k.c.c.internals.AbstractCoordinator : [Consumer clientId=consumer_10.13.35.30_7815f9f9-5b41-47f5-9be8-6aa2b46e0d38, groupId=delay] Discovered group coordinator admin1-virtual-machine:9092 (id: 2147483647 rack: null)
2025-10-14 14:27:07.319 INFO 32308 — [_consumer_group] o.a.k.c.c.internals.AbstractCoordinator : [Consumer clientId=consumer_10.13.35.30_eb89af4b-074d-4f77-86e7-c603baae8c7e, groupId=delay_consumer_group] (Re-)joining group
2025-10-14 14:27:07.319 INFO 32308 — [llo-topic_delay] o.a.k.c.c.internals.AbstractCoordinator : [Consumer clientId=consumer_10.13.35.30_7815f9f9-5b41-47f5-9be8-6aa2b46e0d38, groupId=delay] (Re-)joining group
2025-10-14 14:27:07.340 INFO 32308 — [_consumer_group] o.a.k.c.c.internals.AbstractCoordinator : [Consumer clientId=consumer_10.13.35.30_eb89af4b-074d-4f77-86e7-c603baae8c7e, groupId=delay_consumer_group] (Re-)joining group
2025-10-14 14:27:07.340 INFO 32308 — [llo-topic_delay] o.a.k.c.c.internals.AbstractCoordinator : [Consumer clientId=consumer_10.13.35.30_7815f9f9-5b41-47f5-9be8-6aa2b46e0d38, groupId=delay] (Re-)joining group
2025-10-14 14:27:07.342 INFO 32308 — [_consumer_group] o.a.k.c.c.internals.AbstractCoordinator : [Consumer clientId=consumer_10.13.35.30_eb89af4b-074d-4f77-86e7-c603baae8c7e, groupId=delay_consumer_group] Successfully joined group with generation Generation{generationId=57, memberId=‘consumer_10.13.35.30_eb89af4b-074d-4f77-86e7-c603baae8c7e-f9f25a29-db98-4de4-882c-f19cfd2b2579’, protocol=‘cooperative-sticky’}
2025-10-14 14:27:07.342 INFO 32308 — [llo-topic_delay] o.a.k.c.c.internals.AbstractCoordinator : [Consumer clientId=consumer_10.13.35.30_7815f9f9-5b41-47f5-9be8-6aa2b46e0d38, groupId=delay] Successfully joined group with generation Generation{generationId=135, memberId=‘consumer_10.13.35.30_7815f9f9-5b41-47f5-9be8-6aa2b46e0d38-d305c0d3-8886-4596-bfdc-81ae28db78a1’, protocol=‘cooperative-sticky’}
2025-10-14 14:27:07.343 INFO 32308 — [_consumer_group] o.a.k.c.c.internals.ConsumerCoordinator : [Consumer clientId=consumer_10.13.35.30_eb89af4b-074d-4f77-86e7-c603baae8c7e, groupId=delay_consumer_group] Finished assignment for group at generation 57: {consumer_10.13.35.30_eb89af4b-074d-4f77-86e7-c603baae8c7e-f9f25a29-db98-4de4-882c-f19cfd2b2579=Assignment(partitions=[delay_topic_level_2-0])}
2025-10-14 14:27:07.343 INFO 32308 — [llo-topic_delay] o.a.k.c.c.internals.ConsumerCoordinator : [Consumer clientId=consumer_10.13.35.30_7815f9f9-5b41-47f5-9be8-6aa2b46e0d38, groupId=delay] Finished assignment for group at generation 135: {consumer_10.13.35.30_7815f9f9-5b41-47f5-9be8-6aa2b46e0d38-d305c0d3-8886-4596-bfdc-81ae28db78a1=Assignment(partitions=[vms_dlq_hello-topic-0])}
2025-10-14 14:27:07.345 INFO 32308 — [_consumer_group] o.a.k.c.c.internals.AbstractCoordinator : [Consumer clientId=consumer_10.13.35.30_eb89af4b-074d-4f77-86e7-c603baae8c7e, groupId=delay_consumer_group] Successfully synced group in generation Generation{generationId=57, memberId=‘consumer_10.13.35.30_eb89af4b-074d-4f77-86e7-c603baae8c7e-f9f25a29-db98-4de4-882c-f19cfd2b2579’, protocol=‘cooperative-sticky’}
2025-10-14 14:27:07.345 INFO 32308 — [_consumer_group] o.a.k.c.c.internals.ConsumerCoordinator : [Consumer clientId=consumer_10.13.35.30_eb89af4b-074d-4f77-86e7-c603baae8c7e, groupId=delay_consumer_group] Updating assignment with
Assigned partitions: [delay_topic_level_2-0]
Current owned partitions: []
Added partitions (assigned - owned): [delay_topic_level_2-0]
Revoked partitions (owned - assigned): []
2025-10-14 14:27:07.345 INFO 32308 — [_consumer_group] o.a.k.c.c.internals.ConsumerCoordinator : [Consumer clientId=consumer_10.13.35.30_eb89af4b-074d-4f77-86e7-c603baae8c7e, groupId=delay_consumer_group] Notifying assignor about the new Assignment(partitions=[delay_topic_level_2-0])
2025-10-14 14:27:07.346 INFO 32308 — [_consumer_group] o.a.k.c.c.internals.ConsumerCoordinator : [Consumer clientId=consumer_10.13.35.30_eb89af4b-074d-4f77-86e7-c603baae8c7e, groupId=delay_consumer_group] Adding newly assigned partitions: delay_topic_level_2-0
2025-10-14 14:27:07.346 INFO 32308 — [_consumer_group] com.tplink.smb.eventcenter.api.Handler : ending rebalance!
2025-10-14 14:27:07.347 INFO 32308 — [llo-topic_delay] o.a.k.c.c.internals.AbstractCoordinator : [Consumer clientId=consumer_10.13.35.30_7815f9f9-5b41-47f5-9be8-6aa2b46e0d38, groupId=delay] Successfully synced group in generation Generation{generationId=135, memberId=‘consumer_10.13.35.30_7815f9f9-5b41-47f5-9be8-6aa2b46e0d38-d305c0d3-8886-4596-bfdc-81ae28db78a1’, protocol=‘cooperative-sticky’}
2025-10-14 14:27:07.347 INFO 32308 — [llo-topic_delay] o.a.k.c.c.internals.ConsumerCoordinator : [Consumer clientId=consumer_10.13.35.30_7815f9f9-5b41-47f5-9be8-6aa2b46e0d38, groupId=delay] Updating assignment with
Assigned partitions: [vms_dlq_hello-topic-0]
Current owned partitions: []
Added partitions (assigned - owned): [vms_dlq_hello-topic-0]
Revoked partitions (owned - assigned): []
2025-10-14 14:27:07.347 INFO 32308 — [llo-topic_delay] o.a.k.c.c.internals.ConsumerCoordinator : [Consumer clientId=consumer_10.13.35.30_7815f9f9-5b41-47f5-9be8-6aa2b46e0d38, groupId=delay] Notifying assignor about the new Assignment(partitions=[vms_dlq_hello-topic-0])
2025-10-14 14:27:07.347 INFO 32308 — [llo-topic_delay] o.a.k.c.c.internals.ConsumerCoordinator : [Consumer clientId=consumer_10.13.35.30_7815f9f9-5b41-47f5-9be8-6aa2b46e0d38, groupId=delay] Adding newly assigned partitions: vms_dlq_hello-topic-0
2025-10-14 14:27:07.347 INFO 32308 — [llo-topic_delay] com.tplink.smb.eventcenter.api.Handler : ending rebalance!
2025-10-14 14:27:07.351 INFO 32308 — [llo-topic_delay] o.a.k.c.c.internals.ConsumerCoordinator : [Consumer clientId=consumer_10.13.35.30_7815f9f9-5b41-47f5-9be8-6aa2b46e0d38, groupId=delay] Setting offset for partition vms_dlq_hello-topic-0 to the committed offset FetchPosition{offset=0, offsetEpoch=Optional.empty, currentLeader=LeaderAndEpoch{leader=Optional[admin1-virtual-machine:9092 (id: 0 rack: null)], epoch=absent}}
2025-10-14 14:27:07.351 INFO 32308 — [_consumer_group] o.a.k.c.c.internals.ConsumerCoordinator : [Consumer clientId=consumer_10.13.35.30_eb89af4b-074d-4f77-86e7-c603baae8c7e, groupId=delay_consumer_group] Setting offset for partition delay_topic_level_2-0 to the committed offset FetchPosition{offset=377, offsetEpoch=Optional.empty, currentLeader=LeaderAndEpoch{leader=Optional[admin1-virtual-machine:9092 (id: 0 rack: null)], epoch=absent}}@Slf4j
@SpringBootApplication(exclude = {DataSourceAutoConfiguration.class, DataSourceTransactionManagerAutoConfiguration.class})
@ComponentScan(basePackages = {
“com.tplink.nbu.demo.basicspringboot”,
“com.tplink.smb.eventcenter.port.kafka.deadletter”,
“com.tplink.smb.eventcenter.api.config”
})
public class DelayConsume implements CommandLineRunner {
@Autowired
private KafkaEventCenter eventCenter;
@Autowired
private DLQConfig deadLetterConfig;
private static final String EVENT_TOPIC = "delay_topic_level_2"; public static void main(String[] args) { SpringApplication.run(DelayConsume.class, args); } @Override public void run(String... args) throws Exception { registerDelayConsumer(); registerEventConsumer(); eventCenter.sendDelay("vms_dlq_hello-topic", "key1", 0, new Event("key1", "延迟触发消息"), 2000, new ForceMatchStrategy()); } private void registerDelayConsumer() { eventCenter.registerDelayConsumer(EVENT_TOPIC, Executors.newSingleThreadExecutor()); } private void registerEventConsumer() { EventHandler eventHandler = event -> { // 打印消息内容和消费时间戳,明确消费时机 log.info("[消费时间:{}] 触发消费,消息内容:{}", LocalDateTime.now().format(DateTimeFormatter.ISO_LOCAL_DATE_TIME), event.getMessage()); }; eventCenter.registerUnicast( "vms_dlq_hello-topic", "delay", eventHandler, ForkJoinPool.commonPool() ); }
}/**
* 将原始事件转发到目标Topic
*/
public void forwardToTargetTopic(DelayEvent delayEvent) {
try {
log.info("准备转发延迟消息: ");
send( delayEvent.getTopic(), delayEvent.getPartition(), delayEvent.getEvent() ); log.info("延迟事件已转发,targetTopic: {}", delayEvent.getTopic()); } catch (Exception e) { log.error("转发延迟事件失败,targetTopic: {}", delayEvent.getTopic(), e); } } /** * 注册延迟消费者(使用已有调度器) * @param delayTopic 延迟主题(如"delay_topic_level_2") * @param executorService 消费者任务执行线程池 */ public void registerDelayConsumer(String delayTopic, ExecutorService executorService) { String delayGroupId = "delay_consumer_group"; // 构造延迟处理器时,注入已有scheduledExecutorService log.info("注册延迟消费者"); DelayHandler delayHandler = new DelayHandler(this,Executors.newScheduledThreadPool(5, r -> { Thread t = new Thread(r, "Forwarding-Worker"); t.setDaemon(true); return t;})); registerUnicastGenerically( delayTopic, // 监听的延迟主题 delayGroupId, // 延迟消费者固定组ID delayHandler, executorService, DelayEvent.class// 使用延迟处理器 ); log.info("注册延迟消费者成功"); }@Slf4j
@RequiredArgsConstructor
public class DelayHandler implements GenericEventHandler {
private final KafkaEventCenter eventCenter;
private final ScheduledExecutorService delayScheduler;
private final Map<String, Boolean> pausedTopics = new ConcurrentHashMap<>();
@Override public void handleEvent(DelayEvent delayEvent) { log.info("进入handleEvent"); long currentTime = System.currentTimeMillis(); long expirationTime = delayEvent.getExpirationTime(); String targetTopic = delayEvent.getTopic(); // 如果已过期直接转发 if (expirationTime <= currentTime) { eventCenter.forwardToTargetTopic(delayEvent); return; } else { // 未过期则暂停目标Topic消费 pauseTargetTopic(targetTopic); // 计算需要等待的时间 long delayMs = expirationTime - currentTime; // 注册延迟任务:到期后恢复Topic并转发事件 delayScheduler.schedule(() -> { log.info("触发延迟任务,目标主题={}, 当前时间={}", targetTopic, System.currentTimeMillis()); resumeTargetTopic(targetTopic); eventCenter.forwardToTargetTopic(delayEvent); }, delayMs, TimeUnit.MILLISECONDS); } } /** * 暂停目标Topic的消费者 */ private void pauseTargetTopic(String targetTopic) { if (pausedTopics.putIfAbsent(targetTopic, true) == null) { try { eventCenter.pauseTopic(targetTopic); log.info("已暂停延迟Topic消费,targetTopic: {}", targetTopic); } catch (Exception e) { log.error("暂停Topic失败,targetTopic: {}", targetTopic, e); pausedTopics.remove(targetTopic); } } } /** * 恢复目标Topic的消费者 */ private void resumeTargetTopic(String targetTopic) { if (pausedTopics.remove(targetTopic) != null) { try { eventCenter.resumeTopic(targetTopic); log.info("已恢复延迟Topic消费,targetTopic: {}", targetTopic); } catch (Exception e) { log.error("恢复Topic失败,targetTopic: {}", targetTopic, e); } } }
}
请继续分析,为何delayhandler未正常工作