删除 debezium 连接器后,PostgreSQL 复制槽仍处于活动状态

时间:2021-01-06 02:34:10

标签: postgresql apache-kafka-connect debezium

3 Debezium 连接器连接了一个 PostgreSQL。(每个连接器都有自己的复制槽)我使用 (DELETE) REST API 删除了所有连接器。在连接器列表(GET)上,它说它们已被删除。但是,其中一个连接器似乎没有删除。因为,复制槽处于活动状态,我可以在 kafka-connect 日志中找到以下日志消息。


[2021-01-06 09:56:35,038] WARN Error getting JMX attribute 'connector-unassigned-task-count' (org.apache.kafka.common.metrics.JmxRepo
rter:258)
org.apache.kafka.connect.errors.NotFoundException: No status found for task lake_test0088_expired-0
        at org.apache.kafka.connect.runtime.AbstractHerder.taskStatus(AbstractHerder.java:305)
        at org.apache.kafka.connect.runtime.Worker$ConnectorStatusMetricsGroup.lambda$null$2(Worker.java:1024)
        at java.util.stream.ReferencePipeline$2$1.accept(ReferencePipeline.java:174)
        at java.util.concurrent.ConcurrentHashMap$ValueSpliterator.forEachRemaining(ConcurrentHashMap.java:3566)
        at java.util.stream.AbstractPipeline.copyInto(AbstractPipeline.java:482)
        at java.util.stream.AbstractPipeline.wrapAndCopyInto(AbstractPipeline.java:472)
                                                                                                                   71328,19-26   98%
org.apache.kafka.connect.errors.NotFoundException: No status found for task lake_test0088_expired-0
        at org.apache.kafka.connect.runtime.AbstractHerder.taskStatus(AbstractHerder.java:305)
        at org.apache.kafka.connect.runtime.Worker$ConnectorStatusMetricsGroup.lambda$null$2(Worker.java:1024)
        at java.util.stream.ReferencePipeline$2$1.accept(ReferencePipeline.java:174)
        at java.util.concurrent.ConcurrentHashMap$ValueSpliterator.forEachRemaining(ConcurrentHashMap.java:3566)
        at java.util.stream.AbstractPipeline.copyInto(AbstractPipeline.java:482)
        at java.util.stream.AbstractPipeline.wrapAndCopyInto(AbstractPipeline.java:472)
        at java.util.stream.ReduceOps$ReduceOp.evaluateSequential(ReduceOps.java:708)
        at java.util.stream.AbstractPipeline.evaluate(AbstractPipeline.java:234)
        at java.util.stream.LongPipeline.reduce(LongPipeline.java:461)
        at java.util.stream.LongPipeline.sum(LongPipeline.java:419)
        at java.util.stream.ReferencePipeline.count(ReferencePipeline.java:593)
        at org.apache.kafka.connect.runtime.Worker$ConnectorStatusMetricsGroup.lambda$taskStatusCounter$3(Worker.java:1025)
        at org.apache.kafka.connect.runtime.ConnectMetrics$MetricGroup.lambda$addValueMetric$0(ConnectMetrics.java:322)
        at org.apache.kafka.common.metrics.KafkaMetric.metricValue(KafkaMetric.java:68)
        at org.apache.kafka.common.metrics.JmxReporter$KafkaMbean.getAttribute(JmxReporter.java:246)
        at io.prometheus.jmx.shaded.io.prometheus.client.exporter.common.TextFormat.write004(TextFormat.java:22)
        at io.prometheus.jmx.shaded.io.prometheus.client.exporter.HTTPServer$HTTPMetricHandler.handle(HTTPServer.java:68)
        at com.sun.net.httpserver.Filter$Chain.doFilter(Filter.java:79)
        at sun.net.httpserver.AuthFilter.doFilter(AuthFilter.java:83)
        at com.sun.net.httpserver.Filter$Chain.doFilter(Filter.java:82)
        at sun.net.httpserver.ServerImpl$Exchange.run(ServerImpl.java:647)
        at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149)
        at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)
        at java.lang.Thread.run(Thread.java:748)
org.apache.kafka.connect.errors.NotFoundException: No status found for task lake_test0088_expired-0
        at org.apache.kafka.connect.runtime.AbstractHerder.taskStatus(AbstractHerder.java:305)
        at org.apache.kafka.connect.runtime.Worker$ConnectorStatusMetricsGroup.lambda$null$2(Worker.java:1024)
        at java.util.stream.ReferencePipeline$2$1.accept(ReferencePipeline.java:174)
        at java.util.concurrent.ConcurrentHashMap$ValueSpliterator.forEachRemaining(ConcurrentHashMap.java:3566)
        at java.util.stream.AbstractPipeline.copyInto(AbstractPipeline.java:482)
        at java.util.stream.AbstractPipeline.wrapAndCopyInto(AbstractPipeline.java:472)
        at java.util.stream.ReduceOps$ReduceOp.evaluateSequential(ReduceOps.java:708)
        at java.util.stream.AbstractPipeline.evaluate(AbstractPipeline.java:234)
        at java.util.stream.LongPipeline.reduce(LongPipeline.java:461)
        at java.util.stream.LongPipeline.sum(LongPipeline.java:419)
        at java.util.stream.ReferencePipeline.count(ReferencePipeline.java:593)
        at org.apache.kafka.connect.runtime.Worker$ConnectorStatusMetricsGroup.lambda$taskStatusCounter$3(Worker.java:1025)
        at org.apache.kafka.connect.runtime.ConnectMetrics$MetricGroup.lambda$addValueMetric$0(ConnectMetrics.java:322)
        at org.apache.kafka.common.metrics.KafkaMetric.metricValue(KafkaMetric.java:68)
        at org.apache.kafka.common.metrics.JmxReporter$KafkaMbean.getAttribute(JmxReporter.java:246)
        at io.prometheus.jmx.shaded.io.prometheus.client.exporter.common.TextFormat.write004(TextFormat.java:22)
        at io.prometheus.jmx.shaded.io.prometheus.client.exporter.HTTPServer$HTTPMetricHandler.handle(HTTPServer.java:68)
        at com.sun.net.httpserver.Filter$Chain.doFilter(Filter.java:79)
        at sun.net.httpserver.AuthFilter.doFilter(AuthFilter.java:83)
        at com.sun.net.httpserver.Filter$Chain.doFilter(Filter.java:82)
        at sun.net.httpserver.ServerImpl$Exchange.run(ServerImpl.java:647)
        at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149)
        at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)
        at java.lang.Thread.run(Thread.java:748)
org.apache.kafka.connect.errors.NotFoundException: No status found for task lake_test0088_expired-0
        at org.apache.kafka.connect.runtime.AbstractHerder.taskStatus(AbstractHerder.java:305)
        at org.apache.kafka.connect.runtime.Worker$ConnectorStatusMetricsGroup.lambda$null$2(Worker.java:1024)
        at java.util.stream.ReferencePipeline$2$1.accept(ReferencePipeline.java:174)
        at java.util.concurrent.ConcurrentHashMap$ValueSpliterator.forEachRemaining(ConcurrentHashMap.java:3566)
        at java.util.stream.AbstractPipeline.copyInto(AbstractPipeline.java:482)
        at java.util.stream.AbstractPipeline.wrapAndCopyInto(AbstractPipeline.java:472)
        at java.util.stream.ReduceOps$ReduceOp.evaluateSequential(ReduceOps.java:708)
        at java.util.stream.AbstractPipeline.evaluate(AbstractPipeline.java:234)
        at java.util.stream.LongPipeline.reduce(LongPipeline.java:461)
        at java.util.stream.LongPipeline.sum(LongPipeline.java:419)
        at java.util.stream.ReferencePipeline.count(ReferencePipeline.java:593)
        at org.apache.kafka.connect.runtime.Worker$ConnectorStatusMetricsGroup.lambda$taskStatusCounter$3(Worker.java:1025)
        at org.apache.kafka.connect.runtime.ConnectMetrics$MetricGroup.lambda$addValueMetric$0(ConnectMetrics.java:322)
        at org.apache.kafka.common.metrics.KafkaMetric.metricValue(KafkaMetric.java:68)
        at org.apache.kafka.common.metrics.JmxReporter$KafkaMbean.getAttribute(JmxReporter.java:246)
        at io.prometheus.jmx.shaded.io.prometheus.client.exporter.common.TextFormat.write004(TextFormat.java:22)
        at io.prometheus.jmx.shaded.io.prometheus.client.exporter.HTTPServer$HTTPMetricHandler.handle(HTTPServer.java:68)
        at com.sun.net.httpserver.Filter$Chain.doFilter(Filter.java:79)
        at sun.net.httpserver.AuthFilter.doFilter(AuthFilter.java:83)
        at com.sun.net.httpserver.Filter$Chain.doFilter(Filter.java:82)
        at sun.net.httpserver.ServerImpl$Exchange.run(ServerImpl.java:647)
        at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149)
        at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)
        at java.lang.Thread.run(Thread.java:748)
org.apache.kafka.connect.errors.NotFoundException: No status found for task lake_test0088_expired-0
        at org.apache.kafka.connect.runtime.AbstractHerder.taskStatus(AbstractHerder.java:305)
        at org.apache.kafka.connect.runtime.Worker$ConnectorStatusMetricsGroup.lambda$null$2(Worker.java:1024)
        at java.util.stream.ReferencePipeline$2$1.accept(ReferencePipeline.java:174)
        at java.util.concurrent.ConcurrentHashMap$ValueSpliterator.forEachRemaining(ConcurrentHashMap.java:3566)
        at java.util.stream.AbstractPipeline.copyInto(AbstractPipeline.java:482)
        at java.util.stream.AbstractPipeline.wrapAndCopyInto(AbstractPipeline.java:472)
        at java.util.stream.ReduceOps$ReduceOp.evaluateSequential(ReduceOps.java:708)
        at java.util.stream.AbstractPipeline.evaluate(AbstractPipeline.java:234)
        at java.util.stream.LongPipeline.reduce(LongPipeline.java:461)
        at java.util.stream.LongPipeline.sum(LongPipeline.java:419)
        at java.util.stream.ReferencePipeline.count(ReferencePipeline.java:593)
        at org.apache.kafka.connect.runtime.Worker$ConnectorStatusMetricsGroup.lambda$taskStatusCounter$3(Worker.java:1025)
        at org.apache.kafka.connect.runtime.ConnectMetrics$MetricGroup.lambda$addValueMetric$0(ConnectMetrics.java:322)
        at org.apache.kafka.common.metrics.KafkaMetric.metricValue(KafkaMetric.java:68)
        at org.apache.kafka.common.metrics.JmxReporter$KafkaMbean.getAttribute(JmxReporter.java:246)
        at io.prometheus.jmx.shaded.io.prometheus.client.exporter.common.TextFormat.write004(TextFormat.java:22)
        at io.prometheus.jmx.shaded.io.prometheus.client.exporter.HTTPServer$HTTPMetricHandler.handle(HTTPServer.java:68)
        at com.sun.net.httpserver.Filter$Chain.doFilter(Filter.java:79)
        at sun.net.httpserver.AuthFilter.doFilter(AuthFilter.java:83)
        at com.sun.net.httpserver.Filter$Chain.doFilter(Filter.java:82)
        at java.util.stream.LongPipeline.sum(LongPipeline.java:419)
        at java.util.stream.ReferencePipeline.count(ReferencePipeline.java:593)
        at org.apache.kafka.connect.runtime.Worker$ConnectorStatusMetricsGroup.lambda$taskStatusCounter$3(Worker.java:1025)
        at org.apache.kafka.connect.runtime.ConnectMetrics$MetricGroup.lambda$addValueMetric$0(ConnectMetrics.java:322)
        at org.apache.kafka.common.metrics.KafkaMetric.metricValue(KafkaMetric.java:68)
        at org.apache.kafka.common.metrics.JmxReporter$KafkaMbean.getAttribute(JmxReporter.java:246)
        at org.apache.kafka.common.metrics.JmxReporter$KafkaMbean.getAttributes(JmxReporter.java:256)
        at com.sun.jmx.interceptor.DefaultMBeanServerInterceptor.getAttributes(DefaultMBeanServerInterceptor.java:709)
        at com.sun.jmx.mbeanserver.JmxMBeanServer.getAttributes(JmxMBeanServer.java:705)
        at io.prometheus.jmx.shaded.io.prometheus.jmx.JmxScraper.scrapeBean(JmxScraper.java:151)
        at io.prometheus.jmx.shaded.io.prometheus.jmx.JmxScraper.doScrape(JmxScraper.java:117)
        at io.prometheus.jmx.shaded.io.prometheus.jmx.JmxCollector.collect(JmxCollector.java:473)
        at io.prometheus.jmx.shaded.io.prometheus.client.CollectorRegistry$MetricFamilySamplesEnumeration.findNextElement(CollectorRe
gistry.java:190)
        at io.prometheus.jmx.shaded.io.prometheus.client.CollectorRegistry$MetricFamilySamplesEnumeration.nextElement(CollectorRegist
ry.java:223)
        at io.prometheus.jmx.shaded.io.prometheus.client.CollectorRegistry$MetricFamilySamplesEnumeration.nextElement(CollectorRegist
ry.java:144)
        at io.prometheus.jmx.shaded.io.prometheus.client.exporter.common.TextFormat.write004(TextFormat.java:22)
        at io.prometheus.jmx.shaded.io.prometheus.client.exporter.HTTPServer$HTTPMetricHandler.handle(HTTPServer.java:68)
        at com.sun.net.httpserver.Filter$Chain.doFilter(Filter.java:79)
        at sun.net.httpserver.AuthFilter.doFilter(AuthFilter.java:83)
        at com.sun.net.httpserver.Filter$Chain.doFilter(Filter.java:82)
        at sun.net.httpserver.ServerImpl$Exchange$LinkHandler.handle(ServerImpl.java:675)
        at com.sun.net.httpserver.Filter$Chain.doFilter(Filter.java:79)
        at sun.net.httpserver.ServerImpl$Exchange.run(ServerImpl.java:647)
        at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149)
        at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)
        at java.lang.Thread.run(Thread.java:748)

我在 PG 服务器中用 pid 杀死了进程(walsender),但它会自动重新创建。我认为 kafka connect 没有删除连接器的任务。我该如何解决这个问题?

0 个答案:

没有答案