Github user Aitozi commented on a diff in the pull request: https://github.com/apache/flink/pull/4935#discussion_r148738495 --- Diff: flink-connectors/flink-connector-kafka-0.9/src/main/java/org/apache/flink/streaming/connectors/kafka/internal/KafkaConsumerThread.java --- @@ -245,6 +238,23 @@ public void run() { if (records == null) { try { records = consumer.poll(pollTimeout); + // register Kafka's very own metrics in Flink's metric reporters + if (useMetrics && !records.isEmpty()) { + // register Kafka metrics to Flink + Map<MetricName, ? extends Metric> metrics = consumer.metrics(); + if (metrics == null) { + // MapR's Kafka implementation returns null here. + log.info("Consumer implementation does not support metrics"); + } else { + // we have Kafka metrics, register them + for (Map.Entry<MetricName, ? extends Metric> metric: metrics.entrySet()) { --- End diff -- i have realized the register several times and then skip the register, and then can successfully register the related metrics. Please let me know if you have any suggestion, thanks~ and the commits have been squash :)
---