dajac commented on a change in pull request #11456:
URL: https://github.com/apache/kafka/pull/11456#discussion_r786181034



##########
File path: core/src/main/scala/kafka/tools/ConsoleProducer.scala
##########
@@ -264,39 +277,89 @@ object ConsoleProducer {
     var reader: BufferedReader = null
     var parseKey = false
     var keySeparator = "\t"
+    var parseHeader = false
+    var headersDelimiter = "\t"
+    var headersSeparator = ","
+    var headerKeySeparator = ":"
     var ignoreError = false
     var lineNumber = 0
     var printPrompt = System.console != null
+    var headerSeparatorPattern: Pattern = _
 
     override def init(inputStream: InputStream, props: Properties): Unit = {
       topic = props.getProperty("topic")
       if (props.containsKey("parse.key"))
         parseKey = props.getProperty("parse.key").trim.equalsIgnoreCase("true")
       if (props.containsKey("key.separator"))
         keySeparator = props.getProperty("key.separator")
+      if (props.containsKey("parse.headers"))
+        parseHeader = 
props.getProperty("parse.headers").trim.equalsIgnoreCase("true")
+      if (props.containsKey("headers.delimiter"))
+        headersDelimiter = props.getProperty("headers.delimiter")
+      if (props.containsKey("headers.separator"))
+        headersSeparator = props.getProperty("headers.separator")
+        headerSeparatorPattern = Pattern.compile(headersSeparator)
+      if (props.containsKey("headers.key.separator"))
+        headerKeySeparator = props.getProperty("headers.key.separator")
       if (props.containsKey("ignore.error"))
         ignoreError = 
props.getProperty("ignore.error").trim.equalsIgnoreCase("true")
+      if (headersDelimiter.equals(headersSeparator))
+        throw new KafkaException("headers.delimiter and headers.separator may 
not be equal")
+      if (headersDelimiter.equals(headerKeySeparator))
+        throw new KafkaException("headers.delimiter and headers.key.separator 
may not be equal")
+      if (headersSeparator.equals(headerKeySeparator))
+        throw new KafkaException("headers.separator and headers.key.separator 
may not be equal")
       reader = new BufferedReader(new InputStreamReader(inputStream, 
StandardCharsets.UTF_8))
     }
 
-    override def readMessage() = {
+    override def readMessage(): ProducerRecord[Array[Byte], Array[Byte]] = {
       lineNumber += 1
-      if (printPrompt)
-        print(">")
-      (reader.readLine(), parseKey) match {
-        case (null, _) => null
-        case (line, true) =>
-          line.indexOf(keySeparator) match {
-            case -1 =>
-              if (ignoreError) new ProducerRecord(topic, 
line.getBytes(StandardCharsets.UTF_8))
-              else throw new KafkaException(s"No key found on line 
$lineNumber: $line")
-            case n =>
-              val value = (if (n + keySeparator.size > line.size) "" else 
line.substring(n + keySeparator.size)).getBytes(StandardCharsets.UTF_8)
-              new ProducerRecord(topic, line.substring(0, 
n).getBytes(StandardCharsets.UTF_8), value)
+      if (printPrompt) print(">")
+      val line = reader.readLine()
+      line match {
+        case null => null
+        case line =>
+          val headers = parse(parseHeader, line, headersDelimiter, "headers 
delimiter")
+          val key = parse(parseKey, line.substring(offset(headers)), 
keySeparator, "key separator")
+          val value = line.substring(offset(headers) + offset(key))
+
+          val record = new ProducerRecord[Array[Byte], Array[Byte]](
+            topic,
+            if (key != null) key.getBytes(StandardCharsets.UTF_8) else null,
+            if (value != null) value.getBytes(StandardCharsets.UTF_8) else 
null,
+          )
+
+          if (headers != null) {
+            splitHeaders(headers)
+              .foreach(header => record.headers().add(header._1, header._2))
           }
-        case (line, false) =>
-          new ProducerRecord(topic, line.getBytes(StandardCharsets.UTF_8))
+
+          record
       }
     }
+
+    private def parse(enabled: Boolean, toParse: String, demarcation: String, 
demarcationName: String): String = {
+      (enabled, toParse.indexOf(demarcation)) match {
+        case (false, _) => null
+        case (_, -1) =>
+          if (ignoreError) null
+          else throw new KafkaException(s"No $demarcationName found in 
'$toParse' on line number $lineNumber")
+        case (_, index) => toParse.substring(0, index)
+      }
+    }
+
+    private def splitHeaders(headers: String): Array[(String, Array[Byte])] = {
+      headerSeparatorPattern.split(headers).map { pair =>
+        (pair.indexOf(headerKeySeparator), ignoreError) match {
+          case (-1, false) => throw new KafkaException(s"No header key 
separator found in pair '$pair' on line number $lineNumber")
+          case (-1, true) => (pair, null)
+          case (i, _) => (pair.substring(0, i), pair.substring(i + 
1).getBytes(StandardCharsets.UTF_8))
+        }
+      }
+    }
+
+    private def offset(segment: String) = {
+      if (segment == null) 0 else segment.length + 1
+    }
   }
-}
+}

Review comment:
       nit: Could we add a new line at the end of the file like it was before?

##########
File path: core/src/main/scala/kafka/tools/ConsoleProducer.scala
##########
@@ -264,39 +277,89 @@ object ConsoleProducer {
     var reader: BufferedReader = null
     var parseKey = false
     var keySeparator = "\t"
+    var parseHeader = false
+    var headersDelimiter = "\t"
+    var headersSeparator = ","
+    var headerKeySeparator = ":"
     var ignoreError = false
     var lineNumber = 0
     var printPrompt = System.console != null
+    var headerSeparatorPattern: Pattern = _
 
     override def init(inputStream: InputStream, props: Properties): Unit = {
       topic = props.getProperty("topic")
       if (props.containsKey("parse.key"))
         parseKey = props.getProperty("parse.key").trim.equalsIgnoreCase("true")
       if (props.containsKey("key.separator"))
         keySeparator = props.getProperty("key.separator")
+      if (props.containsKey("parse.headers"))
+        parseHeader = 
props.getProperty("parse.headers").trim.equalsIgnoreCase("true")
+      if (props.containsKey("headers.delimiter"))
+        headersDelimiter = props.getProperty("headers.delimiter")
+      if (props.containsKey("headers.separator"))
+        headersSeparator = props.getProperty("headers.separator")
+        headerSeparatorPattern = Pattern.compile(headersSeparator)
+      if (props.containsKey("headers.key.separator"))
+        headerKeySeparator = props.getProperty("headers.key.separator")
       if (props.containsKey("ignore.error"))
         ignoreError = 
props.getProperty("ignore.error").trim.equalsIgnoreCase("true")
+      if (headersDelimiter.equals(headersSeparator))

Review comment:
       nit: In Scala, we can use `==` to compare strings. It is a bit more 
readable than using `equals`.

##########
File path: core/src/test/scala/unit/kafka/tools/LineMessageReaderTest.scala
##########
@@ -0,0 +1,226 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License") you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package unit.kafka.tools

Review comment:
       nit: I think that we usually don't put the `unit` prefix for the 
package. Could you check other classes in the same package?

##########
File path: core/src/main/scala/kafka/tools/ConsoleProducer.scala
##########
@@ -264,39 +277,89 @@ object ConsoleProducer {
     var reader: BufferedReader = null
     var parseKey = false
     var keySeparator = "\t"
+    var parseHeader = false
+    var headersDelimiter = "\t"
+    var headersSeparator = ","
+    var headerKeySeparator = ":"
     var ignoreError = false
     var lineNumber = 0
     var printPrompt = System.console != null
+    var headerSeparatorPattern: Pattern = _
 
     override def init(inputStream: InputStream, props: Properties): Unit = {
       topic = props.getProperty("topic")
       if (props.containsKey("parse.key"))
         parseKey = props.getProperty("parse.key").trim.equalsIgnoreCase("true")
       if (props.containsKey("key.separator"))
         keySeparator = props.getProperty("key.separator")
+      if (props.containsKey("parse.headers"))
+        parseHeader = 
props.getProperty("parse.headers").trim.equalsIgnoreCase("true")
+      if (props.containsKey("headers.delimiter"))
+        headersDelimiter = props.getProperty("headers.delimiter")
+      if (props.containsKey("headers.separator"))
+        headersSeparator = props.getProperty("headers.separator")
+        headerSeparatorPattern = Pattern.compile(headersSeparator)
+      if (props.containsKey("headers.key.separator"))
+        headerKeySeparator = props.getProperty("headers.key.separator")
       if (props.containsKey("ignore.error"))
         ignoreError = 
props.getProperty("ignore.error").trim.equalsIgnoreCase("true")
+      if (headersDelimiter.equals(headersSeparator))
+        throw new KafkaException("headers.delimiter and headers.separator may 
not be equal")
+      if (headersDelimiter.equals(headerKeySeparator))
+        throw new KafkaException("headers.delimiter and headers.key.separator 
may not be equal")
+      if (headersSeparator.equals(headerKeySeparator))
+        throw new KafkaException("headers.separator and headers.key.separator 
may not be equal")
       reader = new BufferedReader(new InputStreamReader(inputStream, 
StandardCharsets.UTF_8))
     }
 
-    override def readMessage() = {
+    override def readMessage(): ProducerRecord[Array[Byte], Array[Byte]] = {
       lineNumber += 1
-      if (printPrompt)
-        print(">")
-      (reader.readLine(), parseKey) match {
-        case (null, _) => null
-        case (line, true) =>
-          line.indexOf(keySeparator) match {
-            case -1 =>
-              if (ignoreError) new ProducerRecord(topic, 
line.getBytes(StandardCharsets.UTF_8))
-              else throw new KafkaException(s"No key found on line 
$lineNumber: $line")
-            case n =>
-              val value = (if (n + keySeparator.size > line.size) "" else 
line.substring(n + keySeparator.size)).getBytes(StandardCharsets.UTF_8)
-              new ProducerRecord(topic, line.substring(0, 
n).getBytes(StandardCharsets.UTF_8), value)
+      if (printPrompt) print(">")
+      val line = reader.readLine()
+      line match {
+        case null => null
+        case line =>
+          val headers = parse(parseHeader, line, headersDelimiter, "headers 
delimiter")
+          val key = parse(parseKey, line.substring(offset(headers)), 
keySeparator, "key separator")
+          val value = line.substring(offset(headers) + offset(key))
+
+          val record = new ProducerRecord[Array[Byte], Array[Byte]](
+            topic,
+            if (key != null) key.getBytes(StandardCharsets.UTF_8) else null,
+            if (value != null) value.getBytes(StandardCharsets.UTF_8) else 
null,
+          )
+
+          if (headers != null) {
+            splitHeaders(headers)
+              .foreach(header => record.headers().add(header._1, header._2))

Review comment:
       nit: `()` could be omitted here as well.

##########
File path: core/src/main/scala/kafka/tools/ConsoleProducer.scala
##########
@@ -264,39 +278,92 @@ object ConsoleProducer {
     var reader: BufferedReader = null
     var parseKey = false
     var keySeparator = "\t"
+    var parseHeader = false
+    var headersDelimiter = "\t"
+    var headersSeparator = ","
+    var headerKeySeparator = ":"
     var ignoreError = false
     var lineNumber = 0
     var printPrompt = System.console != null
+    var headerSeparatorPattern: Pattern = _
 
     override def init(inputStream: InputStream, props: Properties): Unit = {
       topic = props.getProperty("topic")
       if (props.containsKey("parse.key"))
         parseKey = props.getProperty("parse.key").trim.equalsIgnoreCase("true")
       if (props.containsKey("key.separator"))
         keySeparator = props.getProperty("key.separator")
+      if (props.containsKey("parse.headers"))
+        parseHeader = 
props.getProperty("parse.headers").trim.equalsIgnoreCase("true")
+      if (props.containsKey("headers.delimiter"))
+        headersDelimiter = props.getProperty("headers.delimiter")
+      if (props.containsKey("headers.separator"))
+        headersSeparator = props.getProperty("headers.separator")
+        headerSeparatorPattern = Pattern.compile(headersSeparator)
+      if (props.containsKey("headers.key.separator"))
+        headerKeySeparator = props.getProperty("headers.key.separator")
       if (props.containsKey("ignore.error"))
         ignoreError = 
props.getProperty("ignore.error").trim.equalsIgnoreCase("true")
+      if (headersDelimiter.equals(headersSeparator))
+        throw new KafkaException("headers.delimiter and headers.separator may 
not be equal")
+      if (headersDelimiter.equals(headerKeySeparator))
+        throw new KafkaException("headers.delimiter and headers.key.separator 
may not be equal")
+      if (headersSeparator.equals(headerKeySeparator))
+        throw new KafkaException("headers.separator and headers.key.separator 
may not be equal")
       reader = new BufferedReader(new InputStreamReader(inputStream, 
StandardCharsets.UTF_8))
     }
 
-    override def readMessage() = {
+    override def readMessage(): ProducerRecord[Array[Byte], Array[Byte]] = {
       lineNumber += 1
-      if (printPrompt)
-        print(">")
-      (reader.readLine(), parseKey) match {
-        case (null, _) => null
-        case (line, true) =>
-          line.indexOf(keySeparator) match {
-            case -1 =>
-              if (ignoreError) new ProducerRecord(topic, 
line.getBytes(StandardCharsets.UTF_8))
-              else throw new KafkaException(s"No key found on line 
$lineNumber: $line")
-            case n =>
-              val value = (if (n + keySeparator.size > line.size) "" else 
line.substring(n + keySeparator.size)).getBytes(StandardCharsets.UTF_8)
-              new ProducerRecord(topic, line.substring(0, 
n).getBytes(StandardCharsets.UTF_8), value)
+      if (printPrompt) print(">")
+      val line = reader.readLine()
+      line match {
+        case null => null
+        case line =>
+
+          val headers = parse(parseHeader, line, headersDelimiter, "headers 
delimiter")
+          val key = parse(parseKey, line.substring(offset(headers)), 
keySeparator, "key separator")

Review comment:
       Yeah, this is exactly what I thought about. You're right. We would need 
to pass both the `line` and the start index to `parse`. For the headers' call, 
it would be `0` all the time. For the key's call, it would be `offset(headers)`.
   
   This is basically a minor optimization, avoiding to unnecessary allocate a 
`String` with `line.substring(offset(headers))` as we could work on `line` all 
the time. I don't feel strong about this so I leave it up to you.

##########
File path: core/src/main/scala/kafka/tools/ConsoleProducer.scala
##########
@@ -264,39 +277,89 @@ object ConsoleProducer {
     var reader: BufferedReader = null
     var parseKey = false
     var keySeparator = "\t"
+    var parseHeader = false
+    var headersDelimiter = "\t"
+    var headersSeparator = ","
+    var headerKeySeparator = ":"
     var ignoreError = false
     var lineNumber = 0
     var printPrompt = System.console != null
+    var headerSeparatorPattern: Pattern = _
 
     override def init(inputStream: InputStream, props: Properties): Unit = {
       topic = props.getProperty("topic")
       if (props.containsKey("parse.key"))
         parseKey = props.getProperty("parse.key").trim.equalsIgnoreCase("true")
       if (props.containsKey("key.separator"))
         keySeparator = props.getProperty("key.separator")
+      if (props.containsKey("parse.headers"))
+        parseHeader = 
props.getProperty("parse.headers").trim.equalsIgnoreCase("true")
+      if (props.containsKey("headers.delimiter"))
+        headersDelimiter = props.getProperty("headers.delimiter")
+      if (props.containsKey("headers.separator"))
+        headersSeparator = props.getProperty("headers.separator")
+        headerSeparatorPattern = Pattern.compile(headersSeparator)
+      if (props.containsKey("headers.key.separator"))
+        headerKeySeparator = props.getProperty("headers.key.separator")
       if (props.containsKey("ignore.error"))
         ignoreError = 
props.getProperty("ignore.error").trim.equalsIgnoreCase("true")
+      if (headersDelimiter.equals(headersSeparator))
+        throw new KafkaException("headers.delimiter and headers.separator may 
not be equal")
+      if (headersDelimiter.equals(headerKeySeparator))
+        throw new KafkaException("headers.delimiter and headers.key.separator 
may not be equal")
+      if (headersSeparator.equals(headerKeySeparator))
+        throw new KafkaException("headers.separator and headers.key.separator 
may not be equal")
       reader = new BufferedReader(new InputStreamReader(inputStream, 
StandardCharsets.UTF_8))
     }
 
-    override def readMessage() = {
+    override def readMessage(): ProducerRecord[Array[Byte], Array[Byte]] = {
       lineNumber += 1
-      if (printPrompt)
-        print(">")
-      (reader.readLine(), parseKey) match {
-        case (null, _) => null
-        case (line, true) =>
-          line.indexOf(keySeparator) match {
-            case -1 =>
-              if (ignoreError) new ProducerRecord(topic, 
line.getBytes(StandardCharsets.UTF_8))
-              else throw new KafkaException(s"No key found on line 
$lineNumber: $line")
-            case n =>
-              val value = (if (n + keySeparator.size > line.size) "" else 
line.substring(n + keySeparator.size)).getBytes(StandardCharsets.UTF_8)

Review comment:
       * It seems that we supported separator of arbitrary size before whereas 
now we always assume 1.
   * I am curious about this check: `if (n + keySeparator.size > line.size) 
""`. We don't have it anymore with your changes. Do we still need it?

##########
File path: core/src/test/scala/unit/kafka/tools/LineMessageReaderTest.scala
##########
@@ -0,0 +1,226 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License") you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package unit.kafka.tools
+
+import kafka.tools.ConsoleProducer.LineMessageReader
+import org.apache.kafka.clients.producer.ProducerRecord
+import org.apache.kafka.common.KafkaException
+import org.junit.jupiter.api.Assertions.{assertEquals, assertThrows}
+import org.junit.jupiter.api.Test
+
+import java.io.ByteArrayInputStream
+import java.util.Properties
+
+class LineMessageReaderTest {
+
+  private def defaultTestProps = {
+    val props = new Properties
+    props.put("topic", "topic")
+    props.put("parse.key", "true")
+    props.put("parse.headers", "true")
+    props
+  }
+
+  @Test
+  def testLineReader(): Unit = {
+    val input = "key0\tvalue0\nkey1\tvalue1"
+
+    val props = defaultTestProps
+    props.put("parse.headers", "false")
+
+    runTest(props, input, record("key0", "value0"), record("key1", "value1"))
+  }
+
+  @Test
+  def testLineReaderHeader(): Unit = {
+    val input = 
"headerKey0:headerValue0,headerKey1:headerValue1\tkey0\tvalue0\n"
+    val expected = record("key0", "value0", List("headerKey0" -> 
"headerValue0", "headerKey1" -> "headerValue1"))
+    runTest(defaultTestProps, input, expected)
+  }
+
+  @Test
+  def minimalValidInputWithHeaderKeyAndValue(): Unit = {
+    runTest(defaultTestProps, ":\t\t", record("", "", List("" -> "")))
+  }
+
+  @Test
+  def testLineReaderHeaderNoKey(): Unit = {
+    val input = "headerKey:headerValue\tvalue\n"
+
+    val props = defaultTestProps
+    props.put("parse.key", "false")
+
+    runTest(props, input, record(null, "value", List("headerKey" -> 
"headerValue")))
+  }
+
+  @Test
+  def testLineReaderOnlyValue(): Unit = {
+    val props = defaultTestProps
+    props.put("parse.key", "false")
+    props.put("parse.headers", "false")
+
+    runTest(props, "value\n", record(null, "value"))
+  }
+
+  @Test
+  def 
testParseHeaderEnabledWithCustomDelimiterAndVaryingNumberOfKeyValueHeaderPairs():
 Unit = {
+    val props = defaultTestProps
+    props.put("key.separator", "#")
+    props.put("parse.headers", "true")
+    props.put("headers.delimiter", "!")
+    props.put("headers.separator", "&")
+    props.put("headers.key.separator", ":")
+
+    val input =
+      "headerKey0.0:headerValue0.0&headerKey0.1:headerValue0.1!key0#value0\n" +
+      "headerKey1.0:headerValue1.0!key1#value1"
+
+    val record0 = record("key0", "value0", List("headerKey0.0" -> 
"headerValue0.0", "headerKey0.1" -> "headerValue0.1"))
+    val record1 = record("key1", "value1", List("headerKey1.0" -> 
"headerValue1.0"))
+
+    runTest(props, input, record0, record1)
+  }
+
+  @Test
+  def testMissingKeySeparator(): Unit = {
+    val lineReader = new LineMessageReader
+    val input =
+      
"headerKey0.0:headerValue0.0,headerKey0.1:headerValue0.1\tkey0\tvalue0\n" +
+      "headerKey1.0:headerValue1.0\tkey1[MISSING-DELIMITER]value1"
+
+    lineReader.init(new ByteArrayInputStream(input.getBytes), defaultTestProps)
+    lineReader.readMessage()
+
+    val expectedException = assertThrows(classOf[KafkaException], () => 
lineReader.readMessage())
+
+    assertEquals(
+      "No key separator found in 'key1[MISSING-DELIMITER]value1' on line 
number 2",
+      expectedException.getMessage
+    )
+  }
+
+  @Test
+  def testMissingHeaderKeySeparator(): Unit = {
+    val lineReader = new LineMessageReader()
+    val input = "key[MISSING-DELIMITER]val\tkey0\tvalue0\n"
+    lineReader.init(new ByteArrayInputStream(input.getBytes), defaultTestProps)
+
+    val expectedException = assertThrows(classOf[KafkaException], () => 
lineReader.readMessage())
+
+    assertEquals(
+      "No header key separator found in pair 'key[MISSING-DELIMITER]val' on 
line number 1",
+      expectedException.getMessage
+    )
+  }
+
+  @Test
+  def testHeaderDemarcationCollision(): Unit = {
+    val props = defaultTestProps
+    props.put("headers.delimiter", "\t")
+    props.put("headers.separator", "\t")
+    props.put("headers.key.separator", "\t")
+
+    assertThrowsOnInvalidPatternConfig(props, "headers.delimiter and 
headers.separator may not be equal")
+
+    props.put("headers.separator", ",")
+    assertThrowsOnInvalidPatternConfig(props, "headers.delimiter and 
headers.key.separator may not be equal")
+
+    props.put("headers.key.separator", ",")
+    assertThrowsOnInvalidPatternConfig(props, "headers.separator and 
headers.key.separator may not be equal")
+  }
+
+  private def assertThrowsOnInvalidPatternConfig(props: Properties, 
expectedMessage: String): Unit = {
+    val exception = assertThrows(classOf[KafkaException], () => new 
LineMessageReader().init(null, props))
+    assertEquals(
+      expectedMessage,
+      exception.getMessage
+    )
+  }
+
+  @Test
+  def testIgnoreErrorInInput(): Unit = {
+    val input =
+      "headerKey0.0:headerValue0.0\tkey0\tvalue0\n" +
+      
"headerKey1.0:headerValue1.0,headerKey1.1:headerValue1.1[MISSING-HEADER-DELIMITER]key1\tvalue1\n"
 +
+      "headerKey2.0:headerValue2.0\tkey2[MISSING-KEY-DELIMITER]value2\n" +
+      
"headerKey3.0:headerValue3.0[MISSING-HEADER-DELIMITER]key3[MISSING-KEY-DELIMITER]value3\n"
+
+    val props = defaultTestProps
+    props.put("ignore.error", "true")
+
+    val validRecord = record("key0", "value0", List("headerKey0.0" -> 
"headerValue0.0"))
+
+    val missingHeaderDelimiter: ProducerRecord[String, String] =
+      record(
+        null,
+        "value1",
+        List("headerKey1.0" -> "headerValue1.0", "headerKey1.1" -> 
"headerValue1.1[MISSING-HEADER-DELIMITER]key1")
+      )
+
+    val missingKeyDelimiter: ProducerRecord[String, String] =
+      record(
+        null,
+        "key2[MISSING-KEY-DELIMITER]value2",
+        List("headerKey2.0" -> "headerValue2.0")
+      )
+
+    val missingKeyHeaderDelimiter: ProducerRecord[String, String] =
+      record(
+        null,
+        
"headerKey3.0:headerValue3.0[MISSING-HEADER-DELIMITER]key3[MISSING-KEY-DELIMITER]value3",
+        List()
+      )
+
+    runTest(props, input, validRecord, missingHeaderDelimiter, 
missingKeyDelimiter, missingKeyHeaderDelimiter)
+  }
+
+  @Test
+  def testMalformedHeaderIgnoreError(): Unit = {
+    val input = "key-val\tkey0\tvalue0\n"
+
+    val props = defaultTestProps
+    props.put("ignore.error", "true")
+
+    val expected = record("key0", "value0", List("key-val" -> null))
+
+    runTest(props, input, expected)
+  }
+
+  def runTest(props: Properties, input: String, expectedRecords: 
ProducerRecord[String, String]*): Unit = {
+    val lineReader = new LineMessageReader
+    lineReader.init(new ByteArrayInputStream(input.getBytes), props)
+    expectedRecords.foreach(r => assertEquality(r, lineReader.readMessage()))
+  }
+
+  //  The equality method of ProducerRecord compares memory references for the 
header iterator, this is why this custom equality check is used.
+  private def assertEquality[K, V](expected: ProducerRecord[K, V], actual: 
ProducerRecord[Array[Byte], Array[Byte]]): Unit = {

Review comment:
       nit: Would it make sense to call this one `assertRecordEquals`? 




-- 
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.

To unsubscribe, e-mail: jira-unsubscr...@kafka.apache.org

For queries about this service, please contact Infrastructure at:
us...@infra.apache.org


Reply via email to