sigram commented on code in PR #87: URL: https://github.com/apache/solr-sandbox/pull/87#discussion_r1399005175
########## crossdc-producer/src/main/java/org/apache/solr/update/processor/MirroringUpdateProcessor.java: ########## @@ -107,18 +110,24 @@ UpdateRequest createMirrorRequest() { doc.removeField(CommonParams.VERSION_FIELD); // strip internal doc version final long estimatedDocSizeInBytes = ObjectSizeEstimator.estimate(doc); log.info("estimated doc size is {} bytes, max size is {}", estimatedDocSizeInBytes, maxMirroringDocSizeBytes); + producerMirroringMetrics.getDocumentsSize().update(estimatedDocSizeInBytes); Review Comment: Since it's a histogram it should be singular, ie. "documentSize". ########## crossdc-producer/src/main/java/org/apache/solr/update/processor/MirroringUpdateProcessor.java: ########## @@ -107,18 +110,24 @@ UpdateRequest createMirrorRequest() { doc.removeField(CommonParams.VERSION_FIELD); // strip internal doc version final long estimatedDocSizeInBytes = ObjectSizeEstimator.estimate(doc); log.info("estimated doc size is {} bytes, max size is {}", estimatedDocSizeInBytes, maxMirroringDocSizeBytes); + producerMirroringMetrics.getDocumentsSize().update(estimatedDocSizeInBytes); final boolean tooLargeForKafka = estimatedDocSizeInBytes > maxMirroringDocSizeBytes; if (tooLargeForKafka && !indexUnmirrorableDocs) { throw new SolrException(SolrException.ErrorCode.BAD_REQUEST, "Update exceeds the doc-size limit and is unmirrorable. id=" + cmd.getPrintableId() + " doc size=" + estimatedDocSizeInBytes + " maxDocSize=" + maxMirroringDocSizeBytes); } else if (tooLargeForKafka) { + producerMirroringMetrics.getTooLargeDocuments().inc(); Review Comment: Maybe "documentTooLarge" ? ########## crossdc-producer/src/main/java/org/apache/solr/update/processor/MirroringUpdateProcessor.java: ########## @@ -107,18 +110,24 @@ UpdateRequest createMirrorRequest() { doc.removeField(CommonParams.VERSION_FIELD); // strip internal doc version final long estimatedDocSizeInBytes = ObjectSizeEstimator.estimate(doc); log.info("estimated doc size is {} bytes, max size is {}", estimatedDocSizeInBytes, maxMirroringDocSizeBytes); + producerMirroringMetrics.getDocumentsSize().update(estimatedDocSizeInBytes); final boolean tooLargeForKafka = estimatedDocSizeInBytes > maxMirroringDocSizeBytes; if (tooLargeForKafka && !indexUnmirrorableDocs) { throw new SolrException(SolrException.ErrorCode.BAD_REQUEST, "Update exceeds the doc-size limit and is unmirrorable. id=" + cmd.getPrintableId() + " doc size=" + estimatedDocSizeInBytes + " maxDocSize=" + maxMirroringDocSizeBytes); } else if (tooLargeForKafka) { + producerMirroringMetrics.getTooLargeDocuments().inc(); log.warn( "Skipping mirroring of doc {} as it exceeds the doc-size limit ({} bytes) and is unmirrorable. doc size={}", cmd.getPrintableId(), maxMirroringDocSizeBytes, estimatedDocSizeInBytes); } - super.processAdd(cmd); // let this throw to prevent mirroring invalid reqs + try { + super.processAdd(cmd); // let this throw to prevent mirroring invalid reqs + } catch (IOException exception) { + producerMirroringMetrics.getSavedDocuments().inc(); Review Comment: This should re-throw, that's the point of calling it first so that we avoid sending documents that will likely cause the same error after mirroring. Also, "savedDocuments" seems to be wrong, they are likely not "saved" locally because we got an exception. Maybe "errors.local" ? ########## crossdc-producer/src/main/java/org/apache/solr/update/processor/KafkaRequestMirroringHandler.java: ########## @@ -51,6 +53,7 @@ public void mirror(UpdateRequest request) throws MirroringException { final MirroredSolrRequest mirroredRequest = new MirroredSolrRequest(MirroredSolrRequest.Type.UPDATE, 1, request, TimeUnit.MILLISECONDS.toNanos(System.currentTimeMillis())); try { sink.submit(mirroredRequest); + producerMirroringMetrics.getMessages().inc(); Review Comment: "Messages" sounds cryptic... maybe "submitted" ? ########## crossdc-producer/src/main/java/org/apache/solr/update/processor/ProducerMirroringMetrics.java: ########## @@ -0,0 +1,43 @@ +package org.apache.solr.update.processor; + +import com.codahale.metrics.Counter; +import com.codahale.metrics.Histogram; +import org.apache.solr.core.SolrCore; +import org.apache.solr.metrics.SolrMetricsContext; + +public class ProducerMirroringMetrics { + + private final Counter savedDocuments; + private final Counter messages; + private final Counter tooLargeDocuments; + private final Counter mirrorFailures; + private final Histogram documentsSize; + + public ProducerMirroringMetrics(SolrMetricsContext solrMetricsContext, SolrCore solrCore) { Review Comment: Please add the class-level and property-level javadocs. Some of the metric names are unclear or redundant (eg. "errors.mirroredFailures"). -- This is an automated message from the Apache Git Service. To respond to the message, please log on to GitHub and use the URL above to go to the specific comment. To unsubscribe, e-mail: issues-unsubscr...@solr.apache.org For queries about this service, please contact Infrastructure at: us...@infra.apache.org --------------------------------------------------------------------- To unsubscribe, e-mail: issues-unsubscr...@solr.apache.org For additional commands, e-mail: issues-h...@solr.apache.org