Copilot commented on code in PR #3908:
URL: https://github.com/apache/solr/pull/3908#discussion_r2573770351
##########
solr/webapp/web/js/angular/controllers/cloud.js:
##########
@@ -394,118 +394,134 @@ var nodesSubController = function($scope, Collections,
System, Metrics) {
Fetch metrics for all selected nodes. Only pull the metrics that we'll
show to save bandwidth
Pick the data we want to display and add it to the node-centric data
structure
*/
- Metrics.get({
- "nodes": nodesParam,
- "prefix":
"CONTAINER.fs,org.eclipse.jetty.server.handler.DefaultHandler.get-requests,INDEX.sizeInBytes,SEARCHER.searcher.numDocs,SEARCHER.searcher.deletedDocs,SEARCHER.searcher.warmupTime"
- },
- function (metricsResponse) {
- for (var node in metricsResponse) {
- if (node in nodes) {
- var m = metricsResponse[node];
- nodes[node]['metrics'] = m;
- var diskTotal =
m.metrics['solr.node']['CONTAINER.fs.totalSpace'];
- var diskFree =
m.metrics['solr.node']['CONTAINER.fs.usableSpace'];
- var diskPercentage = Math.floor((diskTotal - diskFree) /
diskTotal * 100);
- nodes[node]['diskUsedPct'] = diskPercentage;
- nodes[node]['diskUsedPctStyle'] = styleForPct(diskPercentage);
- nodes[node]['diskTotal'] = bytesToSize(diskTotal);
- nodes[node]['diskFree'] = bytesToSize(diskFree);
-
- var r =
m.metrics['solr.jetty']['org.eclipse.jetty.server.handler.DefaultHandler.get-requests'];
- nodes[node]['req'] = r.count;
- nodes[node]['req1minRate'] = Math.floor(r['1minRate'] * 100) /
100;
- nodes[node]['req5minRate'] = Math.floor(r['5minRate'] * 100) /
100;
- nodes[node]['req15minRate'] = Math.floor(r['15minRate'] * 100) /
100;
- nodes[node]['reqp75_ms'] = Math.floor(r['p75_ms']);
- nodes[node]['reqp95_ms'] = Math.floor(r['p95_ms']);
- nodes[node]['reqp99_ms'] = Math.floor(r['p99_ms']);
-
- // These are the cores we _expect_ to find on this node
according to the CLUSTERSTATUS
- var cores = nodes[node]['cores'];
- var indexSizeTotal = 0;
- var indexSizeMax = 0;
- var docsTotal = 0;
- var graphData = [];
- for (let coreId in cores) {
- var core = cores[coreId];
- if (core['shard_state'] !== 'active' || core['state'] !==
'active') {
- // If core state is not active, display the real state, or
if shard is inactive, display that
- var labelState = (core['state'] !== 'active') ?
core['state'] : core['shard_state'];
- core['label'] += "_(" + labelState + ")";
- }
- var coreMetricName = "solr.core." + core['collection'] + "." +
core['shard'] + "." + core['replica'];
- var coreMetric = m.metrics[coreMetricName];
- // we may not actually get metrics back for every expected
core (the core may be down)
- if (coreMetric) {
- var size = coreMetric['INDEX.sizeInBytes'];
- size = (typeof size !== 'undefined') ? size : 0;
- core['sizeInBytes'] = size;
- core['size'] = bytesToSize(size);
- indexSizeTotal = indexSizeTotal + size;
- indexSizeMax = size > indexSizeMax ? size : indexSizeMax;
- var numDocs = coreMetric['SEARCHER.searcher.numDocs'];
- numDocs = (typeof numDocs !== 'undefined') ? numDocs : 0;
- core['numDocs'] = numDocs;
- core['numDocsHuman'] = numDocsHuman(numDocs);
- core['avgSizePerDoc'] = bytesToSize(numDocs === 0 ? 0 : size
/ numDocs);
- var deletedDocs =
coreMetric['SEARCHER.searcher.deletedDocs'];
- deletedDocs = (typeof deletedDocs !== 'undefined') ?
deletedDocs : 0;
- core['deletedDocs'] = deletedDocs;
- core['deletedDocsHuman'] = numDocsHuman(deletedDocs);
- var warmupTime = coreMetric['SEARCHER.searcher.warmupTime'];
- warmupTime = (typeof warmupTime !== 'undefined') ?
warmupTime : 0;
- core['warmupTime'] = warmupTime;
- docsTotal += core['numDocs'];
- }
- }
- for (let coreId in cores) {
- var core = cores[coreId];
- var graphObj = {};
- graphObj['label'] = core['label'];
- graphObj['size'] = core['sizeInBytes'];
- graphObj['sizeHuman'] = core['size'];
- graphObj['pct'] = (core['sizeInBytes'] / indexSizeMax) * 100;
- graphData.push(graphObj);
- }
- if (cores) {
- cores.sort(function (a, b) {
- return b.sizeInBytes - a.sizeInBytes
- });
- }
- graphData.sort(function (a, b) {
- return b.size - a.size
- });
- nodes[node]['graphData'] = graphData;
- nodes[node]['numDocs'] = numDocsHuman(docsTotal);
- nodes[node]['sizeInBytes'] = indexSizeTotal;
- nodes[node]['size'] = bytesToSize(indexSizeTotal);
- nodes[node]['sizePerDoc'] = docsTotal === 0 ? '0b' :
bytesToSize(indexSizeTotal / docsTotal);
-
- // Build the d3 powered bar chart
- $('#chart' + nodes[node]['id']).empty();
- var chart = d3.select('#chart' +
nodes[node]['id']).append('div').attr('class', 'chart');
-
- // Add one div per bar which will group together both labels and
bars
- var g = chart.selectAll('div')
- .data(nodes[node]['graphData']).enter()
- .append('div');
-
- // Add the bars
- var bars = g.append("div")
- .attr("class", "rect")
- .text(function (d) {
- return d.label + ':\u00A0\u00A0' + d.sizeHuman;
- });
-
- // Execute the transition to show the bars
- bars.transition()
- .ease('elastic')
- .style('width', function (d) {
- return d.pct + '%';
- });
+ Metrics.get(
+ {
+ nodes: nodesParam,
+ name:
"solr_disk_space_megabytes,solr_core_index_size_megabytes,solr_core_indexsearcher_index_num_docs,solr_core_indexsearcher_index_docs,solr_core_indexsearcher_open_time_milliseconds"
+ },
+ function (response) {
+ // response.metrics contains the parsed merged Prometheus data with
node labels
+ var parsedMetrics = response.metrics;
Review Comment:
Missing null/undefined check for `response.metrics`. If the response object
doesn't contain a `metrics` property, the code will proceed with `undefined`,
which could cause issues downstream when iterating through nodes or accessing
metrics.
Consider adding a check:
```javascript
var parsedMetrics = response.metrics;
if (!parsedMetrics) {
console.error('No metrics in response');
return;
}
```
```suggestion
var parsedMetrics = response.metrics;
if (!parsedMetrics) {
console.error('No metrics in response');
return;
}
```
##########
solr/core/src/java/org/apache/solr/handler/admin/AdminHandlersProxy.java:
##########
@@ -129,9 +135,198 @@ public static CompletableFuture<NamedList<Object>>
callRemoteNode(
URI baseUri =
URI.create(zkController.zkStateReader.getBaseUrlForNodeName(nodeName));
SolrRequest<?> proxyReq = new GenericSolrRequest(SolrRequest.METHOD.GET,
uriPath, params);
+ // Set response parser based on wt parameter to ensure correct format is
used
+ String wt = params.get("wt");
+ if ("prometheus".equals(wt) || "openmetrics".equals(wt)) {
+ proxyReq.setResponseParser(new InputStreamResponseParser(wt));
+ }
+
return zkController
.getCoreContainer()
.getDefaultHttpSolrClient()
.requestWithBaseUrl(baseUri.toString(), c -> c.requestAsync(proxyReq));
}
+
+ /**
+ * Resolve node names from the "nodes" parameter into a set of live node
names.
+ *
+ * @param nodeNames the value of the "nodes" parameter ("all" or
comma-separated node names)
+ * @param container the CoreContainer
+ * @return set of resolved node names
+ * @throws SolrException if node format is invalid or node is not in cluster
+ */
+ private static Set<String> resolveNodes(String nodeNames, CoreContainer
container) {
+ Set<String> liveNodes =
+
container.getZkController().zkStateReader.getClusterState().getLiveNodes();
+
+ if (nodeNames.equals("all")) {
+ log.debug("All live nodes requested");
+ return liveNodes;
+ }
+
+ Set<String> nodes = new HashSet<>(Arrays.asList(nodeNames.split(",")));
+ for (String nodeName : nodes) {
+ if (!nodeName.matches("^[^/:]+:\\d+_[\\w/]+$")) {
+ throw new SolrException(
+ SolrException.ErrorCode.BAD_REQUEST, "Parameter " + PARAM_NODES +
" has wrong format");
+ }
+ if (!liveNodes.contains(nodeName)) {
+ throw new SolrException(
+ SolrException.ErrorCode.BAD_REQUEST,
+ "Requested node " + nodeName + " is not part of cluster");
+ }
+ }
+ log.debug("Nodes requested: {}", nodes);
+ return nodes;
+ }
+
+ /** Handle Prometheus format by fetching from nodes and merging text
responses. */
+ private static void handlePrometheusFormat(
+ Set<String> nodes,
+ String pathStr,
+ SolrParams params,
+ CoreContainer container,
+ SolrQueryResponse rsp)
+ throws IOException, SolrServerException, InterruptedException {
+
+ ZkController zkController = container.getZkController();
+ Map<String, Future<NamedList<Object>>> responses = new LinkedHashMap<>();
+
+ // Ensure wt=prometheus for all requests
+ ModifiableSolrParams prometheusParams = new ModifiableSolrParams(params);
+ if (!prometheusParams.get("wt", "").equals("prometheus")) {
+ prometheusParams.set("wt", "prometheus");
+ }
+
+ // Submit all requests (already async via callRemoteNode)
+ for (String node : nodes) {
+ responses.put(node, callRemoteNode(node, pathStr, prometheusParams,
zkController));
+ }
+
+ // Collect all Prometheus text responses
+ StringBuilder mergedText = new StringBuilder();
+ int successCount = 0;
+ int failureCount = 0;
+ for (Map.Entry<String, Future<NamedList<Object>>> entry :
responses.entrySet()) {
+ try {
+ NamedList<Object> resp =
+ entry.getValue().get(PROMETHEUS_FETCH_TIMEOUT_SECONDS,
TimeUnit.SECONDS);
+
+ // Extract text from InputStream response
+ Object streamObj = resp.get("stream");
+ if (streamObj instanceof InputStream) {
+ try (InputStream stream = (InputStream) streamObj) {
+ String prometheusText = new String(stream.readAllBytes(),
StandardCharsets.UTF_8);
+ if (!prometheusText.isEmpty()) {
+ // Inject node label into each metric line
+ String labeledText = injectNodeLabelIntoText(prometheusText,
entry.getKey());
+ mergedText.append(labeledText);
+ successCount++;
+ }
Review Comment:
Potential resource leak: If an exception occurs after the InputStream is
obtained but before it's closed (e.g., during `readAllBytes()` or
`injectNodeLabelIntoText()`), the stream may not be properly closed despite the
try-with-resources block.
Additionally, if `readAllBytes()` throws an exception (e.g., `IOException`),
it's not caught here, which could cause the entire multi-node fetch to fail
even if only one node has issues.
Consider wrapping the entire processing in a try-catch block:
```java
try (InputStream stream = (InputStream) streamObj) {
String prometheusText = new String(stream.readAllBytes(),
StandardCharsets.UTF_8);
if (!prometheusText.isEmpty()) {
String labeledText = injectNodeLabelIntoText(prometheusText,
entry.getKey());
mergedText.append(labeledText);
successCount++;
}
} catch (IOException ioe) {
log.warn("IOException when reading stream from node {}", entry.getKey(),
ioe);
failureCount++;
}
```
```suggestion
}
} catch (IOException ioe) {
log.warn("IOException when reading stream from node {}",
entry.getKey(), ioe);
failureCount++;
```
##########
solr/core/src/java/org/apache/solr/handler/admin/AdminHandlersProxy.java:
##########
@@ -129,9 +135,198 @@ public static CompletableFuture<NamedList<Object>>
callRemoteNode(
URI baseUri =
URI.create(zkController.zkStateReader.getBaseUrlForNodeName(nodeName));
SolrRequest<?> proxyReq = new GenericSolrRequest(SolrRequest.METHOD.GET,
uriPath, params);
+ // Set response parser based on wt parameter to ensure correct format is
used
+ String wt = params.get("wt");
+ if ("prometheus".equals(wt) || "openmetrics".equals(wt)) {
+ proxyReq.setResponseParser(new InputStreamResponseParser(wt));
+ }
+
return zkController
.getCoreContainer()
.getDefaultHttpSolrClient()
.requestWithBaseUrl(baseUri.toString(), c -> c.requestAsync(proxyReq));
}
+
+ /**
+ * Resolve node names from the "nodes" parameter into a set of live node
names.
+ *
+ * @param nodeNames the value of the "nodes" parameter ("all" or
comma-separated node names)
+ * @param container the CoreContainer
+ * @return set of resolved node names
+ * @throws SolrException if node format is invalid or node is not in cluster
+ */
+ private static Set<String> resolveNodes(String nodeNames, CoreContainer
container) {
+ Set<String> liveNodes =
+
container.getZkController().zkStateReader.getClusterState().getLiveNodes();
+
+ if (nodeNames.equals("all")) {
+ log.debug("All live nodes requested");
+ return liveNodes;
+ }
+
+ Set<String> nodes = new HashSet<>(Arrays.asList(nodeNames.split(",")));
+ for (String nodeName : nodes) {
+ if (!nodeName.matches("^[^/:]+:\\d+_[\\w/]+$")) {
+ throw new SolrException(
+ SolrException.ErrorCode.BAD_REQUEST, "Parameter " + PARAM_NODES +
" has wrong format");
+ }
+ if (!liveNodes.contains(nodeName)) {
+ throw new SolrException(
+ SolrException.ErrorCode.BAD_REQUEST,
+ "Requested node " + nodeName + " is not part of cluster");
+ }
+ }
+ log.debug("Nodes requested: {}", nodes);
+ return nodes;
+ }
+
+ /** Handle Prometheus format by fetching from nodes and merging text
responses. */
+ private static void handlePrometheusFormat(
+ Set<String> nodes,
+ String pathStr,
+ SolrParams params,
+ CoreContainer container,
+ SolrQueryResponse rsp)
+ throws IOException, SolrServerException, InterruptedException {
+
+ ZkController zkController = container.getZkController();
+ Map<String, Future<NamedList<Object>>> responses = new LinkedHashMap<>();
+
+ // Ensure wt=prometheus for all requests
+ ModifiableSolrParams prometheusParams = new ModifiableSolrParams(params);
+ if (!prometheusParams.get("wt", "").equals("prometheus")) {
+ prometheusParams.set("wt", "prometheus");
+ }
+
+ // Submit all requests (already async via callRemoteNode)
+ for (String node : nodes) {
+ responses.put(node, callRemoteNode(node, pathStr, prometheusParams,
zkController));
+ }
+
+ // Collect all Prometheus text responses
+ StringBuilder mergedText = new StringBuilder();
+ int successCount = 0;
+ int failureCount = 0;
+ for (Map.Entry<String, Future<NamedList<Object>>> entry :
responses.entrySet()) {
+ try {
+ NamedList<Object> resp =
+ entry.getValue().get(PROMETHEUS_FETCH_TIMEOUT_SECONDS,
TimeUnit.SECONDS);
+
+ // Extract text from InputStream response
+ Object streamObj = resp.get("stream");
+ if (streamObj instanceof InputStream) {
+ try (InputStream stream = (InputStream) streamObj) {
+ String prometheusText = new String(stream.readAllBytes(),
StandardCharsets.UTF_8);
+ if (!prometheusText.isEmpty()) {
+ // Inject node label into each metric line
+ String labeledText = injectNodeLabelIntoText(prometheusText,
entry.getKey());
+ mergedText.append(labeledText);
+ successCount++;
+ }
+ }
+ } else {
+ log.warn("No stream in response from node {}", entry.getKey());
+ failureCount++;
+ }
+ } catch (ExecutionException ee) {
+ log.warn("Exception when fetching Prometheus result from node {}",
entry.getKey(), ee);
+ failureCount++;
+ } catch (TimeoutException te) {
+ log.warn("Timeout when fetching Prometheus result from node {}",
entry.getKey(), te);
+ failureCount++;
+ }
+ }
+
+ // Add metadata comment to indicate success/failure counts
+ if (failureCount > 0 || successCount > 0) {
+ StringBuilder header = new StringBuilder();
+ header
+ .append("# Solr multi-node metrics aggregation: ")
+ .append(successCount)
+ .append(" of ")
+ .append(responses.size())
+ .append(" nodes succeeded");
+ if (failureCount > 0) {
+ header.append(" (").append(failureCount).append(" failed)");
+ }
+ header.append("\n");
+ mergedText.insert(0, header);
+ }
+
+ // Store the merged text in response - will be written as-is
+ rsp.add("prometheusText", mergedText.toString());
+ }
+
+ /**
+ * Escape special characters in Prometheus label values according to
Prometheus specification.
+ * Escapes backslash, double quote, and newline characters.
+ */
+ private static String escapePrometheusLabelValue(String value) {
+ return value.replace("\\", "\\\\").replace("\"", "\\\"").replace("\n",
"\\n");
+ }
+
+ /**
+ * Inject node="nodeName" label into Prometheus text format. Each metric
line gets the node label
+ * added.
+ */
+ private static String injectNodeLabelIntoText(String prometheusText, String
nodeName) {
+ StringBuilder result = new StringBuilder();
+ String[] lines = prometheusText.split("\n");
+ String escapedNodeName = escapePrometheusLabelValue(nodeName);
+
+ for (String line : lines) {
+ // Skip comments and empty lines
+ if (line.startsWith("#") || line.trim().isEmpty()) {
+ result.append(line).append("\n");
+ continue;
+ }
+
+ // Metric line format: metric_name{labels} value timestamp
+ // or: metric_name value timestamp
+ int braceIndex = line.indexOf('{');
+ int spaceIndex = line.indexOf(' ');
+
+ if (braceIndex == -1) {
+ // No labels, add node label before value
+ // Format: metric_name value timestamp
+ if (spaceIndex > 0) {
+ String metricName = line.substring(0, spaceIndex);
+ String valueAndTime = line.substring(spaceIndex);
+ result
+ .append(metricName)
+ .append("{node=\"")
+ .append(escapedNodeName)
+ .append("\"}")
+ .append(valueAndTime)
+ .append("\n");
+ } else {
+ result.append(line).append("\n");
+ }
+ } else {
+ // Has labels, inject node label
+ // Format: metric_name{existing_labels} value timestamp
+ int closeBraceIndex = line.indexOf('}', braceIndex);
+ if (closeBraceIndex > braceIndex) {
+ String before = line.substring(0, closeBraceIndex);
+ String after = line.substring(closeBraceIndex);
+
+ // Add comma if there are existing labels
+ String separator = (closeBraceIndex > braceIndex + 1) ? "," : "";
+
+ result
+ .append(before)
+ .append(separator)
+ .append("node=\"")
+ .append(escapedNodeName)
+ .append("\"")
+ .append(after)
+ .append("\n");
+ } else {
+ result.append(line).append("\n");
+ }
Review Comment:
The label injection logic doesn't handle metrics with values that contain
spaces correctly. The code finds the first space to split metric name from
value, but Prometheus allows metric values in scientific notation (e.g.,
`1.23e-10`) which don't contain spaces. However, the timestamp (if present) is
separated by a space.
More critically, if a metric line has trailing whitespace or multiple spaces
between the value and timestamp, this could lead to incorrect parsing. While
the current implementation should work for well-formed Prometheus output,
consider trimming lines or using a more robust parsing approach (e.g., regex
matching the entire line format).
```suggestion
// Regex to match Prometheus metric lines:
// ^(\w+)(\{[^}]*\})?\s+([^\s]+)(?:\s+([^\s]+))?\s*$
// Group 1: metric name
// Group 2: labels (optional)
// Group 3: value
// Group 4: timestamp (optional)
java.util.regex.Pattern metricLinePattern =
java.util.regex.Pattern.compile(
"^([a-zA-Z_:][a-zA-Z0-9_:]*)(\\{[^}]*\\})?\\s+([^\\s]+)(?:\\s+([^\\s]+))?\\s*$"
);
for (String line : lines) {
String trimmedLine = line.trim();
// Skip comments and empty lines
if (trimmedLine.startsWith("#") || trimmedLine.isEmpty()) {
result.append(line).append("\n");
continue;
}
java.util.regex.Matcher matcher =
metricLinePattern.matcher(trimmedLine);
if (matcher.matches()) {
String metricName = matcher.group(1);
String labels = matcher.group(2);
String value = matcher.group(3);
String timestamp = matcher.group(4);
if (labels == null) {
// No labels, add node label
result
.append(metricName)
.append("{node=\"")
.append(escapedNodeName)
.append("\"}")
.append(" ")
.append(value);
if (timestamp != null) {
result.append(" ").append(timestamp);
}
result.append("\n");
} else {
// Has labels, inject node label
String trimmedLabels = labels.substring(1, labels.length() -
1).trim();
String separator = trimmedLabels.isEmpty() ? "" : ",";
result
.append(metricName)
.append("{")
.append(trimmedLabels)
.append(separator)
.append("node=\"")
.append(escapedNodeName)
.append("\"}")
.append(" ")
.append(value);
if (timestamp != null) {
result.append(" ").append(timestamp);
}
result.append("\n");
}
} else {
// Not a metric line, preserve as-is
result.append(line).append("\n");
```
##########
solr/webapp/web/js/angular/prometheus-parser.js:
##########
@@ -0,0 +1,176 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+/**
+ * Prometheus text format parser for Solr Admin UI
+ *
+ * Parses Prometheus exposition format (text-based format for metrics)
+ * into a structured JavaScript object for consumption by the Admin UI.
+ */
+
+(function() {
+ 'use strict';
+
+ angular.module('solrAdminApp').factory('PrometheusParser', function() {
+
+ /**
+ * Parse Prometheus text format into structured JavaScript object
+ * @param {string} prometheusText - Raw Prometheus format text
+ * @returns {Object} Parsed metrics object keyed by metric name
+ */
+ function parsePrometheusFormat(prometheusText) {
+ if (!prometheusText || typeof prometheusText !== 'string') {
+ return {};
+ }
+
+ var metrics = {};
+ var lines = prometheusText.split('\n');
+ var currentMetricName = null;
+ var currentMetricType = null;
+ var currentMetricHelp = null;
+
+ for (var i = 0; i < lines.length; i++) {
+ var line = lines[i].trim();
+
+ // Skip empty lines
+ if (!line) continue;
+
+ // Parse HELP comments - use regex for robust parsing
+ if (line.indexOf('# HELP ') === 0) {
+ var helpMatch = line.match(/^# HELP
([a-zA-Z_:][a-zA-Z0-9_:]*)\s+(.*)$/);
+ if (helpMatch) {
+ currentMetricName = helpMatch[1];
+ currentMetricHelp = helpMatch[2];
+ }
+ }
+ // Parse TYPE comments
+ else if (line.indexOf('# TYPE ') === 0) {
+ var typeParts = line.substring(7).split(' ');
+ currentMetricName = typeParts[0];
+ currentMetricType = typeParts[1];
+
+ // Initialize metric entry
+ if (!metrics[currentMetricName]) {
+ metrics[currentMetricName] = {
+ type: currentMetricType,
+ help: currentMetricHelp || '',
+ samples: []
+ };
+ }
+ }
+ // Skip other comments
+ else if (line.charAt(0) === '#') {
+ continue;
+ }
+ // Parse metric sample
+ else {
+ var sample = parseMetricLine(line);
+ if (sample && sample.metricName) {
+ var baseMetricName = sample.metricName;
+ var metricSuffix = null;
+
+ // Only strip suffixes for histogram and summary types
+ // Check if metric name has known suffixes
+ if (sample.metricName.indexOf('_sum') === sample.metricName.length
- 4) {
+ baseMetricName = sample.metricName.substring(0,
sample.metricName.length - 4);
+ metricSuffix = '_sum';
+ } else if (sample.metricName.indexOf('_count') ===
sample.metricName.length - 6) {
+ baseMetricName = sample.metricName.substring(0,
sample.metricName.length - 6);
+ metricSuffix = '_count';
+ } else if (sample.metricName.indexOf('_bucket') ===
sample.metricName.length - 7) {
+ baseMetricName = sample.metricName.substring(0,
sample.metricName.length - 7);
+ metricSuffix = '_bucket';
+ } else if (sample.metricName.indexOf('_total') ===
sample.metricName.length - 6) {
+ // Handle _total suffix for summary metrics
+ baseMetricName = sample.metricName.substring(0,
sample.metricName.length - 6);
+ metricSuffix = '_total';
+ }
+
+ // Check if base metric exists with histogram/summary type
+ var shouldGroup = false;
+ if (metricSuffix && metrics[baseMetricName]) {
+ var baseType = metrics[baseMetricName].type;
+ shouldGroup = (baseType === 'histogram' || baseType ===
'summary');
+ }
+
+ // Use base name if we should group, otherwise use full name
+ var targetMetricName = (shouldGroup || metricSuffix) ?
baseMetricName : sample.metricName;
+
+ if (!metrics[targetMetricName]) {
+ metrics[targetMetricName] = {
+ type: currentMetricType || 'unknown',
+ help: currentMetricHelp || '',
+ samples: []
+ };
+ }
+
+ // Add suffix info to sample if present
+ if (metricSuffix) {
+ sample.metricSuffix = metricSuffix;
+ }
+
+ metrics[targetMetricName].samples.push(sample);
+ }
+ }
+ }
+
+ return metrics;
+ }
+
+ /**
+ * Parse a single metric line
+ * @param {string} line - Metric line (e.g., 'metric_name{label1="val1"}
123.45' or with timestamp)
+ * @returns {Object|null} Parsed sample or null
+ */
+ function parseMetricLine(line) {
+ // Regex to match: metric_name{labels} value [timestamp]
+ // or: metric_name value [timestamp]
+ // The timestamp is optional and is a Unix timestamp in milliseconds
+ var match =
line.match(/^([a-zA-Z_:][a-zA-Z0-9_:]*?)(?:\{(.*?)\})?\s+([^\s]+)(?:\s+\d+)?$/);
+
+ if (!match) return null;
+
+ var metricName = match[1];
+ var labelsStr = match[2] || '';
+ var value = parseFloat(match[3]);
+
+ // Parse labels
+ var labels = {};
+ if (labelsStr) {
+ // Match label="value" patterns - only allow valid Prometheus escape
sequences (\\, \", \n)
+ var labelRegex = /([a-zA-Z_][a-zA-Z0-9_]*)="((?:[^"\\]|\\[\\"n])*)"/g;
+ var labelMatch;
+ while ((labelMatch = labelRegex.exec(labelsStr)) !== null) {
+ // Unescape label values - must unescape \\ first to avoid
double-unescaping
+ var labelValue = labelMatch[2].replace(/\\\\/g,
'\\').replace(/\\"/g, '"').replace(/\\n/g, '\n');
Review Comment:
The escape sequence unescaping logic has a bug. When unescaping `\\`, it
should be done last (or using a different approach) to avoid incorrectly
processing already-escaped sequences.
For example, the input `\\\\"` (representing `\"`) would be incorrectly
processed:
1. `replace(/\\\\/g, '\\')` converts `\\\\` to `\\`
2. `replace(/\\"/g, '"')` then converts `\"` to `"` (incorrect, as it was
originally an escaped backslash followed by quote)
The correct order should replace `\\` last, or use a single regex
replacement with a callback function to handle all escape sequences atomically.
Recommended fix:
```javascript
var labelValue = labelMatch[2]
.replace(/\\"/g, '"')
.replace(/\\n/g, '\n')
.replace(/\\\\/g, '\\');
```
This way, `\\"` and `\n` are processed first, and only then are remaining
`\\` converted to `\`.
```suggestion
var labelValue = labelMatch[2].replace(/\\"/g,
'"').replace(/\\n/g, '\n').replace(/\\\\/g, '\\');
```
##########
solr/webapp/web/js/angular/metrics-extractor.js:
##########
@@ -0,0 +1,166 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+/**
+ * Metrics extraction helper for Solr Admin UI
+ *
+ * Provides helper functions to extract specific metric values from
+ * parsed Prometheus metrics data.
+ */
+
+(function() {
+ 'use strict';
+
+ angular.module('solrAdminApp').factory('MetricsExtractor', function() {
+
+ /**
+ * Find a metric sample by label filters
+ * @param {Object} metric - Parsed metric object with samples array
+ * @param {Object} labelFilters - Object with label key-value pairs to
match
+ * @returns {Object|null} Matching sample or null
+ */
+ function findSample(metric, labelFilters) {
+ if (!metric || !metric.samples) return null;
+
+ for (var i = 0; i < metric.samples.length; i++) {
+ var sample = metric.samples[i];
+ var matches = true;
+
+ for (var key in labelFilters) {
+ if (labelFilters.hasOwnProperty(key)) {
+ if (sample.labels[key] !== labelFilters[key]) {
Review Comment:
Missing check for `sample.labels` before accessing it. If a sample doesn't
have a `labels` property, the code will throw an error when trying to access
`sample.labels[key]`.
Add a safety check:
```javascript
if (!sample.labels || sample.labels[key] !== labelFilters[key]) {
matches = false;
break;
}
```
```suggestion
if (!sample.labels || sample.labels[key] !== labelFilters[key]) {
```
##########
solr/webapp/web/js/angular/prometheus-parser.js:
##########
@@ -0,0 +1,176 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+/**
+ * Prometheus text format parser for Solr Admin UI
+ *
+ * Parses Prometheus exposition format (text-based format for metrics)
+ * into a structured JavaScript object for consumption by the Admin UI.
+ */
+
+(function() {
+ 'use strict';
+
+ angular.module('solrAdminApp').factory('PrometheusParser', function() {
+
+ /**
+ * Parse Prometheus text format into structured JavaScript object
+ * @param {string} prometheusText - Raw Prometheus format text
+ * @returns {Object} Parsed metrics object keyed by metric name
+ */
+ function parsePrometheusFormat(prometheusText) {
+ if (!prometheusText || typeof prometheusText !== 'string') {
+ return {};
+ }
+
+ var metrics = {};
+ var lines = prometheusText.split('\n');
+ var currentMetricName = null;
+ var currentMetricType = null;
+ var currentMetricHelp = null;
+
+ for (var i = 0; i < lines.length; i++) {
+ var line = lines[i].trim();
+
+ // Skip empty lines
+ if (!line) continue;
+
+ // Parse HELP comments - use regex for robust parsing
+ if (line.indexOf('# HELP ') === 0) {
+ var helpMatch = line.match(/^# HELP
([a-zA-Z_:][a-zA-Z0-9_:]*)\s+(.*)$/);
+ if (helpMatch) {
+ currentMetricName = helpMatch[1];
+ currentMetricHelp = helpMatch[2];
+ }
+ }
+ // Parse TYPE comments
+ else if (line.indexOf('# TYPE ') === 0) {
+ var typeParts = line.substring(7).split(' ');
+ currentMetricName = typeParts[0];
+ currentMetricType = typeParts[1];
+
+ // Initialize metric entry
+ if (!metrics[currentMetricName]) {
+ metrics[currentMetricName] = {
+ type: currentMetricType,
+ help: currentMetricHelp || '',
+ samples: []
+ };
+ }
+ }
+ // Skip other comments
+ else if (line.charAt(0) === '#') {
+ continue;
+ }
+ // Parse metric sample
+ else {
+ var sample = parseMetricLine(line);
+ if (sample && sample.metricName) {
+ var baseMetricName = sample.metricName;
+ var metricSuffix = null;
+
+ // Only strip suffixes for histogram and summary types
+ // Check if metric name has known suffixes
+ if (sample.metricName.indexOf('_sum') === sample.metricName.length
- 4) {
+ baseMetricName = sample.metricName.substring(0,
sample.metricName.length - 4);
+ metricSuffix = '_sum';
+ } else if (sample.metricName.indexOf('_count') ===
sample.metricName.length - 6) {
+ baseMetricName = sample.metricName.substring(0,
sample.metricName.length - 6);
+ metricSuffix = '_count';
+ } else if (sample.metricName.indexOf('_bucket') ===
sample.metricName.length - 7) {
+ baseMetricName = sample.metricName.substring(0,
sample.metricName.length - 7);
+ metricSuffix = '_bucket';
+ } else if (sample.metricName.indexOf('_total') ===
sample.metricName.length - 6) {
+ // Handle _total suffix for summary metrics
+ baseMetricName = sample.metricName.substring(0,
sample.metricName.length - 6);
+ metricSuffix = '_total';
+ }
+
+ // Check if base metric exists with histogram/summary type
+ var shouldGroup = false;
+ if (metricSuffix && metrics[baseMetricName]) {
+ var baseType = metrics[baseMetricName].type;
+ shouldGroup = (baseType === 'histogram' || baseType ===
'summary');
+ }
+
+ // Use base name if we should group, otherwise use full name
+ var targetMetricName = (shouldGroup || metricSuffix) ?
baseMetricName : sample.metricName;
+
+ if (!metrics[targetMetricName]) {
+ metrics[targetMetricName] = {
+ type: currentMetricType || 'unknown',
+ help: currentMetricHelp || '',
+ samples: []
+ };
+ }
+
+ // Add suffix info to sample if present
+ if (metricSuffix) {
+ sample.metricSuffix = metricSuffix;
+ }
+
+ metrics[targetMetricName].samples.push(sample);
+ }
+ }
+ }
+
+ return metrics;
+ }
+
+ /**
+ * Parse a single metric line
+ * @param {string} line - Metric line (e.g., 'metric_name{label1="val1"}
123.45' or with timestamp)
+ * @returns {Object|null} Parsed sample or null
+ */
+ function parseMetricLine(line) {
+ // Regex to match: metric_name{labels} value [timestamp]
+ // or: metric_name value [timestamp]
+ // The timestamp is optional and is a Unix timestamp in milliseconds
+ var match =
line.match(/^([a-zA-Z_:][a-zA-Z0-9_:]*?)(?:\{(.*?)\})?\s+([^\s]+)(?:\s+\d+)?$/);
Review Comment:
The regex pattern for matching metric lines doesn't properly handle escaped
characters within label values. The pattern `(?:\{(.*?)\})?` uses a non-greedy
match which could fail on labels containing `}` characters (even if properly
escaped).
Additionally, metric names can contain colons (`:`) which are already
allowed, but the pattern doesn't account for metrics where the value might be
in scientific notation (e.g., `1.23e-4`) or special values like `NaN`, `+Inf`,
`-Inf`.
The value pattern `[^\s]+` should work for these cases, but consider adding
a comment explaining that it handles these edge cases.
```suggestion
// The value pattern [^\s]+ matches scientific notation (e.g.,
1.23e-4) and special values (NaN, +Inf, -Inf).
// The label set is matched as everything between { and }, allowing
for escaped braces inside quoted label values.
var match =
line.match(/^([a-zA-Z_:][a-zA-Z0-9_:]*)(?:\{((?:"(?:[^"\\]|\\.)*"|[^}])*)\})?\s+([^\s]+)(?:\s+\d+)?$/);
```
##########
solr/core/src/java/org/apache/solr/handler/admin/AdminHandlersProxy.java:
##########
@@ -129,9 +135,198 @@ public static CompletableFuture<NamedList<Object>>
callRemoteNode(
URI baseUri =
URI.create(zkController.zkStateReader.getBaseUrlForNodeName(nodeName));
SolrRequest<?> proxyReq = new GenericSolrRequest(SolrRequest.METHOD.GET,
uriPath, params);
+ // Set response parser based on wt parameter to ensure correct format is
used
+ String wt = params.get("wt");
+ if ("prometheus".equals(wt) || "openmetrics".equals(wt)) {
+ proxyReq.setResponseParser(new InputStreamResponseParser(wt));
+ }
+
return zkController
.getCoreContainer()
.getDefaultHttpSolrClient()
.requestWithBaseUrl(baseUri.toString(), c -> c.requestAsync(proxyReq));
}
+
+ /**
+ * Resolve node names from the "nodes" parameter into a set of live node
names.
+ *
+ * @param nodeNames the value of the "nodes" parameter ("all" or
comma-separated node names)
+ * @param container the CoreContainer
+ * @return set of resolved node names
+ * @throws SolrException if node format is invalid or node is not in cluster
+ */
+ private static Set<String> resolveNodes(String nodeNames, CoreContainer
container) {
+ Set<String> liveNodes =
+
container.getZkController().zkStateReader.getClusterState().getLiveNodes();
+
+ if (nodeNames.equals("all")) {
+ log.debug("All live nodes requested");
+ return liveNodes;
+ }
+
+ Set<String> nodes = new HashSet<>(Arrays.asList(nodeNames.split(",")));
+ for (String nodeName : nodes) {
+ if (!nodeName.matches("^[^/:]+:\\d+_[\\w/]+$")) {
+ throw new SolrException(
+ SolrException.ErrorCode.BAD_REQUEST, "Parameter " + PARAM_NODES +
" has wrong format");
+ }
+ if (!liveNodes.contains(nodeName)) {
+ throw new SolrException(
+ SolrException.ErrorCode.BAD_REQUEST,
+ "Requested node " + nodeName + " is not part of cluster");
+ }
+ }
+ log.debug("Nodes requested: {}", nodes);
+ return nodes;
+ }
+
+ /** Handle Prometheus format by fetching from nodes and merging text
responses. */
+ private static void handlePrometheusFormat(
+ Set<String> nodes,
+ String pathStr,
+ SolrParams params,
+ CoreContainer container,
+ SolrQueryResponse rsp)
+ throws IOException, SolrServerException, InterruptedException {
+
+ ZkController zkController = container.getZkController();
+ Map<String, Future<NamedList<Object>>> responses = new LinkedHashMap<>();
+
+ // Ensure wt=prometheus for all requests
+ ModifiableSolrParams prometheusParams = new ModifiableSolrParams(params);
+ if (!prometheusParams.get("wt", "").equals("prometheus")) {
+ prometheusParams.set("wt", "prometheus");
+ }
+
+ // Submit all requests (already async via callRemoteNode)
+ for (String node : nodes) {
+ responses.put(node, callRemoteNode(node, pathStr, prometheusParams,
zkController));
+ }
+
+ // Collect all Prometheus text responses
+ StringBuilder mergedText = new StringBuilder();
+ int successCount = 0;
+ int failureCount = 0;
+ for (Map.Entry<String, Future<NamedList<Object>>> entry :
responses.entrySet()) {
+ try {
+ NamedList<Object> resp =
+ entry.getValue().get(PROMETHEUS_FETCH_TIMEOUT_SECONDS,
TimeUnit.SECONDS);
+
+ // Extract text from InputStream response
+ Object streamObj = resp.get("stream");
+ if (streamObj instanceof InputStream) {
+ try (InputStream stream = (InputStream) streamObj) {
+ String prometheusText = new String(stream.readAllBytes(),
StandardCharsets.UTF_8);
+ if (!prometheusText.isEmpty()) {
+ // Inject node label into each metric line
+ String labeledText = injectNodeLabelIntoText(prometheusText,
entry.getKey());
+ mergedText.append(labeledText);
+ successCount++;
+ }
+ }
+ } else {
+ log.warn("No stream in response from node {}", entry.getKey());
+ failureCount++;
+ }
+ } catch (ExecutionException ee) {
+ log.warn("Exception when fetching Prometheus result from node {}",
entry.getKey(), ee);
+ failureCount++;
+ } catch (TimeoutException te) {
+ log.warn("Timeout when fetching Prometheus result from node {}",
entry.getKey(), te);
+ failureCount++;
+ }
+ }
+
+ // Add metadata comment to indicate success/failure counts
+ if (failureCount > 0 || successCount > 0) {
+ StringBuilder header = new StringBuilder();
+ header
+ .append("# Solr multi-node metrics aggregation: ")
+ .append(successCount)
+ .append(" of ")
+ .append(responses.size())
+ .append(" nodes succeeded");
+ if (failureCount > 0) {
+ header.append(" (").append(failureCount).append(" failed)");
+ }
+ header.append("\n");
+ mergedText.insert(0, header);
+ }
+
+ // Store the merged text in response - will be written as-is
+ rsp.add("prometheusText", mergedText.toString());
+ }
+
+ /**
+ * Escape special characters in Prometheus label values according to
Prometheus specification.
+ * Escapes backslash, double quote, and newline characters.
+ */
+ private static String escapePrometheusLabelValue(String value) {
+ return value.replace("\\", "\\\\").replace("\"", "\\\"").replace("\n",
"\\n");
+ }
+
+ /**
+ * Inject node="nodeName" label into Prometheus text format. Each metric
line gets the node label
+ * added.
+ */
+ private static String injectNodeLabelIntoText(String prometheusText, String
nodeName) {
+ StringBuilder result = new StringBuilder();
+ String[] lines = prometheusText.split("\n");
+ String escapedNodeName = escapePrometheusLabelValue(nodeName);
+
+ for (String line : lines) {
+ // Skip comments and empty lines
+ if (line.startsWith("#") || line.trim().isEmpty()) {
+ result.append(line).append("\n");
+ continue;
+ }
+
+ // Metric line format: metric_name{labels} value timestamp
+ // or: metric_name value timestamp
+ int braceIndex = line.indexOf('{');
+ int spaceIndex = line.indexOf(' ');
+
+ if (braceIndex == -1) {
+ // No labels, add node label before value
+ // Format: metric_name value timestamp
+ if (spaceIndex > 0) {
+ String metricName = line.substring(0, spaceIndex);
+ String valueAndTime = line.substring(spaceIndex);
+ result
+ .append(metricName)
+ .append("{node=\"")
+ .append(escapedNodeName)
+ .append("\"}")
+ .append(valueAndTime)
+ .append("\n");
+ } else {
+ result.append(line).append("\n");
+ }
+ } else {
+ // Has labels, inject node label
+ // Format: metric_name{existing_labels} value timestamp
+ int closeBraceIndex = line.indexOf('}', braceIndex);
+ if (closeBraceIndex > braceIndex) {
+ String before = line.substring(0, closeBraceIndex);
+ String after = line.substring(closeBraceIndex);
+
+ // Add comma if there are existing labels
+ String separator = (closeBraceIndex > braceIndex + 1) ? "," : "";
+
+ result
+ .append(before)
+ .append(separator)
+ .append("node=\"")
+ .append(escapedNodeName)
+ .append("\"")
Review Comment:
The label injection logic doesn't properly handle metrics with existing
labels that already have a `node` label. If a metric already contains
`node="something"`, this code will add a second `node` label, resulting in
invalid Prometheus format (duplicate labels with the same name are not allowed).
Consider checking if the `node` label already exists and either skip adding
it, override it, or rename the injected label to avoid conflicts.
```suggestion
String metricName = line.substring(0, braceIndex);
String labelsStr = line.substring(braceIndex + 1, closeBraceIndex);
String after = line.substring(closeBraceIndex);
// Parse labels into map
Map<String, String> labels = new LinkedHashMap<>();
if (!labelsStr.trim().isEmpty()) {
String[] labelPairs = labelsStr.split(",");
for (String pair : labelPairs) {
int eqIdx = pair.indexOf('=');
if (eqIdx > 0) {
String key = pair.substring(0, eqIdx).trim();
String value = pair.substring(eqIdx + 1).trim();
// Remove surrounding quotes from value
if (value.startsWith("\"") && value.endsWith("\"") &&
value.length() >= 2) {
value = value.substring(1, value.length() - 1);
}
labels.put(key, value);
}
}
}
// Override or add node label
labels.put("node", nodeName);
// Reconstruct labels string
StringBuilder newLabelsStr = new StringBuilder();
boolean first = true;
for (Map.Entry<String, String> entry : labels.entrySet()) {
if (!first) newLabelsStr.append(",");
newLabelsStr.append(entry.getKey()).append("=\"").append(escapePrometheusLabelValue(entry.getValue())).append("\"");
first = false;
}
result
.append(metricName)
.append("{")
.append(newLabelsStr)
.append("}")
```
--
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.
To unsubscribe, e-mail: [email protected]
For queries about this service, please contact Infrastructure at:
[email protected]
---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]