Copilot commented on code in PR #3908:
URL: https://github.com/apache/solr/pull/3908#discussion_r2573656667
##########
solr/core/src/java/org/apache/solr/handler/admin/AdminHandlersProxy.java:
##########
@@ -129,9 +140,218 @@ public static CompletableFuture<NamedList<Object>>
callRemoteNode(
URI baseUri =
URI.create(zkController.zkStateReader.getBaseUrlForNodeName(nodeName));
SolrRequest<?> proxyReq = new GenericSolrRequest(SolrRequest.METHOD.GET,
uriPath, params);
+ // Set response parser based on wt parameter to ensure correct format is
used
+ String wt = params.get("wt");
+ if ("prometheus".equals(wt) || "openmetrics".equals(wt)) {
+ proxyReq.setResponseParser(new InputStreamResponseParser(wt));
+ }
+
return zkController
.getCoreContainer()
.getDefaultHttpSolrClient()
.requestWithBaseUrl(baseUri.toString(), c -> c.requestAsync(proxyReq));
}
+
+ /**
+ * Resolve node names from the "nodes" parameter into a set of live node
names.
+ *
+ * @param nodeNames the value of the "nodes" parameter ("all" or
comma-separated node names)
+ * @param container the CoreContainer
+ * @return set of resolved node names
+ * @throws SolrException if node format is invalid or node is not in cluster
+ */
+ private static Set<String> resolveNodes(String nodeNames, CoreContainer
container) {
+ Set<String> liveNodes =
+
container.getZkController().zkStateReader.getClusterState().getLiveNodes();
+
+ if (nodeNames.equals("all")) {
+ log.debug("All live nodes requested");
+ return liveNodes;
+ }
+
+ Set<String> nodes = new HashSet<>(Arrays.asList(nodeNames.split(",")));
+ for (String nodeName : nodes) {
+ if (!nodeName.matches("^[^/:]+:\\d+_[\\w/]+$")) {
+ throw new SolrException(
+ SolrException.ErrorCode.BAD_REQUEST, "Parameter " + PARAM_NODES +
" has wrong format");
+ }
+ if (!liveNodes.contains(nodeName)) {
+ throw new SolrException(
+ SolrException.ErrorCode.BAD_REQUEST,
+ "Requested node " + nodeName + " is not part of cluster");
+ }
+ }
+ log.debug("Nodes requested: {}", nodes);
+ return nodes;
+ }
+
+ /** Handle Prometheus format by fetching from nodes and merging text
responses. */
+ private static void handlePrometheusFormat(
+ Set<String> nodes,
+ String pathStr,
+ SolrParams params,
+ CoreContainer container,
+ SolrQueryResponse rsp)
+ throws IOException, SolrServerException, InterruptedException {
+
+ // Bounded parallel executor - max concurrent fetches using Solr's
ExecutorUtil
+ ExecutorService executor =
+ new ExecutorUtil.MDCAwareThreadPoolExecutor(
+ PROMETHEUS_PROXY_THREAD_POOL_SIZE, // corePoolSize
+ PROMETHEUS_PROXY_THREAD_POOL_SIZE, // maximumPoolSize
+ 60L,
+ TimeUnit.SECONDS,
+ new LinkedBlockingQueue<>(),
+ new SolrNamedThreadFactory("metricsProxyExecutor"));
+
+ try {
+ // Submit all fetches at once - executor will handle bounded parallelism
+ Map<String, Future<String>> futures = new LinkedHashMap<>();
+ for (String node : nodes) {
+ futures.put(node, fetchNodePrometheusTextAsync(executor, node,
pathStr, params, container));
+ }
+
+ // Collect all Prometheus text responses
+ StringBuilder mergedText = new StringBuilder();
+ for (Map.Entry<String, Future<String>> entry : futures.entrySet()) {
+ try {
+ String prometheusText =
+ entry.getValue().get(PROMETHEUS_FETCH_TIMEOUT_SECONDS,
TimeUnit.SECONDS);
+ if (prometheusText != null && !prometheusText.isEmpty()) {
+ // Inject node label into each metric line
+ String labeledText = injectNodeLabelIntoText(prometheusText,
entry.getKey());
+ mergedText.append(labeledText);
+ }
+ } catch (ExecutionException ee) {
+ log.warn("Exception when fetching Prometheus result from node {}",
entry.getKey(), ee);
+ } catch (TimeoutException te) {
+ log.warn("Timeout when fetching Prometheus result from node {}",
entry.getKey(), te);
+ }
+ }
+
+ // Store the merged text in response - will be written as-is
+ rsp.add("prometheusText", mergedText.toString());
+
+ } finally {
+ ExecutorUtil.shutdownAndAwaitTermination(executor);
+ }
+ }
+
+ /** Fetch Prometheus text from a remote node asynchronously. */
+ private static Future<String> fetchNodePrometheusTextAsync(
+ ExecutorService executor,
+ String nodeName,
+ String pathStr,
+ SolrParams params,
+ CoreContainer container) {
+
+ return executor.submit(
+ () -> {
+ try {
+ ZkController zkController = container.getZkController();
+ if (zkController == null) {
+ log.warn("ZkController not available for node {}", nodeName);
+ return null;
+ }
+
+ // Ensure wt=prometheus is set for inter-node requests
+ ModifiableSolrParams prometheusParams = new
ModifiableSolrParams(params);
+ if (!prometheusParams.get("wt", "").equals("prometheus")) {
+ prometheusParams.set("wt", "prometheus");
+ }
+
+ // Use existing callRemoteNode() to fetch metrics
+ CompletableFuture<NamedList<Object>> future =
+ callRemoteNode(nodeName, pathStr, prometheusParams,
zkController);
+
+ NamedList<Object> response =
+ future.get(PROMETHEUS_FETCH_TIMEOUT_SECONDS, TimeUnit.SECONDS);
+
+ // Response has "stream" key with InputStream when using
InputStreamResponseParser
+ Object streamObj = response.get("stream");
+ if (streamObj == null) {
+ log.warn("No stream in response from node {}", nodeName);
+ return null;
+ }
+ if (!(streamObj instanceof InputStream)) {
+ log.warn(
+ "Invalid stream type in response from node {}: {}",
+ nodeName,
+ streamObj.getClass());
+ return null;
+ }
+ try (InputStream stream = (InputStream) streamObj) {
+ return new String(stream.readAllBytes(), StandardCharsets.UTF_8);
+ }
+
+ } catch (Exception e) {
+ log.warn("Error fetching metrics from node {}", nodeName, e);
+ return null;
+ }
+ });
+ }
+
+ /**
+ * Inject node="nodeName" label into Prometheus text format. Each metric
line gets the node label
+ * added.
+ */
+ private static String injectNodeLabelIntoText(String prometheusText, String
nodeName) {
+ StringBuilder result = new StringBuilder();
+ String[] lines = prometheusText.split("\n");
+
+ for (String line : lines) {
+ // Skip comments and empty lines
+ if (line.startsWith("#") || line.trim().isEmpty()) {
+ result.append(line).append("\n");
+ continue;
+ }
+
+ // Metric line format: metric_name{labels} value timestamp
+ // or: metric_name value timestamp
+ int braceIndex = line.indexOf('{');
+ int spaceIndex = line.indexOf(' ');
+
+ if (braceIndex == -1) {
+ // No labels, add node label before value
+ // Format: metric_name value timestamp
+ if (spaceIndex > 0) {
+ String metricName = line.substring(0, spaceIndex);
+ String valueAndTime = line.substring(spaceIndex);
+ result
+ .append(metricName)
+ .append("{node=\"")
+ .append(nodeName)
+ .append("\"}")
+ .append(valueAndTime)
+ .append("\n");
+ } else {
+ result.append(line).append("\n");
+ }
+ } else {
+ // Has labels, inject node label
+ // Format: metric_name{existing_labels} value timestamp
+ int closeBraceIndex = line.indexOf('}', braceIndex);
+ if (closeBraceIndex > braceIndex) {
+ String before = line.substring(0, closeBraceIndex);
+ String after = line.substring(closeBraceIndex);
+
+ // Add comma if there are existing labels
+ String separator = (closeBraceIndex > braceIndex + 1) ? "," : "";
+
+ result
+ .append(before)
+ .append(separator)
+ .append("node=\"")
+ .append(nodeName)
Review Comment:
Same escaping issue here - the node name needs to be escaped when injecting
it into the label value to prevent breaking the Prometheus format if the node
name contains special characters.
##########
solr/webapp/web/js/angular/prometheus-parser.js:
##########
@@ -0,0 +1,150 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+/**
+ * Prometheus text format parser for Solr Admin UI
+ *
+ * Parses Prometheus exposition format (text-based format for metrics)
+ * into a structured JavaScript object for consumption by the Admin UI.
+ */
+
+(function() {
+ 'use strict';
+
+ angular.module('solrAdminApp').factory('PrometheusParser', function() {
+
+ /**
+ * Parse Prometheus text format into structured JavaScript object
+ * @param {string} prometheusText - Raw Prometheus format text
+ * @returns {Object} Parsed metrics object keyed by metric name
+ */
+ function parsePrometheusFormat(prometheusText) {
+ if (!prometheusText || typeof prometheusText !== 'string') {
+ return {};
+ }
+
+ var metrics = {};
+ var lines = prometheusText.split('\n');
+ var currentMetricName = null;
+ var currentMetricType = null;
+ var currentMetricHelp = null;
+
+ for (var i = 0; i < lines.length; i++) {
+ var line = lines[i].trim();
+
+ // Skip empty lines
+ if (!line) continue;
+
+ // Parse HELP comments
+ if (line.indexOf('# HELP ') === 0) {
+ var helpParts = line.substring(7).split(' ');
+ currentMetricName = helpParts[0];
+ currentMetricHelp = helpParts.slice(1).join(' ');
+ }
+ // Parse TYPE comments
+ else if (line.indexOf('# TYPE ') === 0) {
+ var typeParts = line.substring(7).split(' ');
+ currentMetricName = typeParts[0];
+ currentMetricType = typeParts[1];
+
+ // Initialize metric entry
+ if (!metrics[currentMetricName]) {
+ metrics[currentMetricName] = {
+ type: currentMetricType,
+ help: currentMetricHelp || '',
+ samples: []
+ };
+ }
+ }
+ // Skip other comments
+ else if (line.charAt(0) === '#') {
+ continue;
+ }
+ // Parse metric sample
+ else {
+ var sample = parseMetricLine(line);
+ if (sample && sample.metricName) {
+ // Handle histogram suffixes (_sum, _count, _bucket)
+ var baseMetricName =
sample.metricName.replace(/_sum$|_count$|_bucket$/, '');
+
+ if (!metrics[baseMetricName]) {
+ metrics[baseMetricName] = {
+ type: 'unknown',
+ help: '',
+ samples: []
+ };
+ }
+
+ // Add suffix info to sample
+ if (sample.metricName.indexOf('_sum') === sample.metricName.length
- 4) {
+ sample.metricSuffix = '_sum';
+ } else if (sample.metricName.indexOf('_count') ===
sample.metricName.length - 6) {
+ sample.metricSuffix = '_count';
+ } else if (sample.metricName.indexOf('_bucket') ===
sample.metricName.length - 7) {
+ sample.metricSuffix = '_bucket';
+ }
+
+ metrics[baseMetricName].samples.push(sample);
+ }
+ }
+ }
+
+ return metrics;
+ }
+
+ /**
+ * Parse a single metric line
+ * @param {string} line - Metric line (e.g., 'metric_name{label1="val1"}
123.45')
+ * @returns {Object|null} Parsed sample or null
+ */
+ function parseMetricLine(line) {
+ // Regex to match: metric_name{labels} value
+ // or: metric_name value
+ var match =
line.match(/^([a-zA-Z_:][a-zA-Z0-9_:]*?)(?:\{(.*?)\})?\s+([^\s]+)$/);
Review Comment:
The regex pattern doesn't handle optional timestamps in Prometheus metrics.
Prometheus metrics can have an optional timestamp after the value (e.g.,
`metric_name{labels} 123.45 1234567890`), but the current pattern `([^\s]+)$`
only captures the value and requires the line to end there.
Consider updating the pattern to:
```javascript
var match =
line.match(/^([a-zA-Z_:][a-zA-Z0-9_:]*?)(?:\{(.*?)\})?\s+([^\s]+)(?:\s+\d+)?$/);
```
This allows an optional timestamp at the end.
```suggestion
var match =
line.match(/^([a-zA-Z_:][a-zA-Z0-9_:]*?)(?:\{(.*?)\})?\s+([^\s]+)(?:\s+\d+)?$/);
```
##########
solr/webapp/web/js/angular/prometheus-parser.js:
##########
@@ -0,0 +1,150 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+/**
+ * Prometheus text format parser for Solr Admin UI
+ *
+ * Parses Prometheus exposition format (text-based format for metrics)
+ * into a structured JavaScript object for consumption by the Admin UI.
+ */
+
+(function() {
+ 'use strict';
+
+ angular.module('solrAdminApp').factory('PrometheusParser', function() {
+
+ /**
+ * Parse Prometheus text format into structured JavaScript object
+ * @param {string} prometheusText - Raw Prometheus format text
+ * @returns {Object} Parsed metrics object keyed by metric name
+ */
+ function parsePrometheusFormat(prometheusText) {
+ if (!prometheusText || typeof prometheusText !== 'string') {
+ return {};
+ }
+
+ var metrics = {};
+ var lines = prometheusText.split('\n');
+ var currentMetricName = null;
+ var currentMetricType = null;
+ var currentMetricHelp = null;
+
+ for (var i = 0; i < lines.length; i++) {
+ var line = lines[i].trim();
+
+ // Skip empty lines
+ if (!line) continue;
+
+ // Parse HELP comments
+ if (line.indexOf('# HELP ') === 0) {
+ var helpParts = line.substring(7).split(' ');
+ currentMetricName = helpParts[0];
+ currentMetricHelp = helpParts.slice(1).join(' ');
+ }
+ // Parse TYPE comments
+ else if (line.indexOf('# TYPE ') === 0) {
+ var typeParts = line.substring(7).split(' ');
+ currentMetricName = typeParts[0];
+ currentMetricType = typeParts[1];
+
+ // Initialize metric entry
+ if (!metrics[currentMetricName]) {
+ metrics[currentMetricName] = {
+ type: currentMetricType,
+ help: currentMetricHelp || '',
+ samples: []
+ };
+ }
+ }
+ // Skip other comments
+ else if (line.charAt(0) === '#') {
+ continue;
+ }
+ // Parse metric sample
+ else {
+ var sample = parseMetricLine(line);
+ if (sample && sample.metricName) {
+ // Handle histogram suffixes (_sum, _count, _bucket)
+ var baseMetricName =
sample.metricName.replace(/_sum$|_count$|_bucket$/, '');
+
+ if (!metrics[baseMetricName]) {
+ metrics[baseMetricName] = {
+ type: 'unknown',
+ help: '',
+ samples: []
+ };
+ }
+
+ // Add suffix info to sample
+ if (sample.metricName.indexOf('_sum') === sample.metricName.length
- 4) {
+ sample.metricSuffix = '_sum';
+ } else if (sample.metricName.indexOf('_count') ===
sample.metricName.length - 6) {
+ sample.metricSuffix = '_count';
+ } else if (sample.metricName.indexOf('_bucket') ===
sample.metricName.length - 7) {
+ sample.metricSuffix = '_bucket';
+ }
+
+ metrics[baseMetricName].samples.push(sample);
+ }
+ }
+ }
+
+ return metrics;
+ }
+
+ /**
+ * Parse a single metric line
+ * @param {string} line - Metric line (e.g., 'metric_name{label1="val1"}
123.45')
+ * @returns {Object|null} Parsed sample or null
+ */
+ function parseMetricLine(line) {
+ // Regex to match: metric_name{labels} value
+ // or: metric_name value
+ var match =
line.match(/^([a-zA-Z_:][a-zA-Z0-9_:]*?)(?:\{(.*?)\})?\s+([^\s]+)$/);
+
+ if (!match) return null;
+
+ var metricName = match[1];
+ var labelsStr = match[2] || '';
+ var value = parseFloat(match[3]);
+
+ // Parse labels
+ var labels = {};
+ if (labelsStr) {
+ // Match label="value" patterns
+ var labelRegex = /([a-zA-Z_][a-zA-Z0-9_]*)="((?:[^"\\]|\\.)*)"/g;
Review Comment:
The regex pattern `((?:[^"\\]|\\.)*)` for label values doesn't correctly
handle all Prometheus escape sequences. Specifically, it allows any character
after a backslash (`\\.`), but Prometheus only allows specific escape
sequences: `\\`, `\"`, and `\n`.
Consider using a more precise pattern:
```javascript
var labelRegex = /([a-zA-Z_][a-zA-Z0-9_]*)="((?:[^"\\]|\\[\\"])*)"/g;
```
This ensures only valid escape sequences are matched.
```suggestion
var labelRegex =
/([a-zA-Z_][a-zA-Z0-9_]*)="((?:[^"\\]|\\[\\n"])*)"/g;
```
##########
solr/core/src/java/org/apache/solr/response/PrometheusResponseWriter.java:
##########
@@ -41,7 +41,18 @@ public void write(
OutputStream out, SolrQueryRequest request, SolrQueryResponse response,
String contentType)
throws IOException {
+ // Check if we have pre-merged Prometheus text (from multi-node requests)
+ var prometheusText = response.getValues().get("prometheusText");
+ if (prometheusText instanceof String) {
+ out.write(((String)
prometheusText).getBytes(java.nio.charset.StandardCharsets.UTF_8));
+ return;
+ }
+
+ // Otherwise handle normal MetricSnapshots
var metrics = response.getValues().get("metrics");
+ if (metrics == null) {
+ throw new IOException("No metrics or prometheusText found in response");
+ }
Review Comment:
The cast to `MetricSnapshots` on line 56 could throw a `ClassCastException`
if `metrics` is not of the expected type. While there's a null check, there's
no type check before casting.
Consider adding a type check:
```java
if (!(metrics instanceof MetricSnapshots)) {
throw new IOException("Invalid metrics type: " +
metrics.getClass().getName());
}
```
```suggestion
}
if (!(metrics instanceof MetricSnapshots)) {
throw new IOException("Invalid metrics type: " +
metrics.getClass().getName());
}
```
##########
solr/webapp/web/js/angular/controllers/cloud.js:
##########
@@ -394,118 +394,136 @@ var nodesSubController = function($scope, Collections,
System, Metrics) {
Fetch metrics for all selected nodes. Only pull the metrics that we'll
show to save bandwidth
Pick the data we want to display and add it to the node-centric data
structure
*/
- Metrics.get({
- "nodes": nodesParam,
- "prefix":
"CONTAINER.fs,org.eclipse.jetty.server.handler.DefaultHandler.get-requests,INDEX.sizeInBytes,SEARCHER.searcher.numDocs,SEARCHER.searcher.deletedDocs,SEARCHER.searcher.warmupTime"
- },
- function (metricsResponse) {
- for (var node in metricsResponse) {
- if (node in nodes) {
- var m = metricsResponse[node];
- nodes[node]['metrics'] = m;
- var diskTotal =
m.metrics['solr.node']['CONTAINER.fs.totalSpace'];
- var diskFree =
m.metrics['solr.node']['CONTAINER.fs.usableSpace'];
- var diskPercentage = Math.floor((diskTotal - diskFree) /
diskTotal * 100);
- nodes[node]['diskUsedPct'] = diskPercentage;
- nodes[node]['diskUsedPctStyle'] = styleForPct(diskPercentage);
- nodes[node]['diskTotal'] = bytesToSize(diskTotal);
- nodes[node]['diskFree'] = bytesToSize(diskFree);
-
- var r =
m.metrics['solr.jetty']['org.eclipse.jetty.server.handler.DefaultHandler.get-requests'];
- nodes[node]['req'] = r.count;
- nodes[node]['req1minRate'] = Math.floor(r['1minRate'] * 100) /
100;
- nodes[node]['req5minRate'] = Math.floor(r['5minRate'] * 100) /
100;
- nodes[node]['req15minRate'] = Math.floor(r['15minRate'] * 100) /
100;
- nodes[node]['reqp75_ms'] = Math.floor(r['p75_ms']);
- nodes[node]['reqp95_ms'] = Math.floor(r['p95_ms']);
- nodes[node]['reqp99_ms'] = Math.floor(r['p99_ms']);
-
- // These are the cores we _expect_ to find on this node
according to the CLUSTERSTATUS
- var cores = nodes[node]['cores'];
- var indexSizeTotal = 0;
- var indexSizeMax = 0;
- var docsTotal = 0;
- var graphData = [];
- for (let coreId in cores) {
- var core = cores[coreId];
- if (core['shard_state'] !== 'active' || core['state'] !==
'active') {
- // If core state is not active, display the real state, or
if shard is inactive, display that
- var labelState = (core['state'] !== 'active') ?
core['state'] : core['shard_state'];
- core['label'] += "_(" + labelState + ")";
- }
- var coreMetricName = "solr.core." + core['collection'] + "." +
core['shard'] + "." + core['replica'];
- var coreMetric = m.metrics[coreMetricName];
- // we may not actually get metrics back for every expected
core (the core may be down)
- if (coreMetric) {
- var size = coreMetric['INDEX.sizeInBytes'];
- size = (typeof size !== 'undefined') ? size : 0;
- core['sizeInBytes'] = size;
- core['size'] = bytesToSize(size);
- indexSizeTotal = indexSizeTotal + size;
- indexSizeMax = size > indexSizeMax ? size : indexSizeMax;
- var numDocs = coreMetric['SEARCHER.searcher.numDocs'];
- numDocs = (typeof numDocs !== 'undefined') ? numDocs : 0;
- core['numDocs'] = numDocs;
- core['numDocsHuman'] = numDocsHuman(numDocs);
- core['avgSizePerDoc'] = bytesToSize(numDocs === 0 ? 0 : size
/ numDocs);
- var deletedDocs =
coreMetric['SEARCHER.searcher.deletedDocs'];
- deletedDocs = (typeof deletedDocs !== 'undefined') ?
deletedDocs : 0;
- core['deletedDocs'] = deletedDocs;
- core['deletedDocsHuman'] = numDocsHuman(deletedDocs);
- var warmupTime = coreMetric['SEARCHER.searcher.warmupTime'];
- warmupTime = (typeof warmupTime !== 'undefined') ?
warmupTime : 0;
- core['warmupTime'] = warmupTime;
- docsTotal += core['numDocs'];
- }
- }
- for (let coreId in cores) {
- var core = cores[coreId];
- var graphObj = {};
- graphObj['label'] = core['label'];
- graphObj['size'] = core['sizeInBytes'];
- graphObj['sizeHuman'] = core['size'];
- graphObj['pct'] = (core['sizeInBytes'] / indexSizeMax) * 100;
- graphData.push(graphObj);
- }
- if (cores) {
- cores.sort(function (a, b) {
- return b.sizeInBytes - a.sizeInBytes
- });
- }
- graphData.sort(function (a, b) {
- return b.size - a.size
- });
- nodes[node]['graphData'] = graphData;
- nodes[node]['numDocs'] = numDocsHuman(docsTotal);
- nodes[node]['sizeInBytes'] = indexSizeTotal;
- nodes[node]['size'] = bytesToSize(indexSizeTotal);
- nodes[node]['sizePerDoc'] = docsTotal === 0 ? '0b' :
bytesToSize(indexSizeTotal / docsTotal);
-
- // Build the d3 powered bar chart
- $('#chart' + nodes[node]['id']).empty();
- var chart = d3.select('#chart' +
nodes[node]['id']).append('div').attr('class', 'chart');
-
- // Add one div per bar which will group together both labels and
bars
- var g = chart.selectAll('div')
- .data(nodes[node]['graphData']).enter()
- .append('div');
-
- // Add the bars
- var bars = g.append("div")
- .attr("class", "rect")
- .text(function (d) {
- return d.label + ':\u00A0\u00A0' + d.sizeHuman;
- });
-
- // Execute the transition to show the bars
- bars.transition()
- .ease('elastic')
- .style('width', function (d) {
- return d.pct + '%';
- });
+ Metrics.get(
+ {
+ nodes: nodesParam,
+ name:
"solr_disk_space_megabytes,solr_core_index_size_megabytes,solr_core_indexsearcher_index_num_docs,solr_core_indexsearcher_index_docs,solr_core_indexsearcher_open_time_milliseconds"
+ },
+ function (response) {
+ // response.metrics contains the parsed merged Prometheus data with
node labels
+ var parsedMetrics = response.metrics;
+
+ for (var i = 0; i < nodesToShow.length; i++) {
+ var node = nodesToShow[i];
+ if (!nodes[node]) continue;
+
+ nodes[node]['metrics'] = parsedMetrics;
+
+ // Extract disk metrics with node filter
+ var diskMetrics = MetricsExtractor.extractDiskMetrics(parsedMetrics,
{ node: node });
+ if (diskMetrics) {
+ var diskTotal = diskMetrics.totalSpace || 0;
+ var diskFree = diskMetrics.usableSpace || 0;
+ var diskPercentage = diskTotal > 0 ? Math.floor((diskTotal -
diskFree) / diskTotal * 100) : 0;
+ nodes[node]['diskUsedPct'] = diskPercentage;
+ nodes[node]['diskUsedPctStyle'] = styleForPct(diskPercentage);
+ nodes[node]['diskTotal'] = bytesToSize(diskTotal);
+ nodes[node]['diskFree'] = bytesToSize(diskFree);
+ }
+
+ // These are the cores we _expect_ to find on this node according to
the CLUSTERSTATUS
+ var cores = nodes[node]['cores'];
+ if (!cores || typeof cores !== 'object') {
+ cores = {};
Review Comment:
The `cores` variable is reassigned to an empty object `{}` if it doesn't
exist, but this assignment is local to the loop iteration and doesn't update
`nodes[node]['cores']`. This means subsequent operations on `cores` won't
persist to the node object.
Should be:
```javascript
var cores = nodes[node]['cores'];
if (!cores || typeof cores !== 'object') {
cores = {};
nodes[node]['cores'] = cores;
}
```
Or simply use `nodes[node]['cores']` directly throughout the code if cores
is expected to always exist.
```suggestion
cores = {};
nodes[node]['cores'] = cores;
```
##########
solr/core/src/java/org/apache/solr/handler/admin/AdminHandlersProxy.java:
##########
@@ -129,9 +140,218 @@ public static CompletableFuture<NamedList<Object>>
callRemoteNode(
URI baseUri =
URI.create(zkController.zkStateReader.getBaseUrlForNodeName(nodeName));
SolrRequest<?> proxyReq = new GenericSolrRequest(SolrRequest.METHOD.GET,
uriPath, params);
+ // Set response parser based on wt parameter to ensure correct format is
used
+ String wt = params.get("wt");
+ if ("prometheus".equals(wt) || "openmetrics".equals(wt)) {
+ proxyReq.setResponseParser(new InputStreamResponseParser(wt));
+ }
+
return zkController
.getCoreContainer()
.getDefaultHttpSolrClient()
.requestWithBaseUrl(baseUri.toString(), c -> c.requestAsync(proxyReq));
}
+
+ /**
+ * Resolve node names from the "nodes" parameter into a set of live node
names.
+ *
+ * @param nodeNames the value of the "nodes" parameter ("all" or
comma-separated node names)
+ * @param container the CoreContainer
+ * @return set of resolved node names
+ * @throws SolrException if node format is invalid or node is not in cluster
+ */
+ private static Set<String> resolveNodes(String nodeNames, CoreContainer
container) {
+ Set<String> liveNodes =
+
container.getZkController().zkStateReader.getClusterState().getLiveNodes();
+
+ if (nodeNames.equals("all")) {
+ log.debug("All live nodes requested");
+ return liveNodes;
+ }
+
+ Set<String> nodes = new HashSet<>(Arrays.asList(nodeNames.split(",")));
+ for (String nodeName : nodes) {
+ if (!nodeName.matches("^[^/:]+:\\d+_[\\w/]+$")) {
+ throw new SolrException(
+ SolrException.ErrorCode.BAD_REQUEST, "Parameter " + PARAM_NODES +
" has wrong format");
+ }
+ if (!liveNodes.contains(nodeName)) {
+ throw new SolrException(
+ SolrException.ErrorCode.BAD_REQUEST,
+ "Requested node " + nodeName + " is not part of cluster");
+ }
+ }
+ log.debug("Nodes requested: {}", nodes);
+ return nodes;
+ }
+
+ /** Handle Prometheus format by fetching from nodes and merging text
responses. */
+ private static void handlePrometheusFormat(
+ Set<String> nodes,
+ String pathStr,
+ SolrParams params,
+ CoreContainer container,
+ SolrQueryResponse rsp)
+ throws IOException, SolrServerException, InterruptedException {
+
+ // Bounded parallel executor - max concurrent fetches using Solr's
ExecutorUtil
+ ExecutorService executor =
+ new ExecutorUtil.MDCAwareThreadPoolExecutor(
+ PROMETHEUS_PROXY_THREAD_POOL_SIZE, // corePoolSize
+ PROMETHEUS_PROXY_THREAD_POOL_SIZE, // maximumPoolSize
+ 60L,
+ TimeUnit.SECONDS,
+ new LinkedBlockingQueue<>(),
+ new SolrNamedThreadFactory("metricsProxyExecutor"));
+
+ try {
+ // Submit all fetches at once - executor will handle bounded parallelism
+ Map<String, Future<String>> futures = new LinkedHashMap<>();
+ for (String node : nodes) {
+ futures.put(node, fetchNodePrometheusTextAsync(executor, node,
pathStr, params, container));
+ }
+
+ // Collect all Prometheus text responses
+ StringBuilder mergedText = new StringBuilder();
+ for (Map.Entry<String, Future<String>> entry : futures.entrySet()) {
+ try {
+ String prometheusText =
+ entry.getValue().get(PROMETHEUS_FETCH_TIMEOUT_SECONDS,
TimeUnit.SECONDS);
+ if (prometheusText != null && !prometheusText.isEmpty()) {
+ // Inject node label into each metric line
+ String labeledText = injectNodeLabelIntoText(prometheusText,
entry.getKey());
+ mergedText.append(labeledText);
+ }
+ } catch (ExecutionException ee) {
+ log.warn("Exception when fetching Prometheus result from node {}",
entry.getKey(), ee);
+ } catch (TimeoutException te) {
+ log.warn("Timeout when fetching Prometheus result from node {}",
entry.getKey(), te);
+ }
Review Comment:
[nitpick] The error handling silently drops failed node responses without
providing any indication to the user. When a node times out or throws an
exception, the merged response simply excludes that node's metrics. This could
be misleading as users won't know if metrics are missing due to errors.
Consider adding error information to the response or at minimum logging at a
higher level than WARN for production visibility:
```java
} catch (ExecutionException ee) {
log.error("Failed to fetch Prometheus metrics from node {}",
entry.getKey(), ee);
// Consider adding error info to response
} catch (TimeoutException te) {
log.error("Timeout fetching Prometheus metrics from node {} after {}s",
entry.getKey(), PROMETHEUS_FETCH_TIMEOUT_SECONDS, te);
}
```
##########
solr/webapp/web/js/angular/prometheus-parser.js:
##########
@@ -0,0 +1,150 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+/**
+ * Prometheus text format parser for Solr Admin UI
+ *
+ * Parses Prometheus exposition format (text-based format for metrics)
+ * into a structured JavaScript object for consumption by the Admin UI.
+ */
+
+(function() {
+ 'use strict';
+
+ angular.module('solrAdminApp').factory('PrometheusParser', function() {
+
+ /**
+ * Parse Prometheus text format into structured JavaScript object
+ * @param {string} prometheusText - Raw Prometheus format text
+ * @returns {Object} Parsed metrics object keyed by metric name
+ */
+ function parsePrometheusFormat(prometheusText) {
+ if (!prometheusText || typeof prometheusText !== 'string') {
+ return {};
+ }
+
+ var metrics = {};
+ var lines = prometheusText.split('\n');
+ var currentMetricName = null;
+ var currentMetricType = null;
+ var currentMetricHelp = null;
+
+ for (var i = 0; i < lines.length; i++) {
+ var line = lines[i].trim();
+
+ // Skip empty lines
+ if (!line) continue;
+
+ // Parse HELP comments
+ if (line.indexOf('# HELP ') === 0) {
+ var helpParts = line.substring(7).split(' ');
+ currentMetricName = helpParts[0];
+ currentMetricHelp = helpParts.slice(1).join(' ');
Review Comment:
[nitpick] The parsing of HELP comments is fragile. If the help text itself
contains multiple consecutive spaces, using `split(' ')` will create empty
string elements in the array. The metric name extraction is fine, but the help
text reconstruction may have issues.
Consider using a more robust approach:
```javascript
var helpMatch = line.match(/^# HELP ([a-zA-Z_:][a-zA-Z0-9_:]*)\s+(.*)$/);
if (helpMatch) {
currentMetricName = helpMatch[1];
currentMetricHelp = helpMatch[2];
}
```
This ensures the metric name is properly validated and the help text is
captured as-is.
```suggestion
var helpMatch = line.match(/^# HELP
([a-zA-Z_:][a-zA-Z0-9_:]*)\s+(.*)$/);
if (helpMatch) {
currentMetricName = helpMatch[1];
currentMetricHelp = helpMatch[2];
}
```
##########
solr/core/src/java/org/apache/solr/handler/admin/AdminHandlersProxy.java:
##########
@@ -129,9 +140,218 @@ public static CompletableFuture<NamedList<Object>>
callRemoteNode(
URI baseUri =
URI.create(zkController.zkStateReader.getBaseUrlForNodeName(nodeName));
SolrRequest<?> proxyReq = new GenericSolrRequest(SolrRequest.METHOD.GET,
uriPath, params);
+ // Set response parser based on wt parameter to ensure correct format is
used
+ String wt = params.get("wt");
+ if ("prometheus".equals(wt) || "openmetrics".equals(wt)) {
+ proxyReq.setResponseParser(new InputStreamResponseParser(wt));
+ }
+
return zkController
.getCoreContainer()
.getDefaultHttpSolrClient()
.requestWithBaseUrl(baseUri.toString(), c -> c.requestAsync(proxyReq));
}
+
+ /**
+ * Resolve node names from the "nodes" parameter into a set of live node
names.
+ *
+ * @param nodeNames the value of the "nodes" parameter ("all" or
comma-separated node names)
+ * @param container the CoreContainer
+ * @return set of resolved node names
+ * @throws SolrException if node format is invalid or node is not in cluster
+ */
+ private static Set<String> resolveNodes(String nodeNames, CoreContainer
container) {
+ Set<String> liveNodes =
+
container.getZkController().zkStateReader.getClusterState().getLiveNodes();
+
+ if (nodeNames.equals("all")) {
+ log.debug("All live nodes requested");
+ return liveNodes;
+ }
+
+ Set<String> nodes = new HashSet<>(Arrays.asList(nodeNames.split(",")));
+ for (String nodeName : nodes) {
+ if (!nodeName.matches("^[^/:]+:\\d+_[\\w/]+$")) {
+ throw new SolrException(
+ SolrException.ErrorCode.BAD_REQUEST, "Parameter " + PARAM_NODES +
" has wrong format");
+ }
+ if (!liveNodes.contains(nodeName)) {
+ throw new SolrException(
+ SolrException.ErrorCode.BAD_REQUEST,
+ "Requested node " + nodeName + " is not part of cluster");
+ }
+ }
+ log.debug("Nodes requested: {}", nodes);
+ return nodes;
+ }
+
+ /** Handle Prometheus format by fetching from nodes and merging text
responses. */
+ private static void handlePrometheusFormat(
+ Set<String> nodes,
+ String pathStr,
+ SolrParams params,
+ CoreContainer container,
+ SolrQueryResponse rsp)
+ throws IOException, SolrServerException, InterruptedException {
+
+ // Bounded parallel executor - max concurrent fetches using Solr's
ExecutorUtil
+ ExecutorService executor =
+ new ExecutorUtil.MDCAwareThreadPoolExecutor(
+ PROMETHEUS_PROXY_THREAD_POOL_SIZE, // corePoolSize
+ PROMETHEUS_PROXY_THREAD_POOL_SIZE, // maximumPoolSize
+ 60L,
+ TimeUnit.SECONDS,
+ new LinkedBlockingQueue<>(),
+ new SolrNamedThreadFactory("metricsProxyExecutor"));
+
+ try {
+ // Submit all fetches at once - executor will handle bounded parallelism
+ Map<String, Future<String>> futures = new LinkedHashMap<>();
+ for (String node : nodes) {
+ futures.put(node, fetchNodePrometheusTextAsync(executor, node,
pathStr, params, container));
+ }
+
+ // Collect all Prometheus text responses
+ StringBuilder mergedText = new StringBuilder();
+ for (Map.Entry<String, Future<String>> entry : futures.entrySet()) {
+ try {
+ String prometheusText =
+ entry.getValue().get(PROMETHEUS_FETCH_TIMEOUT_SECONDS,
TimeUnit.SECONDS);
+ if (prometheusText != null && !prometheusText.isEmpty()) {
+ // Inject node label into each metric line
+ String labeledText = injectNodeLabelIntoText(prometheusText,
entry.getKey());
+ mergedText.append(labeledText);
+ }
+ } catch (ExecutionException ee) {
+ log.warn("Exception when fetching Prometheus result from node {}",
entry.getKey(), ee);
+ } catch (TimeoutException te) {
+ log.warn("Timeout when fetching Prometheus result from node {}",
entry.getKey(), te);
+ }
+ }
+
+ // Store the merged text in response - will be written as-is
+ rsp.add("prometheusText", mergedText.toString());
+
+ } finally {
+ ExecutorUtil.shutdownAndAwaitTermination(executor);
+ }
+ }
+
+ /** Fetch Prometheus text from a remote node asynchronously. */
+ private static Future<String> fetchNodePrometheusTextAsync(
+ ExecutorService executor,
+ String nodeName,
+ String pathStr,
+ SolrParams params,
+ CoreContainer container) {
+
+ return executor.submit(
+ () -> {
+ try {
+ ZkController zkController = container.getZkController();
+ if (zkController == null) {
+ log.warn("ZkController not available for node {}", nodeName);
+ return null;
+ }
+
+ // Ensure wt=prometheus is set for inter-node requests
+ ModifiableSolrParams prometheusParams = new
ModifiableSolrParams(params);
+ if (!prometheusParams.get("wt", "").equals("prometheus")) {
+ prometheusParams.set("wt", "prometheus");
+ }
+
+ // Use existing callRemoteNode() to fetch metrics
+ CompletableFuture<NamedList<Object>> future =
+ callRemoteNode(nodeName, pathStr, prometheusParams,
zkController);
+
+ NamedList<Object> response =
+ future.get(PROMETHEUS_FETCH_TIMEOUT_SECONDS, TimeUnit.SECONDS);
+
+ // Response has "stream" key with InputStream when using
InputStreamResponseParser
+ Object streamObj = response.get("stream");
+ if (streamObj == null) {
+ log.warn("No stream in response from node {}", nodeName);
+ return null;
+ }
+ if (!(streamObj instanceof InputStream)) {
+ log.warn(
+ "Invalid stream type in response from node {}: {}",
+ nodeName,
+ streamObj.getClass());
+ return null;
+ }
+ try (InputStream stream = (InputStream) streamObj) {
+ return new String(stream.readAllBytes(), StandardCharsets.UTF_8);
+ }
+
+ } catch (Exception e) {
+ log.warn("Error fetching metrics from node {}", nodeName, e);
+ return null;
+ }
+ });
+ }
+
+ /**
+ * Inject node="nodeName" label into Prometheus text format. Each metric
line gets the node label
+ * added.
+ */
+ private static String injectNodeLabelIntoText(String prometheusText, String
nodeName) {
+ StringBuilder result = new StringBuilder();
+ String[] lines = prometheusText.split("\n");
+
+ for (String line : lines) {
+ // Skip comments and empty lines
+ if (line.startsWith("#") || line.trim().isEmpty()) {
+ result.append(line).append("\n");
+ continue;
+ }
+
+ // Metric line format: metric_name{labels} value timestamp
+ // or: metric_name value timestamp
+ int braceIndex = line.indexOf('{');
+ int spaceIndex = line.indexOf(' ');
+
+ if (braceIndex == -1) {
+ // No labels, add node label before value
+ // Format: metric_name value timestamp
+ if (spaceIndex > 0) {
+ String metricName = line.substring(0, spaceIndex);
+ String valueAndTime = line.substring(spaceIndex);
+ result
+ .append(metricName)
+ .append("{node=\"")
+ .append(nodeName)
+ .append("\"}")
Review Comment:
The node label needs to be escaped if it contains special characters. Node
names in Solr can contain characters like quotes or backslashes that need to be
escaped according to Prometheus label value rules.
Consider adding escaping:
```java
private static String escapePrometheusLabelValue(String value) {
return value.replace("\\", "\\\\").replace("\"", "\\\"").replace("\n",
"\\n");
}
```
And use it when injecting the node label:
```java
.append("node=\"")
.append(escapePrometheusLabelValue(nodeName))
.append("\"}")
```
##########
solr/webapp/web/js/angular/prometheus-parser.js:
##########
@@ -0,0 +1,150 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+/**
+ * Prometheus text format parser for Solr Admin UI
+ *
+ * Parses Prometheus exposition format (text-based format for metrics)
+ * into a structured JavaScript object for consumption by the Admin UI.
+ */
+
+(function() {
+ 'use strict';
+
+ angular.module('solrAdminApp').factory('PrometheusParser', function() {
+
+ /**
+ * Parse Prometheus text format into structured JavaScript object
+ * @param {string} prometheusText - Raw Prometheus format text
+ * @returns {Object} Parsed metrics object keyed by metric name
+ */
+ function parsePrometheusFormat(prometheusText) {
+ if (!prometheusText || typeof prometheusText !== 'string') {
+ return {};
+ }
+
+ var metrics = {};
+ var lines = prometheusText.split('\n');
+ var currentMetricName = null;
+ var currentMetricType = null;
+ var currentMetricHelp = null;
+
+ for (var i = 0; i < lines.length; i++) {
+ var line = lines[i].trim();
+
+ // Skip empty lines
+ if (!line) continue;
+
+ // Parse HELP comments
+ if (line.indexOf('# HELP ') === 0) {
+ var helpParts = line.substring(7).split(' ');
+ currentMetricName = helpParts[0];
+ currentMetricHelp = helpParts.slice(1).join(' ');
+ }
+ // Parse TYPE comments
+ else if (line.indexOf('# TYPE ') === 0) {
+ var typeParts = line.substring(7).split(' ');
+ currentMetricName = typeParts[0];
+ currentMetricType = typeParts[1];
+
+ // Initialize metric entry
+ if (!metrics[currentMetricName]) {
+ metrics[currentMetricName] = {
+ type: currentMetricType,
+ help: currentMetricHelp || '',
+ samples: []
+ };
+ }
+ }
+ // Skip other comments
+ else if (line.charAt(0) === '#') {
+ continue;
+ }
+ // Parse metric sample
+ else {
+ var sample = parseMetricLine(line);
+ if (sample && sample.metricName) {
+ // Handle histogram suffixes (_sum, _count, _bucket)
+ var baseMetricName =
sample.metricName.replace(/_sum$|_count$|_bucket$/, '');
Review Comment:
The regex pattern doesn't correctly handle Prometheus histogram/summary
suffixes. Summary metrics also have `_total` suffixes (e.g.,
`http_requests_total`), and gauges can have `_total` suffixes too. The current
pattern only removes `_sum`, `_count`, and `_bucket` suffixes, but doesn't
handle metric names that have these as part of their actual name (e.g.,
`my_metric_sum_total`).
Additionally, the logic assumes that if a suffix is detected, the metric
should be grouped with its base name. However, this can incorrectly group
unrelated metrics. Consider checking if the TYPE comment matches expected
histogram/summary types before stripping suffixes, or use the metric type from
TYPE comments to determine suffix handling.
##########
solr/webapp/web/js/angular/prometheus-parser.js:
##########
@@ -0,0 +1,150 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+/**
+ * Prometheus text format parser for Solr Admin UI
+ *
+ * Parses Prometheus exposition format (text-based format for metrics)
+ * into a structured JavaScript object for consumption by the Admin UI.
+ */
+
+(function() {
+ 'use strict';
+
+ angular.module('solrAdminApp').factory('PrometheusParser', function() {
+
+ /**
+ * Parse Prometheus text format into structured JavaScript object
+ * @param {string} prometheusText - Raw Prometheus format text
+ * @returns {Object} Parsed metrics object keyed by metric name
+ */
+ function parsePrometheusFormat(prometheusText) {
+ if (!prometheusText || typeof prometheusText !== 'string') {
+ return {};
+ }
+
+ var metrics = {};
+ var lines = prometheusText.split('\n');
+ var currentMetricName = null;
+ var currentMetricType = null;
+ var currentMetricHelp = null;
+
+ for (var i = 0; i < lines.length; i++) {
+ var line = lines[i].trim();
+
+ // Skip empty lines
+ if (!line) continue;
+
+ // Parse HELP comments
+ if (line.indexOf('# HELP ') === 0) {
+ var helpParts = line.substring(7).split(' ');
+ currentMetricName = helpParts[0];
+ currentMetricHelp = helpParts.slice(1).join(' ');
+ }
+ // Parse TYPE comments
+ else if (line.indexOf('# TYPE ') === 0) {
+ var typeParts = line.substring(7).split(' ');
+ currentMetricName = typeParts[0];
+ currentMetricType = typeParts[1];
+
+ // Initialize metric entry
+ if (!metrics[currentMetricName]) {
+ metrics[currentMetricName] = {
+ type: currentMetricType,
+ help: currentMetricHelp || '',
+ samples: []
+ };
+ }
+ }
+ // Skip other comments
+ else if (line.charAt(0) === '#') {
+ continue;
+ }
+ // Parse metric sample
+ else {
+ var sample = parseMetricLine(line);
+ if (sample && sample.metricName) {
+ // Handle histogram suffixes (_sum, _count, _bucket)
+ var baseMetricName =
sample.metricName.replace(/_sum$|_count$|_bucket$/, '');
+
+ if (!metrics[baseMetricName]) {
+ metrics[baseMetricName] = {
+ type: 'unknown',
+ help: '',
+ samples: []
+ };
+ }
+
+ // Add suffix info to sample
+ if (sample.metricName.indexOf('_sum') === sample.metricName.length
- 4) {
+ sample.metricSuffix = '_sum';
+ } else if (sample.metricName.indexOf('_count') ===
sample.metricName.length - 6) {
+ sample.metricSuffix = '_count';
+ } else if (sample.metricName.indexOf('_bucket') ===
sample.metricName.length - 7) {
+ sample.metricSuffix = '_bucket';
+ }
+
+ metrics[baseMetricName].samples.push(sample);
+ }
+ }
+ }
+
+ return metrics;
+ }
+
+ /**
+ * Parse a single metric line
+ * @param {string} line - Metric line (e.g., 'metric_name{label1="val1"}
123.45')
+ * @returns {Object|null} Parsed sample or null
+ */
+ function parseMetricLine(line) {
+ // Regex to match: metric_name{labels} value
+ // or: metric_name value
+ var match =
line.match(/^([a-zA-Z_:][a-zA-Z0-9_:]*?)(?:\{(.*?)\})?\s+([^\s]+)$/);
+
+ if (!match) return null;
+
+ var metricName = match[1];
+ var labelsStr = match[2] || '';
+ var value = parseFloat(match[3]);
+
+ // Parse labels
+ var labels = {};
+ if (labelsStr) {
+ // Match label="value" patterns
+ var labelRegex = /([a-zA-Z_][a-zA-Z0-9_]*)="((?:[^"\\]|\\.)*)"/g;
+ var labelMatch;
+ while ((labelMatch = labelRegex.exec(labelsStr)) !== null) {
+ // Unescape label values
+ var labelValue = labelMatch[2].replace(/\\"/g, '"').replace(/\\\\/g,
'\\');
Review Comment:
The unescape sequence has two issues:
1. The order of replacements is incorrect - should unescape `\\` before `\"`
to avoid double-unescaping
2. Missing support for `\n` escape sequence
Should be:
```javascript
var labelValue = labelMatch[2].replace(/\\\\/g, '\\').replace(/\\"/g,
'"').replace(/\\n/g, '\n');
```
This ensures `\\` is unescaped to `\` first, then `\"` to `"`, and `\n` to
newline.
```suggestion
var labelValue = labelMatch[2].replace(/\\\\/g,
'\\').replace(/\\"/g, '"').replace(/\\n/g, '\n');
```
##########
solr/webapp/web/js/angular/controllers/cloud.js:
##########
@@ -394,118 +394,136 @@ var nodesSubController = function($scope, Collections,
System, Metrics) {
Fetch metrics for all selected nodes. Only pull the metrics that we'll
show to save bandwidth
Pick the data we want to display and add it to the node-centric data
structure
*/
- Metrics.get({
- "nodes": nodesParam,
- "prefix":
"CONTAINER.fs,org.eclipse.jetty.server.handler.DefaultHandler.get-requests,INDEX.sizeInBytes,SEARCHER.searcher.numDocs,SEARCHER.searcher.deletedDocs,SEARCHER.searcher.warmupTime"
- },
- function (metricsResponse) {
- for (var node in metricsResponse) {
- if (node in nodes) {
- var m = metricsResponse[node];
- nodes[node]['metrics'] = m;
- var diskTotal =
m.metrics['solr.node']['CONTAINER.fs.totalSpace'];
- var diskFree =
m.metrics['solr.node']['CONTAINER.fs.usableSpace'];
- var diskPercentage = Math.floor((diskTotal - diskFree) /
diskTotal * 100);
- nodes[node]['diskUsedPct'] = diskPercentage;
- nodes[node]['diskUsedPctStyle'] = styleForPct(diskPercentage);
- nodes[node]['diskTotal'] = bytesToSize(diskTotal);
- nodes[node]['diskFree'] = bytesToSize(diskFree);
-
- var r =
m.metrics['solr.jetty']['org.eclipse.jetty.server.handler.DefaultHandler.get-requests'];
- nodes[node]['req'] = r.count;
- nodes[node]['req1minRate'] = Math.floor(r['1minRate'] * 100) /
100;
- nodes[node]['req5minRate'] = Math.floor(r['5minRate'] * 100) /
100;
- nodes[node]['req15minRate'] = Math.floor(r['15minRate'] * 100) /
100;
- nodes[node]['reqp75_ms'] = Math.floor(r['p75_ms']);
- nodes[node]['reqp95_ms'] = Math.floor(r['p95_ms']);
- nodes[node]['reqp99_ms'] = Math.floor(r['p99_ms']);
-
- // These are the cores we _expect_ to find on this node
according to the CLUSTERSTATUS
- var cores = nodes[node]['cores'];
- var indexSizeTotal = 0;
- var indexSizeMax = 0;
- var docsTotal = 0;
- var graphData = [];
- for (let coreId in cores) {
- var core = cores[coreId];
- if (core['shard_state'] !== 'active' || core['state'] !==
'active') {
- // If core state is not active, display the real state, or
if shard is inactive, display that
- var labelState = (core['state'] !== 'active') ?
core['state'] : core['shard_state'];
- core['label'] += "_(" + labelState + ")";
- }
- var coreMetricName = "solr.core." + core['collection'] + "." +
core['shard'] + "." + core['replica'];
- var coreMetric = m.metrics[coreMetricName];
- // we may not actually get metrics back for every expected
core (the core may be down)
- if (coreMetric) {
- var size = coreMetric['INDEX.sizeInBytes'];
- size = (typeof size !== 'undefined') ? size : 0;
- core['sizeInBytes'] = size;
- core['size'] = bytesToSize(size);
- indexSizeTotal = indexSizeTotal + size;
- indexSizeMax = size > indexSizeMax ? size : indexSizeMax;
- var numDocs = coreMetric['SEARCHER.searcher.numDocs'];
- numDocs = (typeof numDocs !== 'undefined') ? numDocs : 0;
- core['numDocs'] = numDocs;
- core['numDocsHuman'] = numDocsHuman(numDocs);
- core['avgSizePerDoc'] = bytesToSize(numDocs === 0 ? 0 : size
/ numDocs);
- var deletedDocs =
coreMetric['SEARCHER.searcher.deletedDocs'];
- deletedDocs = (typeof deletedDocs !== 'undefined') ?
deletedDocs : 0;
- core['deletedDocs'] = deletedDocs;
- core['deletedDocsHuman'] = numDocsHuman(deletedDocs);
- var warmupTime = coreMetric['SEARCHER.searcher.warmupTime'];
- warmupTime = (typeof warmupTime !== 'undefined') ?
warmupTime : 0;
- core['warmupTime'] = warmupTime;
- docsTotal += core['numDocs'];
- }
- }
- for (let coreId in cores) {
- var core = cores[coreId];
- var graphObj = {};
- graphObj['label'] = core['label'];
- graphObj['size'] = core['sizeInBytes'];
- graphObj['sizeHuman'] = core['size'];
- graphObj['pct'] = (core['sizeInBytes'] / indexSizeMax) * 100;
- graphData.push(graphObj);
- }
- if (cores) {
- cores.sort(function (a, b) {
- return b.sizeInBytes - a.sizeInBytes
- });
- }
- graphData.sort(function (a, b) {
- return b.size - a.size
- });
- nodes[node]['graphData'] = graphData;
- nodes[node]['numDocs'] = numDocsHuman(docsTotal);
- nodes[node]['sizeInBytes'] = indexSizeTotal;
- nodes[node]['size'] = bytesToSize(indexSizeTotal);
- nodes[node]['sizePerDoc'] = docsTotal === 0 ? '0b' :
bytesToSize(indexSizeTotal / docsTotal);
-
- // Build the d3 powered bar chart
- $('#chart' + nodes[node]['id']).empty();
- var chart = d3.select('#chart' +
nodes[node]['id']).append('div').attr('class', 'chart');
-
- // Add one div per bar which will group together both labels and
bars
- var g = chart.selectAll('div')
- .data(nodes[node]['graphData']).enter()
- .append('div');
-
- // Add the bars
- var bars = g.append("div")
- .attr("class", "rect")
- .text(function (d) {
- return d.label + ':\u00A0\u00A0' + d.sizeHuman;
- });
-
- // Execute the transition to show the bars
- bars.transition()
- .ease('elastic')
- .style('width', function (d) {
- return d.pct + '%';
- });
+ Metrics.get(
+ {
+ nodes: nodesParam,
+ name:
"solr_disk_space_megabytes,solr_core_index_size_megabytes,solr_core_indexsearcher_index_num_docs,solr_core_indexsearcher_index_docs,solr_core_indexsearcher_open_time_milliseconds"
+ },
+ function (response) {
+ // response.metrics contains the parsed merged Prometheus data with
node labels
+ var parsedMetrics = response.metrics;
+
+ for (var i = 0; i < nodesToShow.length; i++) {
+ var node = nodesToShow[i];
+ if (!nodes[node]) continue;
+
+ nodes[node]['metrics'] = parsedMetrics;
+
+ // Extract disk metrics with node filter
+ var diskMetrics = MetricsExtractor.extractDiskMetrics(parsedMetrics,
{ node: node });
+ if (diskMetrics) {
+ var diskTotal = diskMetrics.totalSpace || 0;
+ var diskFree = diskMetrics.usableSpace || 0;
+ var diskPercentage = diskTotal > 0 ? Math.floor((diskTotal -
diskFree) / diskTotal * 100) : 0;
+ nodes[node]['diskUsedPct'] = diskPercentage;
+ nodes[node]['diskUsedPctStyle'] = styleForPct(diskPercentage);
+ nodes[node]['diskTotal'] = bytesToSize(diskTotal);
+ nodes[node]['diskFree'] = bytesToSize(diskFree);
+ }
+
+ // These are the cores we _expect_ to find on this node according to
the CLUSTERSTATUS
+ var cores = nodes[node]['cores'];
+ if (!cores || typeof cores !== 'object') {
+ cores = {};
+ }
+ var indexSizeTotal = 0;
+ var indexSizeMax = 0;
+ var docsTotal = 0;
+ var graphData = [];
+
+ for (var coreId in cores) {
+ var core = cores[coreId];
+
+ if (core['shard_state'] !== 'active' || core['state'] !==
'active') {
+ // If core state is not active, display the real state, or if
shard is inactive, display that
+ var labelState = (core['state'] !== 'active') ? core['state'] :
core['shard_state'];
+ core['label'] += "_(" + labelState + ")";
}
+
+ // Build full core name for label matching
+ // Prometheus metrics use format: "collection_shard_replica"
+ var fullCoreName = core['collection'] + '_' + core['shard'] + '_'
+ core['replica'];
+ var coreLabels = { core: fullCoreName, node: node };
+
+ // Extract metrics using helpers (with node filter)
+ var size = MetricsExtractor.extractCoreIndexSize(parsedMetrics,
coreLabels);
+ var searcherMetrics =
MetricsExtractor.extractSearcherMetrics(parsedMetrics, coreLabels);
+
+ core['sizeInBytes'] = size;
+ core['size'] = bytesToSize(size);
+ indexSizeTotal = indexSizeTotal + size;
+ indexSizeMax = size > indexSizeMax ? size : indexSizeMax;
+
+ var numDocs = searcherMetrics.numDocs || 0;
+ core['numDocs'] = numDocs;
+ core['numDocsHuman'] = numDocsHuman(numDocs);
+ core['avgSizePerDoc'] = bytesToSize(numDocs === 0 ? 0 : size /
numDocs);
+
+ var deletedDocs = searcherMetrics.deletedDocs || 0;
+ core['deletedDocs'] = deletedDocs;
+ core['deletedDocsHuman'] = numDocsHuman(deletedDocs);
+
+ var warmupTime = searcherMetrics.warmupTime || 0;
+ core['warmupTime'] = warmupTime;
+
+ docsTotal += core['numDocs'];
}
- });
+
+ for (var coreId in cores) {
+ var core = cores[coreId];
+ var graphObj = {};
+ graphObj['label'] = core['label'];
+ graphObj['size'] = core['sizeInBytes'];
+ graphObj['sizeHuman'] = core['size'];
+ graphObj['pct'] = indexSizeMax > 0 ? (core['sizeInBytes'] /
indexSizeMax) * 100 : 0;
+ graphData.push(graphObj);
+ }
+
+ if (cores) {
+ cores.sort(function (a, b) {
+ return b.sizeInBytes - a.sizeInBytes
Review Comment:
The `cores` object is being sorted using array sort method, but `cores` is
an object (key-value pairs), not an array. The `sort()` method doesn't work on
plain objects in JavaScript.
If `cores` is expected to be an array, the initialization on line 427 should
use `[]` instead of `{}`. If it's an object, you need to convert it to an array
first:
```javascript
if (cores && typeof cores === 'object') {
Object.values(cores).sort(function (a, b) {
return b.sizeInBytes - a.sizeInBytes;
});
}
```
However, this sorted array wouldn't be stored anywhere. Consider clarifying
the data structure.
```suggestion
if (cores && typeof cores === 'object') {
var sortedCores = Object.values(cores).sort(function (a, b) {
return b.sizeInBytes - a.sizeInBytes;
```
--
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.
To unsubscribe, e-mail: [email protected]
For queries about this service, please contact Infrastructure at:
[email protected]
---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]