risdenk commented on a change in pull request #717:
URL: https://github.com/apache/solr/pull/717#discussion_r817878458



##########
File path: 
solr/solrj/src/test/org/apache/solr/client/solrj/io/stream/ParallelFacetStreamOverAliasTest.java
##########
@@ -69,7 +69,8 @@
   private static final String ALIAS_NAME = "SOME_ALIAS_WITH_MANY_COLLS";
 
   private static final String id = "id";
-  private static final int NUM_COLLECTIONS = 2; // this test requires at least 
2 collections, each with multiple shards
+  private static final int NUM_COLLECTIONS =
+      2; // this test requires at least 2 collections, each with multiple 
shards

Review comment:
       Fix this

##########
File path: 
solr/solrj/src/test/org/apache/solr/client/solrj/io/stream/ParallelFacetStreamOverAliasTest.java
##########
@@ -165,76 +193,89 @@ public static void after() throws Exception {
     }
   }
 
-  /**
-   * Test parallelized calls to facet expression, one for each collection in 
the alias
-   */
+  /** Test parallelized calls to facet expression, one for each collection in 
the alias */
   @Test
   public void testParallelFacetOverAlias() throws Exception {
 
-    String facetExprTmpl = "" +
-        "facet(\n" +
-        "  %s,\n" +
-        "  tiered=%s,\n" +
-        "  q=\"*:*\", \n" +
-        "  buckets=\"a_i\", \n" +
-        "  bucketSorts=\"a_i asc\", \n" +
-        "  bucketSizeLimit=" + BUCKET_SIZE_LIMIT + ", \n" +
-        "  sum(a_d), avg(a_d), min(a_d), max(a_d), count(*)\n" +
-        ")\n";
+    String facetExprTmpl =
+        ""
+            + "facet(\n"
+            + "  %s,\n"
+            + "  tiered=%s,\n"
+            + "  q=\"*:*\", \n"
+            + "  buckets=\"a_i\", \n"
+            + "  bucketSorts=\"a_i asc\", \n"
+            + "  bucketSizeLimit="
+            + BUCKET_SIZE_LIMIT
+            + ", \n"
+            + "  sum(a_d), avg(a_d), min(a_d), max(a_d), count(*)\n"
+            + ")\n";
 
     compareTieredStreamWithNonTiered(facetExprTmpl, 1);
   }
 
   /**
-   * Test parallelized calls to facet expression with multiple dimensions, one 
for each collection in the alias
+   * Test parallelized calls to facet expression with multiple dimensions, one 
for each collection
+   * in the alias
    */
   @Test
   public void testParallelFacetMultipleDimensionsOverAlias() throws Exception {
 
-    // notice we're sorting the stream by a metric, but internally, that 
doesn't work for parallelization
-    // so the rollup has to sort by dimensions and then apply a final re-sort 
once the parallel streams are merged
-    String facetExprTmpl = "" +
-        "facet(\n" +
-        "  %s,\n" +
-        "  tiered=%s,\n" +
-        "  q=\"*:*\", \n" +
-        "  buckets=\"a_i,b_i\", \n" + /* two dimensions here ~ doubles the 
number of tuples */
-        "  bucketSorts=\"sum(a_d) desc\", \n" +
-        "  bucketSizeLimit=" + BUCKET_SIZE_LIMIT + ", \n" +
-        "  sum(a_d), avg(a_d), min(a_d), max(a_d), count(*)\n" +
-        ")\n";
+    // notice we're sorting the stream by a metric, but internally, that 
doesn't work for
+    // parallelization
+    // so the rollup has to sort by dimensions and then apply a final re-sort 
once the parallel
+    // streams are merged
+    String facetExprTmpl =
+        ""
+            + "facet(\n"
+            + "  %s,\n"
+            + "  tiered=%s,\n"
+            + "  q=\"*:*\", \n"
+            + "  buckets=\"a_i,b_i\", \n"
+            + /* two dimensions here ~ doubles the number of tuples */ "  
bucketSorts=\"sum(a_d) desc\", \n"
+            + "  bucketSizeLimit="
+            + BUCKET_SIZE_LIMIT
+            + ", \n"
+            + "  sum(a_d), avg(a_d), min(a_d), max(a_d), count(*)\n"
+            + ")\n";
 
     compareTieredStreamWithNonTiered(facetExprTmpl, 2);
   }
 
   @Test
   public void testParallelFacetSortByDimensions() throws Exception {
-    // notice we're sorting the stream by a metric, but internally, that 
doesn't work for parallelization
-    // so the rollup has to sort by dimensions and then apply a final re-sort 
once the parallel streams are merged
-    String facetExprTmpl = "" +
-        "facet(\n" +
-        "  %s,\n" +
-        "  tiered=%s,\n" +
-        "  q=\"*:*\", \n" +
-        "  buckets=\"a_i,b_i\", \n" +
-        "  bucketSorts=\"a_i asc, b_i asc\", \n" +
-        "  bucketSizeLimit=" + BUCKET_SIZE_LIMIT + ", \n" +
-        "  sum(a_d), avg(a_d), min(a_d), max(a_d), count(*)\n" +
-        ")\n";
+    // notice we're sorting the stream by a metric, but internally, that 
doesn't work for
+    // parallelization
+    // so the rollup has to sort by dimensions and then apply a final re-sort 
once the parallel
+    // streams are merged

Review comment:
       Fix this

##########
File path: 
solr/solrj/src/test/org/apache/solr/client/solrj/io/stream/StreamExpressionToExplanationTest.java
##########
@@ -31,67 +31,78 @@
 import org.apache.solr.client.solrj.io.stream.metrics.SumMetric;
 import org.junit.Test;
 
-/**
- **/
-
+/** */
 public class StreamExpressionToExplanationTest extends SolrTestCase {
 
   private StreamFactory factory;
-  
+
   public StreamExpressionToExplanationTest() {
     super();
-    
-    factory = new StreamFactory()
-                    .withCollectionZkHost("collection1", "testhost:1234")
-                    .withCollectionZkHost("collection2", "testhost:1234")
-                    .withFunctionName("search", CloudSolrStream.class)
-                    .withFunctionName("select", SelectStream.class)
-                    .withFunctionName("merge", MergeStream.class)
-                    .withFunctionName("unique", UniqueStream.class)
-                    .withFunctionName("top", RankStream.class)
-                    .withFunctionName("reduce", ReducerStream.class)
-                    .withFunctionName("group", GroupOperation.class)
-                    .withFunctionName("update", UpdateStream.class)
-                    .withFunctionName("stats", StatsStream.class)
-                    .withFunctionName("facet", FacetStream.class)
-                    .withFunctionName("jdbc", JDBCStream.class)
-                    .withFunctionName("intersect", IntersectStream.class)
-                    .withFunctionName("complement", ComplementStream.class)
-                    .withFunctionName("count", CountMetric.class)
-                    .withFunctionName("sum", SumMetric.class)
-                    .withFunctionName("min", MinMetric.class)
-                    .withFunctionName("max", MaxMetric.class)
-                    .withFunctionName("avg", MeanMetric.class)
-                    .withFunctionName("daemon", DaemonStream.class)
-                    .withFunctionName("topic", TopicStream.class)
-                    ;
+
+    factory =
+        new StreamFactory()
+            .withCollectionZkHost("collection1", "testhost:1234")
+            .withCollectionZkHost("collection2", "testhost:1234")
+            .withFunctionName("search", CloudSolrStream.class)
+            .withFunctionName("select", SelectStream.class)
+            .withFunctionName("merge", MergeStream.class)
+            .withFunctionName("unique", UniqueStream.class)
+            .withFunctionName("top", RankStream.class)
+            .withFunctionName("reduce", ReducerStream.class)
+            .withFunctionName("group", GroupOperation.class)
+            .withFunctionName("update", UpdateStream.class)
+            .withFunctionName("stats", StatsStream.class)
+            .withFunctionName("facet", FacetStream.class)
+            .withFunctionName("jdbc", JDBCStream.class)
+            .withFunctionName("intersect", IntersectStream.class)
+            .withFunctionName("complement", ComplementStream.class)
+            .withFunctionName("count", CountMetric.class)
+            .withFunctionName("sum", SumMetric.class)
+            .withFunctionName("min", MinMetric.class)
+            .withFunctionName("max", MaxMetric.class)
+            .withFunctionName("avg", MeanMetric.class)
+            .withFunctionName("daemon", DaemonStream.class)
+            .withFunctionName("topic", TopicStream.class);
   }
-    
+
   @Test
   public void testCloudSolrStream() throws Exception {
     // Basic test
-    try (CloudSolrStream stream = new 
CloudSolrStream(StreamExpressionParser.parse("search(collection1, q=*:*, 
fl=\"id,a_s,a_i,a_f\", sort=\"a_f asc, a_i asc\")"), factory)) {
+    try (CloudSolrStream stream =
+        new CloudSolrStream(
+            StreamExpressionParser.parse(
+                "search(collection1, q=*:*, fl=\"id,a_s,a_i,a_f\", sort=\"a_f 
asc, a_i asc\")"),
+            factory)) {
       Explanation explanation = stream.toExplanation(factory);
       Assert.assertEquals("search", explanation.getFunctionName());
       Assert.assertEquals(CloudSolrStream.class.getName(), 
explanation.getImplementingClass());
     }
   }
-  
+
   @Test
   public void testSelectStream() throws Exception {
     // Basic test
-    try (SelectStream stream = new 
SelectStream(StreamExpressionParser.parse("select(\"a_s as fieldA\", 
search(collection1, q=*:*, fl=\"id,a_s,a_i,a_f\", sort=\"a_f asc, a_i 
asc\"))"), factory)) {
+    try (SelectStream stream =
+        new SelectStream(
+            StreamExpressionParser.parse(
+                "select(\"a_s as fieldA\", search(collection1, q=*:*, 
fl=\"id,a_s,a_i,a_f\", sort=\"a_f asc, a_i asc\"))"),
+            factory)) {
       Explanation explanation = stream.toExplanation(factory);
       Assert.assertEquals("select", explanation.getFunctionName());
       Assert.assertEquals(SelectStream.class.getName(), 
explanation.getImplementingClass());
     }
   }
 
   @Test
-  // commented out on: 24-Dec-2018   
@BadApple(bugUrl="https://issues.apache.org/jira/browse/SOLR-12028";) // added 
20-Sep-2018
+  // commented out on: 24-Dec-2018
+  // @BadApple(bugUrl="https://issues.apache.org/jira/browse/SOLR-12028";) // 
added 20-Sep-2018

Review comment:
       Fix this

##########
File path: 
solr/solrj/src/test/org/apache/solr/client/solrj/io/stream/JDBCStreamTest.java
##########
@@ -79,34 +83,43 @@ public static void setupCluster() throws Exception {
     CollectionAdminRequest.createCollection(collection, "conf", 2, 1)
         .setPerReplicaState(SolrCloudTestCase.USE_PER_REPLICA_STATE)
         .process(cluster.getSolrClient());
-    AbstractDistribZkTestBase.waitForRecoveriesToFinish(collection, 
cluster.getSolrClient().getZkStateReader(),
-        false, true, TIMEOUT);
+    AbstractDistribZkTestBase.waitForRecoveriesToFinish(
+        collection, cluster.getSolrClient().getZkStateReader(), false, true, 
TIMEOUT);
     if (useAlias) {
-      CollectionAdminRequest.createAlias(COLLECTIONORALIAS, 
collection).process(cluster.getSolrClient());
+      CollectionAdminRequest.createAlias(COLLECTIONORALIAS, collection)
+          .process(cluster.getSolrClient());
     }
   }
 
   @BeforeClass
   public static void setupDatabase() throws Exception {
-    
+
     // Initialize Database
-    // Ok, so.....hsqldb is doing something totally weird so I thought I'd 
take a moment to explain it.
-    // According to http://www.hsqldb.org/doc/1.8/guide/guide.html#N101EF, 
section "Components of SQL Expressions", clause "name",
-    // "When an SQL statement is issued, any lowercase characters in unquoted 
identifiers are converted to uppercase."
+    // Ok, so.....hsqldb is doing something totally weird so I thought I'd 
take a moment to explain
+    // it.
+    // According to http://www.hsqldb.org/doc/1.8/guide/guide.html#N101EF, 
section "Components of
+    // SQL Expressions", clause "name",
+    // "When an SQL statement is issued, any lowercase characters in unquoted 
identifiers are
+    // converted to uppercase."
     // :(   Like seriously....
-    // So, for this reason and to simplify writing these tests I've decided 
that in all statements all table and column names 
-    // will be in UPPERCASE. This is to ensure things look and behave 
consistently. Note that this is not a requirement of the 
+    // So, for this reason and to simplify writing these tests I've decided 
that in all statements
+    // all table and column names
+    // will be in UPPERCASE. This is to ensure things look and behave 
consistently. Note that this
+    // is not a requirement of the
     // JDBCStream and is only a carryover from the driver we are testing with.

Review comment:
       Fix this

##########
File path: 
solr/solrj/src/test/org/apache/solr/client/solrj/io/stream/ParallelFacetStreamOverAliasTest.java
##########
@@ -165,76 +193,89 @@ public static void after() throws Exception {
     }
   }
 
-  /**
-   * Test parallelized calls to facet expression, one for each collection in 
the alias
-   */
+  /** Test parallelized calls to facet expression, one for each collection in 
the alias */
   @Test
   public void testParallelFacetOverAlias() throws Exception {
 
-    String facetExprTmpl = "" +
-        "facet(\n" +
-        "  %s,\n" +
-        "  tiered=%s,\n" +
-        "  q=\"*:*\", \n" +
-        "  buckets=\"a_i\", \n" +
-        "  bucketSorts=\"a_i asc\", \n" +
-        "  bucketSizeLimit=" + BUCKET_SIZE_LIMIT + ", \n" +
-        "  sum(a_d), avg(a_d), min(a_d), max(a_d), count(*)\n" +
-        ")\n";
+    String facetExprTmpl =
+        ""
+            + "facet(\n"
+            + "  %s,\n"
+            + "  tiered=%s,\n"
+            + "  q=\"*:*\", \n"
+            + "  buckets=\"a_i\", \n"
+            + "  bucketSorts=\"a_i asc\", \n"
+            + "  bucketSizeLimit="
+            + BUCKET_SIZE_LIMIT
+            + ", \n"
+            + "  sum(a_d), avg(a_d), min(a_d), max(a_d), count(*)\n"
+            + ")\n";
 
     compareTieredStreamWithNonTiered(facetExprTmpl, 1);
   }
 
   /**
-   * Test parallelized calls to facet expression with multiple dimensions, one 
for each collection in the alias
+   * Test parallelized calls to facet expression with multiple dimensions, one 
for each collection
+   * in the alias
    */
   @Test
   public void testParallelFacetMultipleDimensionsOverAlias() throws Exception {
 
-    // notice we're sorting the stream by a metric, but internally, that 
doesn't work for parallelization
-    // so the rollup has to sort by dimensions and then apply a final re-sort 
once the parallel streams are merged
-    String facetExprTmpl = "" +
-        "facet(\n" +
-        "  %s,\n" +
-        "  tiered=%s,\n" +
-        "  q=\"*:*\", \n" +
-        "  buckets=\"a_i,b_i\", \n" + /* two dimensions here ~ doubles the 
number of tuples */
-        "  bucketSorts=\"sum(a_d) desc\", \n" +
-        "  bucketSizeLimit=" + BUCKET_SIZE_LIMIT + ", \n" +
-        "  sum(a_d), avg(a_d), min(a_d), max(a_d), count(*)\n" +
-        ")\n";
+    // notice we're sorting the stream by a metric, but internally, that 
doesn't work for
+    // parallelization
+    // so the rollup has to sort by dimensions and then apply a final re-sort 
once the parallel
+    // streams are merged

Review comment:
       Fix this

##########
File path: 
solr/solrj/src/test/org/apache/solr/client/solrj/io/stream/StreamExpressionToExplanationTest.java
##########
@@ -162,30 +192,37 @@ public void testRankStream() throws Exception {
   @Test
   public void testReducerStream() throws Exception {
     String expressionString;
-    
+
     // Basic test
-    try (ReducerStream stream = new 
ReducerStream(StreamExpressionParser.parse("reduce("
-                                                  + "search(collection1, 
q=*:*, fl=\"id,a_s,a_i,a_f\", sort=\"a_s desc, a_f asc\"),"
-                                                  + "by=\"a_s\", 
group(sort=\"a_i desc\", n=\"5\"))"), factory)) {
+    try (ReducerStream stream =
+        new ReducerStream(
+            StreamExpressionParser.parse(
+                "reduce("
+                    + "search(collection1, q=*:*, fl=\"id,a_s,a_i,a_f\", 
sort=\"a_s desc, a_f asc\"),"
+                    + "by=\"a_s\", group(sort=\"a_i desc\", n=\"5\"))"),
+            factory)) {
       Explanation explanation = stream.toExplanation(factory);
       Assert.assertEquals("reduce", explanation.getFunctionName());
       Assert.assertEquals(ReducerStream.class.getName(), 
explanation.getImplementingClass());
       Assert.assertEquals(1, ((StreamExplanation) 
explanation).getChildren().size());
     }
   }
-  
+
   @Test
-  // commented out on: 24-Dec-2018   
@BadApple(bugUrl="https://issues.apache.org/jira/browse/SOLR-12028";) // added 
20-Sep-2018
+  // commented out on: 24-Dec-2018
+  // @BadApple(bugUrl="https://issues.apache.org/jira/browse/SOLR-12028";) // 
added 20-Sep-2018

Review comment:
       Fix this

##########
File path: 
solr/solrj/src/test/org/apache/solr/client/solrj/io/stream/StreamExpressionToExpessionTest.java
##########
@@ -424,79 +487,103 @@ public void testFacetStream() throws Exception {
       assertEquals(stream.getOffset(), 100);
       assertEquals(stream.getOverfetch(), -1);
     }
-
   }
-  
+
   @Test
   public void testJDBCStream() throws Exception {
     String expressionString;
-    
+
     // Basic test
-    try (JDBCStream stream = new 
JDBCStream(StreamExpressionParser.parse("jdbc(connection=\"jdbc:hsqldb:mem:.\", 
sql=\"select PEOPLE.ID, PEOPLE.NAME, COUNTRIES.COUNTRY_NAME from PEOPLE inner 
join COUNTRIES on PEOPLE.COUNTRY_CODE = COUNTRIES.CODE order by PEOPLE.ID\", 
sort=\"ID asc\")"), factory)) {
+    try (JDBCStream stream =
+        new JDBCStream(
+            StreamExpressionParser.parse(
+                "jdbc(connection=\"jdbc:hsqldb:mem:.\", sql=\"select 
PEOPLE.ID, PEOPLE.NAME, COUNTRIES.COUNTRY_NAME from PEOPLE inner join COUNTRIES 
on PEOPLE.COUNTRY_CODE = COUNTRIES.CODE order by PEOPLE.ID\", sort=\"ID 
asc\")"),
+            factory)) {
       expressionString = stream.toExpression(factory).toString();
       
assertTrue(expressionString.contains("jdbc(connection=\"jdbc:hsqldb:mem:.\","));
-      assertTrue(expressionString.contains("sql=\"select PEOPLE.ID, 
PEOPLE.NAME, COUNTRIES.COUNTRY_NAME from PEOPLE inner join COUNTRIES on 
PEOPLE.COUNTRY_CODE = COUNTRIES.CODE order by PEOPLE.ID\""));
+      assertTrue(
+          expressionString.contains(
+              "sql=\"select PEOPLE.ID, PEOPLE.NAME, COUNTRIES.COUNTRY_NAME 
from PEOPLE inner join COUNTRIES on PEOPLE.COUNTRY_CODE = COUNTRIES.CODE order 
by PEOPLE.ID\""));
       assertTrue(expressionString.contains("sort=\"ID asc\""));
     }
   }
 
-  @Test 
+  @Test
   public void testIntersectStream() throws Exception {
     String expressionString;
-    
+
     // Basic test
-    try (IntersectStream stream = new 
IntersectStream(StreamExpressionParser.parse("intersect("
-                              + "search(collection1, q=\"id:(0 3 4)\", 
fl=\"id,a_s,a_i,a_f\", sort=\"a_f asc, a_s asc\"),"
-                              + "search(collection1, q=\"id:(1 2)\", 
fl=\"id,a_s,a_i,a_f\", sort=\"a_f asc, a_s asc\"),"
-                              + "on=\"a_f, a_s\")"), factory)) {
+    try (IntersectStream stream =
+        new IntersectStream(
+            StreamExpressionParser.parse(
+                "intersect("
+                    + "search(collection1, q=\"id:(0 3 4)\", 
fl=\"id,a_s,a_i,a_f\", sort=\"a_f asc, a_s asc\"),"
+                    + "search(collection1, q=\"id:(1 2)\", 
fl=\"id,a_s,a_i,a_f\", sort=\"a_f asc, a_s asc\"),"
+                    + "on=\"a_f, a_s\")"),
+            factory)) {
       expressionString = stream.toExpression(factory).toString();
       assertTrue(expressionString.contains("q=\"id:(0 3 4)\""));
       assertTrue(expressionString.contains("q=\"id:(1 2)\""));
       assertTrue(expressionString.contains("on=\"a_f,a_s\""));
     }
   }
 
-  @Test 
+  @Test
   public void testComplementStream() throws Exception {
     String expressionString;
-    
+
     // Basic test
-    try (ComplementStream stream = new 
ComplementStream(StreamExpressionParser.parse("complement("
-                              + "search(collection1, q=\"id:(0 3 4)\", 
fl=\"id,a_s,a_i,a_f\", sort=\"a_f asc, a_s asc\"),"
-                              + "search(collection1, q=\"id:(1 2)\", 
fl=\"id,a_s,a_i,a_f\", sort=\"a_f asc, a_s asc\"),"
-                              + "on=\"a_f, a_s\")"), factory)) {
+    try (ComplementStream stream =
+        new ComplementStream(
+            StreamExpressionParser.parse(
+                "complement("
+                    + "search(collection1, q=\"id:(0 3 4)\", 
fl=\"id,a_s,a_i,a_f\", sort=\"a_f asc, a_s asc\"),"
+                    + "search(collection1, q=\"id:(1 2)\", 
fl=\"id,a_s,a_i,a_f\", sort=\"a_f asc, a_s asc\"),"
+                    + "on=\"a_f, a_s\")"),
+            factory)) {
       expressionString = stream.toExpression(factory).toString();
       assertTrue(expressionString.contains("q=\"id:(0 3 4)\""));
       assertTrue(expressionString.contains("q=\"id:(1 2)\""));
       assertTrue(expressionString.contains("on=\"a_f,a_s\""));
     }
   }
-  
+
   @Test
   public void testCloudSolrStreamWithEscapedQuote() throws Exception {
 
-    // The purpose of this test is to ensure that a parameter with a contained 
" character is properly
-    // escaped when it is turned back into an expression. This is important 
when an expression is passed
+    // The purpose of this test is to ensure that a parameter with a contained 
" character is
+    // properly
+    // escaped when it is turned back into an expression. This is important 
when an expression is
+    // passed
     // to a worker (parallel stream) or even for other reasons when an 
expression is string-ified.

Review comment:
       Fix this

##########
File path: 
solr/solrj/src/test/org/apache/solr/client/solrj/io/stream/StreamExpressionToExpessionTest.java
##########
@@ -162,75 +185,97 @@ public void testStatsStream() throws Exception {
   @Test
   public void testUniqueStream() throws Exception {
     String expressionString;
-    
+
     // Basic test
-    try (UniqueStream stream = new 
UniqueStream(StreamExpressionParser.parse("unique(search(collection1, q=*:*, 
fl=\"id,a_s,a_i,a_f\", sort=\"a_f asc, a_i asc\"), over=\"a_f\")"), factory)) {
+    try (UniqueStream stream =
+        new UniqueStream(
+            StreamExpressionParser.parse(
+                "unique(search(collection1, q=*:*, fl=\"id,a_s,a_i,a_f\", 
sort=\"a_f asc, a_i asc\"), over=\"a_f\")"),
+            factory)) {
       expressionString = stream.toExpression(factory).toString();
       assertTrue(expressionString.contains("unique(search(collection1"));
       assertTrue(expressionString.contains("over=a_f"));
     }
   }
-  
+
   @Test
   public void testMergeStream() throws Exception {
     String expressionString;
-    
+
     // Basic test
-    try (MergeStream stream = new 
MergeStream(StreamExpressionParser.parse("merge("
-                              + "search(collection1, q=\"id:(0 3 4)\", 
fl=\"id,a_s,a_i,a_f\", sort=\"a_f asc, a_s asc\"),"
-                              + "search(collection1, q=\"id:(1 2)\", 
fl=\"id,a_s,a_i,a_f\", sort=\"a_f asc, a_s asc\"),"
-                              + "on=\"a_f asc, a_s asc\")"), factory)) {
+    try (MergeStream stream =
+        new MergeStream(
+            StreamExpressionParser.parse(
+                "merge("
+                    + "search(collection1, q=\"id:(0 3 4)\", 
fl=\"id,a_s,a_i,a_f\", sort=\"a_f asc, a_s asc\"),"
+                    + "search(collection1, q=\"id:(1 2)\", 
fl=\"id,a_s,a_i,a_f\", sort=\"a_f asc, a_s asc\"),"
+                    + "on=\"a_f asc, a_s asc\")"),
+            factory)) {
       expressionString = stream.toExpression(factory).toString();
       assertTrue(expressionString.contains("q=\"id:(0 3 4)\""));
       assertTrue(expressionString.contains("q=\"id:(1 2)\""));
       assertTrue(expressionString.contains("on=\"a_f asc,a_s asc\""));
     }
   }
-  
+
   @Test
   public void testRankStream() throws Exception {
 
     String expressionString;
-    
+
     // Basic test
-    try (RankStream stream = new RankStream(StreamExpressionParser.parse("top("
-                                              + "n=3,"
-                                              + "search(collection1, q=*:*, 
fl=\"id,a_s,a_i,a_f\", sort=\"a_f asc,a_i asc\"),"
-                                              + "sort=\"a_f asc, a_i asc\")"), 
factory)) {
+    try (RankStream stream =
+        new RankStream(
+            StreamExpressionParser.parse(
+                "top("
+                    + "n=3,"
+                    + "search(collection1, q=*:*, fl=\"id,a_s,a_i,a_f\", 
sort=\"a_f asc,a_i asc\"),"
+                    + "sort=\"a_f asc, a_i asc\")"),
+            factory)) {
       expressionString = stream.toExpression(factory).toString();
       assertTrue(expressionString.contains("top(n=3,search(collection1"));
       assertTrue(expressionString.contains("sort=\"a_f asc,a_i asc\""));
       // find 2nd instance of sort
-      
assertTrue(expressionString.substring(expressionString.indexOf("sort=\"a_f 
asc,a_i asc\"") + 1).contains("sort=\"a_f asc,a_i asc\""));
+      assertTrue(
+          expressionString
+              .substring(expressionString.indexOf("sort=\"a_f asc,a_i asc\"") 
+ 1)
+              .contains("sort=\"a_f asc,a_i asc\""));
     }
   }
 
   @Test
   public void testReducerStream() throws Exception {
     String expressionString;
-    
+
     // Basic test
-    try (ReducerStream stream = new 
ReducerStream(StreamExpressionParser.parse("reduce("
-                                                  + "search(collection1, 
q=*:*, fl=\"id,a_s,a_i,a_f\", sort=\"a_s desc, a_f asc\"),"
-                                                  + "by=\"a_s\", 
group(sort=\"a_i desc\", n=\"5\"))"), factory)) {
+    try (ReducerStream stream =
+        new ReducerStream(
+            StreamExpressionParser.parse(
+                "reduce("
+                    + "search(collection1, q=*:*, fl=\"id,a_s,a_i,a_f\", 
sort=\"a_s desc, a_f asc\"),"
+                    + "by=\"a_s\", group(sort=\"a_i desc\", n=\"5\"))"),
+            factory)) {
       expressionString = stream.toExpression(factory).toString();
       assertTrue(expressionString.contains("reduce(search(collection1"));
       assertTrue(expressionString.contains("by=a_s"));
     }
   }
-  
+
   @Test
-  // commented out on: 24-Dec-2018   
@BadApple(bugUrl="https://issues.apache.org/jira/browse/SOLR-12028";) // added 
20-Sep-2018
+  // commented out on: 24-Dec-2018
+  // @BadApple(bugUrl="https://issues.apache.org/jira/browse/SOLR-12028";) // 
added 20-Sep-2018

Review comment:
       Fix this

##########
File path: 
solr/solrj/src/test/org/apache/solr/client/solrj/io/stream/StreamExpressionToExpessionTest.java
##########
@@ -107,12 +117,17 @@ public void testSelectStream() throws Exception {
   }
 
   @Test
-  // commented out on: 24-Dec-2018   
@BadApple(bugUrl="https://issues.apache.org/jira/browse/SOLR-12028";) // added 
20-Sep-2018
+  // commented out on: 24-Dec-2018
+  // @BadApple(bugUrl="https://issues.apache.org/jira/browse/SOLR-12028";) // 
added 20-Sep-2018

Review comment:
       Fix this




-- 
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.

To unsubscribe, e-mail: issues-unsubscr...@solr.apache.org

For queries about this service, please contact Infrastructure at:
us...@infra.apache.org



---------------------------------------------------------------------
To unsubscribe, e-mail: issues-unsubscr...@solr.apache.org
For additional commands, e-mail: issues-h...@solr.apache.org

Reply via email to