Build: https://jenkins.thetaphi.de/job/Lucene-Solr-master-MacOSX/3769/
Java: 64bit/jdk1.8.0 -XX:+UseCompressedOops -XX:+UseG1GC
1 tests failed.
FAILED: org.apache.solr.cloud.PeerSyncReplicationTest.test
Error Message:
timeout waiting to see all nodes active
Stack Trace:
java.lang.AssertionError: timeout waiting to see all nodes active
at
__randomizedtesting.SeedInfo.seed([4A39EBB288C3A0B6:C26DD468263FCD4E]:0)
at org.junit.Assert.fail(Assert.java:93)
at
org.apache.solr.cloud.PeerSyncReplicationTest.waitTillNodesActive(PeerSyncReplicationTest.java:326)
at
org.apache.solr.cloud.PeerSyncReplicationTest.bringUpDeadNodeAndEnsureNoReplication(PeerSyncReplicationTest.java:277)
at
org.apache.solr.cloud.PeerSyncReplicationTest.forceNodeFailureAndDoPeerSync(PeerSyncReplicationTest.java:259)
at
org.apache.solr.cloud.PeerSyncReplicationTest.test(PeerSyncReplicationTest.java:138)
at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
at
sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
at
sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
at java.lang.reflect.Method.invoke(Method.java:498)
at
com.carrotsearch.randomizedtesting.RandomizedRunner.invoke(RandomizedRunner.java:1713)
at
com.carrotsearch.randomizedtesting.RandomizedRunner$8.evaluate(RandomizedRunner.java:907)
at
com.carrotsearch.randomizedtesting.RandomizedRunner$9.evaluate(RandomizedRunner.java:943)
at
com.carrotsearch.randomizedtesting.RandomizedRunner$10.evaluate(RandomizedRunner.java:957)
at
org.apache.solr.BaseDistributedSearchTestCase$ShardsRepeatRule$ShardsFixedStatement.callStatement(BaseDistributedSearchTestCase.java:985)
at
org.apache.solr.BaseDistributedSearchTestCase$ShardsRepeatRule$ShardsStatement.evaluate(BaseDistributedSearchTestCase.java:960)
at
com.carrotsearch.randomizedtesting.rules.SystemPropertiesRestoreRule$1.evaluate(SystemPropertiesRestoreRule.java:57)
at
org.apache.lucene.util.TestRuleSetupTeardownChained$1.evaluate(TestRuleSetupTeardownChained.java:49)
at
org.apache.lucene.util.AbstractBeforeAfterRule$1.evaluate(AbstractBeforeAfterRule.java:45)
at
org.apache.lucene.util.TestRuleThreadAndTestName$1.evaluate(TestRuleThreadAndTestName.java:48)
at
org.apache.lucene.util.TestRuleIgnoreAfterMaxFailures$1.evaluate(TestRuleIgnoreAfterMaxFailures.java:64)
at
org.apache.lucene.util.TestRuleMarkFailure$1.evaluate(TestRuleMarkFailure.java:47)
at
com.carrotsearch.randomizedtesting.rules.StatementAdapter.evaluate(StatementAdapter.java:36)
at
com.carrotsearch.randomizedtesting.ThreadLeakControl$StatementRunner.run(ThreadLeakControl.java:367)
at
com.carrotsearch.randomizedtesting.ThreadLeakControl.forkTimeoutingTask(ThreadLeakControl.java:811)
at
com.carrotsearch.randomizedtesting.ThreadLeakControl$3.evaluate(ThreadLeakControl.java:462)
at
com.carrotsearch.randomizedtesting.RandomizedRunner.runSingleTest(RandomizedRunner.java:916)
at
com.carrotsearch.randomizedtesting.RandomizedRunner$5.evaluate(RandomizedRunner.java:802)
at
com.carrotsearch.randomizedtesting.RandomizedRunner$6.evaluate(RandomizedRunner.java:852)
at
com.carrotsearch.randomizedtesting.RandomizedRunner$7.evaluate(RandomizedRunner.java:863)
at
com.carrotsearch.randomizedtesting.rules.StatementAdapter.evaluate(StatementAdapter.java:36)
at
com.carrotsearch.randomizedtesting.rules.SystemPropertiesRestoreRule$1.evaluate(SystemPropertiesRestoreRule.java:57)
at
org.apache.lucene.util.AbstractBeforeAfterRule$1.evaluate(AbstractBeforeAfterRule.java:45)
at
com.carrotsearch.randomizedtesting.rules.StatementAdapter.evaluate(StatementAdapter.java:36)
at
org.apache.lucene.util.TestRuleStoreClassName$1.evaluate(TestRuleStoreClassName.java:41)
at
com.carrotsearch.randomizedtesting.rules.NoShadowingOrOverridesOnMethodsRule$1.evaluate(NoShadowingOrOverridesOnMethodsRule.java:40)
at
com.carrotsearch.randomizedtesting.rules.NoShadowingOrOverridesOnMethodsRule$1.evaluate(NoShadowingOrOverridesOnMethodsRule.java:40)
at
com.carrotsearch.randomizedtesting.rules.StatementAdapter.evaluate(StatementAdapter.java:36)
at
com.carrotsearch.randomizedtesting.rules.StatementAdapter.evaluate(StatementAdapter.java:36)
at
com.carrotsearch.randomizedtesting.rules.StatementAdapter.evaluate(StatementAdapter.java:36)
at
org.apache.lucene.util.TestRuleAssertionsRequired$1.evaluate(TestRuleAssertionsRequired.java:53)
at
org.apache.lucene.util.TestRuleMarkFailure$1.evaluate(TestRuleMarkFailure.java:47)
at
org.apache.lucene.util.TestRuleIgnoreAfterMaxFailures$1.evaluate(TestRuleIgnoreAfterMaxFailures.java:64)
at
org.apache.lucene.util.TestRuleIgnoreTestSuites$1.evaluate(TestRuleIgnoreTestSuites.java:54)
at
com.carrotsearch.randomizedtesting.rules.StatementAdapter.evaluate(StatementAdapter.java:36)
at
com.carrotsearch.randomizedtesting.ThreadLeakControl$StatementRunner.run(ThreadLeakControl.java:367)
at java.lang.Thread.run(Thread.java:745)
Build Log:
[...truncated 11365 lines...]
[junit4] Suite: org.apache.solr.cloud.PeerSyncReplicationTest
[junit4] 2> Creating dataDir:
/Users/jenkins/workspace/Lucene-Solr-master-MacOSX/solr/build/solr-core/test/J1/temp/solr.cloud.PeerSyncReplicationTest_4A39EBB288C3A0B6-001/init-core-data-001
[junit4] 2> 1007049 INFO
(SUITE-PeerSyncReplicationTest-seed#[4A39EBB288C3A0B6]-worker) [ ]
o.a.s.SolrTestCaseJ4 Randomized ssl (false) and clientAuth (false) via:
@org.apache.solr.util.RandomizeSSL(reason=, value=NaN, ssl=NaN, clientAuth=NaN)
w/ MAC_OS_X supressed clientAuth
[junit4] 2> 1007049 INFO
(SUITE-PeerSyncReplicationTest-seed#[4A39EBB288C3A0B6]-worker) [ ]
o.a.s.BaseDistributedSearchTestCase Setting hostContext system property: /
[junit4] 2> 1007051 INFO
(TEST-PeerSyncReplicationTest.test-seed#[4A39EBB288C3A0B6]) [ ]
o.a.s.c.ZkTestServer STARTING ZK TEST SERVER
[junit4] 2> 1007052 INFO (Thread-1634) [ ] o.a.s.c.ZkTestServer client
port:0.0.0.0/0.0.0.0:0
[junit4] 2> 1007052 INFO (Thread-1634) [ ] o.a.s.c.ZkTestServer
Starting server
[junit4] 2> 1007158 INFO
(TEST-PeerSyncReplicationTest.test-seed#[4A39EBB288C3A0B6]) [ ]
o.a.s.c.ZkTestServer start zk server on port:53202
[junit4] 2> 1007194 INFO
(TEST-PeerSyncReplicationTest.test-seed#[4A39EBB288C3A0B6]) [ ]
o.a.s.c.AbstractZkTestCase put
/Users/jenkins/workspace/Lucene-Solr-master-MacOSX/solr/core/src/test-files/solr/collection1/conf/solrconfig-tlog.xml
to /configs/conf1/solrconfig.xml
[junit4] 2> 1007198 INFO
(TEST-PeerSyncReplicationTest.test-seed#[4A39EBB288C3A0B6]) [ ]
o.a.s.c.AbstractZkTestCase put
/Users/jenkins/workspace/Lucene-Solr-master-MacOSX/solr/core/src/test-files/solr/collection1/conf/schema.xml
to /configs/conf1/schema.xml
[junit4] 2> 1007201 INFO
(TEST-PeerSyncReplicationTest.test-seed#[4A39EBB288C3A0B6]) [ ]
o.a.s.c.AbstractZkTestCase put
/Users/jenkins/workspace/Lucene-Solr-master-MacOSX/solr/core/src/test-files/solr/collection1/conf/solrconfig.snippet.randomindexconfig.xml
to /configs/conf1/solrconfig.snippet.randomindexconfig.xml
[junit4] 2> 1007204 INFO
(TEST-PeerSyncReplicationTest.test-seed#[4A39EBB288C3A0B6]) [ ]
o.a.s.c.AbstractZkTestCase put
/Users/jenkins/workspace/Lucene-Solr-master-MacOSX/solr/core/src/test-files/solr/collection1/conf/stopwords.txt
to /configs/conf1/stopwords.txt
[junit4] 2> 1007207 INFO
(TEST-PeerSyncReplicationTest.test-seed#[4A39EBB288C3A0B6]) [ ]
o.a.s.c.AbstractZkTestCase put
/Users/jenkins/workspace/Lucene-Solr-master-MacOSX/solr/core/src/test-files/solr/collection1/conf/protwords.txt
to /configs/conf1/protwords.txt
[junit4] 2> 1007210 INFO
(TEST-PeerSyncReplicationTest.test-seed#[4A39EBB288C3A0B6]) [ ]
o.a.s.c.AbstractZkTestCase put
/Users/jenkins/workspace/Lucene-Solr-master-MacOSX/solr/core/src/test-files/solr/collection1/conf/currency.xml
to /configs/conf1/currency.xml
[junit4] 2> 1007214 INFO
(TEST-PeerSyncReplicationTest.test-seed#[4A39EBB288C3A0B6]) [ ]
o.a.s.c.AbstractZkTestCase put
/Users/jenkins/workspace/Lucene-Solr-master-MacOSX/solr/core/src/test-files/solr/collection1/conf/enumsConfig.xml
to /configs/conf1/enumsConfig.xml
[junit4] 2> 1007217 INFO
(TEST-PeerSyncReplicationTest.test-seed#[4A39EBB288C3A0B6]) [ ]
o.a.s.c.AbstractZkTestCase put
/Users/jenkins/workspace/Lucene-Solr-master-MacOSX/solr/core/src/test-files/solr/collection1/conf/open-exchange-rates.json
to /configs/conf1/open-exchange-rates.json
[junit4] 2> 1007220 INFO
(TEST-PeerSyncReplicationTest.test-seed#[4A39EBB288C3A0B6]) [ ]
o.a.s.c.AbstractZkTestCase put
/Users/jenkins/workspace/Lucene-Solr-master-MacOSX/solr/core/src/test-files/solr/collection1/conf/mapping-ISOLatin1Accent.txt
to /configs/conf1/mapping-ISOLatin1Accent.txt
[junit4] 2> 1007223 INFO
(TEST-PeerSyncReplicationTest.test-seed#[4A39EBB288C3A0B6]) [ ]
o.a.s.c.AbstractZkTestCase put
/Users/jenkins/workspace/Lucene-Solr-master-MacOSX/solr/core/src/test-files/solr/collection1/conf/old_synonyms.txt
to /configs/conf1/old_synonyms.txt
[junit4] 2> 1007226 INFO
(TEST-PeerSyncReplicationTest.test-seed#[4A39EBB288C3A0B6]) [ ]
o.a.s.c.AbstractZkTestCase put
/Users/jenkins/workspace/Lucene-Solr-master-MacOSX/solr/core/src/test-files/solr/collection1/conf/synonyms.txt
to /configs/conf1/synonyms.txt
[junit4] 2> 1007758 INFO
(TEST-PeerSyncReplicationTest.test-seed#[4A39EBB288C3A0B6]) [ ]
o.a.s.SolrTestCaseJ4 Writing core.properties file to
/Users/jenkins/workspace/Lucene-Solr-master-MacOSX/solr/build/solr-core/test/J1/temp/solr.cloud.PeerSyncReplicationTest_4A39EBB288C3A0B6-001/control-001/cores/collection1
[junit4] 2> 1007761 INFO
(TEST-PeerSyncReplicationTest.test-seed#[4A39EBB288C3A0B6]) [ ]
o.e.j.s.Server jetty-9.3.14.v20161028
[junit4] 2> 1007763 INFO
(TEST-PeerSyncReplicationTest.test-seed#[4A39EBB288C3A0B6]) [ ]
o.e.j.s.h.ContextHandler Started
o.e.j.s.ServletContextHandler@2802ca1a{/,null,AVAILABLE}
[junit4] 2> 1007765 INFO
(TEST-PeerSyncReplicationTest.test-seed#[4A39EBB288C3A0B6]) [ ]
o.e.j.s.AbstractConnector Started
ServerConnector@73309173{HTTP/1.1,[http/1.1]}{127.0.0.1:53205}
[junit4] 2> 1007765 INFO
(TEST-PeerSyncReplicationTest.test-seed#[4A39EBB288C3A0B6]) [ ]
o.e.j.s.Server Started @1013347ms
[junit4] 2> 1007765 INFO
(TEST-PeerSyncReplicationTest.test-seed#[4A39EBB288C3A0B6]) [ ]
o.a.s.c.s.e.JettySolrRunner Jetty properties:
{solr.data.dir=/Users/jenkins/workspace/Lucene-Solr-master-MacOSX/solr/build/solr-core/test/J1/temp/solr.cloud.PeerSyncReplicationTest_4A39EBB288C3A0B6-001/tempDir-001/control/data,
hostContext=/, hostPort=53205,
coreRootDirectory=/Users/jenkins/workspace/Lucene-Solr-master-MacOSX/solr/build/solr-core/test/J1/temp/solr.cloud.PeerSyncReplicationTest_4A39EBB288C3A0B6-001/control-001/cores}
[junit4] 2> 1007766 ERROR
(TEST-PeerSyncReplicationTest.test-seed#[4A39EBB288C3A0B6]) [ ]
o.a.s.s.StartupLoggingUtils Missing Java Option solr.log.dir. Logging may be
missing or incomplete.
[junit4] 2> 1007766 INFO
(TEST-PeerSyncReplicationTest.test-seed#[4A39EBB288C3A0B6]) [ ]
o.a.s.s.SolrDispatchFilter ___ _ Welcome to Apache Solr? version
7.0.0
[junit4] 2> 1007766 INFO
(TEST-PeerSyncReplicationTest.test-seed#[4A39EBB288C3A0B6]) [ ]
o.a.s.s.SolrDispatchFilter / __| ___| |_ _ Starting in cloud mode on port null
[junit4] 2> 1007766 INFO
(TEST-PeerSyncReplicationTest.test-seed#[4A39EBB288C3A0B6]) [ ]
o.a.s.s.SolrDispatchFilter \__ \/ _ \ | '_| Install dir: null
[junit4] 2> 1007766 INFO
(TEST-PeerSyncReplicationTest.test-seed#[4A39EBB288C3A0B6]) [ ]
o.a.s.s.SolrDispatchFilter |___/\___/_|_| Start time:
2017-01-10T13:05:51.401Z
[junit4] 2> 1007771 INFO
(TEST-PeerSyncReplicationTest.test-seed#[4A39EBB288C3A0B6]) [ ]
o.a.s.s.SolrDispatchFilter Loading solr.xml from SolrHome (not found in
ZooKeeper)
[junit4] 2> 1007771 INFO
(TEST-PeerSyncReplicationTest.test-seed#[4A39EBB288C3A0B6]) [ ]
o.a.s.c.SolrXmlConfig Loading container configuration from
/Users/jenkins/workspace/Lucene-Solr-master-MacOSX/solr/build/solr-core/test/J1/temp/solr.cloud.PeerSyncReplicationTest_4A39EBB288C3A0B6-001/control-001/solr.xml
[junit4] 2> 1007800 INFO
(TEST-PeerSyncReplicationTest.test-seed#[4A39EBB288C3A0B6]) [ ]
o.a.s.c.ZkContainer Zookeeper client=127.0.0.1:53202/solr
[junit4] 2> 1007852 INFO
(TEST-PeerSyncReplicationTest.test-seed#[4A39EBB288C3A0B6]) [n:127.0.0.1:53205_
] o.a.s.c.OverseerElectionContext I am going to be the leader
127.0.0.1:53205_
[junit4] 2> 1007854 INFO
(TEST-PeerSyncReplicationTest.test-seed#[4A39EBB288C3A0B6]) [n:127.0.0.1:53205_
] o.a.s.c.Overseer Overseer
(id=97258933497954308-127.0.0.1:53205_-n_0000000000) starting
[junit4] 2> 1007875 INFO
(TEST-PeerSyncReplicationTest.test-seed#[4A39EBB288C3A0B6]) [n:127.0.0.1:53205_
] o.a.s.c.ZkController Register node as live in
ZooKeeper:/live_nodes/127.0.0.1:53205_
[junit4] 2> 1007878 INFO
(zkCallback-2021-thread-1-processing-n:127.0.0.1:53205_) [n:127.0.0.1:53205_
] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (0) -> (1)
[junit4] 2> 1008032 INFO
(TEST-PeerSyncReplicationTest.test-seed#[4A39EBB288C3A0B6]) [n:127.0.0.1:53205_
] o.a.s.c.CorePropertiesLocator Found 1 core definitions underneath
/Users/jenkins/workspace/Lucene-Solr-master-MacOSX/solr/build/solr-core/test/J1/temp/solr.cloud.PeerSyncReplicationTest_4A39EBB288C3A0B6-001/control-001/cores
[junit4] 2> 1008032 INFO
(TEST-PeerSyncReplicationTest.test-seed#[4A39EBB288C3A0B6]) [n:127.0.0.1:53205_
] o.a.s.c.CorePropertiesLocator Cores are: [collection1]
[junit4] 2> 1008047 INFO
(OverseerStateUpdate-97258933497954308-127.0.0.1:53205_-n_0000000000)
[n:127.0.0.1:53205_ ] o.a.s.c.o.ReplicaMutator Assigning new node to shard
shard=shard1
[junit4] 2> 1009065 INFO
(coreLoadExecutor-3567-thread-1-processing-n:127.0.0.1:53205_)
[n:127.0.0.1:53205_ c:control_collection x:collection1] o.a.s.c.SolrConfig
Using Lucene MatchVersion: 7.0.0
[junit4] 2> 1009081 INFO
(coreLoadExecutor-3567-thread-1-processing-n:127.0.0.1:53205_)
[n:127.0.0.1:53205_ c:control_collection x:collection1] o.a.s.s.IndexSchema
[collection1] Schema name=test
[junit4] 2> 1009301 WARN
(coreLoadExecutor-3567-thread-1-processing-n:127.0.0.1:53205_)
[n:127.0.0.1:53205_ c:control_collection x:collection1] o.a.s.s.IndexSchema
[collection1] default search field in schema is text. WARNING: Deprecated,
please use 'df' on request instead.
[junit4] 2> 1009304 INFO
(coreLoadExecutor-3567-thread-1-processing-n:127.0.0.1:53205_)
[n:127.0.0.1:53205_ c:control_collection x:collection1] o.a.s.s.IndexSchema
Loaded schema test/1.0 with uniqueid field id
[junit4] 2> 1009326 INFO
(coreLoadExecutor-3567-thread-1-processing-n:127.0.0.1:53205_)
[n:127.0.0.1:53205_ c:control_collection x:collection1] o.a.s.c.CoreContainer
Creating SolrCore 'collection1' using configuration from collection
control_collection
[junit4] 2> 1009327 INFO
(coreLoadExecutor-3567-thread-1-processing-n:127.0.0.1:53205_)
[n:127.0.0.1:53205_ c:control_collection x:collection1] o.a.s.c.SolrCore
[[collection1] ] Opening new SolrCore at
[/Users/jenkins/workspace/Lucene-Solr-master-MacOSX/solr/build/solr-core/test/J1/temp/solr.cloud.PeerSyncReplicationTest_4A39EBB288C3A0B6-001/control-001/cores/collection1],
dataDir=[/Users/jenkins/workspace/Lucene-Solr-master-MacOSX/solr/build/solr-core/test/J1/temp/solr.cloud.PeerSyncReplicationTest_4A39EBB288C3A0B6-001/control-001/cores/collection1/data/]
[junit4] 2> 1009327 INFO
(coreLoadExecutor-3567-thread-1-processing-n:127.0.0.1:53205_)
[n:127.0.0.1:53205_ c:control_collection x:collection1]
o.a.s.c.JmxMonitoredMap JMX monitoring is enabled. Adding Solr mbeans to JMX
Server: com.sun.jmx.mbeanserver.JmxMBeanServer@5e983ee5
[junit4] 2> 1009330 INFO
(coreLoadExecutor-3567-thread-1-processing-n:127.0.0.1:53205_)
[n:127.0.0.1:53205_ c:control_collection x:collection1]
o.a.s.u.RandomMergePolicy RandomMergePolicy wrapping class
org.apache.lucene.index.TieredMergePolicy: [TieredMergePolicy:
maxMergeAtOnce=17, maxMergeAtOnceExplicit=11, maxMergedSegmentMB=48.986328125,
floorSegmentMB=1.4169921875, forceMergeDeletesPctAllowed=17.289735763156585,
segmentsPerTier=20.0, maxCFSSegmentSizeMB=8.796093022207999E12,
noCFSRatio=0.6146725899110337
[junit4] 2> 1009337 WARN
(coreLoadExecutor-3567-thread-1-processing-n:127.0.0.1:53205_)
[n:127.0.0.1:53205_ c:control_collection x:collection1]
o.a.s.c.RequestHandlers INVALID paramSet a in requestHandler {type =
requestHandler,name = /dump,class = DumpRequestHandler,attributes =
{initParams=a, name=/dump, class=DumpRequestHandler},args =
{defaults={a=A,b=B}}}
[junit4] 2> 1009350 INFO
(coreLoadExecutor-3567-thread-1-processing-n:127.0.0.1:53205_)
[n:127.0.0.1:53205_ c:control_collection x:collection1] o.a.s.u.UpdateHandler
Using UpdateLog implementation: org.apache.solr.update.UpdateLog
[junit4] 2> 1009350 INFO
(coreLoadExecutor-3567-thread-1-processing-n:127.0.0.1:53205_)
[n:127.0.0.1:53205_ c:control_collection x:collection1] o.a.s.u.UpdateLog
Initializing UpdateLog: dataDir= defaultSyncLevel=FLUSH numRecordsToKeep=1000
maxNumLogsToKeep=10 numVersionBuckets=65536
[junit4] 2> 1009351 INFO
(coreLoadExecutor-3567-thread-1-processing-n:127.0.0.1:53205_)
[n:127.0.0.1:53205_ c:control_collection x:collection1] o.a.s.u.CommitTracker
Hard AutoCommit: disabled
[junit4] 2> 1009351 INFO
(coreLoadExecutor-3567-thread-1-processing-n:127.0.0.1:53205_)
[n:127.0.0.1:53205_ c:control_collection x:collection1] o.a.s.u.CommitTracker
Soft AutoCommit: disabled
[junit4] 2> 1009352 INFO
(coreLoadExecutor-3567-thread-1-processing-n:127.0.0.1:53205_)
[n:127.0.0.1:53205_ c:control_collection x:collection1]
o.a.s.u.RandomMergePolicy RandomMergePolicy wrapping class
org.apache.lucene.index.TieredMergePolicy: [TieredMergePolicy:
maxMergeAtOnce=43, maxMergeAtOnceExplicit=35, maxMergedSegmentMB=12.322265625,
floorSegmentMB=2.0947265625, forceMergeDeletesPctAllowed=18.392260164660186,
segmentsPerTier=37.0, maxCFSSegmentSizeMB=8.796093022207999E12,
noCFSRatio=0.6142252830410042
[junit4] 2> 1009353 INFO
(coreLoadExecutor-3567-thread-1-processing-n:127.0.0.1:53205_)
[n:127.0.0.1:53205_ c:control_collection x:collection1]
o.a.s.s.SolrIndexSearcher Opening [Searcher@33f03f73[collection1] main]
[junit4] 2> 1009355 INFO
(coreLoadExecutor-3567-thread-1-processing-n:127.0.0.1:53205_)
[n:127.0.0.1:53205_ c:control_collection x:collection1]
o.a.s.r.ManagedResourceStorage Configured ZooKeeperStorageIO with znodeBase:
/configs/conf1
[junit4] 2> 1009356 INFO
(coreLoadExecutor-3567-thread-1-processing-n:127.0.0.1:53205_)
[n:127.0.0.1:53205_ c:control_collection x:collection1]
o.a.s.r.ManagedResourceStorage Loaded null at path _rest_managed.json using
ZooKeeperStorageIO:path=/configs/conf1
[junit4] 2> 1009356 INFO
(coreLoadExecutor-3567-thread-1-processing-n:127.0.0.1:53205_)
[n:127.0.0.1:53205_ c:control_collection x:collection1]
o.a.s.h.ReplicationHandler Commits will be reserved for 10000
[junit4] 2> 1009357 INFO
(searcherExecutor-3568-thread-1-processing-n:127.0.0.1:53205_ x:collection1
c:control_collection) [n:127.0.0.1:53205_ c:control_collection x:collection1]
o.a.s.c.SolrCore [collection1] Registered new searcher
Searcher@33f03f73[collection1]
main{ExitableDirectoryReader(UninvertingDirectoryReader())}
[junit4] 2> 1009357 INFO
(coreLoadExecutor-3567-thread-1-processing-n:127.0.0.1:53205_)
[n:127.0.0.1:53205_ c:control_collection x:collection1] o.a.s.u.UpdateLog
Could not find max version in index or recent updates, using new clock
1556142938382139392
[junit4] 2> 1009368 INFO
(coreZkRegister-3560-thread-1-processing-n:127.0.0.1:53205_ x:collection1
c:control_collection) [n:127.0.0.1:53205_ c:control_collection s:shard1
r:core_node1 x:collection1] o.a.s.c.ShardLeaderElectionContext Enough replicas
found to continue.
[junit4] 2> 1009368 INFO
(coreZkRegister-3560-thread-1-processing-n:127.0.0.1:53205_ x:collection1
c:control_collection) [n:127.0.0.1:53205_ c:control_collection s:shard1
r:core_node1 x:collection1] o.a.s.c.ShardLeaderElectionContext I may be the new
leader - try and sync
[junit4] 2> 1009368 INFO
(coreZkRegister-3560-thread-1-processing-n:127.0.0.1:53205_ x:collection1
c:control_collection) [n:127.0.0.1:53205_ c:control_collection s:shard1
r:core_node1 x:collection1] o.a.s.c.SyncStrategy Sync replicas to
http://127.0.0.1:53205/collection1/
[junit4] 2> 1009368 INFO
(coreZkRegister-3560-thread-1-processing-n:127.0.0.1:53205_ x:collection1
c:control_collection) [n:127.0.0.1:53205_ c:control_collection s:shard1
r:core_node1 x:collection1] o.a.s.c.SyncStrategy Sync Success - now sync
replicas to me
[junit4] 2> 1009368 INFO
(coreZkRegister-3560-thread-1-processing-n:127.0.0.1:53205_ x:collection1
c:control_collection) [n:127.0.0.1:53205_ c:control_collection s:shard1
r:core_node1 x:collection1] o.a.s.c.SyncStrategy
http://127.0.0.1:53205/collection1/ has no replicas
[junit4] 2> 1009376 INFO
(coreZkRegister-3560-thread-1-processing-n:127.0.0.1:53205_ x:collection1
c:control_collection) [n:127.0.0.1:53205_ c:control_collection s:shard1
r:core_node1 x:collection1] o.a.s.c.ShardLeaderElectionContext I am the new
leader: http://127.0.0.1:53205/collection1/ shard1
[junit4] 2> 1009483 INFO
(TEST-PeerSyncReplicationTest.test-seed#[4A39EBB288C3A0B6]) [ ]
o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (0) -> (1)
[junit4] 2> 1009487 INFO
(TEST-PeerSyncReplicationTest.test-seed#[4A39EBB288C3A0B6]) [ ]
o.a.s.c.s.i.ZkClientClusterStateProvider Cluster at 127.0.0.1:53202/solr ready
[junit4] 2> 1009487 INFO
(TEST-PeerSyncReplicationTest.test-seed#[4A39EBB288C3A0B6]) [ ]
o.a.s.c.ChaosMonkey monkey: init - expire sessions:false cause connection
loss:false
[junit4] 2> 1009535 INFO
(coreZkRegister-3560-thread-1-processing-n:127.0.0.1:53205_ x:collection1
c:control_collection) [n:127.0.0.1:53205_ c:control_collection s:shard1
r:core_node1 x:collection1] o.a.s.c.ZkController I am the leader, no recovery
necessary
[junit4] 2> 1009810 INFO
(TEST-PeerSyncReplicationTest.test-seed#[4A39EBB288C3A0B6]) [ ]
o.a.s.SolrTestCaseJ4 Writing core.properties file to
/Users/jenkins/workspace/Lucene-Solr-master-MacOSX/solr/build/solr-core/test/J1/temp/solr.cloud.PeerSyncReplicationTest_4A39EBB288C3A0B6-001/shard-1-001/cores/collection1
[junit4] 2> 1009812 INFO
(TEST-PeerSyncReplicationTest.test-seed#[4A39EBB288C3A0B6]) [ ]
o.a.s.c.AbstractFullDistribZkTestBase create jetty 1 in directory
/Users/jenkins/workspace/Lucene-Solr-master-MacOSX/solr/build/solr-core/test/J1/temp/solr.cloud.PeerSyncReplicationTest_4A39EBB288C3A0B6-001/shard-1-001
[junit4] 2> 1009812 INFO
(TEST-PeerSyncReplicationTest.test-seed#[4A39EBB288C3A0B6]) [ ]
o.e.j.s.Server jetty-9.3.14.v20161028
[junit4] 2> 1009814 INFO
(TEST-PeerSyncReplicationTest.test-seed#[4A39EBB288C3A0B6]) [ ]
o.e.j.s.h.ContextHandler Started
o.e.j.s.ServletContextHandler@3b48078d{/,null,AVAILABLE}
[junit4] 2> 1009814 INFO
(TEST-PeerSyncReplicationTest.test-seed#[4A39EBB288C3A0B6]) [ ]
o.e.j.s.AbstractConnector Started
ServerConnector@6d8bda7f{HTTP/1.1,[http/1.1]}{127.0.0.1:53210}
[junit4] 2> 1009814 INFO
(TEST-PeerSyncReplicationTest.test-seed#[4A39EBB288C3A0B6]) [ ]
o.e.j.s.Server Started @1015396ms
[junit4] 2> 1009815 INFO
(TEST-PeerSyncReplicationTest.test-seed#[4A39EBB288C3A0B6]) [ ]
o.a.s.c.s.e.JettySolrRunner Jetty properties:
{solr.data.dir=/Users/jenkins/workspace/Lucene-Solr-master-MacOSX/solr/build/solr-core/test/J1/temp/solr.cloud.PeerSyncReplicationTest_4A39EBB288C3A0B6-001/tempDir-001/jetty1,
solrconfig=solrconfig.xml, hostContext=/, hostPort=53210,
coreRootDirectory=/Users/jenkins/workspace/Lucene-Solr-master-MacOSX/solr/build/solr-core/test/J1/../../../../../../../../../Users/jenkins/workspace/Lucene-Solr-master-MacOSX/solr/build/solr-core/test/J1/temp/solr.cloud.PeerSyncReplicationTest_4A39EBB288C3A0B6-001/shard-1-001/cores}
[junit4] 2> 1009815 ERROR
(TEST-PeerSyncReplicationTest.test-seed#[4A39EBB288C3A0B6]) [ ]
o.a.s.s.StartupLoggingUtils Missing Java Option solr.log.dir. Logging may be
missing or incomplete.
[junit4] 2> 1009816 INFO
(TEST-PeerSyncReplicationTest.test-seed#[4A39EBB288C3A0B6]) [ ]
o.a.s.s.SolrDispatchFilter ___ _ Welcome to Apache Solr? version
7.0.0
[junit4] 2> 1009816 INFO
(TEST-PeerSyncReplicationTest.test-seed#[4A39EBB288C3A0B6]) [ ]
o.a.s.s.SolrDispatchFilter / __| ___| |_ _ Starting in cloud mode on port null
[junit4] 2> 1009816 INFO
(TEST-PeerSyncReplicationTest.test-seed#[4A39EBB288C3A0B6]) [ ]
o.a.s.s.SolrDispatchFilter \__ \/ _ \ | '_| Install dir: null
[junit4] 2> 1009816 INFO
(TEST-PeerSyncReplicationTest.test-seed#[4A39EBB288C3A0B6]) [ ]
o.a.s.s.SolrDispatchFilter |___/\___/_|_| Start time:
2017-01-10T13:05:53.451Z
[junit4] 2> 1009820 INFO
(TEST-PeerSyncReplicationTest.test-seed#[4A39EBB288C3A0B6]) [ ]
o.a.s.s.SolrDispatchFilter Loading solr.xml from SolrHome (not found in
ZooKeeper)
[junit4] 2> 1009820 INFO
(TEST-PeerSyncReplicationTest.test-seed#[4A39EBB288C3A0B6]) [ ]
o.a.s.c.SolrXmlConfig Loading container configuration from
/Users/jenkins/workspace/Lucene-Solr-master-MacOSX/solr/build/solr-core/test/J1/temp/solr.cloud.PeerSyncReplicationTest_4A39EBB288C3A0B6-001/shard-1-001/solr.xml
[junit4] 2> 1009859 INFO
(TEST-PeerSyncReplicationTest.test-seed#[4A39EBB288C3A0B6]) [ ]
o.a.s.c.ZkContainer Zookeeper client=127.0.0.1:53202/solr
[junit4] 2> 1009875 INFO
(TEST-PeerSyncReplicationTest.test-seed#[4A39EBB288C3A0B6]) [n:127.0.0.1:53210_
] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (0) -> (1)
[junit4] 2> 1009885 INFO
(TEST-PeerSyncReplicationTest.test-seed#[4A39EBB288C3A0B6]) [n:127.0.0.1:53210_
] o.a.s.c.ZkController Register node as live in
ZooKeeper:/live_nodes/127.0.0.1:53210_
[junit4] 2> 1009888 INFO
(zkCallback-2021-thread-2-processing-n:127.0.0.1:53205_) [n:127.0.0.1:53205_
] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (1) -> (2)
[junit4] 2> 1009888 INFO (zkCallback-2025-thread-1) [ ]
o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (1) -> (2)
[junit4] 2> 1009889 INFO
(zkCallback-2030-thread-1-processing-n:127.0.0.1:53210_) [n:127.0.0.1:53210_
] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (1) -> (2)
[junit4] 2> 1010002 INFO
(TEST-PeerSyncReplicationTest.test-seed#[4A39EBB288C3A0B6]) [n:127.0.0.1:53210_
] o.a.s.c.CorePropertiesLocator Found 1 core definitions underneath
/Users/jenkins/workspace/Lucene-Solr-master-MacOSX/solr/build/solr-core/test/J1/../../../../../../../../../Users/jenkins/workspace/Lucene-Solr-master-MacOSX/solr/build/solr-core/test/J1/temp/solr.cloud.PeerSyncReplicationTest_4A39EBB288C3A0B6-001/shard-1-001/cores
[junit4] 2> 1010002 INFO
(TEST-PeerSyncReplicationTest.test-seed#[4A39EBB288C3A0B6]) [n:127.0.0.1:53210_
] o.a.s.c.CorePropertiesLocator Cores are: [collection1]
[junit4] 2> 1010006 INFO
(OverseerStateUpdate-97258933497954308-127.0.0.1:53205_-n_0000000000)
[n:127.0.0.1:53205_ ] o.a.s.c.o.ReplicaMutator Assigning new node to shard
shard=shard1
[junit4] 2> 1011022 INFO
(coreLoadExecutor-3578-thread-1-processing-n:127.0.0.1:53210_)
[n:127.0.0.1:53210_ c:collection1 x:collection1] o.a.s.c.SolrConfig Using
Lucene MatchVersion: 7.0.0
[junit4] 2> 1011034 INFO
(coreLoadExecutor-3578-thread-1-processing-n:127.0.0.1:53210_)
[n:127.0.0.1:53210_ c:collection1 x:collection1] o.a.s.s.IndexSchema
[collection1] Schema name=test
[junit4] 2> 1011126 WARN
(coreLoadExecutor-3578-thread-1-processing-n:127.0.0.1:53210_)
[n:127.0.0.1:53210_ c:collection1 x:collection1] o.a.s.s.IndexSchema
[collection1] default search field in schema is text. WARNING: Deprecated,
please use 'df' on request instead.
[junit4] 2> 1011170 INFO
(coreLoadExecutor-3578-thread-1-processing-n:127.0.0.1:53210_)
[n:127.0.0.1:53210_ c:collection1 x:collection1] o.a.s.s.IndexSchema Loaded
schema test/1.0 with uniqueid field id
[junit4] 2> 1011201 INFO
(coreLoadExecutor-3578-thread-1-processing-n:127.0.0.1:53210_)
[n:127.0.0.1:53210_ c:collection1 x:collection1] o.a.s.c.CoreContainer
Creating SolrCore 'collection1' using configuration from collection collection1
[junit4] 2> 1011202 INFO
(coreLoadExecutor-3578-thread-1-processing-n:127.0.0.1:53210_)
[n:127.0.0.1:53210_ c:collection1 x:collection1] o.a.s.c.SolrCore
[[collection1] ] Opening new SolrCore at
[/Users/jenkins/workspace/Lucene-Solr-master-MacOSX/solr/build/solr-core/test/J1/temp/solr.cloud.PeerSyncReplicationTest_4A39EBB288C3A0B6-001/shard-1-001/cores/collection1],
dataDir=[/Users/jenkins/workspace/Lucene-Solr-master-MacOSX/solr/build/solr-core/test/J1/../../../../../../../../../Users/jenkins/workspace/Lucene-Solr-master-MacOSX/solr/build/solr-core/test/J1/temp/solr.cloud.PeerSyncReplicationTest_4A39EBB288C3A0B6-001/shard-1-001/cores/collection1/data/]
[junit4] 2> 1011202 INFO
(coreLoadExecutor-3578-thread-1-processing-n:127.0.0.1:53210_)
[n:127.0.0.1:53210_ c:collection1 x:collection1] o.a.s.c.JmxMonitoredMap JMX
monitoring is enabled. Adding Solr mbeans to JMX Server:
com.sun.jmx.mbeanserver.JmxMBeanServer@5e983ee5
[junit4] 2> 1011205 INFO
(coreLoadExecutor-3578-thread-1-processing-n:127.0.0.1:53210_)
[n:127.0.0.1:53210_ c:collection1 x:collection1] o.a.s.u.RandomMergePolicy
RandomMergePolicy wrapping class org.apache.lucene.index.TieredMergePolicy:
[TieredMergePolicy: maxMergeAtOnce=17, maxMergeAtOnceExplicit=11,
maxMergedSegmentMB=48.986328125, floorSegmentMB=1.4169921875,
forceMergeDeletesPctAllowed=17.289735763156585, segmentsPerTier=20.0,
maxCFSSegmentSizeMB=8.796093022207999E12, noCFSRatio=0.6146725899110337
[junit4] 2> 1011228 WARN
(coreLoadExecutor-3578-thread-1-processing-n:127.0.0.1:53210_)
[n:127.0.0.1:53210_ c:collection1 x:collection1] o.a.s.c.RequestHandlers
INVALID paramSet a in requestHandler {type = requestHandler,name = /dump,class
= DumpRequestHandler,attributes = {initParams=a, name=/dump,
class=DumpRequestHandler},args = {defaults={a=A,b=B}}}
[junit4] 2> 1011247 INFO
(coreLoadExecutor-3578-thread-1-processing-n:127.0.0.1:53210_)
[n:127.0.0.1:53210_ c:collection1 x:collection1] o.a.s.u.UpdateHandler Using
UpdateLog implementation: org.apache.solr.update.UpdateLog
[junit4] 2> 1011247 INFO
(coreLoadExecutor-3578-thread-1-processing-n:127.0.0.1:53210_)
[n:127.0.0.1:53210_ c:collection1 x:collection1] o.a.s.u.UpdateLog
Initializing UpdateLog: dataDir= defaultSyncLevel=FLUSH numRecordsToKeep=1000
maxNumLogsToKeep=10 numVersionBuckets=65536
[junit4] 2> 1011248 INFO
(coreLoadExecutor-3578-thread-1-processing-n:127.0.0.1:53210_)
[n:127.0.0.1:53210_ c:collection1 x:collection1] o.a.s.u.CommitTracker Hard
AutoCommit: disabled
[junit4] 2> 1011249 INFO
(coreLoadExecutor-3578-thread-1-processing-n:127.0.0.1:53210_)
[n:127.0.0.1:53210_ c:collection1 x:collection1] o.a.s.u.CommitTracker Soft
AutoCommit: disabled
[junit4] 2> 1011250 INFO
(coreLoadExecutor-3578-thread-1-processing-n:127.0.0.1:53210_)
[n:127.0.0.1:53210_ c:collection1 x:collection1] o.a.s.u.RandomMergePolicy
RandomMergePolicy wrapping class org.apache.lucene.index.TieredMergePolicy:
[TieredMergePolicy: maxMergeAtOnce=43, maxMergeAtOnceExplicit=35,
maxMergedSegmentMB=12.322265625, floorSegmentMB=2.0947265625,
forceMergeDeletesPctAllowed=18.392260164660186, segmentsPerTier=37.0,
maxCFSSegmentSizeMB=8.796093022207999E12, noCFSRatio=0.6142252830410042
[junit4] 2> 1011251 INFO
(coreLoadExecutor-3578-thread-1-processing-n:127.0.0.1:53210_)
[n:127.0.0.1:53210_ c:collection1 x:collection1] o.a.s.s.SolrIndexSearcher
Opening [Searcher@693f7f83[collection1] main]
[junit4] 2> 1011253 INFO
(coreLoadExecutor-3578-thread-1-processing-n:127.0.0.1:53210_)
[n:127.0.0.1:53210_ c:collection1 x:collection1]
o.a.s.r.ManagedResourceStorage Configured ZooKeeperStorageIO with znodeBase:
/configs/conf1
[junit4] 2> 1011254 INFO
(coreLoadExecutor-3578-thread-1-processing-n:127.0.0.1:53210_)
[n:127.0.0.1:53210_ c:collection1 x:collection1]
o.a.s.r.ManagedResourceStorage Loaded null at path _rest_managed.json using
ZooKeeperStorageIO:path=/configs/conf1
[junit4] 2> 1011254 INFO
(coreLoadExecutor-3578-thread-1-processing-n:127.0.0.1:53210_)
[n:127.0.0.1:53210_ c:collection1 x:collection1] o.a.s.h.ReplicationHandler
Commits will be reserved for 10000
[junit4] 2> 1011255 INFO
(searcherExecutor-3579-thread-1-processing-n:127.0.0.1:53210_ x:collection1
c:collection1) [n:127.0.0.1:53210_ c:collection1 x:collection1]
o.a.s.c.SolrCore [collection1] Registered new searcher
Searcher@693f7f83[collection1]
main{ExitableDirectoryReader(UninvertingDirectoryReader())}
[junit4] 2> 1011256 INFO
(coreLoadExecutor-3578-thread-1-processing-n:127.0.0.1:53210_)
[n:127.0.0.1:53210_ c:collection1 x:collection1] o.a.s.u.UpdateLog Could not
find max version in index or recent updates, using new clock 1556142940373385216
[junit4] 2> 1011267 INFO
(coreZkRegister-3573-thread-1-processing-n:127.0.0.1:53210_ x:collection1
c:collection1) [n:127.0.0.1:53210_ c:collection1 s:shard1 r:core_node1
x:collection1] o.a.s.c.ShardLeaderElectionContext Enough replicas found to
continue.
[junit4] 2> 1011267 INFO
(coreZkRegister-3573-thread-1-processing-n:127.0.0.1:53210_ x:collection1
c:collection1) [n:127.0.0.1:53210_ c:collection1 s:shard1 r:core_node1
x:collection1] o.a.s.c.ShardLeaderElectionContext I may be the new leader - try
and sync
[junit4] 2> 1011267 INFO
(coreZkRegister-3573-thread-1-processing-n:127.0.0.1:53210_ x:collection1
c:collection1) [n:127.0.0.1:53210_ c:collection1 s:shard1 r:core_node1
x:collection1] o.a.s.c.SyncStrategy Sync replicas to
http://127.0.0.1:53210/collection1/
[junit4] 2> 1011268 INFO
(coreZkRegister-3573-thread-1-processing-n:127.0.0.1:53210_ x:collection1
c:collection1) [n:127.0.0.1:53210_ c:collection1 s:shard1 r:core_node1
x:collection1] o.a.s.c.SyncStrategy Sync Success - now sync replicas to me
[junit4] 2> 1011268 INFO
(coreZkRegister-3573-thread-1-processing-n:127.0.0.1:53210_ x:collection1
c:collection1) [n:127.0.0.1:53210_ c:collection1 s:shard1 r:core_node1
x:collection1] o.a.s.c.SyncStrategy http://127.0.0.1:53210/collection1/ has no
replicas
[junit4] 2> 1011275 INFO
(coreZkRegister-3573-thread-1-processing-n:127.0.0.1:53210_ x:collection1
c:collection1) [n:127.0.0.1:53210_ c:collection1 s:shard1 r:core_node1
x:collection1] o.a.s.c.ShardLeaderElectionContext I am the new leader:
http://127.0.0.1:53210/collection1/ shard1
[junit4] 2> 1011435 INFO
(coreZkRegister-3573-thread-1-processing-n:127.0.0.1:53210_ x:collection1
c:collection1) [n:127.0.0.1:53210_ c:collection1 s:shard1 r:core_node1
x:collection1] o.a.s.c.ZkController I am the leader, no recovery necessary
[junit4] 2> 1011607 INFO
(TEST-PeerSyncReplicationTest.test-seed#[4A39EBB288C3A0B6]) [ ]
o.a.s.SolrTestCaseJ4 Writing core.properties file to
/Users/jenkins/workspace/Lucene-Solr-master-MacOSX/solr/build/solr-core/test/J1/temp/solr.cloud.PeerSyncReplicationTest_4A39EBB288C3A0B6-001/shard-2-001/cores/collection1
[junit4] 2> 1011608 INFO
(TEST-PeerSyncReplicationTest.test-seed#[4A39EBB288C3A0B6]) [ ]
o.a.s.c.AbstractFullDistribZkTestBase create jetty 2 in directory
/Users/jenkins/workspace/Lucene-Solr-master-MacOSX/solr/build/solr-core/test/J1/temp/solr.cloud.PeerSyncReplicationTest_4A39EBB288C3A0B6-001/shard-2-001
[junit4] 2> 1011609 INFO
(TEST-PeerSyncReplicationTest.test-seed#[4A39EBB288C3A0B6]) [ ]
o.e.j.s.Server jetty-9.3.14.v20161028
[junit4] 2> 1011610 INFO
(TEST-PeerSyncReplicationTest.test-seed#[4A39EBB288C3A0B6]) [ ]
o.e.j.s.h.ContextHandler Started
o.e.j.s.ServletContextHandler@143ebb29{/,null,AVAILABLE}
[junit4] 2> 1011611 INFO
(TEST-PeerSyncReplicationTest.test-seed#[4A39EBB288C3A0B6]) [ ]
o.e.j.s.AbstractConnector Started
ServerConnector@1f7ed30a{HTTP/1.1,[http/1.1]}{127.0.0.1:53214}
[junit4] 2> 1011611 INFO
(TEST-PeerSyncReplicationTest.test-seed#[4A39EBB288C3A0B6]) [ ]
o.e.j.s.Server Started @1017192ms
[junit4] 2> 1011611 INFO
(TEST-PeerSyncReplicationTest.test-seed#[4A39EBB288C3A0B6]) [ ]
o.a.s.c.s.e.JettySolrRunner Jetty properties:
{solr.data.dir=/Users/jenkins/workspace/Lucene-Solr-master-MacOSX/solr/build/solr-core/test/J1/temp/solr.cloud.PeerSyncReplicationTest_4A39EBB288C3A0B6-001/tempDir-001/jetty2,
solrconfig=solrconfig.xml, hostContext=/, hostPort=53214,
coreRootDirectory=/Users/jenkins/workspace/Lucene-Solr-master-MacOSX/solr/build/solr-core/test/J1/temp/solr.cloud.PeerSyncReplicationTest_4A39EBB288C3A0B6-001/shard-2-001/cores}
[junit4] 2> 1011612 ERROR
(TEST-PeerSyncReplicationTest.test-seed#[4A39EBB288C3A0B6]) [ ]
o.a.s.s.StartupLoggingUtils Missing Java Option solr.log.dir. Logging may be
missing or incomplete.
[junit4] 2> 1011612 INFO
(TEST-PeerSyncReplicationTest.test-seed#[4A39EBB288C3A0B6]) [ ]
o.a.s.s.SolrDispatchFilter ___ _ Welcome to Apache Solr? version
7.0.0
[junit4] 2> 1011612 INFO
(TEST-PeerSyncReplicationTest.test-seed#[4A39EBB288C3A0B6]) [ ]
o.a.s.s.SolrDispatchFilter / __| ___| |_ _ Starting in cloud mode on port null
[junit4] 2> 1011612 INFO
(TEST-PeerSyncReplicationTest.test-seed#[4A39EBB288C3A0B6]) [ ]
o.a.s.s.SolrDispatchFilter \__ \/ _ \ | '_| Install dir: null
[junit4] 2> 1011612 INFO
(TEST-PeerSyncReplicationTest.test-seed#[4A39EBB288C3A0B6]) [ ]
o.a.s.s.SolrDispatchFilter |___/\___/_|_| Start time:
2017-01-10T13:05:55.247Z
[junit4] 2> 1011616 INFO
(TEST-PeerSyncReplicationTest.test-seed#[4A39EBB288C3A0B6]) [ ]
o.a.s.s.SolrDispatchFilter Loading solr.xml from SolrHome (not found in
ZooKeeper)
[junit4] 2> 1011616 INFO
(TEST-PeerSyncReplicationTest.test-seed#[4A39EBB288C3A0B6]) [ ]
o.a.s.c.SolrXmlConfig Loading container configuration from
/Users/jenkins/workspace/Lucene-Solr-master-MacOSX/solr/build/solr-core/test/J1/temp/solr.cloud.PeerSyncReplicationTest_4A39EBB288C3A0B6-001/shard-2-001/solr.xml
[junit4] 2> 1011672 INFO
(TEST-PeerSyncReplicationTest.test-seed#[4A39EBB288C3A0B6]) [ ]
o.a.s.c.ZkContainer Zookeeper client=127.0.0.1:53202/solr
[junit4] 2> 1011690 INFO
(TEST-PeerSyncReplicationTest.test-seed#[4A39EBB288C3A0B6]) [n:127.0.0.1:53214_
] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (0) -> (2)
[junit4] 2> 1011701 INFO
(TEST-PeerSyncReplicationTest.test-seed#[4A39EBB288C3A0B6]) [n:127.0.0.1:53214_
] o.a.s.c.ZkController Register node as live in
ZooKeeper:/live_nodes/127.0.0.1:53214_
[junit4] 2> 1011704 INFO (zkCallback-2025-thread-1) [ ]
o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (2) -> (3)
[junit4] 2> 1011704 INFO
(zkCallback-2030-thread-1-processing-n:127.0.0.1:53210_) [n:127.0.0.1:53210_
] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (2) -> (3)
[junit4] 2> 1011704 INFO
(zkCallback-2021-thread-2-processing-n:127.0.0.1:53205_) [n:127.0.0.1:53205_
] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (2) -> (3)
[junit4] 2> 1011706 INFO
(zkCallback-2036-thread-1-processing-n:127.0.0.1:53214_) [n:127.0.0.1:53214_
] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (2) -> (3)
[junit4] 2> 1011789 INFO
(TEST-PeerSyncReplicationTest.test-seed#[4A39EBB288C3A0B6]) [n:127.0.0.1:53214_
] o.a.s.c.CorePropertiesLocator Found 1 core definitions underneath
/Users/jenkins/workspace/Lucene-Solr-master-MacOSX/solr/build/solr-core/test/J1/temp/solr.cloud.PeerSyncReplicationTest_4A39EBB288C3A0B6-001/shard-2-001/cores
[junit4] 2> 1011789 INFO
(TEST-PeerSyncReplicationTest.test-seed#[4A39EBB288C3A0B6]) [n:127.0.0.1:53214_
] o.a.s.c.CorePropertiesLocator Cores are: [collection1]
[junit4] 2> 1011792 INFO
(OverseerStateUpdate-97258933497954308-127.0.0.1:53205_-n_0000000000)
[n:127.0.0.1:53205_ ] o.a.s.c.o.ReplicaMutator Assigning new node to shard
shard=shard1
[junit4] 2> 1012815 INFO
(coreLoadExecutor-3589-thread-1-processing-n:127.0.0.1:53214_)
[n:127.0.0.1:53214_ c:collection1 x:collection1] o.a.s.c.SolrConfig Using
Lucene MatchVersion: 7.0.0
[junit4] 2> 1012830 INFO
(coreLoadExecutor-3589-thread-1-processing-n:127.0.0.1:53214_)
[n:127.0.0.1:53214_ c:collection1 x:collection1] o.a.s.s.IndexSchema
[collection1] Schema name=test
[junit4] 2> 1012931 WARN
(coreLoadExecutor-3589-thread-1-processing-n:127.0.0.1:53214_)
[n:127.0.0.1:53214_ c:collection1 x:collection1] o.a.s.s.IndexSchema
[collection1] default search field in schema is text. WARNING: Deprecated,
please use 'df' on request instead.
[junit4] 2> 1012933 INFO
(coreLoadExecutor-3589-thread-1-processing-n:127.0.0.1:53214_)
[n:127.0.0.1:53214_ c:collection1 x:collection1] o.a.s.s.IndexSchema Loaded
schema test/1.0 with uniqueid field id
[junit4] 2> 1012962 INFO
(coreLoadExecutor-3589-thread-1-processing-n:127.0.0.1:53214_)
[n:127.0.0.1:53214_ c:collection1 x:collection1] o.a.s.c.CoreContainer
Creating SolrCore 'collection1' using configuration from collection collection1
[junit4] 2> 1012962 INFO
(coreLoadExecutor-3589-thread-1-processing-n:127.0.0.1:53214_)
[n:127.0.0.1:53214_ c:collection1 x:collection1] o.a.s.c.SolrCore
[[collection1] ] Opening new SolrCore at
[/Users/jenkins/workspace/Lucene-Solr-master-MacOSX/solr/build/solr-core/test/J1/temp/solr.cloud.PeerSyncReplicationTest_4A39EBB288C3A0B6-001/shard-2-001/cores/collection1],
dataDir=[/Users/jenkins/workspace/Lucene-Solr-master-MacOSX/solr/build/solr-core/test/J1/temp/solr.cloud.PeerSyncReplicationTest_4A39EBB288C3A0B6-001/shard-2-001/cores/collection1/data/]
[junit4] 2> 1012962 INFO
(coreLoadExecutor-3589-thread-1-processing-n:127.0.0.1:53214_)
[n:127.0.0.1:53214_ c:collection1 x:collection1] o.a.s.c.JmxMonitoredMap JMX
monitoring is enabled. Adding Solr mbeans to JMX Server:
com.sun.jmx.mbeanserver.JmxMBeanServer@5e983ee5
[junit4] 2> 1012969 INFO
(coreLoadExecutor-3589-thread-1-processing-n:127.0.0.1:53214_)
[n:127.0.0.1:53214_ c:collection1 x:collection1] o.a.s.u.RandomMergePolicy
RandomMergePolicy wrapping class org.apache.lucene.index.TieredMergePolicy:
[TieredMergePolicy: maxMergeAtOnce=17, maxMergeAtOnceExplicit=11,
maxMergedSegmentMB=48.986328125, floorSegmentMB=1.4169921875,
forceMergeDeletesPctAllowed=17.289735763156585, segmentsPerTier=20.0,
maxCFSSegmentSizeMB=8.796093022207999E12, noCFSRatio=0.6146725899110337
[junit4] 2> 1012992 WARN
(coreLoadExecutor-3589-thread-1-processing-n:127.0.0.1:53214_)
[n:127.0.0.1:53214_ c:collection1 x:collection1] o.a.s.c.RequestHandlers
INVALID paramSet a in requestHandler {type = requestHandler,name = /dump,class
= DumpRequestHandler,attributes = {initParams=a, name=/dump,
class=DumpRequestHandler},args = {defaults={a=A,b=B}}}
[junit4] 2> 1013012 INFO
(coreLoadExecutor-3589-thread-1-processing-n:127.0.0.1:53214_)
[n:127.0.0.1:53214_ c:collection1 x:collection1] o.a.s.u.UpdateHandler Using
UpdateLog implementation: org.apache.solr.update.UpdateLog
[junit4] 2> 1013012 INFO
(coreLoadExecutor-3589-thread-1-processing-n:127.0.0.1:53214_)
[n:127.0.0.1:53214_ c:collection1 x:collection1] o.a.s.u.UpdateLog
Initializing UpdateLog: dataDir= defaultSyncLevel=FLUSH numRecordsToKeep=1000
maxNumLogsToKeep=10 numVersionBuckets=65536
[junit4] 2> 1013014 INFO
(coreLoadExecutor-3589-thread-1-processing-n:127.0.0.1:53214_)
[n:127.0.0.1:53214_ c:collection1 x:collection1] o.a.s.u.CommitTracker Hard
AutoCommit: disabled
[junit4] 2> 1013014 INFO
(coreLoadExecutor-3589-thread-1-processing-n:127.0.0.1:53214_)
[n:127.0.0.1:53214_ c:collection1 x:collection1] o.a.s.u.CommitTracker Soft
AutoCommit: disabled
[junit4] 2> 1013015 INFO
(coreLoadExecutor-3589-thread-1-processing-n:127.0.0.1:53214_)
[n:127.0.0.1:53214_ c:collection1 x:collection1] o.a.s.u.RandomMergePolicy
RandomMergePolicy wrapping class org.apache.lucene.index.TieredMergePolicy:
[TieredMergePolicy: maxMergeAtOnce=43, maxMergeAtOnceExplicit=35,
maxMergedSegmentMB=12.322265625, floorSegmentMB=2.0947265625,
forceMergeDeletesPctAllowed=18.392260164660186, segmentsPerTier=37.0,
maxCFSSegmentSizeMB=8.796093022207999E12, noCFSRatio=0.6142252830410042
[junit4] 2> 1013016 INFO
(coreLoadExecutor-3589-thread-1-processing-n:127.0.0.1:53214_)
[n:127.0.0.1:53214_ c:collection1 x:collection1] o.a.s.s.SolrIndexSearcher
Opening [Searcher@3341879a[collection1] main]
[junit4] 2> 1013019 INFO
(coreLoadExecutor-3589-thread-1-processing-n:127.0.0.1:53214_)
[n:127.0.0.1:53214_ c:collection1 x:collection1]
o.a.s.r.ManagedResourceStorage Configured ZooKeeperStorageIO with znodeBase:
/configs/conf1
[junit4] 2> 1013026 INFO
(coreLoadExecutor-3589-thread-1-processing-n:127.0.0.1:53214_)
[n:127.0.0.1:53214_ c:collection1 x:collection1]
o.a.s.r.ManagedResourceStorage Loaded null at path _rest_managed.json using
ZooKeeperStorageIO:path=/configs/conf1
[junit4] 2> 1013026 INFO
(coreLoadExecutor-3589-thread-1-processing-n:127.0.0.1:53214_)
[n:127.0.0.1:53214_ c:collection1 x:collection1] o.a.s.h.ReplicationHandler
Commits will be reserved for 10000
[junit4] 2> 1013029 INFO
(searcherExecutor-3590-thread-1-processing-n:127.0.0.1:53214_ x:collection1
c:collection1) [n:127.0.0.1:53214_ c:collection1 x:collection1]
o.a.s.c.SolrCore [collection1] Registered new searcher
Searcher@3341879a[collection1]
main{ExitableDirectoryReader(UninvertingDirectoryReader())}
[junit4] 2> 1013033 INFO
(coreLoadExecutor-3589-thread-1-processing-n:127.0.0.1:53214_)
[n:127.0.0.1:53214_ c:collection1 x:collection1] o.a.s.u.UpdateLog Could not
find max version in index or recent updates, using new clock 1556142942236704768
[junit4] 2> 1013040 INFO
(coreZkRegister-3584-thread-1-processing-n:127.0.0.1:53214_ x:collection1
c:collection1) [n:127.0.0.1:53214_ c:collection1 s:shard1 r:core_node2
x:collection1] o.a.s.c.ZkController Core needs to recover:collection1
[junit4] 2> 1013041 INFO
(updateExecutor-2033-thread-1-processing-n:127.0.0.1:53214_ x:collection1
s:shard1 c:collection1 r:core_node2) [n:127.0.0.1:53214_ c:collection1 s:shard1
r:core_node2 x:collection1] o.a.s.u.DefaultSolrCoreState Running recovery
[junit4] 2> 1013041 INFO
(recoveryExecutor-2034-thread-1-processing-n:127.0.0.1:53214_ x:collection1
s:shard1 c:collection1 r:core_node2) [n:127.0.0.1:53214_ c:collection1 s:shard1
r:core_node2 x:collection1] o.a.s.c.RecoveryStrategy Starting recovery process.
recoveringAfterStartup=true
[junit4] 2> 1013042 INFO
(recoveryExecutor-2034-thread-1-processing-n:127.0.0.1:53214_ x:collection1
s:shard1 c:collection1 r:core_node2) [n:127.0.0.1:53214_ c:collection1 s:shard1
r:core_node2 x:collection1] o.a.s.c.RecoveryStrategy ###### startupVersions=[[]]
[junit4] 2> 1013042 INFO
(recoveryExecutor-2034-thread-1-processing-n:127.0.0.1:53214_ x:collection1
s:shard1 c:collection1 r:core_node2) [n:127.0.0.1:53214_ c:collection1 s:shard1
r:core_node2 x:collection1] o.a.s.c.RecoveryStrategy Begin buffering updates.
core=[collection1]
[junit4] 2> 1013042 INFO
(recoveryExecutor-2034-thread-1-processing-n:127.0.0.1:53214_ x:collection1
s:shard1 c:collection1 r:core_node2) [n:127.0.0.1:53214_ c:collection1 s:shard1
r:core_node2 x:collection1] o.a.s.u.UpdateLog Starting to buffer updates.
FSUpdateLog{state=ACTIVE, tlog=null}
[junit4] 2> 1013043 INFO
(recoveryExecutor-2034-thread-1-processing-n:127.0.0.1:53214_ x:collection1
s:shard1 c:collection1 r:core_node2) [n:127.0.0.1:53214_ c:collection1 s:shard1
r:core_node2 x:collection1] o.a.s.c.RecoveryStrategy Publishing state of core
[collection1] as recovering, leader is [http://127.0.0.1:53210/collection1/]
and I am [http://127.0.0.1:53214/collection1/]
[junit4] 2> 1013046 INFO
(recoveryExecutor-2034-thread-1-processing-n:127.0.0.1:53214_ x:collection1
s:shard1 c:collection1 r:core_node2) [n:127.0.0.1:53214_ c:collection1 s:shard1
r:core_node2 x:collection1] o.a.s.c.RecoveryStrategy Sending prep recovery
command to [http://127.0.0.1:53210]; [WaitForState:
action=PREPRECOVERY&core=collection1&nodeName=127.0.0.1:53214_&coreNodeName=core_node2&state=recovering&checkLive=true&onlyIfLeader=true&onlyIfLeaderActive=true]
[junit4] 2> 1013050 INFO (qtp1595854443-9837) [n:127.0.0.1:53210_ ]
o.a.s.h.a.PrepRecoveryOp Going to wait for coreNodeName: core_node2, state:
recovering, checkLive: true, onlyIfLeader: true, onlyIfLeaderActive: true
[junit4] 2> 1013051 INFO (qtp1595854443-9837) [n:127.0.0.1:53210_ ]
o.a.s.h.a.PrepRecoveryOp Will wait a max of 183 seconds to see collection1
(shard1 of collection1) have state: recovering
[junit4] 2> 1013051 INFO (qtp1595854443-9837) [n:127.0.0.1:53210_ ]
o.a.s.h.a.PrepRecoveryOp In WaitForState(recovering): collection=collection1,
shard=shard1, thisCore=collection1, leaderDoesNotNeedRecovery=false, isLeader?
true, live=true, checkLive=true, currentState=down, localState=active,
nodeName=127.0.0.1:53214_, coreNodeName=core_node2,
onlyIfActiveCheckResult=false, nodeProps:
core_node2:{"core":"collection1","base_url":"http://127.0.0.1:53214","node_name":"127.0.0.1:53214_","state":"down"}
[junit4] 2> 1013633 INFO
(TEST-PeerSyncReplicationTest.test-seed#[4A39EBB288C3A0B6]) [ ]
o.a.s.SolrTestCaseJ4 Writing core.properties file to
/Users/jenkins/workspace/Lucene-Solr-master-MacOSX/solr/build/solr-core/test/J1/temp/solr.cloud.PeerSyncReplicationTest_4A39EBB288C3A0B6-001/shard-3-001/cores/collection1
[junit4] 2> 1013634 INFO
(TEST-PeerSyncReplicationTest.test-seed#[4A39EBB288C3A0B6]) [ ]
o.a.s.c.AbstractFullDistribZkTestBase create jetty 3 in directory
/Users/jenkins/workspace/Lucene-Solr-master-MacOSX/solr/build/solr-core/test/J1/temp/solr.cloud.PeerSyncReplicationTest_4A39EBB288C3A0B6-001/shard-3-001
[junit4] 2> 1013635 INFO
(TEST-PeerSyncReplicationTest.test-seed#[4A39EBB288C3A0B6]) [ ]
o.e.j.s.Server jetty-9.3.14.v20161028
[junit4] 2> 1013637 INFO
(TEST-PeerSyncReplicationTest.test-seed#[4A39EBB288C3A0B6]) [ ]
o.e.j.s.h.ContextHandler Started
o.e.j.s.ServletContextHandler@25600c0d{/,null,AVAILABLE}
[junit4] 2> 1013638 INFO
(TEST-PeerSyncReplicationTest.test-seed#[4A39EBB288C3A0B6]) [ ]
o.e.j.s.AbstractConnector Started
ServerConnector@4111e0ca{HTTP/1.1,[http/1.1]}{127.0.0.1:53219}
[junit4] 2> 1013638 INFO
(TEST-PeerSyncReplicationTest.test-seed#[4A39EBB288C3A0B6]) [ ]
o.e.j.s.Server Started @1019219ms
[junit4] 2> 1013638 INFO
(TEST-PeerSyncReplicationTest.test-seed#[4A39EBB288C3A0B6]) [ ]
o.a.s.c.s.e.JettySolrRunner Jetty properties:
{solr.data.dir=/Users/jenkins/workspace/Lucene-Solr-master-MacOSX/solr/build/solr-core/test/J1/temp/solr.cloud.PeerSyncReplicationTest_4A39EBB288C3A0B6-001/tempDir-001/jetty3,
solrconfig=solrconfig.xml, hostContext=/, hostPort=53219,
coreRootDirectory=/Users/jenkins/workspace/Lucene-Solr-master-MacOSX/solr/build/solr-core/test/J1/temp/solr.cloud.PeerSyncReplicationTest_4A39EBB288C3A0B6-001/shard-3-001/cores}
[junit4] 2> 1013639 ERROR
(TEST-PeerSyncReplicationTest.test-seed#[4A39EBB288C3A0B6]) [ ]
o.a.s.s.StartupLoggingUtils Missing Java Option solr.log.dir. Logging may be
missing or incomplete.
[junit4] 2> 1013639 INFO
(TEST-PeerSyncReplicationTest.test-seed#[4A39EBB288C3A0B6]) [ ]
o.a.s.s.SolrDispatchFilter ___ _ Welcome to Apache Solr? version
7.0.0
[junit4] 2> 1013639 INFO
(TEST-PeerSyncReplicationTest.test-seed#[4A39EBB288C3A0B6]) [ ]
o.a.s.s.SolrDispatchFilter / __| ___| |_ _ Starting in cloud mode on port null
[junit4] 2> 1013639 INFO
(TEST-PeerSyncReplicationTest.test-seed#[4A39EBB288C3A0B6]) [ ]
o.a.s.s.SolrDispatchFilter \__ \/ _ \ | '_| Install dir: null
[junit4] 2> 1013639 INFO
(TEST-PeerSyncReplicationTest.test-seed#[4A39EBB288C3A0B6]) [ ]
o.a.s.s.SolrDispatchFilter |___/\___/_|_| Start time:
2017-01-10T13:05:57.274Z
[junit4] 2> 1013644 INFO
(TEST-PeerSyncReplicationTest.test-seed#[4A39EBB288C3A0B6]) [ ]
o.a.s.s.SolrDispatchFilter Loading solr.xml from SolrHome (not found in
ZooKeeper)
[junit4] 2> 1013644 INFO
(TEST-PeerSyncReplicationTest.test-seed#[4A39EBB288C3A0B6]) [ ]
o.a.s.c.SolrXmlConfig Loading container configuration from
/Users/jenkins/workspace/Lucene-Solr-master-MacOSX/solr/build/solr-core/test/J1/temp/solr.cloud.PeerSyncReplicationTest_4A39EBB288C3A0B6-001/shard-3-001/solr.xml
[junit4] 2> 1013658 INFO
(TEST-PeerSyncReplicationTest.test-seed#[4A39EBB288C3A0B6]) [ ]
o.a.s.c.ZkContainer Zookeeper client=127.0.0.1:53202/solr
[junit4] 2> 1013677 INFO
(TEST-PeerSyncReplicationTest.test-seed#[4A39EBB288C3A0B6]) [n:127.0.0.1:53219_
] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (0) -> (3)
[junit4] 2> 1013687 INFO
(TEST-PeerSyncReplicationTest.test-seed#[4A39EBB288C3A0B6]) [n:127.0.0.1:53219_
] o.a.s.c.ZkController Register node as live in
ZooKeeper:/live_nodes/127.0.0.1:53219_
[junit4] 2> 1013691 INFO (zkCallback-2025-thread-1) [ ]
o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (3) -> (4)
[junit4] 2> 1013691 INFO
(zkCallback-2030-thread-1-processing-n:127.0.0.1:53210_) [n:127.0.0.1:53210_
] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (3) -> (4)
[junit4] 2> 1013691 INFO
(zkCallback-2021-thread-2-processing-n:127.0.0.1:53205_) [n:127.0.0.1:53205_
] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (3) -> (4)
[junit4] 2> 1013692 INFO
(zkCallback-2036-thread-1-processing-n:127.0.0.1:53214_) [n:127.0.0.1:53214_
] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (3) -> (4)
[junit4] 2> 1013693 INFO
(zkCallback-2043-thread-1-processing-n:127.0.0.1:53219_) [n:127.0.0.1:53219_
] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (3) -> (4)
[junit4] 2> 1013731 INFO
(TEST-PeerSyncReplicationTest.test-seed#[4A39EBB288C3A0B6]) [n:127.0.0.1:53219_
] o.a.s.c.CorePropertiesLocator Found 1 core definitions underneath
/Users/jenkins/workspace/Lucene-Solr-master-MacOSX/solr/build/solr-core/test/J1/temp/solr.cloud.PeerSyncReplicationTest_4A39EBB288C3A0B6-001/shard-3-001/cores
[junit4] 2> 1013731 INFO
(TEST-PeerSyncReplicationTest.test-seed#[4A39EBB288C3A0B6]) [n:127.0.0.1:53219_
] o.a.s.c.CorePropertiesLocator Cores are: [collection1]
[junit4] 2> 1013735 INFO
(OverseerStateUpdate-97258933497954308-127.0.0.1:53205_-n_0000000000)
[n:127.0.0.1:53205_ ] o.a.s.c.o.ReplicaMutator Assigning new node to shard
shard=shard1
[junit4] 2> 1014059 INFO (qtp1595854443-9837) [n:127.0.0.1:53210_ ]
o.a.s.h.a.PrepRecoveryOp In WaitForState(recovering): collection=collection1,
shard=shard1, thisCore=collection1, leaderDoesNotNeedRecovery=false, isLeader?
true, live=true, checkLive=true, currentState=recovering, localState=active,
nodeName=127.0.0.1:53214_, coreNodeName=core_node2,
onlyIfActiveCheckResult=false, nodeProps:
core_node2:{"core":"collection1","base_url":"http://127.0.0.1:53214","node_name":"127.0.0.1:53214_","state":"recovering"}
[junit4] 2> 1014060 INFO (qtp1595854443-9837) [n:127.0.0.1:53210_ ]
o.a.s.h.a.PrepRecoveryOp Waited coreNodeName: core_node2, state: recovering,
checkLive: true, onlyIfLeader: true for: 1 seconds.
[junit4] 2> 1014060 INFO (qtp1595854443-9837) [n:127.0.0.1:53210_ ]
o.a.s.s.HttpSolrCall [admin] webapp=null path=/admin/cores
params={nodeName=127.0.0.1:53214_&onlyIfLeaderActive=true&core=collection1&coreNodeName=core_node2&action=PREPRECOVERY&checkLive=true&state=recovering&onlyIfLeader=true&wt=javabin&version=2}
status=0 QTime=1009
[junit4] 2> 1014752 INFO
(coreLoadExecutor-3600-thread-1-processing-n:127.0.0.1:53219_)
[n:127.0.0.1:53219_ c:collection1 x:collection1] o.a.s.c.SolrConfig Using
Lucene MatchVersion: 7.0.0
[junit4] 2> 1014764 INFO
(coreLoadExecutor-3600-thread-1-processing-n:127.0.0.1:53219_)
[n:127.0.0.1:53219_ c:collection1 x:collection1] o.a.s.s.IndexSchema
[collection1] Schema name=test
[junit4] 2> 1014847 WARN
(coreLoadExecutor-3600-thread-1-processing-n:127.0.0.1:53219_)
[n:127.0.0.1:53219_ c:collection1 x:collection1] o.a.s.s.IndexSchema
[collection1] default search field in schema is text. WARNING: Deprecated,
please use 'df' on request instead.
[junit4] 2> 1014849 INFO
(coreLoadExecutor-3600-thread-1-processing-n:127.0.0.1:53219_)
[n:127.0.0.1:53219_ c:collection1 x:collection1] o.a.s.s.IndexSchema Loaded
schema test/1.0 with uniqueid field id
[junit4] 2> 1014869 INFO
(coreLoadExecutor-3600-thread-1-processing-n:127.0.0.1:53219_)
[n:127.0.0.1:53219_ c:collection1 x:collection1] o.a.s.c.CoreContainer
Creating SolrCore 'collection1' using configuration from collection collection1
[junit4] 2> 1014870 INFO
(coreLoadExecutor-3600-thread-1-processing-n:127.0.0.1:53219_)
[n:127.0.0.1:53219_ c:collection1 x:collection1] o.a.s.c.SolrCore
[[collection1] ] Opening new SolrCore at
[/Users/jenkins/workspace/Lucene-Solr-master-MacOSX/solr/build/solr-core/test/J1/temp/solr.cloud.PeerSyncReplicationTest_4A39EBB288C3A0B6-001/shard-3-001/cores/collection1],
dataDir=[/Users/jenkins/workspace/Lucene-Solr-master-MacOSX/solr/build/solr-core/test/J1/temp/solr.cloud.PeerSyncReplicationTest_4A39EBB288C3A0B6-001/shard-3-001/cores/collection1/data/]
[junit4] 2> 1014870 INFO
(coreLoadExecutor-3600-thread-1-processing-n:127.0.0.1:53219_)
[n:127.0.0.1:53219_ c:collection1 x:collection1] o.a.s.c.JmxMonitoredMap JMX
monitoring is enabled. Adding Solr mbeans to JMX Server:
com.sun.jmx.mbeanserver.JmxMBeanServer@5e983ee5
[junit4] 2> 1014873 INFO
(coreLoadExecutor-3600-thread-1-processing-n:127.0.0.1:53219_)
[n:127.0.0.1:53219_ c:collection1 x:collection1] o.a.s.u.RandomMergePolicy
RandomMergePolicy wrapping class org.apache.lucene.index.TieredMergePolicy:
[TieredMergePolicy: maxMergeAtOnce=17, maxMergeAtOnceExplicit=11,
maxMergedSegmentMB=48.986328125, floorSegmentMB=1.4169921875,
forceMergeDeletesPctAllowed=17.289735763156585, segmentsPerTier=20.0,
maxCFSSegmentSizeMB=8.796093022207999E12, noCFSRatio=0.6146725899110337
[junit4] 2> 1014881 WARN
(coreLoadExecutor-3600-thread-1-processing-n:127.0.0.1:53219_)
[n:127.0.0.1:53219_ c:collection1 x:collection1] o.a.s.c.RequestHandlers
INVALID paramSet a in requestHandler {type = requestHandler,name = /dump,class
= DumpRequestHandler,attributes = {initParams=a, name=/dump,
class=DumpRequestHandler},args = {defaults={a=A,b=B}}}
[junit4] 2> 1014896 INFO
(coreLoadExecutor-3600-thread-1-processing-n:127.0.0.1:53219_)
[n:127.0.0.1:53219_ c:collection1 x:collection1] o.a.s.u.UpdateHandler Using
UpdateLog implementation: org.apache.solr.update.UpdateLog
[junit4] 2> 1014897 INFO
(coreLoadExecutor-3600-thread-1-processing-n:127.0.0.1:53219_)
[n:127.0.0.1:53219_ c:collection1 x:collection1] o.a.s.u.UpdateLog
Initializing UpdateLog: dataDir= defaultSyncLevel=FLUSH numRecordsToKeep=1000
maxNumLogsToKeep=10 numVersionBuckets=65536
[junit4] 2> 1014898 INFO
(coreLoadExecutor-3600-thread-1-processing-n:127.0.0.1:53219_)
[n:127.0.0.1:53219_ c:collection1 x:collection1] o.a.s.u.CommitTracker Hard
AutoCommit: disabled
[junit4] 2> 1014898 INFO
(coreLoadExecutor-3600-thread-1-processing-n:127.0.0.1:53219_)
[n:127.0.0.1:53219_ c:collection1 x:collection1] o.a.s.u.CommitTracker Soft
AutoCommit: disabled
[junit4] 2> 1014898 INFO
(coreLoadExecutor-3600-thread-1-processing-n:127.0.0.1:53219_)
[n:127.0.0.1:53219_ c:collection1 x:collection1] o.a.s.u.RandomMergePolicy
RandomMergePolicy wrapping class org.apache.lucene.index.TieredMergePolicy:
[TieredMergePolicy: maxMergeAtOnce=43, maxMergeAtOnceExplicit=35,
maxMergedSegmentMB=12.322265625, floorSegmentMB=2.0947265625,
forceMergeDeletesPctAllowed=18.392260164660186, segmentsPerTier=37.0,
maxCFSSegmentSizeMB=8.796093022207999E12, noCFSRatio=0.6142252830410042
[junit4] 2> 1014900 INFO
(coreLoadExecutor-3600-thread-1-processing-n:127.0.0.1:53219_)
[n:127.0.0.1:53219_ c:collection1 x:collection1] o.a.s.s.SolrIndexSearcher
Opening [Searcher@39aea14f[collection1] main]
[junit4] 2> 1014902 INFO
(coreLoadExecutor-3600-thread-1-processing-n:127.0.0.1:53219_)
[n:127.0.0.1:53219_ c:collection1 x:collection1]
o.a.s.r.ManagedResourceStorage Configured ZooKeeperStorageIO with znodeBase:
/configs/conf1
[junit4] 2> 1014902 INFO
(coreLoadExecutor-3600-thread-1-processing-n:127.0.0.1:53219_)
[n:127.0.0.1:53219_ c:collection1 x:collection1]
o.a.s.r.ManagedResourceStorage Loaded null at path _rest_managed.json using
ZooKeeperStorageIO:path=/configs/conf1
[junit4] 2> 1014902 INFO
(coreLoadExecutor-3600-thread-1-processing-n:127.0.0.1:53219_)
[n:127.0.0.1:53219_ c:collection1 x:collection1] o.a.s.h.ReplicationHandler
Commits will be reserved for 10000
[junit4] 2> 1014903 INFO
(searcherExecutor-3601-thread-1-processing-n:127.0.0.1:53219_ x:collection1
c:collection1) [n:127.0.0.1:53219_ c:collection1 x:collection1]
o.a.s.c.SolrCore [collection1] Registered new searcher
Searcher@39aea14f[collection1]
main{ExitableDirectoryReader(UninvertingDirectoryReader())}
[junit4] 2> 1014904 INFO
(coreLoadExecutor-3600-thread-1-processing-n:127.0.0.1:53219_)
[n:127.0.0.1:53219_ c:collection1 x:collection1] o.a.s.u.UpdateLog Could not
find max version in index or recent updates, using new clock 1556142944198590464
[junit4] 2> 1014909 INFO
(coreZkRegister-3595-thread-1-processing-n:127.0.0.1:53219_ x:collection1
c:collection1) [n:127.0.0.1:53219_ c:collection1 s:shard1 r:core_node3
x:collection1] o.a.s.c.ZkController Core needs to recover:collection1
[junit4] 2> 1014910 INFO
(updateExecutor-2040-thread-1-processing-n:127.0.0.1:53219_ x:collection1
s:shard1 c:collection1 r:core_node3) [n:127.0.0.1:53219_ c:collection1 s:shard1
r:core_node3 x:collection1] o.a.s.u.DefaultSolrCoreState Running recovery
[junit4] 2> 1014910 INFO
(recoveryExecutor-2041-thread-1-processing-n:127.0.0.1:53219_ x:collection1
s:shard1 c:collection1 r:core_node3) [n:127.0.0.1:53219_ c:collection1 s:shard1
r:core_node3 x:collection1] o.a.s.c.RecoveryStrategy Starting recovery process.
recoveringAfterStartup=true
[junit4] 2> 1014911 INFO
(recoveryExecutor-2041-thread-1-processing-n:127.0.0.1:53219_ x:collection1
s:shard1 c:collection1 r:core_node3) [n:127.0.0.1:53219_ c:collection1 s:shard1
r:core_node3 x:collection1] o.a.s.c.RecoveryStrategy ###### startupVersions=[[]]
[junit4] 2> 1014911 INFO
(recoveryExecutor-2041-thread-1-processing-n:127.0.0.1:53219_ x:collection1
s:shard1 c:collection1 r:core_node3) [n:127.0.0.1:53219_ c:collection1 s:shard1
r:core_node3 x:collection1] o.a.s.c.RecoveryStrategy Begin buffering updates.
core=[collection1]
[junit4] 2> 1014911 INFO
(recoveryExecutor-2041-thread-1-processing-n:127.0.0.1:53219_ x:collection1
s:shard1 c:collection1 r:core_node3) [n:127.0.0.1:53219_ c:collection1 s:shard1
r:core_node3 x:collection1] o.a.s.u.UpdateLog Starting to buffer updates.
FSUpdateLog{state=ACTIVE, tlog=null}
[junit4] 2> 1014911 INFO
(recoveryExecutor-2041-thread-1-processing-n:127.0.0.1:53219_ x:collection1
s:shard1 c:collection1 r:core_node3) [n:127.0.0.1:53219_ c:collection1 s:shard1
r:core_node3 x:collection1] o.a.s.c.RecoveryStrategy Publishing state of core
[collection1] as recovering, leader is [http://127.0.0.1:53210/collection1/]
and I am [http://127.0.0.1:53219/collection1/]
[junit4] 2> 1014913 INFO
(recoveryExecutor-2041-thread-1-processing-n:127.0.0.1:53219_ x:collection1
s:shard1 c:collection1 r:core_node3) [n:127.0.0.1:53219_ c:collection1 s:shard1
r:core_node3 x:collection1] o.a.s.c.RecoveryStrategy Sending prep recovery
command to [http://127.0.0.1:53210]; [WaitForState:
action=PREPRECOVERY&core=collection1&nodeName=127.0.0.1:53219_&coreNodeName=core_node3&state=recovering&checkLive=true&onlyIfLeader=true&onlyIfLeaderActive=true]
[junit4] 2> 1014915 INFO (qtp1595854443-9841) [n:127.0.0.1:53210_ ]
o.a.s.h.a.PrepRecoveryOp Going to wait for coreNodeName: core_node3, state:
recovering, checkLive: true, onlyIfLeader: true, onlyIfLeaderActive: true
[junit4] 2> 1014915 INFO (qtp1595854443-9841) [n:127.0.0.1:53210_ ]
o.a.s.h.a.PrepRecoveryOp Will wait a max of 183 seconds to see collection1
(shard1 of collection1) have state: recovering
[junit4] 2> 1014915 INFO (qtp1595854443-9841) [n:127.0.0.1:53210_ ]
o.a.s.h.a.PrepRecoveryOp In WaitForState(recovering): collection=collection1,
shard=shard1, thisCore=collection1, leaderDoesNotNeedRecovery=false, isLeader?
true, live=true, checkLive=true, currentState=down, localState=active,
nodeName=127.0.0.1:53219_, coreNodeName=core_node3,
onlyIfActiveCheckResult=false, nodeProps:
core_node3:{"core":"collection1","base_url":"http://127.0.0.1:53219","node_name":"127.0.0.1:53219_","state":"down"}
[junit4] 2> 1015074 INFO
(TEST-PeerSyncReplicationTest.test-seed#[4A39EBB288C3A0B6]) [ ]
o.a.s.SolrTestCaseJ4 ###Starting test
[junit4] 2> 1015074 INFO
(TEST-PeerSyncReplicationTest.test-seed#[4A39EBB288C3A0B6]) [ ]
o.a.s.c.AbstractFullDistribZkTestBase Wait for recoveries to finish - wait 30
for each attempt
[junit4] 2> 1015074 INFO
(TEST-PeerSyncReplicationTest.test-seed#[4A39EBB288C3A0B6]) [ ]
o.a.s.c.AbstractDistribZkTestBase Wait for recoveries to finish - collection:
collection1 failOnTimeout:true timeout (sec):30
[junit4] 2> 1015921 INFO (qtp1595854443-9841) [n:127.0.0.1:53210_ ]
o.a.s.h.a.PrepRecoveryOp In WaitForState(recovering): collection=collection1,
shard=shard1, thisCore=collection1, leaderDoesNotNeedRecovery=false, isLeader?
true, live=true, checkLive=true, currentState=recovering, localState=active,
nodeName=127.0.0.1:53219_, coreNodeName=core_node3,
onlyIfActiveCheckResult=false, nodeProps:
core_node3:{"core":"collection1","base_url":"http://127.0.0.1:53219","node_name":"127.0.0.1:53219_","state":"recovering"}
[junit4] 2> 1015921 INFO (qtp1595854443-9841) [n:127.0.0.1:53210_ ]
o.a.s.h.a.PrepRecoveryOp Waited coreNodeName: core_node3, state: recovering,
checkLive: true, onlyIfLeader: true for: 1 seconds.
[junit4] 2> 1015921 INFO (qtp1595854443-9841) [n:127.0.0.1:53210_ ]
o.a.s.s.HttpSolrCall [admin] webapp=null path=/admin/cores
params={nodeName=127.0.0.1:53219_&onlyIfLeaderActive=true&core=collection1&coreNodeName=core_node3&action=PREPRECOVERY&checkLive=true&state=recovering&onlyIfLeader=true&wt=javabin&version=2}
status=0 QTime=1006
[junit4] 2> 1021061 INFO
(recoveryExecutor-2034-thread-1-processing-n:127.0.0.1:53214_ x:collection1
s:shard1 c:collection1 r:core_node2) [n:127.0.0.1:53214_ c:collection1 s:shard1
r:core_node2 x:collection1] o.a.s.c.RecoveryStrategy Attempting to PeerSync
from [http://127.0.0.1:53210/collection1/] - recoveringAfterStartup=[true]
[junit4] 2> 1021061 INFO
(recoveryExecutor-2034-thread-1-processing-n:127.0.0.1:53214_ x:collection1
s:shard1 c:collection1 r:core_node2) [n:127.0.0.1:53214_ c:collection1 s:shard1
r:core_node2 x:collection1] o.a.s.u.PeerSync PeerSync: core=collection1
url=http://127.0.0.1:53214 START replicas=[http://127.0.0.1:53210/collection1/]
nUpdates=1000
[junit4] 2> 1021064 INFO (qtp1595854443-9838) [n:127.0.0.1:53210_
c:collection1 s:shard1 r:core_node1 x:collection1] o.a.s.u.IndexFingerprint
IndexFingerprint millis:1.0 result:{maxVersionSpecified=9223372036854775807,
maxVersionEncountered=0, maxInHash=0, versionsHash=0, numVersions=0, numDocs=0,
maxDoc=0}
[junit4] 2> 1021064 INFO (qtp1595854443-9838) [n:127.0.0.1:53210_
c:collection1 s:shard1 r:core_node1 x:collection1] o.a.s.c.S.Request
[collection1] webapp= path=/get
params={distrib=false&qt=/get&getFingerprint=9223372036854775807&wt=javabin&version=2}
status=0 QTime=1
[junit4] 2> 1021065 INFO
(recoveryExecutor-2034-thread-1-processing-n:127.0.0.1:53214_ x:collection1
s:shard1 c:collection1 r:core_node2) [n:127.0.0.1:53214_ c:collection1 s:shard1
r:core_node2 x:collection1] o.a.s.u.IndexFingerprint IndexFingerprint
millis:0.0 result:{maxVersionSpecified=9223372036854775807,
maxVersionEncountered=0, maxInHash=0, versionsHash=0, numVersions=0, numDocs=0,
maxDoc=0}
[junit4] 2> 1021065 INFO
(recoveryExecutor-2034-thread-1-processing-n:127.0.0.1:53214_ x:collection1
s:shard1 c:collection1 r:core_node2) [n:127.0.0.1:53214_ c:collection1 s:shard1
r:core_node2 x:collection1] o.a.s.u.PeerSync We are already in sync. No need to
do a PeerSync
[junit4] 2> 1021066 INFO
(recoveryExecutor-2034-thread-1-processing-n:127.0.0.1:53214_ x:collection1
s:shard1 c:collection1 r:core_node2) [n:127.0.0.1:53214_ c:collection1 s:shard1
r:core_node2 x:collection1] o.a.s.u.DirectUpdateHandler2 start
commit{,optimize=false,openSearcher=true,waitSearcher=true,expungeDeletes=false,softCommit=false,prepareCommit=false}
[junit4] 2> 1021066 INFO
(recoveryExecutor-2034-thread-1-processing-n:127.0.0.1:53214_ x:collection1
s:shard1 c:collection1 r:core_node2) [n:127.0.0.1:53214_ c:collection1 s:shard1
r:core_node2 x:collection1] o.a.s.u.DirectUpdateHandler2 No uncommitted
changes. Skipping IW.commit.
[junit4] 2> 1021066 INFO
(recoveryExecutor-2034-thread-1-processing-n:127.0.0.1:53214_ x:collection1
s:shard1 c:collection1 r:core_node2) [n:127.0.0.1:53214_ c:collection1 s:shard1
r:core_node2 x:collection1] o.a.s.u.DirectUpdateHandler2 end_commit_flush
[junit4] 2> 1021066 INFO
(recoveryExecutor-2034-thread-1-processing-n:127.0.0.1:53214_ x:collection1
s:shard1 c:collection1 r:core_node2) [n:127.0.0.1:53214_ c:collection1 s:shard1
r:core_node2 x:collection1] o.a.s.c.RecoveryStrategy PeerSync stage of recovery
was successful.
[junit4] 2> 1021066 INFO
(recoveryExecutor-2034-thread-1-processing-n:127.0.0.1:53214_ x:collection1
s:shard1 c:collection1 r:core_node2) [n:127.0.0.1:53214_ c:collection1 s:shard1
r:core_node2 x:collection1] o.a.s.c.RecoveryStrategy Replaying updates buffered
during PeerSync.
[junit4] 2> 1021066 INFO
(recoveryExecutor-2034-thread-1-processing-n:127.0.0.1:53214_ x:collection1
s:shard1 c:collection1 r:core_node2) [n:127.0.0.1:53214_ c:collection1 s:shard1
r:core_node2 x:collection1] o.a.s.c.RecoveryStrategy No replay needed.
[junit4] 2> 1021066 INFO
(recoveryExecutor-2034-thread-1-processing-n:127.0.0.1:53214_ x:collection1
s:shard1 c:collection1 r:core_node2) [n:127.0.0.1:53214_ c:collection1 s:shard1
r:core_node2 x:collection1] o.a.s.c.RecoveryStrategy Registering as Active
after recovery.
[junit4] 2> 1022927 INFO
(recoveryExecutor-2041-thread-1-processing-n:127.0.0.1:53219_ x:collection1
s:shard1 c:collection1 r:core_node3) [n:127.0.0.1:53219_ c:collection1 s:shard1
r:core_node3 x:collection1] o.a.s.c.RecoveryStrategy Attempting to PeerSync
from [http://127.0.0.1:53210/collection1/] - recoveringAfterStartup=[true]
[junit4] 2> 1022927 INFO
(recoveryExecutor-2041-thread-1-processing-n:127.0.0.1:53219_ x:collection1
s:shard1 c:collection1 r:core_node3) [n:127.0.0.1:53219_ c:collection1 s:shard1
r:core_node3 x:collection1] o.a.s.u.PeerSync PeerSync: core=collection1
url=http://127.0.0.1:53219 START replicas=[http://127.0.0.1:53210/collection1/]
nUpdates=1000
[junit4] 2> 1022929 INFO (qtp1595854443-9839) [n:127.0.0.1:53210_
c:collection1 s:shard1 r:core_node1 x:collection1] o.a.s.u.IndexFingerprint
IndexFingerprint millis:0.0 result:{maxVersionSpecified=9223372036854775807,
maxVersionEncountered=0, maxInHash=0, versionsHash=0, numVersions=0, numDocs=0,
maxDoc=0}
[junit4] 2> 1022929 INFO (qtp1595854443-9839) [n:127.0.0.1:53210_
c:collection1 s:shard1 r:core_node1 x:collection1] o.a.s.c.S.Request
[collection1] webapp= path=/get
params={distrib=false&qt=/get&getFingerprint=9223372036854775807&wt=javabin&version=2}
status=0 QTime=0
[junit4] 2> 1022930 INFO
(recoveryExecutor-2041-thread-1-processing-n:127.0.0.1:53219_ x:collection1
s:shard1 c:collection1 r:core_node3) [n:127.0.0.1:53219_ c:collection1 s:shard1
r:core_node3 x:collection1] o.a.s.u.IndexFingerprint IndexFingerprint
millis:0.0 result:{maxVersionSpecified=9223372036854775807,
maxVersionEncountered=0, maxInHash=0, versionsHash=0, numVersions=0, numDocs=0,
maxDoc=0}
[junit4] 2> 1022931 INFO
(recoveryExecutor-2041-thread-1-processing-n:127.0.0.1:53219_ x:collection1
s:shard1 c:collection1 r:core_node3) [n:127.0.0.1:53219_ c:collection1 s:shard1
r:core_node3 x:collection1] o.a.s.u.PeerSync We are already in sync. No need to
do a PeerSync
[junit4] 2> 1022931 INFO
(recoveryExecutor-2041-thread-1-processing-n:127.0.0.1:53219_ x:collection1
s:shard1 c:collection1 r:core_node3) [n:127.0.0.1:53219_ c:collection1 s:shard1
r:core_node3 x:collection1] o.a.s.u.DirectUpdateHandler2 start
commit{,optimize=false,openSearcher=true,waitSearcher=true,expungeDeletes=false,softCommit=false,prepareCommit=false}
[junit4] 2> 1022931 INFO
(recoveryExecutor-2041-thread-1-processing-n:127.0.0.1:53219_ x:collection1
s:shard1 c:collection1 r:core_node3) [n:127.0.0.1:53219_ c:collection1 s:shard1
r:core_node3 x:collection1] o.a.s.u.DirectUpdateHandler2 No uncommitted
changes. Skipping IW.commit.
[junit4] 2> 1022931 INFO
(recoveryExecutor-2041-thread-1-processing-n:127.0.0.1:53219_ x:collection1
s:shard1 c:collection1 r:core_node3) [n:127.0.0.1:53219_ c:collection1 s:shard1
r:core_node3 x:collection1] o.a.s.u.DirectUpdateHandler2 end_commit_flush
[junit4] 2> 1022931 INFO
(recoveryExecutor-2041-thread-1-processing-n:127.0.0.1:53219_ x:collection1
s:shard1 c:collection1 r:core_node3) [n:127.0.0.1:53219_ c:collection1 s:shard1
r:core_node3 x:collection1] o.a.s.c.RecoveryStrategy PeerSync stage of recovery
was successful.
[junit4] 2> 1022931 INFO
(recoveryExecutor-2041-thread-1-processing-n:127.0.0.1:53219_ x:collection1
s:shard1 c:collection1 r:core_node3) [n:127.0.0.1:53219_ c:collection1 s:shard1
r:core_node3 x:collection1] o.a.s.c.RecoveryStrategy Replaying updates buffered
during PeerSync.
[junit4] 2> 1022931 INFO
(recoveryExecutor-2041-thread-1-processing-n:127.0.0.1:53219_ x:collection1
s:shard1 c:collection1 r:core_node3) [n:127.0.0.1:53219_ c:collection1 s:shard1
r:core_node3 x:collection1] o.a.s.c.RecoveryStrategy No replay needed.
[junit4] 2> 1022931 INFO
(recoveryExecutor-2041-thread-1-processing-n:127.0.0.1:53219_ x:collection1
s:shard1 c:collection1 r:core_node3) [n:127.0.0.1:53219_ c:collection1 s:shard1
r:core_node3 x:collection1] o.a.s.c.RecoveryStrategy Registering as Active
after recovery.
[junit4] 2> 1023111 INFO
(TEST-PeerSyncReplicationTest.test-seed#[4A39EBB288C3A0B6]) [ ]
o.a.s.c.AbstractDistribZkTestBase Recoveries finished - collection: collection1
[junit4] 2> 1023114 INFO (qtp975395389-9800) [n:127.0.0.1:53205_
c:control_collection s:shard1 r:core_node1 x:collection1]
o.a.s.u.DirectUpdateHandler2 start
commit{,optimize=false,openSearcher=true,waitSearcher=true,expungeDeletes=false,softCommit=false,prepareCommit=false}
[junit4] 2> 1023114 INFO (qtp975395389-9800) [n:127.0.0.1:53205_
c:control_collection s:shard1 r:core_node1 x:collection1]
o.a.s.u.DirectUpdateHandler2 No uncommitted changes. Skipping IW.commit.
[junit4] 2> 1023115 INFO (qtp975395389-9800) [n:127.0.0.1:53205_
c:control_collection s:shard1 r:core_node1 x:collection1]
o.a.s.u.DirectUpdateHandler2 end_commit_flush
[junit4] 2> 1023115 INFO (qtp975395389-9800) [n:127.0.0.1:53205_
c:control_collection s:shard1 r:core_node1 x:collection1]
o.a.s.u.p.LogUpdateProcessorFactory [collection1] webapp= path=/update
params={waitSearcher=true&commit=true&softCommit=false&wt=javabin&version=2}{commit=}
0 1
[junit4] 2> 1023119 INFO (qtp1595854443-9842) [n:127.0.0.1:53210_
c:collection1 s:shard1 r:core_node1 x:collection1] o.a.s.u.DirectUpdateHandler2
start
commit{,optimize=false,openSearcher=true,waitSearcher=true,expungeDeletes=false,softCommit=false,prepareCommit=false}
[junit4] 2> 1023119 INFO (qtp1595854443-9842) [n:127.0.0.1:53210_
c:collection1 s:shard1 r:core_node1 x:collection1] o.a.s.u.DirectUpdateHandler2
No uncommitted changes. Skipping IW.commit.
[junit4] 2> 1023119 INFO (qtp1595854443-9842) [n:127.0.0.1:53210_
c:collection1 s:shard1 r:core_node1 x:collection1] o.a.s.u.DirectUpdateHandler2
end_commit_flush
[junit4] 2> 1023119 INFO (qtp1595854443-9842) [n:127.0.0.1:53210_
c:collection1 s:shard1 r:core_node1 x:collection1]
o.a.s.u.p.LogUpdateProcessorFactory [collection1] webapp= path=/update
params={update.distrib=FROMLEADER&waitSearcher=true&openSearcher=true&commit=true&softCommit=false&distrib.from=http://127.0.0.1:53210/collection1/&commit_end_point=true&wt=javabin&version=2&expungeDeletes=false}{commit=}
0 0
[junit4] 2> 1023122 INFO (qtp1009627779-9900) [n:127.0.0.1:53219_
c:collection1 s:shard1 r:core_node3 x:collection1] o.a.s.u.DirectUpdateHandler2
start
commit{,optimize=false,openSearcher=true,waitSearcher=true,expungeDeletes=false,softCommit=false,prepareCommit=false}
[junit4] 2> 1023122 INFO (qtp1120343778-9865) [n:127.0.0.1:53214_
c:collection1 s:shard1 r:core_node2 x:collection1] o.a.s.u.DirectUpdateHandler2
start
commit{,optimize=false,openSearcher=true,waitSearcher=true,expungeDeletes=false,softCommit=false,prepareCommit=false}
[junit4] 2> 1023123 INFO (qtp1009627779-9900) [n:127.0.0.1:53219_
c:collection1 s:shard1 r:core_node3 x:collection1] o.a.s.u.DirectUpdateHandler2
No uncommitted changes. Skipping IW.commit.
[junit4] 2> 1023124 INFO (qtp1120343778-9865) [n:127.0.0.1:53214_
c:collection1 s:shard1 r:core_node2 x:collection1] o.a.s.u.DirectUpdateHandler2
No uncommitted changes. Skipping IW.commit.
[junit4] 2> 1023124 INFO (qtp1009627779-9900) [n:127.0.0.1:53219_
c:collection1 s:shard1 r:core_node3 x:collection1] o.a.s.u.DirectUpdateHandler2
end_commit_flush
[junit4] 2> 1023124 INFO (qtp1120343778-9865) [n:127.0.0.1:53214_
c:collection1 s:shard1 r:core_node2 x:collection1] o.a.s.u.DirectUpdateHandler2
end_commit_flush
[junit4] 2> 1023125 INFO (qtp1009627779-9900) [n:127.0.0.1:53219_
c:collection1 s:shard1 r:core_node3 x:collection1]
o.a.s.u.p.LogUpdateProcessorFactory [collection1] webapp= path=/update
params={update.distrib=FROMLEADER&waitSearcher=true&openSearcher=true&commit=true&softCommit=false&distrib.from=http://127.0.0.1:53210/collection1/&commit_end_point=true&wt=javabin&version=2&expungeDeletes=false}{commit=}
0 2
[junit4] 2> 1023125 INFO (qtp1120343778-9865) [n:127.0.0.1:53214_
c:collection1 s:shard1 r:core_node2 x:collection1]
o.a.s.u.p.LogUpdateProcessorFactory [collection1] webapp= path=/update
params={update.distrib=FROMLEADER&waitSearcher=true&openSearcher=true&commit=true&softCommit=false&distrib.from=http://127.0.0.1:53210/collection1/&commit_end_point=true&wt=javabin&version=2&expungeDeletes=false}{commit=}
0 2
[junit4] 2> 1023125 INFO (qtp1595854443-9841) [n:127.0.0.1:53210_
c:collection1 s:shard1 r:core_node1 x:collection1]
o.a.s.u.p.LogUpdateProcessorFactory [collection1] webapp= path=/update
params={waitSearcher=true&commit=true&softCommit=false&wt=javabin&version=2}{commit=}
0 8
[junit4] 2> 1023128 INFO (qtp1595854443-9835) [n:127.0.0.1:53210_
c:collection1 s:shard1 r:core_node1 x:collection1] o.a.s.c.S.Request
[collection1] webapp= path=/select
params={q=*:*&distrib=false&tests=checkShardConsistency&rows=0&wt=javabin&version=2}
hits=0 status=0 QTime=0
[junit4] 2> 1023132 INFO (qtp1120343778-9866) [n:127.0.0.1:53214_
c:collection1 s:shard1 r:core_node2 x:collection1] o.a.s.c.S.Request
[collection1] webapp= path=/select
params={q=*:*&distrib=false&tests=checkShardConsistency&rows=0&wt=javabin&version=2}
hits=0 status=0 QTime=0
[junit4] 2> 1023139 INFO (qtp1009627779-9901) [n:127.0.0.1:53219_
c:collection1 s:shard1 r:core_node3 x:collection1] o.a.s.c.S.Request
[collection1] webapp= path=/select
params={q=*:*&distrib=false&tests=checkShardConsistency&rows=0&wt=javabin&version=2}
hits=0 status=0 QTime=0
[junit4] 2> 1025145 INFO (qtp975395389-9801) [n:127.0.0.1:53205_
c:control_collection s:shard1 r:core_node1 x:collection1]
o.a.s.u.p.LogUpdateProcessorFactory [collection1] webapp= path=/update
params={wt=javabin&version=2}{deleteByQuery=*:* (-1556142954934960128)} 0 2
[junit4] 2> 1025150 INFO (qtp1120343778-9866) [n:127.0.0.1:53214_
c:collection1 s:shard1 r:core_node2 x:collection1]
o.a.s.u.p.LogUpdateProcessorFactory [collection1] webapp= path=/update
params={update.distrib=FROMLEADER&_version_=-1556142954938105856&distrib.from=http://127.0.0.1:53210/collection1/&wt=javabin&version=2}{deleteByQuery=*:*
(-1556142954938105856)} 0 1
[junit4] 2> 1025150 INFO (qtp1009627779-9901) [n:127.0.0.1:53219_
c:collection1 s:shard1 r:core_node3 x:collection1]
o.a.s.u.p.LogUpdateProcessorFactory [collection1] webapp= path=/update
params={update.distrib=FROMLEADER&_version_=-1556142954938105856&distrib.from=http://127.0.0.1:53210/collection1/&wt=javabin&version=2}{deleteByQuery=*:*
(-1556142954938105856)} 0 1
[junit4] 2> 1025150 INFO (qtp1595854443-9838) [n:127.0.0.1:53210_
c:collection1 s:shard1 r:core_node1 x:collection1]
o.a.s.u.p.LogUpdateProcessorFactory [collection1] webapp= path=/update
params={wt=javabin&version=2}{deleteByQuery=*:* (-1556142954938105856)} 0 4
[junit4] 2> 1025159 INFO (qtp1120343778-9868) [n:127.0.0.1:53214_
c:collection1 s:shard1 r:core_node2 x:collection1]
o.a.s.u.p.LogUpdateProcessorFactory [collection1] webapp= path=/update
params={update.distrib=FROMLEADER&distrib.from=http://127.0.0.1:53210/collection1/&wt=javabin&version=2}{add=[0
(1556142954947543040)]} 0 1
[junit4] 2> 1025159 INFO (qtp1009627779-9903) [n:127.0.0.1:53219_
c:collection1 s:shard1 r:core_node3 x:collection1]
o.a.s.u.p.LogUpdateProcessorFactory [collection1] webapp= path=/update
params={update.distrib=FROMLEADER&distrib.from=http://127.0.0.1:53210/collection1/&wt=javabin&version=2}{add=[0
(1556142954947543040)]} 0 1
[junit4] 2> 1025160 INFO (qtp1595854443-9839) [n:127.0.0.1:53210_
c:collection1 s:shard1 r:core_node1 x:collection1]
o.a.s.u.p.LogUpdateProcessorFactory [collection1] webapp= path=/update
params={wt=javabin&version=2}{add=[0 (1556142954947543040)]} 0 5
[junit4] 2> 1025162 INFO (qtp1009627779-9904) [n:127.0.0.1:53219_
c:collection1 s:shard1 r:core_node3 x:collection1]
o.a.s.u.p.LogUpdateProcessorFactory [collection1] webapp= path=/update
params={update.distrib=FROMLEADER&distrib.from=http://127.0.0.1:53210/collection1/&wt=javabin&version=2}{add=[1
(1556142954953834496)]} 0 0
[junit4] 2> 1025162 INFO (qtp1120343778-9869) [n:127.0.0.1:53214_
c:collection1 s:shard1 r:core_node2 x:collection1]
o.a.s.u.p.LogUpdateProcessorFactory [collection1] webapp= path=/update
params={update.distrib=FROMLEADER&distrib.from=http://127.0.0.1:53210/collection1/&wt=javabin&version=2}{add=[1
(1556142954953834496)]} 0 0
[junit4] 2> 1025162 INFO (qtp1595854443-9840) [n:127.0.0.1:53210_
c:collection1 s:shard1 r:core_node1 x:collection1]
o.a.s.u.p.LogUpdateProcessorFactory [collection1] webapp= path=/update
params={wt=javabin&version=2}{add=[1 (1556142954953834496)]} 0 1
[junit4] 2> 1025164 INFO (qtp1120343778-9869) [n:127.0.0.1:53214_
c:collection1 s:shard1 r:core_node2 x:collection1]
o.a.s.u.p.LogUpdateProcessorFactory [collection1] webapp= path=/update
params={update.distrib=FROMLEADER&distrib.from=http://127.0.0.1:53210/collection1/&wt=javabin&version=2}{add=[2
(1556142954955931648)]} 0 0
[junit4] 2> 1025164 INFO (qtp1009627779-9904) [n:127.0.0.1:53219_
c:collection1 s:shard1 r:core_node3 x:collection1]
o.a.s.u.p.LogUpdateProcessorFactory [collection1] webapp= path=/update
params={update.distrib=FROMLEADER&distrib.from=http://127.0.0.1:53210/collection1/&wt=javabin&version=2}{add=[2
(1556142954955931648)]} 0 0
[junit4] 2> 1025165 INFO (qtp1595854443-9837) [n:127.0.0.1:53210_
c:collection1 s:shard1 r:core_node1 x:collection1]
o.a.s.u.p.LogUpdateProcessorFactory [collection1] webapp= path=/update
params={wt=javabin&version=2}{add=[2 (1556142954955931648)]} 0 1
[junit4] 2> 1025167 INFO (qtp1009627779-9898) [n:127.0.0.1:53219_
c:collection1 s:shard1 r:core_node3 x:collection1]
o.a.s.u.p.LogUpdateProcessorFactory [collection1] webapp= path=/update
params={update.distrib=FROMLEADER&distrib.from=http://127.0.0.1:53210/collection1/&wt=javabin&version=2}{add=[3
(1556142954959077376)]} 0 0
[junit4] 2> 1025167 INFO (qtp1120343778-9863) [n:127.0.0.1:53214_
c:collection1 s:shard1 r:core_node2 x:collection1]
o.a.s.u.p.LogUpdateProcessorFactory [collection1] webapp= path=/update
params={update.distrib=FROMLEADER&distrib.from=http://127.0.0.1:53210/collection1/&wt=javabin&version=2}{add=[3
(1556142954959077376)]} 0 0
[junit4] 2> 1025167 INFO (qtp1595854443-9842) [n:127.0.0.1:53210_
c:collection1 s:shard1 r:core_node1 x:collection1]
o.a.s.u.p.LogUpdateProcessorFactory [collection1] webapp= path=/update
params={wt=javabin&version=2}{add=[3 (1556142954959077376)]} 0 1
[junit4] 2> 1025169 INFO (qtp1009627779-9900) [n:127.0.0.1:53219_
c:collection1 s:shard1 r:core_node3 x:collection1]
o.a.s.u.p.LogUpdateProcessorFactory [collection1] webapp= path=/update
params={update.distrib=FROMLEADER&distrib.from=http://127.0.0.1:53210/collection1/&wt=javabin&version=2}{add=[4
(1556142954961174528)]} 0 0
[junit4] 2> 1025169 INFO (qtp1120343778-9865) [n:127.0.0.1:53214_
c:collection1 s:shard1 r:core_node2 x:collection1]
o.a.s.u.p.LogUpdateProcessorFactory [collection1] webapp= path=/update
params={update.distrib=FROMLEADER&distrib.from=http://127.0.0.1:53210/collection1/&wt=javabin&version=2}{add=[4
(1556142954961174528)]} 0 0
[junit4] 2> 1025169 INFO (qtp1595854443-9841) [n:127.0.0.1:53210_
c:collection1 s:shard1 r:core_node1 x:collection1]
o.a.s.u.p.LogUpdateProcessorFactory [collection1] webapp= path=/update
params={wt=javabin&version=2}{add=[4 (1556142954961174528)]} 0 1
[junit4] 2> 1025171 INFO (qtp1009627779-9902) [n:127.0.0.1:53219_
c:collection1 s:shard1 r:core_node3 x:collection1]
o.a.s.u.p.LogUpdateProcessorFactory [collection1] webapp= path=/update
params={update.distrib=FROMLEADER&distrib.from=http://127.0.0.1:53210/collection1/&wt=javabin&version=2}{add=[5
(1556142954963271680)]} 0 0
[junit4] 2> 1025171 INFO (qtp1120343778-9867) [n:127.0.0.1:53214_
c:collection1 s:shard1 r:core_node2 x:collection1]
o.a.s.u.p.LogUpdateProcessorFactory [collection1] webapp= path=/update
params={update.distrib=FROMLEADER&distrib.from=http://127.0.0.1:53210/collection1/&wt=javabin&version=2}{add=[5
(1556142954963271680)]} 0 0
[junit4] 2> 1025171 INFO (qtp1595854443-9835) [n:127.0.0.1:53210_
c:collection1 s:shard1 r:core_node1 x:collection1]
o.a.s.u.p.LogUpdateProcessorFactory [collection1] webapp= path=/update
params={wt=javabin&version=2}{add=[5 (1556142954963271680)]} 0 1
[junit4] 2> 1025173 INFO (qtp1120343778-9866) [n:127.0.0.1:53214_
c:collection1 s:shard1 r:core_node2 x:collection1]
o.a.s.u.p.LogUpdateProcessorFactory [collection1] webapp= path=/update
params={update.distrib=FROMLEADER&distrib.from=http://127.0.0.1:53210/collection1/&wt=javabin&version=2}{add=[6
(1556142954965368832)]} 0 0
[junit4] 2> 1025173 INFO (qtp1009627779-9902) [n:127.0.0.1:53219_
c:collection1 s:shard1 r:core_node3 x:collection1]
o.a.s.u.p.LogUpdateProcessorFactory [collection1] webapp= path=/update
params={update.distrib=FROMLEADER&distrib.from=http://127.0.0.1:53210/collection1/&wt=javabin&version=2}{add=[6
(1556142954965368832)]} 0 0
[junit4] 2> 1025173 INFO (qtp1595854443-9838) [n:127.0.0.1:53210_
c:collection1 s:shard1 r:core_node1 x:collection1]
o.a.s.u.p.LogUpdateProcessorFactory [collection1] webapp= path=/update
params={wt=javabin&version=2}{add=[6 (1556142954965368832)]} 0 1
[junit4] 2> 1025175 INFO (qtp1120343778-9866) [n:127.0.0.1:53214_
c:collection1 s:shard1 r:core_node2 x:collection1]
o.a.s.u.p.LogUpdateProcessorFactory [collection1] webapp= path=/update
params={update.distrib=FROMLEADER&distrib.from=http://127.0.0.1:53210/collection1/&wt=javabin&version=2}{add=[7
(1556142954967465984)]} 0 0
[junit4] 2> 1025175 INFO (qtp1009627779-9902) [n:127.0.0.1:53219_
c:collection1 s:shard1 r:core_node3 x:collection1]
o.a.s.u.p.LogUpdateProcessorFactory [collection1] webapp= path=/update
params={update.distrib=FROMLEADER&distrib.from=http://127.0.0.1:53210/collection1/&wt=javabin&version=2}{add=[7
(1556142954967465984)]} 0 0
[junit4] 2> 1025175 INFO (qtp1595854443-9839) [n:127.0.0.1:53210_
c:collection1 s:shard1 r:core_node1 x:collection1]
o.a.s.u.p.LogUpdateProcessorFactory [collection1] webapp= path=/update
params={wt=javabin&version=2}{add=[7 (1556142954967465984)]} 0 1
[junit4] 2> 1025177 INFO (qtp1120343778-9866) [n:127.0.0.1:53214_
c:collection1 s:shard1 r:core_node2 x:collection1]
o.a.s.u.p.LogUpdateProcessorFactory [collection1] webapp= path=/update
params={update.distrib=FROMLEADER&distrib.from=http://127.0.0.1:53210/collection1/&wt=javabin&version=2}{add=[8
(1556142954969563136)]} 0 0
[junit4] 2> 1025177 INFO (qtp1009627779-9902) [n:127.0.0.1:53219_
c:collection1 s:shard1 r:core_node3 x:collection1]
o.a.s.u.p.LogUpdateProcessorFactory [collection1] webapp= path=/update
params={update.distrib=FROMLEADER&distrib.from=http://127.0.0.1:53210/collection1/&wt=javabin&version=2}{add=[8
(1556142954969563136)]} 0 0
[junit4] 2> 1025178 INFO (qtp1595854443-9840) [n:127.0.0.1:53210_
c:collection1 s:shard1 r:core_node1 x:collection1]
o.a.s.u.p.LogUpdateProcessorFactory [collection1] webapp= path=/update
params={wt=javabin&version=2}{add=[8 (1556142954969563136)]} 0
[...truncated too long message...]
]) [ ] o.a.s.c.ChaosMonkey monkey: stop shard! 53214
[junit4] 2> 1210592 INFO
(TEST-PeerSyncReplicationTest.test-seed#[4A39EBB288C3A0B6]) [ ]
o.a.s.c.CoreContainer Shutting down CoreContainer instance=565377
[junit4] 2> 1210592 WARN
(TEST-PeerSyncReplicationTest.test-seed#[4A39EBB288C3A0B6]) [ ]
o.a.s.c.RecoveryStrategy Stopping recovery for core=[collection1]
coreNodeName=[core_node2]
[junit4] 2> 1210596 WARN
(updateExecutor-2047-thread-2-processing-n:127.0.0.1:53214_)
[n:127.0.0.1:53214_ c:collection1 s:shard1 r:core_node2 x:collection1]
o.a.s.u.DefaultSolrCoreState Skipping recovery because Solr is shutdown
[junit4] 2> 1214367 INFO
(recoveryExecutor-2048-thread-1-processing-n:127.0.0.1:53214_ x:collection1
s:shard1 c:collection1 r:core_node2) [n:127.0.0.1:53214_ c:collection1 s:shard1
r:core_node2 x:collection1] o.a.s.c.RecoveryStrategy RecoveryStrategy has been
closed
[junit4] 2> 1214367 INFO
(recoveryExecutor-2048-thread-1-processing-n:127.0.0.1:53214_ x:collection1
s:shard1 c:collection1 r:core_node2) [n:127.0.0.1:53214_ c:collection1 s:shard1
r:core_node2 x:collection1] o.a.s.c.RecoveryStrategy Finished recovery process,
successful=[false]
[junit4] 2> 1214367 INFO
(recoveryExecutor-2048-thread-1-processing-n:127.0.0.1:53214_ x:collection1
s:shard1 c:collection1 r:core_node2) [n:127.0.0.1:53214_ c:collection1 s:shard1
r:core_node2 x:collection1] o.a.s.c.SolrCore [collection1] CLOSING SolrCore
org.apache.solr.core.SolrCore@596060e9
[junit4] 2> 1214367 WARN
(recoveryExecutor-2048-thread-1-processing-n:127.0.0.1:53214_ x:collection1
s:shard1 c:collection1 r:core_node2) [n:127.0.0.1:53214_ c:collection1 s:shard1
r:core_node2 x:collection1] o.a.s.c.RecoveryStrategy Stopping recovery for
core=[collection1] coreNodeName=[core_node2]
[junit4] 2> 1214431 INFO
(recoveryExecutor-2048-thread-1-processing-n:127.0.0.1:53214_ x:collection1
s:shard1 c:collection1 r:core_node2) [n:127.0.0.1:53214_ c:collection1 s:shard1
r:core_node2 x:collection1] o.a.s.m.SolrMetricManager Closing metric reporters
for: solr.core.collection1
[junit4] 2> 1214431 INFO
(TEST-PeerSyncReplicationTest.test-seed#[4A39EBB288C3A0B6]) [ ]
o.a.s.c.Overseer Overseer (id=97258933497954321-127.0.0.1:53214_-n_0000000004)
closing
[junit4] 2> 1214432 INFO
(OverseerStateUpdate-97258933497954321-127.0.0.1:53214_-n_0000000004)
[n:127.0.0.1:53214_ ] o.a.s.c.Overseer Overseer Loop exiting :
127.0.0.1:53214_
[junit4] 2> 1214435 WARN
(zkCallback-2050-thread-4-processing-n:127.0.0.1:53214_) [n:127.0.0.1:53214_
] o.a.s.c.c.ZkStateReader ZooKeeper watch triggered, but Solr cannot talk to
ZK: [KeeperErrorCode = Session expired for /live_nodes]
[junit4] 2> 1214435 INFO
(TEST-PeerSyncReplicationTest.test-seed#[4A39EBB288C3A0B6]) [ ]
o.a.s.m.SolrMetricManager Closing metric reporters for: solr.node
[junit4] 2> 1214437 INFO
(TEST-PeerSyncReplicationTest.test-seed#[4A39EBB288C3A0B6]) [ ]
o.e.j.s.AbstractConnector Stopped
ServerConnector@193108a{HTTP/1.1,[http/1.1]}{127.0.0.1:53214}
[junit4] 2> 1214438 INFO
(TEST-PeerSyncReplicationTest.test-seed#[4A39EBB288C3A0B6]) [ ]
o.e.j.s.h.ContextHandler Stopped
o.e.j.s.ServletContextHandler@3f1784a9{/,null,UNAVAILABLE}
[junit4] 2> 1214441 INFO
(TEST-PeerSyncReplicationTest.test-seed#[4A39EBB288C3A0B6]) [ ]
o.a.s.c.ChaosMonkey monkey: stop shard! 53219
[junit4] 2> 1214445 INFO
(TEST-PeerSyncReplicationTest.test-seed#[4A39EBB288C3A0B6]) [ ]
o.a.s.c.ZkTestServer connecting to 127.0.0.1:53202 53202
[junit4] 2> 1214547 INFO (Thread-1634) [ ] o.a.s.c.ZkTestServer
connecting to 127.0.0.1:53202 53202
[junit4] 2> 1217240 WARN (Thread-1634) [ ] o.a.s.c.ZkTestServer Watch
limit violations:
[junit4] 2> Maximum concurrent create/delete watches above limit:
[junit4] 2>
[junit4] 2> 6 /solr/aliases.json
[junit4] 2> 6 /solr/clusterprops.json
[junit4] 2> 5 /solr/security.json
[junit4] 2> 5 /solr/configs/conf1
[junit4] 2> 4 /solr/collections/collection1/state.json
[junit4] 2>
[junit4] 2> Maximum concurrent data watches above limit:
[junit4] 2>
[junit4] 2> 6 /solr/clusterstate.json
[junit4] 2> 2
/solr/overseer_elect/election/97258933497954312-127.0.0.1:53210_-n_0000000001
[junit4] 2> 2
/solr/collections/collection1/leader_elect/shard1/election/97258933497954312-core_node1-n_0000000000
[junit4] 2>
[junit4] 2> Maximum concurrent children watches above limit:
[junit4] 2>
[junit4] 2> 207 /solr/overseer/collection-queue-work
[junit4] 2> 45 /solr/overseer/queue
[junit4] 2> 7 /solr/overseer/queue-work
[junit4] 2> 6 /solr/collections
[junit4] 2> 5 /solr/live_nodes
[junit4] 2>
[junit4] 2> NOTE: reproduce with: ant test
-Dtestcase=PeerSyncReplicationTest -Dtests.method=test
-Dtests.seed=4A39EBB288C3A0B6 -Dtests.slow=true -Dtests.locale=es-PY
-Dtests.timezone=Europe/Vilnius -Dtests.asserts=true
-Dtests.file.encoding=ISO-8859-1
[junit4] FAILURE 210s J1 | PeerSyncReplicationTest.test <<<
[junit4] > Throwable #1: java.lang.AssertionError: timeout waiting to see
all nodes active
[junit4] > at
__randomizedtesting.SeedInfo.seed([4A39EBB288C3A0B6:C26DD468263FCD4E]:0)
[junit4] > at
org.apache.solr.cloud.PeerSyncReplicationTest.waitTillNodesActive(PeerSyncReplicationTest.java:326)
[junit4] > at
org.apache.solr.cloud.PeerSyncReplicationTest.bringUpDeadNodeAndEnsureNoReplication(PeerSyncReplicationTest.java:277)
[junit4] > at
org.apache.solr.cloud.PeerSyncReplicationTest.forceNodeFailureAndDoPeerSync(PeerSyncReplicationTest.java:259)
[junit4] > at
org.apache.solr.cloud.PeerSyncReplicationTest.test(PeerSyncReplicationTest.java:138)
[junit4] > at
org.apache.solr.BaseDistributedSearchTestCase$ShardsRepeatRule$ShardsFixedStatement.callStatement(BaseDistributedSearchTestCase.java:985)
[junit4] > at
org.apache.solr.BaseDistributedSearchTestCase$ShardsRepeatRule$ShardsStatement.evaluate(BaseDistributedSearchTestCase.java:960)
[junit4] > at java.lang.Thread.run(Thread.java:745)
[junit4] 2> 1217245 INFO
(SUITE-PeerSyncReplicationTest-seed#[4A39EBB288C3A0B6]-worker) [ ]
o.a.s.SolrTestCaseJ4 ###deleteCore
[junit4] 2> NOTE: leaving temporary files on disk at:
/Users/jenkins/workspace/Lucene-Solr-master-MacOSX/solr/build/solr-core/test/J1/temp/solr.cloud.PeerSyncReplicationTest_4A39EBB288C3A0B6-001
[junit4] 2> NOTE: test params are: codec=Asserting(Lucene70):
{other_tl1=Lucene50(blocksize=128),
range_facet_l_dv=PostingsFormat(name=Asserting), rnd_s=Lucene50(blocksize=128),
multiDefault=PostingsFormat(name=LuceneFixedGap),
intDefault=Lucene50(blocksize=128), a_i1=PostingsFormat(name=LuceneFixedGap),
range_facet_l=PostingsFormat(name=LuceneFixedGap),
_version_=Lucene50(blocksize=128), a_t=PostingsFormat(name=LuceneFixedGap),
id=PostingsFormat(name=Asserting),
range_facet_i_dv=PostingsFormat(name=LuceneFixedGap), text=FSTOrd50,
timestamp=PostingsFormat(name=LuceneFixedGap)},
docValues:{range_facet_l_dv=DocValuesFormat(name=Lucene70),
range_facet_i_dv=DocValuesFormat(name=Asserting),
timestamp=DocValuesFormat(name=Asserting)}, maxPointsInLeafNode=264,
maxMBSortInHeap=6.930022212166376, sim=RandomSimilarity(queryNorm=false): {},
locale=es-PY, timezone=Europe/Vilnius
[junit4] 2> NOTE: Mac OS X 10.11.6 x86_64/Oracle Corporation 1.8.0_102
(64-bit)/cpus=3,threads=1,free=406568848,total=536870912
[junit4] 2> NOTE: All tests run in this JVM:
[DefaultValueUpdateProcessorTest, TestSchemaVersionResource,
TestSuggestSpellingConverter, TestRawTransformer, DataDrivenBlockJoinTest,
TestSolrConfigHandlerConcurrent, DirectUpdateHandlerOptimizeTest,
TestShardHandlerFactory, TestNRTOpen, CollectionTooManyReplicasTest,
TestReversedWildcardFilterFactory, TestDynamicLoading, TestTrie,
HdfsBasicDistributedZk2Test, TestCorePropertiesReload,
DistributedFacetPivotLargeTest, BasicDistributedZk2Test, TestSmileRequest,
TestRecoveryHdfs, TestBinaryResponseWriter, RequestHandlersTest,
TestLeaderElectionZkExpiry, AnalyticsMergeStrategyTest, TestFieldResource,
DistributedFacetPivotWhiteBoxTest, TestComplexPhraseQParserPlugin,
TestFieldSortValues, ShowFileRequestHandlerTest, TestOrdValues,
TestDistributedStatsComponentCardinality, TestRequestForwarding,
TestWriterPerf, ExternalFileFieldSortTest,
ClassificationUpdateProcessorIntegrationTest, TestRealTimeGet,
CollectionsAPIDistributedZkTest, TestMiniSolrCloudCluster,
TestAtomicUpdateErrorCases, AliasIntegrationTest,
DistributedQueryComponentOptimizationTest, TestScoreJoinQPScore,
TestUninvertingReader, HLLSerializationTest, HdfsRestartWhileUpdatingTest,
HdfsLockFactoryTest, TestAuthenticationFramework, TestSSLRandomization,
CoreAdminHandlerTest, StatsComponentTest, TestFieldCache, ScriptEngineTest,
FullSolrCloudDistribCmdsTest, DeleteShardTest,
UpdateRequestProcessorFactoryTest, LeaderElectionIntegrationTest,
RAMDirectoryFactoryTest, TestCloudPseudoReturnFields,
SuggestComponentContextFilterQueryTest, PKIAuthenticationIntegrationTest,
TestSQLHandlerNonCloud, TestRandomFaceting, TestChildDocTransformer,
TestRangeQuery, SolrJmxReporterTest, FacetPivotSmallTest,
TestCollationFieldDocValues, TestPHPSerializedResponseWriter,
BadIndexSchemaTest, HttpSolrCallGetCoreTest, TestJsonFacets,
TestQueryWrapperFilter, TestRebalanceLeaders, HighlighterConfigTest,
TestStressLiveNodes, TestTolerantUpdateProcessorRandomCloud, ReturnFieldsTest,
DeleteNodeTest, MinimalSchemaTest, StatsReloadRaceTest,
TestFieldCacheSanityChecker, TestRestoreCore, TestHdfsCloudBackupRestore,
TestConfigSetsAPIZkFailure, TestSolrIndexConfig, HdfsUnloadDistributedZkTest,
ConfigSetsAPITest, DistributedTermsComponentTest, TestBadConfig,
TestConfigReload, SpellCheckComponentTest, SecurityConfHandlerTest,
PeerSyncReplicationTest]
[junit4] Completed [221/677 (1!)] on J1 in 210.22s, 1 test, 1 failure <<<
FAILURES!
[...truncated 63905 lines...]
---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]