刚刚使用指南从3.1更新到DSE 3.2以运行更新,现在日志中充斥着此异常。当通过SOLR查询时,我们正在丢失数据,但是当使用cqlsh或cli进行查询时,数据就在那里。
ERROR [IndexPool work thread-6] 2013-11-18 22:32:18,748 AbstractSolrSecondaryIndex .java (line 912) _yaqn8_Lucene41_0.tip
java.io.FileNotFoundException: _yaqn8_Lucene41_0.tip
at org.apache.lucene.store.bytebuffer.ByteBufferDirectory.fileLength( ByteBufferDirectory.java:129)
at org.apache.lucene.store.NRTCachingDirectory.sizeInBytes(NRTCachingDirectory .java:158)
at org.apache.lucene.store.NRTCachingDirectory.doCacheWrite( NRTCachingDirectory.java:289)
at org.apache.lucene.store.NRTCachingDirectory.createOutput( NRTCachingDirectory.java:199)
at org.apache.lucene.store.TrackingDirectoryWrapper.createOutput( TrackingDirectoryWrapper.java:62)
at org.apache.lucene.codecs.compressing.CompressingStoredFieldsWriter.<init>( CompressingStoredFieldsWriter.java:107)
at com.datastax.bdp.cassandra.index.solr.CassandraStoredFieldsWriter.<init>( CassandraStoredFieldsWriter.java:25)
at com.datastax.bdp.cassandra.index.solr.CassandraStoredFieldsFormat. fieldsWriter(CassandraStoredFieldsFormat.java:39)
at org.apache.lucene.index.StoredFieldsProcessor.initFieldsWriter( StoredFieldsProcessor.java:86)
at org.apache.lucene.index.StoredFieldsProcessor.finishDocument( StoredFieldsProcessor.java:119)
at org.apache.lucene.index.TwoStoredFieldsConsumers.finishDocument( TwoStoredFieldsConsumers.java:65)
at org.apache.lucene.index.DocFieldProcessor.finishDocument(DocFieldProcessor. java:274)
at org.apache.lucene.index.DocumentsWriterPerThread.updateDocument( DocumentsWriterPerThread.java:274)
at org.apache.lucene.index.DocumentsWriter.updateDocument(DocumentsWriter. java:376)
at org.apache.lucene.index.IndexWriter.updateDocument(IndexWriter.java:1485)
at org.apache.solr.update.DirectUpdateHandler2.addDoc(DirectUpdateHandler2. java:201)
at com.datastax.bdp.cassandra.index.solr.CassandraDirectUpdateHandler2.addDoc( CassandraDirectUpdateHandler2.java:103)
at com.datastax.bdp.cassandra.index.solr.AbstractSolrSecondaryIndex.doIndex( AbstractSolrSecondaryIndex.java:929)
at com.datastax.bdp.cassandra.index.solr.AbstractSolrSecondaryIndex. doUpdateOrDelete(AbstractSolrSecondaryIndex.java:586)
at com.datastax.bdp.cassandra.index.solr.ThriftSolrSecondaryIndex. updateColumnFamilyIndex(ThriftSolrSecondaryIndex.java:114)
at com.datastax.bdp.cassandra.index.solr.AbstractSolrSecondaryIndex$3.run( AbstractSolrSecondaryIndex.java:896)
at com.datastax.bdp.cassandra.index.solr.concurrent.IndexWorker.run( IndexWorker.java:38)
at java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:471)
at java.util.concurrent.FutureTask$Sync.innerRun(FutureTask.java:334)
at java.util.concurrent.FutureTask.run(FutureTask.java:166)
at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor. java:1145)
at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor. java:615)
at java.lang.Thread.run(Thread.java:724)
alo this:
ERROR 22:53:01,426 auto commit error...:org.apache.solr.common.SolrException: org.apache.solr.common.SolrException: Error opening new searcher
at com.datastax.bdp.cassandra.index.solr.CassandraDirectUpdateHandler2.commit(CassandraDirectUpdateHandler2.java:318)
at org.apache.solr.update.CommitTracker.run(CommitTracker.java:216)
at java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:471)
at java.util.concurrent.FutureTask$Sync.innerRun(FutureTask.java:334)
at java.util.concurrent.FutureTask.run(FutureTask.java:166)
at java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask.access$201(ScheduledThreadPoolExecutor.java:178)
at java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask.run(ScheduledThreadPoolExecutor.java:292)
at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1145)
at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:615)
at java.lang.Thread.run(Thread.java:724)
Caused by: org.apache.solr.common.SolrException: Error opening new searcher
at org.apache.solr.core.SolrCore.openNewSearcher(SolrCore.java:1457)
at org.apache.solr.core.SolrCore.getSearcher(SolrCore.java:1569)
at org.apache.solr.update.DirectUpdateHandler2.commit(DirectUpdateHandler2.java:557)
at com.datastax.bdp.cassandra.index.solr.CassandraDirectUpdateHandler2.commit(CassandraDirectUpdateHandler2.java:276)
... 9 more
Caused by: java.io.FileNotFoundException: _xfgfw_Lucene41_0.tim
at org.apache.lucene.store.bytebuffer.ByteBufferDirectory.fileLength(ByteBufferDirectory.java:129)
at org.apache.lucene.store.NRTCachingDirectory.sizeInBytes(NRTCachingDirectory.java:158)
at org.apache.lucene.store.NRTCachingDirectory.doCacheWrite(NRTCachingDirectory.java:289)
at org.apache.lucene.store.NRTCachingDirectory.createOutput(NRTCachingDirectory.java:199)
at org.apache.lucene.store.TrackingDirectoryWrapper.createOutput(TrackingDirectoryWrapper.java:62)
at org.apache.lucene.codecs.lucene42.Lucene42FieldInfosWriter.write(Lucene42FieldInfosWriter.java:49)
at org.apache.lucene.index.DocFieldProcessor.flush(DocFieldProcessor.java:88)
at org.apache.lucene.index.DocumentsWriterPerThread.flush(DocumentsWriterPerThread.java:493)
at org.apache.lucene.index.DocumentsWriter.doFlush(DocumentsWriter.java:422)
at org.apache.lucene.index.DocumentsWriter.flushAllThreads(DocumentsWriter.java:559)
at org.apache.lucene.index.IndexWriter.getReader(IndexWriter.java:365)
at org.apache.lucene.index.StandardDirectoryReader.doOpenFromWriter(StandardDirectoryReader.java:270)
at org.apache.lucene.index.StandardDirectoryReader.doOpenIfChanged(StandardDirectoryReader.java:255)
at org.apache.lucene.index.DirectoryReader.openIfChanged(DirectoryReader.java:250)
at org.apache.solr.core.SolrCore.openNewSearcher(SolrCore.java:1393)
... 12 more
答案 0 :(得分:5)
这是DSE 3.2.1中修复的已知问题。
我们刚刚发布了3.2.1,它应该解决您的问题。我们的开发人员能够复制堆栈跟踪,并解决了这个问题。我们还解决了重启后未正确处理索引的问题。
答案 1 :(得分:3)
看起来有些文件在关机时没有正确刷新。您必须在显示这些错误的节点上执行完全重新索引(删除)才能获得重建的lucene索引。
此页面显示如何启动重新索引。 http://www.datastax.com/docs/datastax_enterprise3.2/solutions/dse_search_upload#reloading-a-solr-core
答案 2 :(得分:3)
解决方法是将您的solr配置更改为使用(我们正在进行正确的修复):
<directoryFactory name="DirectoryFactory" class="solr.StandardDirectoryFactory"/>
答案 3 :(得分:0)
如果问题仍然存在,则需要重新编制CF索引。