Id made just a few changes to my code to remove deprecations that ocurred when I upgraded to Lucene 3.1, all tests work fine but when I tried to build a real index it gives this error during optimization stage

Exception in thread "main" java.nio.channels.OverlappingFileLockException
at sun.nio.ch.FileChannelImpl$SharedFileLockTable.checkList(FileChannelImpl.java:1166) at sun.nio.ch.FileChannelImpl$SharedFileLockTable.add(FileChannelImpl.java:1068)
    at sun.nio.ch.FileChannelImpl.tryLock(FileChannelImpl.java:868)
    at java.nio.channels.FileChannel.tryLock(FileChannel.java:962)
at org.apache.lucene.store.NativeFSLock.obtain(NativeFSLockFactory.java:216)
    at org.apache.lucene.store.Lock.obtain(Lock.java:72)
    at org.apache.lucene.index.IndexWriter.<init>(IndexWriter.java:1097)
at org.musicbrainz.search.index.ThreadedIndexWriter.<init>(ThreadedIndexWriter.java:66) at org.musicbrainz.search.index.IndexBuilder.createIndexWriter(IndexBuilder.java:229) at org.musicbrainz.search.index.IndexBuilder.main(IndexBuilder.java:152)

and a few minutes later I also got

xception in thread "Lucene Merge Thread #101" org.apache.lucene.index.MergePolicy$MergeException: java.io.IOException: File too large at org.apache.lucene.index.ConcurrentMergeScheduler.handleMergeException(ConcurrentMergeScheduler.java:517) at org.apache.lucene.index.ConcurrentMergeScheduler$MergeThread.run(ConcurrentMergeScheduler.java:482)
Caused by: java.io.IOException: File too large
    at java.io.RandomAccessFile.writeBytes(Native Method)
    at java.io.RandomAccessFile.write(RandomAccessFile.java:482)
at org.apache.lucene.store.FSDirectory$FSIndexOutput.flushBuffer(FSDirectory.java:469) at org.apache.lucene.store.BufferedIndexOutput.flushBuffer(BufferedIndexOutput.java:99) at org.apache.lucene.store.BufferedIndexOutput.flush(BufferedIndexOutput.java:88) at org.apache.lucene.store.BufferedIndexOutput.close(BufferedIndexOutput.java:113) at org.apache.lucene.store.FSDirectory$FSIndexOutput.close(FSDirectory.java:478)
    at org.apache.lucene.util.IOUtils.closeSafely(IOUtils.java:80)
    at org.apache.lucene.index.FieldsWriter.close(FieldsWriter.java:111)
at org.apache.lucene.index.SegmentMerger.mergeFields(SegmentMerger.java:245)
    at org.apache.lucene.index.SegmentMerger.merge(SegmentMerger.java:110)
at org.apache.lucene.index.IndexWriter.mergeMiddle(IndexWriter.java:3938)
    at org.apache.lucene.index.IndexWriter.merge(IndexWriter.java:3614)
at org.apache.lucene.index.ConcurrentMergeScheduler.doMerge(ConcurrentMergeScheduler.java:388) at org.apache.lucene.index.ConcurrentMergeScheduler$MergeThread.run(ConcurrentMergeScheduler.java:456)


not sure if one is a consequence of another, any ideas ?

The idea behind the ThreadedIndexWriter class is that I can build a number of indexes one a a time from a database source, but when an index built I then optimize it whilst the next index is being built.

package org.musicbrainz.search.index;

import org.apache.lucene.analysis.Analyzer;
import org.apache.lucene.document.Document;
import org.apache.lucene.index.IndexWriter;
import org.apache.lucene.index.IndexWriterConfig;
import org.apache.lucene.index.Term;
import org.apache.lucene.store.Directory;

import java.io.IOException;
import java.util.concurrent.ArrayBlockingQueue;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.ThreadPoolExecutor;
import java.util.concurrent.TimeUnit;

public class ThreadedIndexWriter extends IndexWriter {
    private ExecutorService threadPool;
    private Analyzer defaultAnalyzer;

    private class Job implements Runnable {
        Document doc;
        Analyzer analyzer;

        public Job(Document doc, Analyzer analyzer) {
            this.doc = doc;
            this.analyzer = analyzer;
        }

        public void run() {
            try {

                ThreadedIndexWriter.super.addDocument(doc, analyzer);
            } catch (IOException ioe) {
                ioe.printStackTrace(System.err);
            }
        }
    }

    public ThreadedIndexWriter(Directory dir,
                               IndexWriterConfig config,
                               int numThreads,
                               int maxQueueSize)

            throws  IOException

    {
        super(dir, config);
        defaultAnalyzer = config.getAnalyzer();
        threadPool = new ThreadPoolExecutor(
                numThreads, numThreads, 0,
                TimeUnit.SECONDS,
                new ArrayBlockingQueue<Runnable>(maxQueueSize, false),
                new ThreadPoolExecutor.CallerRunsPolicy());
    }

    public void addDocument(Document doc) {
        threadPool.execute(new Job(doc, defaultAnalyzer));
    }

    public void addDocument(Document doc, Analyzer a) {
        threadPool.execute(new Job(doc,  a));
    }

    public void updateDocument(Term term, Document doc) {
        throw new UnsupportedOperationException();

    }

    public void updateDocument(Term term, Document doc, Analyzer a) {

        throw new UnsupportedOperationException();
    }

    public void close() throws  IOException {
        finish();
        super.close();
    }

    public void close(boolean doWait) throws IOException {
        finish();

        super.close(doWait);
    }

    public void rollback() throws IOException {
        finish();
        super.rollback();
    }

    private void finish() {
        threadPool.shutdown();
        while (true) {
            try {
if (threadPool.awaitTermination(Long.MAX_VALUE, TimeUnit.SECONDS)) {
                    break;
                }
            } catch (InterruptedException ie) {
                Thread.currentThread().interrupt();
                throw new RuntimeException(ie);
            }
        }
    }
}

Paul


---------------------------------------------------------------------
To unsubscribe, e-mail: java-user-unsubscr...@lucene.apache.org
For additional commands, e-mail: java-user-h...@lucene.apache.org

Reply via email to