You are viewing a plain text version of this content. The canonical link for it is here.
Posted to user@lucenenet.apache.org by Richard Wilde <ri...@wildesoft.net> on 2013/04/05 12:19:22 UTC

File locking

I have deployed my application on Rackspace cloud (have 3-4 nodes running
iis all sharing the same files) and am running into issues with:-

 

The process cannot access the file
'\\fs1-n01\stor1wc1dfw1\381858\799525\somedomain.com\web\content\App_Data\Lu
ceneIndex\write.lock' because it is being used by another process.

 

I am not sure why I am running into this issue, the code for this error is:-

 

public void Delete(long id, string type)

{

         directory = FSDirectory.Open(new
DirectoryInfo(Settings.LuceneBaseDir));

         var writer = new IndexWriter(directory, analyzer, false,
IndexWriter.MaxFieldLength.LIMITED);

         var queryParser = new QueryParser(Version.LUCENE_29, "Extract",
analyzer);

         var special = string.Format("Id:{0} AND Type:{1}", id, type);

         writer.DeleteDocuments(queryParser.Parse(special));

         writer.Commit();

         writer.Close();

         directory.Close();

}

 

I have also code that indexes like:-

 

        public void Index(Document document)

        {

            directory = FSDirectory.Open(new
DirectoryInfo(Settings.LuceneBaseDir));

            var writer = new IndexWriter(directory, analyzer, false,
IndexWriter.MaxFieldLength.LIMITED);

            writer.AddDocument(document);

            writer.Close();

            directory.Close();

        }

 

      public void Index(Contact entity)

      {

         indexer.Index(GetDocument(entity));

      }

 

public Document GetDocument(Contact entity)

{

    var mytext = entity.ToStringExtension();

    var doc = new Document();

    doc.Add(new Field(IndexColumns.Extract.ToString(), mytext,
Field.Store.YES, Field.Index.ANALYZED));

    doc.Add(new Field(IndexColumns.Type.ToString(), DocumentType,
Field.Store.YES, Field.Index.ANALYZED_NO_NORMS));

    doc.Add(new Field(IndexColumns.ClientId.ToString(),
entity.Client.Id.ToString(), Field.Store.YES,
Field.Index.ANALYZED_NO_NORMS));

    doc.Add(new Field(IndexColumns.Title.ToString(), entity.Firstname + " "
+ entity.Lastname, Field.Store.YES, Field.Index.NOT_ANALYZED));

    doc.Add(new Field(IndexColumns.Content.ToString(), entity.Company,
Field.Store.YES, Field.Index.NOT_ANALYZED));

    doc.Add(new Field(IndexColumns.Misc1.ToString(), entity.Phone,
Field.Store.YES, Field.Index.NOT_ANALYZED));

    doc.Add(new Field(IndexColumns.Misc2.ToString(),
entity.Email.ToString(CultureInfo.InvariantCulture), Field.Store.YES,
Field.Index.NOT_ANALYZED));

    doc.Add(new Field(IndexColumns.Category.ToString(), entity.Category,
Field.Store.YES, Field.Index.NOT_ANALYZED));

    doc.Add(new Field(IndexColumns.Id.ToString(),
entity.Id.ToString(CultureInfo.InvariantCulture), Field.Store.YES,
Field.Index.NOT_ANALYZED));

    return doc;

}

 

 

The error is intermittent but seems to happen when I delete/insert a
document. I have a background service (web based) that may also be writing
documents to the index every 30 seconds or so.. 

I am on version 2.9.4.2, should I upgrade to 3.0.3 bearing in mind I am
running in Partial Trust and I recall reading an email regarding some
problems with the release (I might be mistaken)

 

I suspect I am doing something stupid not sure how I can diagnose the
problem?

 

Thanks

Rippo

 

 


RE: File locking

Posted by Richard Wilde <ri...@wildesoft.net>.
Thanks for this, trouble is this is impossible on the current hosting provider. 

Bask to crappy SQL based searching I think!





-----Original Message-----
From: Kevin Miller [mailto:scoundrel@gmail.com] 
Sent: 08 April 2013 12:44
To: user@lucenenet.apache.org
Subject: RE: File locking

Yes, we use a Windows service (Topshelf) for indexing and a web service for searching from that index. — Sent from my portable orbiting headquarters.

On Mon, Apr 8, 2013 at 6:22 AM, Richard Wilde <ri...@wildesoft.net>
wrote:

> Are you talking about a http based service or a windows service?
> Many Thanks
> Richard
> -----Original Message-----
> From: Nicholas Paldino [mailto:casperOne@caspershouse.com]
> Sent: 08 April 2013 11:21
> To: <us...@lucenenet.apache.org>
> Cc: user@lucenenet.apache.org
> Subject: Re: File locking
> Richard,
> Lucene takes exclusive file locks on the index files when using the 
> file directory, so having multiple processes access the same files is a problem.
> The recommended course of action is to create a service which uses 
> Lucene and then can be called from any number of processes.
> - Nick
> On Apr 5, 2013, at 6:20 AM, "Richard Wilde" <ri...@wildesoft.net> wrote:
>> I have deployed my application on Rackspace cloud (have 3-4 nodes 
>> running iis all sharing the same files) and am running into issues
>> with:-
>> 
>> 
>> 
>> The process cannot access the file
>> '\\fs1-n01\stor1wc1dfw1\381858\799525\somedomain.com\web\content\App_
>> D ata\Lu ceneIndex\write.lock' because it is being used by another 
>> process.
>> 
>> 
>> 
>> I am not sure why I am running into this issue, the code for this 
>> error is:-
>> 
>> 
>> 
>> public void Delete(long id, string type)
>> 
>> {
>> 
>>         directory = FSDirectory.Open(new 
>> DirectoryInfo(Settings.LuceneBaseDir));
>> 
>>         var writer = new IndexWriter(directory, analyzer, false, 
>> IndexWriter.MaxFieldLength.LIMITED);
>> 
>>         var queryParser = new QueryParser(Version.LUCENE_29, 
>> "Extract", analyzer);
>> 
>>         var special = string.Format("Id:{0} AND Type:{1}", id, type);
>> 
>>         writer.DeleteDocuments(queryParser.Parse(special));
>> 
>>         writer.Commit();
>> 
>>         writer.Close();
>> 
>>         directory.Close();
>> 
>> }
>> 
>> 
>> 
>> I have also code that indexes like:-
>> 
>> 
>> 
>>        public void Index(Document document)
>> 
>>        {
>> 
>>            directory = FSDirectory.Open(new 
>> DirectoryInfo(Settings.LuceneBaseDir));
>> 
>>            var writer = new IndexWriter(directory, analyzer, false, 
>> IndexWriter.MaxFieldLength.LIMITED);
>> 
>>            writer.AddDocument(document);
>> 
>>            writer.Close();
>> 
>>            directory.Close();
>> 
>>        }
>> 
>> 
>> 
>>      public void Index(Contact entity)
>> 
>>      {
>> 
>>         indexer.Index(GetDocument(entity));
>> 
>>      }
>> 
>> 
>> 
>> public Document GetDocument(Contact entity)
>> 
>> {
>> 
>>    var mytext = entity.ToStringExtension();
>> 
>>    var doc = new Document();
>> 
>>    doc.Add(new Field(IndexColumns.Extract.ToString(), mytext, 
>> Field.Store.YES, Field.Index.ANALYZED));
>> 
>>    doc.Add(new Field(IndexColumns.Type.ToString(), DocumentType, 
>> Field.Store.YES, Field.Index.ANALYZED_NO_NORMS));
>> 
>>    doc.Add(new Field(IndexColumns.ClientId.ToString(),
>> entity.Client.Id.ToString(), Field.Store.YES, 
>> Field.Index.ANALYZED_NO_NORMS));
>> 
>>    doc.Add(new Field(IndexColumns.Title.ToString(), entity.Firstname + " "
>> + entity.Lastname, Field.Store.YES, Field.Index.NOT_ANALYZED));
>> 
>>    doc.Add(new Field(IndexColumns.Content.ToString(), entity.Company, 
>> Field.Store.YES, Field.Index.NOT_ANALYZED));
>> 
>>    doc.Add(new Field(IndexColumns.Misc1.ToString(), entity.Phone, 
>> Field.Store.YES, Field.Index.NOT_ANALYZED));
>> 
>>    doc.Add(new Field(IndexColumns.Misc2.ToString(),
>> entity.Email.ToString(CultureInfo.InvariantCulture), Field.Store.YES, 
>> Field.Index.NOT_ANALYZED));
>> 
>>    doc.Add(new Field(IndexColumns.Category.ToString(),
>> entity.Category, Field.Store.YES, Field.Index.NOT_ANALYZED));
>> 
>>    doc.Add(new Field(IndexColumns.Id.ToString(), 
>> entity.Id.ToString(CultureInfo.InvariantCulture), Field.Store.YES, 
>> Field.Index.NOT_ANALYZED));
>> 
>>    return doc;
>> 
>> }
>> 
>> 
>> 
>> 
>> 
>> The error is intermittent but seems to happen when I delete/insert a 
>> document. I have a background service (web based) that may also be 
>> writing documents to the index every 30 seconds or so..
>> 
>> I am on version 2.9.4.2, should I upgrade to 3.0.3 bearing in mind I 
>> am running in Partial Trust and I recall reading an email regarding 
>> some problems with the release (I might be mistaken)
>> 
>> 
>> 
>> I suspect I am doing something stupid not sure how I can diagnose the 
>> problem?
>> 
>> 
>> 
>> Thanks
>> 
>> Rippo
>> 
>> 
>> 
>> 
>> 


RE: File locking

Posted by Kevin Miller <sc...@gmail.com>.
Yes, we use a Windows service (Topshelf) for indexing and a web service for searching from that index.  
—
Sent from my portable orbiting headquarters.

On Mon, Apr 8, 2013 at 6:22 AM, Richard Wilde <ri...@wildesoft.net>
wrote:

> Are you talking about a http based service or a windows service?
> Many Thanks
> Richard
> -----Original Message-----
> From: Nicholas Paldino [mailto:casperOne@caspershouse.com] 
> Sent: 08 April 2013 11:21
> To: <us...@lucenenet.apache.org>
> Cc: user@lucenenet.apache.org
> Subject: Re: File locking
> Richard,
> Lucene takes exclusive file locks on the index files when using the file
> directory, so having multiple processes access the same files is a problem.
> The recommended course of action is to create a service which uses Lucene
> and then can be called from any number of processes.
> - Nick
> On Apr 5, 2013, at 6:20 AM, "Richard Wilde" <ri...@wildesoft.net> wrote:
>> I have deployed my application on Rackspace cloud (have 3-4 nodes 
>> running iis all sharing the same files) and am running into issues 
>> with:-
>> 
>> 
>> 
>> The process cannot access the file
>> '\\fs1-n01\stor1wc1dfw1\381858\799525\somedomain.com\web\content\App_D
>> ata\Lu ceneIndex\write.lock' because it is being used by another 
>> process.
>> 
>> 
>> 
>> I am not sure why I am running into this issue, the code for this 
>> error is:-
>> 
>> 
>> 
>> public void Delete(long id, string type)
>> 
>> {
>> 
>>         directory = FSDirectory.Open(new 
>> DirectoryInfo(Settings.LuceneBaseDir));
>> 
>>         var writer = new IndexWriter(directory, analyzer, false, 
>> IndexWriter.MaxFieldLength.LIMITED);
>> 
>>         var queryParser = new QueryParser(Version.LUCENE_29, 
>> "Extract", analyzer);
>> 
>>         var special = string.Format("Id:{0} AND Type:{1}", id, type);
>> 
>>         writer.DeleteDocuments(queryParser.Parse(special));
>> 
>>         writer.Commit();
>> 
>>         writer.Close();
>> 
>>         directory.Close();
>> 
>> }
>> 
>> 
>> 
>> I have also code that indexes like:-
>> 
>> 
>> 
>>        public void Index(Document document)
>> 
>>        {
>> 
>>            directory = FSDirectory.Open(new 
>> DirectoryInfo(Settings.LuceneBaseDir));
>> 
>>            var writer = new IndexWriter(directory, analyzer, false, 
>> IndexWriter.MaxFieldLength.LIMITED);
>> 
>>            writer.AddDocument(document);
>> 
>>            writer.Close();
>> 
>>            directory.Close();
>> 
>>        }
>> 
>> 
>> 
>>      public void Index(Contact entity)
>> 
>>      {
>> 
>>         indexer.Index(GetDocument(entity));
>> 
>>      }
>> 
>> 
>> 
>> public Document GetDocument(Contact entity)
>> 
>> {
>> 
>>    var mytext = entity.ToStringExtension();
>> 
>>    var doc = new Document();
>> 
>>    doc.Add(new Field(IndexColumns.Extract.ToString(), mytext, 
>> Field.Store.YES, Field.Index.ANALYZED));
>> 
>>    doc.Add(new Field(IndexColumns.Type.ToString(), DocumentType, 
>> Field.Store.YES, Field.Index.ANALYZED_NO_NORMS));
>> 
>>    doc.Add(new Field(IndexColumns.ClientId.ToString(),
>> entity.Client.Id.ToString(), Field.Store.YES, 
>> Field.Index.ANALYZED_NO_NORMS));
>> 
>>    doc.Add(new Field(IndexColumns.Title.ToString(), entity.Firstname + " "
>> + entity.Lastname, Field.Store.YES, Field.Index.NOT_ANALYZED));
>> 
>>    doc.Add(new Field(IndexColumns.Content.ToString(), entity.Company, 
>> Field.Store.YES, Field.Index.NOT_ANALYZED));
>> 
>>    doc.Add(new Field(IndexColumns.Misc1.ToString(), entity.Phone, 
>> Field.Store.YES, Field.Index.NOT_ANALYZED));
>> 
>>    doc.Add(new Field(IndexColumns.Misc2.ToString(),
>> entity.Email.ToString(CultureInfo.InvariantCulture), Field.Store.YES, 
>> Field.Index.NOT_ANALYZED));
>> 
>>    doc.Add(new Field(IndexColumns.Category.ToString(), 
>> entity.Category, Field.Store.YES, Field.Index.NOT_ANALYZED));
>> 
>>    doc.Add(new Field(IndexColumns.Id.ToString(), 
>> entity.Id.ToString(CultureInfo.InvariantCulture), Field.Store.YES, 
>> Field.Index.NOT_ANALYZED));
>> 
>>    return doc;
>> 
>> }
>> 
>> 
>> 
>> 
>> 
>> The error is intermittent but seems to happen when I delete/insert a 
>> document. I have a background service (web based) that may also be 
>> writing documents to the index every 30 seconds or so..
>> 
>> I am on version 2.9.4.2, should I upgrade to 3.0.3 bearing in mind I 
>> am running in Partial Trust and I recall reading an email regarding 
>> some problems with the release (I might be mistaken)
>> 
>> 
>> 
>> I suspect I am doing something stupid not sure how I can diagnose the 
>> problem?
>> 
>> 
>> 
>> Thanks
>> 
>> Rippo
>> 
>> 
>> 
>> 
>> 

RE: File locking

Posted by Richard Wilde <ri...@wildesoft.net>.
This is my problem with HTTP.

I am using Rackspace cloud that basically gives me 5 nodes (iis servers)
with only one set of web files therefore I only have 1 lucene index. I
cannot install a windows service.

WEB01 - locks
WEB02 - writes at same time but fails as there is a write lock...

So how do I resolve this? I can't really do a thread.sleep and retry as this
will slow down the connection for the user.

Or using lucene in my set up impossible?

Many Thanks
Richard




-----Original Message-----
From: Nicholas Paldino [mailto:casperOne@caspershouse.com] 
Sent: 08 April 2013 12:53
To: <us...@lucenenet.apache.org>
Cc: user@lucenenet.apache.org
Subject: Re: File locking

Richard,

Doesn't matter really, you'll need to expose a communications endpoint from
that process which your other processes (that would normally attach to
lucene) would communicate with.

An HTTP endpoint makes sense because there is lots of tooling to make
communication between those processes easier using that protocol.

However if you do this in an ASP.NET application, you'll potentially have a
problem when the application domain recycles.

- Nick

On Apr 8, 2013, at 7:22 AM, "Richard Wilde" <ri...@wildesoft.net> wrote:

> Are you talking about a http based service or a windows service?
> 
> 
> Many Thanks
> Richard
> 
> 
> 
> -----Original Message-----
> From: Nicholas Paldino [mailto:casperOne@caspershouse.com]
> Sent: 08 April 2013 11:21
> To: <us...@lucenenet.apache.org>
> Cc: user@lucenenet.apache.org
> Subject: Re: File locking
> 
> Richard,
> 
> Lucene takes exclusive file locks on the index files when using the 
> file directory, so having multiple processes access the same files is a
problem.
> 
> The recommended course of action is to create a service which uses 
> Lucene and then can be called from any number of processes.
> 
> - Nick
> 
> On Apr 5, 2013, at 6:20 AM, "Richard Wilde" <ri...@wildesoft.net> wrote:
> 
>> I have deployed my application on Rackspace cloud (have 3-4 nodes 
>> running iis all sharing the same files) and am running into issues
>> with:-
>> 
>> 
>> 
>> The process cannot access the file
>> '\\fs1-n01\stor1wc1dfw1\381858\799525\somedomain.com\web\content\App_
>> D ata\Lu ceneIndex\write.lock' because it is being used by another 
>> process.
>> 
>> 
>> 
>> I am not sure why I am running into this issue, the code for this 
>> error is:-
>> 
>> 
>> 
>> public void Delete(long id, string type)
>> 
>> {
>> 
>>        directory = FSDirectory.Open(new 
>> DirectoryInfo(Settings.LuceneBaseDir));
>> 
>>        var writer = new IndexWriter(directory, analyzer, false, 
>> IndexWriter.MaxFieldLength.LIMITED);
>> 
>>        var queryParser = new QueryParser(Version.LUCENE_29, 
>> "Extract", analyzer);
>> 
>>        var special = string.Format("Id:{0} AND Type:{1}", id, type);
>> 
>>        writer.DeleteDocuments(queryParser.Parse(special));
>> 
>>        writer.Commit();
>> 
>>        writer.Close();
>> 
>>        directory.Close();
>> 
>> }
>> 
>> 
>> 
>> I have also code that indexes like:-
>> 
>> 
>> 
>>       public void Index(Document document)
>> 
>>       {
>> 
>>           directory = FSDirectory.Open(new 
>> DirectoryInfo(Settings.LuceneBaseDir));
>> 
>>           var writer = new IndexWriter(directory, analyzer, false, 
>> IndexWriter.MaxFieldLength.LIMITED);
>> 
>>           writer.AddDocument(document);
>> 
>>           writer.Close();
>> 
>>           directory.Close();
>> 
>>       }
>> 
>> 
>> 
>>     public void Index(Contact entity)
>> 
>>     {
>> 
>>        indexer.Index(GetDocument(entity));
>> 
>>     }
>> 
>> 
>> 
>> public Document GetDocument(Contact entity)
>> 
>> {
>> 
>>   var mytext = entity.ToStringExtension();
>> 
>>   var doc = new Document();
>> 
>>   doc.Add(new Field(IndexColumns.Extract.ToString(), mytext, 
>> Field.Store.YES, Field.Index.ANALYZED));
>> 
>>   doc.Add(new Field(IndexColumns.Type.ToString(), DocumentType, 
>> Field.Store.YES, Field.Index.ANALYZED_NO_NORMS));
>> 
>>   doc.Add(new Field(IndexColumns.ClientId.ToString(),
>> entity.Client.Id.ToString(), Field.Store.YES, 
>> Field.Index.ANALYZED_NO_NORMS));
>> 
>>   doc.Add(new Field(IndexColumns.Title.ToString(), entity.Firstname + " "
>> + entity.Lastname, Field.Store.YES, Field.Index.NOT_ANALYZED));
>> 
>>   doc.Add(new Field(IndexColumns.Content.ToString(), entity.Company, 
>> Field.Store.YES, Field.Index.NOT_ANALYZED));
>> 
>>   doc.Add(new Field(IndexColumns.Misc1.ToString(), entity.Phone, 
>> Field.Store.YES, Field.Index.NOT_ANALYZED));
>> 
>>   doc.Add(new Field(IndexColumns.Misc2.ToString(),
>> entity.Email.ToString(CultureInfo.InvariantCulture), Field.Store.YES, 
>> Field.Index.NOT_ANALYZED));
>> 
>>   doc.Add(new Field(IndexColumns.Category.ToString(),
>> entity.Category, Field.Store.YES, Field.Index.NOT_ANALYZED));
>> 
>>   doc.Add(new Field(IndexColumns.Id.ToString(), 
>> entity.Id.ToString(CultureInfo.InvariantCulture), Field.Store.YES, 
>> Field.Index.NOT_ANALYZED));
>> 
>>   return doc;
>> 
>> }
>> 
>> 
>> 
>> 
>> 
>> The error is intermittent but seems to happen when I delete/insert a 
>> document. I have a background service (web based) that may also be 
>> writing documents to the index every 30 seconds or so..
>> 
>> I am on version 2.9.4.2, should I upgrade to 3.0.3 bearing in mind I 
>> am running in Partial Trust and I recall reading an email regarding 
>> some problems with the release (I might be mistaken)
>> 
>> 
>> 
>> I suspect I am doing something stupid not sure how I can diagnose the 
>> problem?
>> 
>> 
>> 
>> Thanks
>> 
>> Rippo
> 


Re: File locking

Posted by Nicholas Paldino <ca...@caspershouse.com>.
Richard,

Doesn't matter really, you'll need to expose a communications endpoint from that process which your other processes (that would normally attach to lucene) would communicate with.

An HTTP endpoint makes sense because there is lots of tooling to make communication between those processes easier using that protocol.

However if you do this in an ASP.NET application, you'll potentially have a problem when the application domain recycles.

- Nick

On Apr 8, 2013, at 7:22 AM, "Richard Wilde" <ri...@wildesoft.net> wrote:

> Are you talking about a http based service or a windows service?
> 
> 
> Many Thanks
> Richard
> 
> 
> 
> -----Original Message-----
> From: Nicholas Paldino [mailto:casperOne@caspershouse.com] 
> Sent: 08 April 2013 11:21
> To: <us...@lucenenet.apache.org>
> Cc: user@lucenenet.apache.org
> Subject: Re: File locking
> 
> Richard,
> 
> Lucene takes exclusive file locks on the index files when using the file
> directory, so having multiple processes access the same files is a problem.
> 
> The recommended course of action is to create a service which uses Lucene
> and then can be called from any number of processes.
> 
> - Nick
> 
> On Apr 5, 2013, at 6:20 AM, "Richard Wilde" <ri...@wildesoft.net> wrote:
> 
>> I have deployed my application on Rackspace cloud (have 3-4 nodes 
>> running iis all sharing the same files) and am running into issues 
>> with:-
>> 
>> 
>> 
>> The process cannot access the file
>> '\\fs1-n01\stor1wc1dfw1\381858\799525\somedomain.com\web\content\App_D
>> ata\Lu ceneIndex\write.lock' because it is being used by another 
>> process.
>> 
>> 
>> 
>> I am not sure why I am running into this issue, the code for this 
>> error is:-
>> 
>> 
>> 
>> public void Delete(long id, string type)
>> 
>> {
>> 
>>        directory = FSDirectory.Open(new 
>> DirectoryInfo(Settings.LuceneBaseDir));
>> 
>>        var writer = new IndexWriter(directory, analyzer, false, 
>> IndexWriter.MaxFieldLength.LIMITED);
>> 
>>        var queryParser = new QueryParser(Version.LUCENE_29, 
>> "Extract", analyzer);
>> 
>>        var special = string.Format("Id:{0} AND Type:{1}", id, type);
>> 
>>        writer.DeleteDocuments(queryParser.Parse(special));
>> 
>>        writer.Commit();
>> 
>>        writer.Close();
>> 
>>        directory.Close();
>> 
>> }
>> 
>> 
>> 
>> I have also code that indexes like:-
>> 
>> 
>> 
>>       public void Index(Document document)
>> 
>>       {
>> 
>>           directory = FSDirectory.Open(new 
>> DirectoryInfo(Settings.LuceneBaseDir));
>> 
>>           var writer = new IndexWriter(directory, analyzer, false, 
>> IndexWriter.MaxFieldLength.LIMITED);
>> 
>>           writer.AddDocument(document);
>> 
>>           writer.Close();
>> 
>>           directory.Close();
>> 
>>       }
>> 
>> 
>> 
>>     public void Index(Contact entity)
>> 
>>     {
>> 
>>        indexer.Index(GetDocument(entity));
>> 
>>     }
>> 
>> 
>> 
>> public Document GetDocument(Contact entity)
>> 
>> {
>> 
>>   var mytext = entity.ToStringExtension();
>> 
>>   var doc = new Document();
>> 
>>   doc.Add(new Field(IndexColumns.Extract.ToString(), mytext, 
>> Field.Store.YES, Field.Index.ANALYZED));
>> 
>>   doc.Add(new Field(IndexColumns.Type.ToString(), DocumentType, 
>> Field.Store.YES, Field.Index.ANALYZED_NO_NORMS));
>> 
>>   doc.Add(new Field(IndexColumns.ClientId.ToString(),
>> entity.Client.Id.ToString(), Field.Store.YES, 
>> Field.Index.ANALYZED_NO_NORMS));
>> 
>>   doc.Add(new Field(IndexColumns.Title.ToString(), entity.Firstname + " "
>> + entity.Lastname, Field.Store.YES, Field.Index.NOT_ANALYZED));
>> 
>>   doc.Add(new Field(IndexColumns.Content.ToString(), entity.Company, 
>> Field.Store.YES, Field.Index.NOT_ANALYZED));
>> 
>>   doc.Add(new Field(IndexColumns.Misc1.ToString(), entity.Phone, 
>> Field.Store.YES, Field.Index.NOT_ANALYZED));
>> 
>>   doc.Add(new Field(IndexColumns.Misc2.ToString(),
>> entity.Email.ToString(CultureInfo.InvariantCulture), Field.Store.YES, 
>> Field.Index.NOT_ANALYZED));
>> 
>>   doc.Add(new Field(IndexColumns.Category.ToString(), 
>> entity.Category, Field.Store.YES, Field.Index.NOT_ANALYZED));
>> 
>>   doc.Add(new Field(IndexColumns.Id.ToString(), 
>> entity.Id.ToString(CultureInfo.InvariantCulture), Field.Store.YES, 
>> Field.Index.NOT_ANALYZED));
>> 
>>   return doc;
>> 
>> }
>> 
>> 
>> 
>> 
>> 
>> The error is intermittent but seems to happen when I delete/insert a 
>> document. I have a background service (web based) that may also be 
>> writing documents to the index every 30 seconds or so..
>> 
>> I am on version 2.9.4.2, should I upgrade to 3.0.3 bearing in mind I 
>> am running in Partial Trust and I recall reading an email regarding 
>> some problems with the release (I might be mistaken)
>> 
>> 
>> 
>> I suspect I am doing something stupid not sure how I can diagnose the 
>> problem?
>> 
>> 
>> 
>> Thanks
>> 
>> Rippo
> 

RE: File locking

Posted by Richard Wilde <ri...@wildesoft.net>.
Are you talking about a http based service or a windows service?


Many Thanks
Richard



-----Original Message-----
From: Nicholas Paldino [mailto:casperOne@caspershouse.com] 
Sent: 08 April 2013 11:21
To: <us...@lucenenet.apache.org>
Cc: user@lucenenet.apache.org
Subject: Re: File locking

Richard,

Lucene takes exclusive file locks on the index files when using the file
directory, so having multiple processes access the same files is a problem.

The recommended course of action is to create a service which uses Lucene
and then can be called from any number of processes.

- Nick

On Apr 5, 2013, at 6:20 AM, "Richard Wilde" <ri...@wildesoft.net> wrote:

> I have deployed my application on Rackspace cloud (have 3-4 nodes 
> running iis all sharing the same files) and am running into issues 
> with:-
> 
> 
> 
> The process cannot access the file
> '\\fs1-n01\stor1wc1dfw1\381858\799525\somedomain.com\web\content\App_D
> ata\Lu ceneIndex\write.lock' because it is being used by another 
> process.
> 
> 
> 
> I am not sure why I am running into this issue, the code for this 
> error is:-
> 
> 
> 
> public void Delete(long id, string type)
> 
> {
> 
>         directory = FSDirectory.Open(new 
> DirectoryInfo(Settings.LuceneBaseDir));
> 
>         var writer = new IndexWriter(directory, analyzer, false, 
> IndexWriter.MaxFieldLength.LIMITED);
> 
>         var queryParser = new QueryParser(Version.LUCENE_29, 
> "Extract", analyzer);
> 
>         var special = string.Format("Id:{0} AND Type:{1}", id, type);
> 
>         writer.DeleteDocuments(queryParser.Parse(special));
> 
>         writer.Commit();
> 
>         writer.Close();
> 
>         directory.Close();
> 
> }
> 
> 
> 
> I have also code that indexes like:-
> 
> 
> 
>        public void Index(Document document)
> 
>        {
> 
>            directory = FSDirectory.Open(new 
> DirectoryInfo(Settings.LuceneBaseDir));
> 
>            var writer = new IndexWriter(directory, analyzer, false, 
> IndexWriter.MaxFieldLength.LIMITED);
> 
>            writer.AddDocument(document);
> 
>            writer.Close();
> 
>            directory.Close();
> 
>        }
> 
> 
> 
>      public void Index(Contact entity)
> 
>      {
> 
>         indexer.Index(GetDocument(entity));
> 
>      }
> 
> 
> 
> public Document GetDocument(Contact entity)
> 
> {
> 
>    var mytext = entity.ToStringExtension();
> 
>    var doc = new Document();
> 
>    doc.Add(new Field(IndexColumns.Extract.ToString(), mytext, 
> Field.Store.YES, Field.Index.ANALYZED));
> 
>    doc.Add(new Field(IndexColumns.Type.ToString(), DocumentType, 
> Field.Store.YES, Field.Index.ANALYZED_NO_NORMS));
> 
>    doc.Add(new Field(IndexColumns.ClientId.ToString(),
> entity.Client.Id.ToString(), Field.Store.YES, 
> Field.Index.ANALYZED_NO_NORMS));
> 
>    doc.Add(new Field(IndexColumns.Title.ToString(), entity.Firstname + " "
> + entity.Lastname, Field.Store.YES, Field.Index.NOT_ANALYZED));
> 
>    doc.Add(new Field(IndexColumns.Content.ToString(), entity.Company, 
> Field.Store.YES, Field.Index.NOT_ANALYZED));
> 
>    doc.Add(new Field(IndexColumns.Misc1.ToString(), entity.Phone, 
> Field.Store.YES, Field.Index.NOT_ANALYZED));
> 
>    doc.Add(new Field(IndexColumns.Misc2.ToString(),
> entity.Email.ToString(CultureInfo.InvariantCulture), Field.Store.YES, 
> Field.Index.NOT_ANALYZED));
> 
>    doc.Add(new Field(IndexColumns.Category.ToString(), 
> entity.Category, Field.Store.YES, Field.Index.NOT_ANALYZED));
> 
>    doc.Add(new Field(IndexColumns.Id.ToString(), 
> entity.Id.ToString(CultureInfo.InvariantCulture), Field.Store.YES, 
> Field.Index.NOT_ANALYZED));
> 
>    return doc;
> 
> }
> 
> 
> 
> 
> 
> The error is intermittent but seems to happen when I delete/insert a 
> document. I have a background service (web based) that may also be 
> writing documents to the index every 30 seconds or so..
> 
> I am on version 2.9.4.2, should I upgrade to 3.0.3 bearing in mind I 
> am running in Partial Trust and I recall reading an email regarding 
> some problems with the release (I might be mistaken)
> 
> 
> 
> I suspect I am doing something stupid not sure how I can diagnose the 
> problem?
> 
> 
> 
> Thanks
> 
> Rippo
> 
> 
> 
> 
> 


Re: File locking

Posted by Nicholas Paldino <ca...@caspershouse.com>.
Richard,

Lucene takes exclusive file locks on the index files when using the file directory, so having multiple processes access the same files is a problem.

The recommended course of action is to create a service which uses Lucene and then can be called from any number of processes.

- Nick

On Apr 5, 2013, at 6:20 AM, "Richard Wilde" <ri...@wildesoft.net> wrote:

> I have deployed my application on Rackspace cloud (have 3-4 nodes running
> iis all sharing the same files) and am running into issues with:-
> 
> 
> 
> The process cannot access the file
> '\\fs1-n01\stor1wc1dfw1\381858\799525\somedomain.com\web\content\App_Data\Lu
> ceneIndex\write.lock' because it is being used by another process.
> 
> 
> 
> I am not sure why I am running into this issue, the code for this error is:-
> 
> 
> 
> public void Delete(long id, string type)
> 
> {
> 
>         directory = FSDirectory.Open(new
> DirectoryInfo(Settings.LuceneBaseDir));
> 
>         var writer = new IndexWriter(directory, analyzer, false,
> IndexWriter.MaxFieldLength.LIMITED);
> 
>         var queryParser = new QueryParser(Version.LUCENE_29, "Extract",
> analyzer);
> 
>         var special = string.Format("Id:{0} AND Type:{1}", id, type);
> 
>         writer.DeleteDocuments(queryParser.Parse(special));
> 
>         writer.Commit();
> 
>         writer.Close();
> 
>         directory.Close();
> 
> }
> 
> 
> 
> I have also code that indexes like:-
> 
> 
> 
>        public void Index(Document document)
> 
>        {
> 
>            directory = FSDirectory.Open(new
> DirectoryInfo(Settings.LuceneBaseDir));
> 
>            var writer = new IndexWriter(directory, analyzer, false,
> IndexWriter.MaxFieldLength.LIMITED);
> 
>            writer.AddDocument(document);
> 
>            writer.Close();
> 
>            directory.Close();
> 
>        }
> 
> 
> 
>      public void Index(Contact entity)
> 
>      {
> 
>         indexer.Index(GetDocument(entity));
> 
>      }
> 
> 
> 
> public Document GetDocument(Contact entity)
> 
> {
> 
>    var mytext = entity.ToStringExtension();
> 
>    var doc = new Document();
> 
>    doc.Add(new Field(IndexColumns.Extract.ToString(), mytext,
> Field.Store.YES, Field.Index.ANALYZED));
> 
>    doc.Add(new Field(IndexColumns.Type.ToString(), DocumentType,
> Field.Store.YES, Field.Index.ANALYZED_NO_NORMS));
> 
>    doc.Add(new Field(IndexColumns.ClientId.ToString(),
> entity.Client.Id.ToString(), Field.Store.YES,
> Field.Index.ANALYZED_NO_NORMS));
> 
>    doc.Add(new Field(IndexColumns.Title.ToString(), entity.Firstname + " "
> + entity.Lastname, Field.Store.YES, Field.Index.NOT_ANALYZED));
> 
>    doc.Add(new Field(IndexColumns.Content.ToString(), entity.Company,
> Field.Store.YES, Field.Index.NOT_ANALYZED));
> 
>    doc.Add(new Field(IndexColumns.Misc1.ToString(), entity.Phone,
> Field.Store.YES, Field.Index.NOT_ANALYZED));
> 
>    doc.Add(new Field(IndexColumns.Misc2.ToString(),
> entity.Email.ToString(CultureInfo.InvariantCulture), Field.Store.YES,
> Field.Index.NOT_ANALYZED));
> 
>    doc.Add(new Field(IndexColumns.Category.ToString(), entity.Category,
> Field.Store.YES, Field.Index.NOT_ANALYZED));
> 
>    doc.Add(new Field(IndexColumns.Id.ToString(),
> entity.Id.ToString(CultureInfo.InvariantCulture), Field.Store.YES,
> Field.Index.NOT_ANALYZED));
> 
>    return doc;
> 
> }
> 
> 
> 
> 
> 
> The error is intermittent but seems to happen when I delete/insert a
> document. I have a background service (web based) that may also be writing
> documents to the index every 30 seconds or so.. 
> 
> I am on version 2.9.4.2, should I upgrade to 3.0.3 bearing in mind I am
> running in Partial Trust and I recall reading an email regarding some
> problems with the release (I might be mistaken)
> 
> 
> 
> I suspect I am doing something stupid not sure how I can diagnose the
> problem?
> 
> 
> 
> Thanks
> 
> Rippo
> 
> 
> 
> 
>