You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@lucene.apache.org by jp...@apache.org on 2020/02/05 15:45:08 UTC
[lucene-solr] branch branch_8x updated: SOLR-14242:
HdfsDirectory#createTempOutput. (#1240)
This is an automated email from the ASF dual-hosted git repository.
jpountz pushed a commit to branch branch_8x
in repository https://gitbox.apache.org/repos/asf/lucene-solr.git
The following commit(s) were added to refs/heads/branch_8x by this push:
new d007470 SOLR-14242: HdfsDirectory#createTempOutput. (#1240)
d007470 is described below
commit d007470bda2f70ba4e1c407ac624e21288947128
Author: Adrien Grand <jp...@gmail.com>
AuthorDate: Wed Feb 5 16:38:53 2020 +0100
SOLR-14242: HdfsDirectory#createTempOutput. (#1240)
---
solr/CHANGES.txt | 2 ++
.../org/apache/solr/store/hdfs/HdfsDirectory.java | 15 +++++++++--
.../apache/solr/store/hdfs/HdfsDirectoryTest.java | 29 +++++++++++++++++++---
3 files changed, 41 insertions(+), 5 deletions(-)
diff --git a/solr/CHANGES.txt b/solr/CHANGES.txt
index cf06872..3b2a64f 100644
--- a/solr/CHANGES.txt
+++ b/solr/CHANGES.txt
@@ -87,6 +87,8 @@ New Features
* SOLR-12325: Introducing uniqueBlock({!v=type:parent}) aggregation (Anatolii Siuniaev via Mikhail Khludnev)
+ * SOLR-14242: HdfsDirectory now supports indexing geo-points, ranges or shapes. (Adrien Grand)
+
Improvements
---------------------
* SOLR-14120: Define JavaScript methods 'includes' and 'startsWith' to ensure AdminUI can be displayed when using
diff --git a/solr/core/src/java/org/apache/solr/store/hdfs/HdfsDirectory.java b/solr/core/src/java/org/apache/solr/store/hdfs/HdfsDirectory.java
index cc1133e..3c2890e 100644
--- a/solr/core/src/java/org/apache/solr/store/hdfs/HdfsDirectory.java
+++ b/solr/core/src/java/org/apache/solr/store/hdfs/HdfsDirectory.java
@@ -24,6 +24,7 @@ import java.util.Collection;
import java.util.Collections;
import java.util.List;
import java.util.Set;
+import java.util.concurrent.atomic.AtomicLong;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataInputStream;
@@ -55,7 +56,10 @@ public class HdfsDirectory extends BaseDirectory {
private final FileContext fileContext;
private final int bufferSize;
-
+
+ /** Used to generate temp file names in {@link #createTempOutput}. */
+ private final AtomicLong nextTempFileCounter = new AtomicLong();
+
public HdfsDirectory(Path hdfsDirPath, Configuration configuration) throws IOException {
this(hdfsDirPath, HdfsLockFactory.INSTANCE, configuration, DEFAULT_BUFFER_SIZE);
}
@@ -123,7 +127,14 @@ public class HdfsDirectory extends BaseDirectory {
@Override
public IndexOutput createTempOutput(String prefix, String suffix, IOContext context) throws IOException {
- throw new UnsupportedOperationException();
+ while (true) {
+ try {
+ String name = getTempFileName(prefix, suffix, nextTempFileCounter.getAndIncrement());
+ return new HdfsFileWriter(getFileSystem(), new Path(hdfsDirPath, name), name);
+ } catch (FileAlreadyExistsException faee) {
+ // Retry with next incremented name
+ }
+ }
}
private String[] getNormalNames(List<String> files) {
diff --git a/solr/core/src/test/org/apache/solr/store/hdfs/HdfsDirectoryTest.java b/solr/core/src/test/org/apache/solr/store/hdfs/HdfsDirectoryTest.java
index 474983d..baa328e 100644
--- a/solr/core/src/test/org/apache/solr/store/hdfs/HdfsDirectoryTest.java
+++ b/solr/core/src/test/org/apache/solr/store/hdfs/HdfsDirectoryTest.java
@@ -53,6 +53,8 @@ public class HdfsDirectoryTest extends SolrTestCaseJ4 {
private static final int MAX_BUFFER_SIZE = 5000;
private static final int MAX_NUMBER_OF_READS = 10000;
private static MiniDFSCluster dfsCluster;
+ private Configuration directoryConf;
+ private Path directoryPath;
private HdfsDirectory directory;
private Random random;
@@ -74,10 +76,11 @@ public class HdfsDirectoryTest extends SolrTestCaseJ4 {
public void setUp() throws Exception {
super.setUp();
- Configuration conf = HdfsTestUtil.getClientConfiguration(dfsCluster);
- conf.set("dfs.permissions.enabled", "false");
+ directoryConf = HdfsTestUtil.getClientConfiguration(dfsCluster);
+ directoryConf.set("dfs.permissions.enabled", "false");
- directory = new HdfsDirectory(new Path(dfsCluster.getURI().toString() + createTempDir().toFile().getAbsolutePath() + "/hdfs"), conf);
+ directoryPath = new Path(dfsCluster.getURI().toString() + createTempDir().toFile().getAbsolutePath() + "/hdfs");
+ directory = new HdfsDirectory(directoryPath, directoryConf);
random = random();
}
@@ -240,4 +243,24 @@ public class HdfsDirectoryTest extends SolrTestCaseJ4 {
() -> directory.createOutput("foo", IOContext.DEFAULT));
}
+ public void testCreateTempFiles() throws IOException {
+ String file1;
+ try (Directory dir = new HdfsDirectory(directoryPath, directoryConf);
+ IndexOutput out = dir.createTempOutput("foo", "bar", IOContext.DEFAULT)) {
+ out.writeByte((byte) 42);
+ file1 = out.getName();
+ }
+ assertTrue(file1.startsWith("foo_bar"));
+ assertTrue(file1.endsWith(".tmp"));
+ // Create the directory again to force the counter to be reset
+ String file2;
+ try (Directory dir = new HdfsDirectory(directoryPath, directoryConf);
+ IndexOutput out = dir.createTempOutput("foo", "bar", IOContext.DEFAULT)) {
+ out.writeByte((byte) 42);
+ file2 = out.getName();
+ }
+ assertTrue(file2.startsWith("foo_bar"));
+ assertTrue(file2.endsWith(".tmp"));
+ assertNotEquals(file1, file2);
+ }
}