You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@druid.apache.org by fj...@apache.org on 2018/11/30 15:40:48 UTC
[incubator-druid] branch master updated: replace Files.map() with
FileUtils.map() (#6692)
This is an automated email from the ASF dual-hosted git repository.
fjy pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/incubator-druid.git
The following commit(s) were added to refs/heads/master by this push:
new 5fae522 replace Files.map() with FileUtils.map() (#6692)
5fae522 is described below
commit 5fae522f127bd9e12c8b965ae152d3cea985104f
Author: Shimi Kiviti <sh...@gmail.com>
AuthorDate: Fri Nov 30 17:40:41 2018 +0200
replace Files.map() with FileUtils.map() (#6692)
---
.../druid/benchmark/FloatCompressionBenchmark.java | 15 +++++++--
.../druid/benchmark/LongCompressionBenchmark.java | 16 ++++++++--
.../druid/benchmark/VSizeSerdeBenchmark.java | 37 ++++++++++++----------
3 files changed, 47 insertions(+), 21 deletions(-)
diff --git a/benchmarks/src/main/java/org/apache/druid/benchmark/FloatCompressionBenchmark.java b/benchmarks/src/main/java/org/apache/druid/benchmark/FloatCompressionBenchmark.java
index 602cf30..8ac9941 100644
--- a/benchmarks/src/main/java/org/apache/druid/benchmark/FloatCompressionBenchmark.java
+++ b/benchmarks/src/main/java/org/apache/druid/benchmark/FloatCompressionBenchmark.java
@@ -20,7 +20,8 @@
package org.apache.druid.benchmark;
import com.google.common.base.Supplier;
-import com.google.common.io.Files;
+import org.apache.druid.java.util.common.FileUtils;
+import org.apache.druid.java.util.common.MappedByteBufferHandler;
import org.apache.druid.segment.data.ColumnarFloats;
import org.apache.druid.segment.data.CompressedColumnarFloatsSupplier;
import org.openjdk.jmh.annotations.Benchmark;
@@ -33,6 +34,7 @@ import org.openjdk.jmh.annotations.Param;
import org.openjdk.jmh.annotations.Scope;
import org.openjdk.jmh.annotations.Setup;
import org.openjdk.jmh.annotations.State;
+import org.openjdk.jmh.annotations.TearDown;
import org.openjdk.jmh.annotations.Warmup;
import org.openjdk.jmh.infra.Blackhole;
@@ -64,15 +66,24 @@ public class FloatCompressionBenchmark
private Supplier<ColumnarFloats> supplier;
+ private MappedByteBufferHandler bufferHandler;
+
@Setup
public void setup() throws Exception
{
File dir = new File(dirPath);
File compFile = new File(dir, file + "-" + strategy);
- ByteBuffer buffer = Files.map(compFile);
+ bufferHandler = FileUtils.map(compFile);
+ ByteBuffer buffer = bufferHandler.get();
supplier = CompressedColumnarFloatsSupplier.fromByteBuffer(buffer, ByteOrder.nativeOrder());
}
+ @TearDown
+ public void tearDown()
+ {
+ bufferHandler.close();
+ }
+
@Benchmark
public void readContinuous(Blackhole bh)
{
diff --git a/benchmarks/src/main/java/org/apache/druid/benchmark/LongCompressionBenchmark.java b/benchmarks/src/main/java/org/apache/druid/benchmark/LongCompressionBenchmark.java
index a85a722..f8383bf 100644
--- a/benchmarks/src/main/java/org/apache/druid/benchmark/LongCompressionBenchmark.java
+++ b/benchmarks/src/main/java/org/apache/druid/benchmark/LongCompressionBenchmark.java
@@ -20,7 +20,8 @@
package org.apache.druid.benchmark;
import com.google.common.base.Supplier;
-import com.google.common.io.Files;
+import org.apache.druid.java.util.common.FileUtils;
+import org.apache.druid.java.util.common.MappedByteBufferHandler;
import org.apache.druid.segment.data.ColumnarLongs;
import org.apache.druid.segment.data.CompressedColumnarLongsSupplier;
import org.openjdk.jmh.annotations.Benchmark;
@@ -33,6 +34,7 @@ import org.openjdk.jmh.annotations.Param;
import org.openjdk.jmh.annotations.Scope;
import org.openjdk.jmh.annotations.Setup;
import org.openjdk.jmh.annotations.State;
+import org.openjdk.jmh.annotations.TearDown;
import org.openjdk.jmh.annotations.Warmup;
import org.openjdk.jmh.infra.Blackhole;
@@ -67,15 +69,24 @@ public class LongCompressionBenchmark
private Supplier<ColumnarLongs> supplier;
+ private MappedByteBufferHandler bufferHandler;
+
@Setup
public void setup() throws Exception
{
File dir = new File(dirPath);
File compFile = new File(dir, file + "-" + strategy + "-" + format);
- ByteBuffer buffer = Files.map(compFile);
+ bufferHandler = FileUtils.map(compFile);
+ ByteBuffer buffer = bufferHandler.get();
supplier = CompressedColumnarLongsSupplier.fromByteBuffer(buffer, ByteOrder.nativeOrder());
}
+ @TearDown
+ public void tearDown()
+ {
+ bufferHandler.close();
+ }
+
@Benchmark
public void readContinuous(Blackhole bh)
{
@@ -99,4 +110,3 @@ public class LongCompressionBenchmark
}
}
-
diff --git a/benchmarks/src/main/java/org/apache/druid/benchmark/VSizeSerdeBenchmark.java b/benchmarks/src/main/java/org/apache/druid/benchmark/VSizeSerdeBenchmark.java
index 6a4c85b..8c049a3 100644
--- a/benchmarks/src/main/java/org/apache/druid/benchmark/VSizeSerdeBenchmark.java
+++ b/benchmarks/src/main/java/org/apache/druid/benchmark/VSizeSerdeBenchmark.java
@@ -19,7 +19,8 @@
package org.apache.druid.benchmark;
-import com.google.common.io.Files;
+import org.apache.druid.java.util.common.FileUtils;
+import org.apache.druid.java.util.common.MappedByteBufferHandler;
import org.apache.druid.java.util.common.logger.Logger;
import org.apache.druid.segment.data.VSizeLongSerde;
import org.openjdk.jmh.annotations.Benchmark;
@@ -41,6 +42,7 @@ import java.io.Writer;
import java.net.URISyntaxException;
import java.nio.ByteBuffer;
import java.nio.charset.StandardCharsets;
+import java.nio.file.Files;
import java.util.concurrent.TimeUnit;
@State(Scope.Benchmark)
@@ -78,26 +80,29 @@ public class VSizeSerdeBenchmark
// to construct a heapByteBuffer since they have different performance
File base = new File(this.getClass().getClassLoader().getResource("").toURI());
dummy = new File(base, "dummy");
- try (Writer writer = java.nio.file.Files.newBufferedWriter(dummy.toPath(), StandardCharsets.UTF_8)) {
+ try (Writer writer = Files.newBufferedWriter(dummy.toPath(), StandardCharsets.UTF_8)) {
String EMPTY_STRING = " ";
for (int i = 0; i < values + 10; i++) {
writer.write(EMPTY_STRING);
}
}
- ByteBuffer buffer = Files.map(dummy);
- d1 = VSizeLongSerde.getDeserializer(1, buffer, 10);
- d2 = VSizeLongSerde.getDeserializer(2, buffer, 10);
- d4 = VSizeLongSerde.getDeserializer(4, buffer, 10);
- d8 = VSizeLongSerde.getDeserializer(8, buffer, 10);
- d12 = VSizeLongSerde.getDeserializer(12, buffer, 10);
- d16 = VSizeLongSerde.getDeserializer(16, buffer, 10);
- d20 = VSizeLongSerde.getDeserializer(20, buffer, 10);
- d24 = VSizeLongSerde.getDeserializer(24, buffer, 10);
- d32 = VSizeLongSerde.getDeserializer(32, buffer, 10);
- d40 = VSizeLongSerde.getDeserializer(40, buffer, 10);
- d48 = VSizeLongSerde.getDeserializer(48, buffer, 10);
- d56 = VSizeLongSerde.getDeserializer(56, buffer, 10);
- d64 = VSizeLongSerde.getDeserializer(64, buffer, 10);
+
+ try (MappedByteBufferHandler bufferHandler = FileUtils.map(dummy)) {
+ ByteBuffer buffer = bufferHandler.get();
+ d1 = VSizeLongSerde.getDeserializer(1, buffer, 10);
+ d2 = VSizeLongSerde.getDeserializer(2, buffer, 10);
+ d4 = VSizeLongSerde.getDeserializer(4, buffer, 10);
+ d8 = VSizeLongSerde.getDeserializer(8, buffer, 10);
+ d12 = VSizeLongSerde.getDeserializer(12, buffer, 10);
+ d16 = VSizeLongSerde.getDeserializer(16, buffer, 10);
+ d20 = VSizeLongSerde.getDeserializer(20, buffer, 10);
+ d24 = VSizeLongSerde.getDeserializer(24, buffer, 10);
+ d32 = VSizeLongSerde.getDeserializer(32, buffer, 10);
+ d40 = VSizeLongSerde.getDeserializer(40, buffer, 10);
+ d48 = VSizeLongSerde.getDeserializer(48, buffer, 10);
+ d56 = VSizeLongSerde.getDeserializer(56, buffer, 10);
+ d64 = VSizeLongSerde.getDeserializer(64, buffer, 10);
+ }
}
@TearDown
---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscribe@druid.apache.org
For additional commands, e-mail: commits-help@druid.apache.org