You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@orc.apache.org by do...@apache.org on 2022/11/03 20:50:29 UTC
[orc] branch main updated: ORC-1306: Fixed indented code style for Java modules
This is an automated email from the ASF dual-hosted git repository.
dongjoon pushed a commit to branch main
in repository https://gitbox.apache.org/repos/asf/orc.git
The following commit(s) were added to refs/heads/main by this push:
new 9cf9d26a2 ORC-1306: Fixed indented code style for Java modules
9cf9d26a2 is described below
commit 9cf9d26a20ac4d43911d7967ecaef8ef26b60594
Author: Guiyanakuang <gu...@gmail.com>
AuthorDate: Thu Nov 3 13:50:18 2022 -0700
ORC-1306: Fixed indented code style for Java modules
### What changes were proposed in this pull request?
This pr is aimed at fixing all indentation style issues with java modules. And add indentation checking rules to checkstyle.xml.
```xml
<module name="Indentation">
<property name="severity" value="error"/>
<property name="basicOffset" value="2"/>
<property name="braceAdjustment" value="0"/>
<property name="caseIndent" value="2"/>
<property name="throwsIndent" value="2"/>
<property name="lineWrappingIndentation" value="4"/>
<property name="arrayInitIndent" value="2"/>
</module>
```
### Why are the changes needed?
Automated checks save more time and nothing is missed.
### How was this patch tested?
Checking with maven-check-plugin.
Closes #1303 from guiyanakuang/ORC-1306.
Authored-by: Guiyanakuang <gu...@gmail.com>
Signed-off-by: Dongjoon Hyun <do...@apache.org>
---
.../apache/orc/bench/core/filter/FilterBench.java | 28 +-
.../orc/bench/core/filter/FilterBenchUtil.java | 18 +-
.../apache/orc/bench/core/impl/ChunkReadUtil.java | 2 +-
.../orc/bench/hive/ColumnProjectionBenchmark.java | 4 +-
java/checkstyle.xml | 9 +
java/core/src/java/org/apache/orc/OrcFile.java | 3 +-
java/core/src/java/org/apache/orc/OrcUtils.java | 213 ++--
java/core/src/java/org/apache/orc/Reader.java | 2 +-
.../org/apache/orc/impl/ColumnStatisticsImpl.java | 8 +-
.../apache/orc/impl/ConvertTreeReaderFactory.java | 1035 ++++++++++----------
.../org/apache/orc/impl/DataReaderProperties.java | 2 +-
.../java/org/apache/orc/impl/DynamicByteArray.java | 2 +-
.../core/src/java/org/apache/orc/impl/IOUtils.java | 2 +-
.../java/org/apache/orc/impl/PhysicalFsWriter.java | 4 +-
.../src/java/org/apache/orc/impl/ReaderImpl.java | 87 +-
.../java/org/apache/orc/impl/RecordReaderImpl.java | 34 +-
.../org/apache/orc/impl/RunLengthByteReader.java | 2 +-
.../org/apache/orc/impl/RunLengthByteWriter.java | 2 +-
.../apache/orc/impl/RunLengthIntegerReader.java | 2 +-
.../apache/orc/impl/RunLengthIntegerReaderV2.java | 28 +-
.../java/org/apache/orc/impl/SchemaEvolution.java | 22 +-
.../org/apache/orc/impl/SerializationUtils.java | 749 +++++++-------
.../org/apache/orc/impl/TreeReaderFactory.java | 16 +-
.../src/java/org/apache/orc/impl/TypeUtils.java | 114 +--
.../src/java/org/apache/orc/impl/WriterImpl.java | 6 +-
.../apache/orc/impl/filter/leaf/FloatFilters.java | 152 +--
.../org/apache/orc/impl/mask/ListIdentity.java | 12 +-
.../apache/orc/impl/mask/RedactMaskFactory.java | 36 +-
.../orc/impl/reader/ReaderEncryptionVariant.java | 42 +-
.../org/apache/orc/impl/reader/StripePlanner.java | 90 +-
.../org/apache/orc/impl/writer/TreeWriter.java | 84 +-
.../java/org/apache/orc/util/CuckooSetBytes.java | 54 +-
.../apache/orc/util/StreamWrapperFileSystem.java | 6 +-
.../threeten/extra/chrono/HybridChronology.java | 691 ++++++-------
.../java/org/threeten/extra/chrono/HybridDate.java | 958 +++++++++---------
.../java/org/apache/orc/examples/CoreWriter.java | 3 +-
.../java/org/apache/orc/mapred/OrcInputFormat.java | 9 +-
.../apache/orc/mapred/OrcMapredRecordWriter.java | 10 +-
.../org/apache/orc/mapreduce/OrcInputFormat.java | 9 +-
.../org/apache/orc/mapreduce/OrcOutputFormat.java | 6 +-
.../org/apache/orc/impl/HadoopShimsCurrent.java | 14 +-
.../org/apache/orc/impl/HadoopShimsPre2_6.java | 2 +-
.../org/apache/orc/impl/HadoopShimsPre2_7.java | 2 +-
.../java/org/apache/orc/impl/NullKeyProvider.java | 48 +-
.../orc/impl/SnappyDirectDecompressWrapper.java | 50 +-
.../orc/impl/ZlibDirectDecompressWrapper.java | 50 +-
.../src/java/org/apache/orc/tools/ColumnSizes.java | 4 +-
.../src/java/org/apache/orc/tools/FileDump.java | 2 +-
.../src/java/org/apache/orc/tools/PrintData.java | 2 +-
.../org/apache/orc/tools/convert/JsonReader.java | 2 +-
50 files changed, 2378 insertions(+), 2354 deletions(-)
diff --git a/java/bench/core/src/java/org/apache/orc/bench/core/filter/FilterBench.java b/java/bench/core/src/java/org/apache/orc/bench/core/filter/FilterBench.java
index e3d88b119..748d638b0 100644
--- a/java/bench/core/src/java/org/apache/orc/bench/core/filter/FilterBench.java
+++ b/java/bench/core/src/java/org/apache/orc/bench/core/filter/FilterBench.java
@@ -83,14 +83,14 @@ public class FilterBench implements OrcBenchmark {
public static CommandLine parseCommandLine(String[] args, boolean needsArgs) {
org.apache.commons.cli.Options options = new org.apache.commons.cli.Options()
- .addOption("h", HELP, false, "Provide help")
- .addOption("i", ITERATIONS, true, "Number of iterations")
- .addOption("I", WARMUP_ITERATIONS, true, "Number of warmup iterations")
- .addOption("f", FORK, true, "How many forks to use")
- .addOption("t", TIME, true, "How long each iteration is in seconds")
- .addOption("m", MIN_MEMORY, true, "The minimum size of each JVM")
- .addOption("M", MAX_MEMORY, true, "The maximum size of each JVM")
- .addOption("g", GC, false, "Should GC be profiled");
+ .addOption("h", HELP, false, "Provide help")
+ .addOption("i", ITERATIONS, true, "Number of iterations")
+ .addOption("I", WARMUP_ITERATIONS, true, "Number of warmup iterations")
+ .addOption("f", FORK, true, "How many forks to use")
+ .addOption("t", TIME, true, "How long each iteration is in seconds")
+ .addOption("m", MIN_MEMORY, true, "The minimum size of each JVM")
+ .addOption("M", MAX_MEMORY, true, "The maximum size of each JVM")
+ .addOption("g", GC, false, "Should GC be profiled");
CommandLine result;
try {
result = new DefaultParser().parse(options, args, true);
@@ -118,15 +118,15 @@ public class FilterBench implements OrcBenchmark {
}
if (options.hasOption(WARMUP_ITERATIONS)) {
builder.warmupIterations(Integer.parseInt(options.getOptionValue(
- WARMUP_ITERATIONS)));
+ WARMUP_ITERATIONS)));
}
if (options.hasOption(FORK)) {
builder.forks(Integer.parseInt(options.getOptionValue(
- FORK)));
+ FORK)));
}
if (options.hasOption(TIME)) {
TimeValue iterationTime = TimeValue.seconds(Long.parseLong(
- options.getOptionValue(TIME)));
+ options.getOptionValue(TIME)));
builder.measurementTime(iterationTime);
builder.warmupTime(iterationTime);
}
@@ -168,9 +168,9 @@ public class FilterBench implements OrcBenchmark {
normalize);
case "vector":
Reader.Options options = new Reader.Options(conf)
- .searchArgument(sArg, new String[0])
- .allowSARGToFilter(true)
- .useSelected(true);
+ .searchArgument(sArg, new String[0])
+ .allowSARGToFilter(true)
+ .useSelected(true);
return FilterFactory.createBatchFilter(options,
FilterBenchUtil.schema,
false,
diff --git a/java/bench/core/src/java/org/apache/orc/bench/core/filter/FilterBenchUtil.java b/java/bench/core/src/java/org/apache/orc/bench/core/filter/FilterBenchUtil.java
index d39bb08de..823c247b8 100644
--- a/java/bench/core/src/java/org/apache/orc/bench/core/filter/FilterBenchUtil.java
+++ b/java/bench/core/src/java/org/apache/orc/bench/core/filter/FilterBenchUtil.java
@@ -35,8 +35,8 @@ import java.util.Set;
class FilterBenchUtil {
static final TypeDescription schema = TypeDescription.createStruct()
- .addField("f1", TypeDescription.createLong())
- .addField("f2", TypeDescription.createLong());
+ .addField("f1", TypeDescription.createLong())
+ .addField("f2", TypeDescription.createLong());
static VectorizedRowBatch createBatch(Random rnd) {
VectorizedRowBatch b = schema.createRowBatch(1024);
@@ -71,11 +71,11 @@ class FilterBenchUtil {
}
SearchArgument sArg = SearchArgumentFactory.newBuilder()
- .startOr()
- .in("f1", PredicateLeaf.Type.LONG, f1Values)
- .in("f2", PredicateLeaf.Type.LONG, f2Values)
- .end()
- .build();
+ .startOr()
+ .in("f1", PredicateLeaf.Type.LONG, f1Values)
+ .in("f2", PredicateLeaf.Type.LONG, f2Values)
+ .end()
+ .build();
int[] s = sel.stream()
.mapToInt(Integer::intValue)
.toArray();
@@ -108,8 +108,8 @@ class FilterBenchUtil {
sel.add(selIdx);
}
builder
- .in("f1", PredicateLeaf.Type.LONG, f1Values)
- .in("f2", PredicateLeaf.Type.LONG, f2Values);
+ .in("f1", PredicateLeaf.Type.LONG, f1Values)
+ .in("f2", PredicateLeaf.Type.LONG, f2Values);
builder.end();
}
builder.end();
diff --git a/java/bench/core/src/java/org/apache/orc/bench/core/impl/ChunkReadUtil.java b/java/bench/core/src/java/org/apache/orc/bench/core/impl/ChunkReadUtil.java
index 6877c9e34..e92a25958 100644
--- a/java/bench/core/src/java/org/apache/orc/bench/core/impl/ChunkReadUtil.java
+++ b/java/bench/core/src/java/org/apache/orc/bench/core/impl/ChunkReadUtil.java
@@ -127,7 +127,7 @@ public class ChunkReadUtil {
private static TypeDescription createSchema(int colCount) {
TypeDescription schema = TypeDescription.createStruct()
- .addField("id", TypeDescription.createLong());
+ .addField("id", TypeDescription.createLong());
for (int i = 1; i <= colCount; i++) {
TypeDescription fieldType;
switch (i % 3) {
diff --git a/java/bench/hive/src/java/org/apache/orc/bench/hive/ColumnProjectionBenchmark.java b/java/bench/hive/src/java/org/apache/orc/bench/hive/ColumnProjectionBenchmark.java
index ce0d1b705..9c1b7fd21 100644
--- a/java/bench/hive/src/java/org/apache/orc/bench/hive/ColumnProjectionBenchmark.java
+++ b/java/bench/hive/src/java/org/apache/orc/bench/hive/ColumnProjectionBenchmark.java
@@ -118,8 +118,8 @@ public class ColumnProjectionBenchmark implements OrcBenchmark {
conf.set("columns", "vendor_id,pickup_time");
conf.set("columns.types", "int,timestamp");
} else if ("sales".equals(dataset)) {
- conf.set("columns", "sales_id,customer_id");
- conf.set("columns.types", "bigint,bigint");
+ conf.set("columns", "sales_id,customer_id");
+ conf.set("columns.types", "bigint,bigint");
} else if ("github".equals(dataset)) {
conf.set("columns", "actor,created_at");
conf.set("columns.types", "struct<avatar_url:string,gravatar_id:string," +
diff --git a/java/checkstyle.xml b/java/checkstyle.xml
index c69d25df8..d55293f8a 100644
--- a/java/checkstyle.xml
+++ b/java/checkstyle.xml
@@ -55,5 +55,14 @@
<property name="ignoreComments" value="true"/>
<property name="message" value="No starting LAND and LOR allowed."/>
</module>
+ <module name="Indentation">
+ <property name="severity" value="error"/>
+ <property name="basicOffset" value="2"/>
+ <property name="braceAdjustment" value="0"/>
+ <property name="caseIndent" value="2"/>
+ <property name="throwsIndent" value="2"/>
+ <property name="lineWrappingIndentation" value="4"/>
+ <property name="arrayInitIndent" value="2"/>
+ </module>
</module>
</module>
diff --git a/java/core/src/java/org/apache/orc/OrcFile.java b/java/core/src/java/org/apache/orc/OrcFile.java
index 406fda8cb..a1a00e92f 100644
--- a/java/core/src/java/org/apache/orc/OrcFile.java
+++ b/java/core/src/java/org/apache/orc/OrcFile.java
@@ -995,8 +995,7 @@ public class OrcFile {
private static MemoryManager memoryManager = null;
- private static synchronized
- MemoryManager getStaticMemoryManager(Configuration conf) {
+ private static synchronized MemoryManager getStaticMemoryManager(Configuration conf) {
if (memoryManager == null) {
memoryManager = new MemoryManagerImpl(conf);
}
diff --git a/java/core/src/java/org/apache/orc/OrcUtils.java b/java/core/src/java/org/apache/orc/OrcUtils.java
index 48eceb037..c1d366118 100644
--- a/java/core/src/java/org/apache/orc/OrcUtils.java
+++ b/java/core/src/java/org/apache/orc/OrcUtils.java
@@ -122,83 +122,83 @@ public class OrcUtils {
.build());
}
switch (typeDescr.getCategory()) {
- case BOOLEAN:
- type.setKind(OrcProto.Type.Kind.BOOLEAN);
- break;
- case BYTE:
- type.setKind(OrcProto.Type.Kind.BYTE);
- break;
- case SHORT:
- type.setKind(OrcProto.Type.Kind.SHORT);
- break;
- case INT:
- type.setKind(OrcProto.Type.Kind.INT);
- break;
- case LONG:
- type.setKind(OrcProto.Type.Kind.LONG);
- break;
- case FLOAT:
- type.setKind(OrcProto.Type.Kind.FLOAT);
- break;
- case DOUBLE:
- type.setKind(OrcProto.Type.Kind.DOUBLE);
- break;
- case STRING:
- type.setKind(OrcProto.Type.Kind.STRING);
- break;
- case CHAR:
- type.setKind(OrcProto.Type.Kind.CHAR);
- type.setMaximumLength(typeDescr.getMaxLength());
- break;
- case VARCHAR:
- type.setKind(OrcProto.Type.Kind.VARCHAR);
- type.setMaximumLength(typeDescr.getMaxLength());
- break;
- case BINARY:
- type.setKind(OrcProto.Type.Kind.BINARY);
- break;
- case TIMESTAMP:
- type.setKind(OrcProto.Type.Kind.TIMESTAMP);
- break;
- case TIMESTAMP_INSTANT:
+ case BOOLEAN:
+ type.setKind(OrcProto.Type.Kind.BOOLEAN);
+ break;
+ case BYTE:
+ type.setKind(OrcProto.Type.Kind.BYTE);
+ break;
+ case SHORT:
+ type.setKind(OrcProto.Type.Kind.SHORT);
+ break;
+ case INT:
+ type.setKind(OrcProto.Type.Kind.INT);
+ break;
+ case LONG:
+ type.setKind(OrcProto.Type.Kind.LONG);
+ break;
+ case FLOAT:
+ type.setKind(OrcProto.Type.Kind.FLOAT);
+ break;
+ case DOUBLE:
+ type.setKind(OrcProto.Type.Kind.DOUBLE);
+ break;
+ case STRING:
+ type.setKind(OrcProto.Type.Kind.STRING);
+ break;
+ case CHAR:
+ type.setKind(OrcProto.Type.Kind.CHAR);
+ type.setMaximumLength(typeDescr.getMaxLength());
+ break;
+ case VARCHAR:
+ type.setKind(OrcProto.Type.Kind.VARCHAR);
+ type.setMaximumLength(typeDescr.getMaxLength());
+ break;
+ case BINARY:
+ type.setKind(OrcProto.Type.Kind.BINARY);
+ break;
+ case TIMESTAMP:
+ type.setKind(OrcProto.Type.Kind.TIMESTAMP);
+ break;
+ case TIMESTAMP_INSTANT:
type.setKind(OrcProto.Type.Kind.TIMESTAMP_INSTANT);
break;
- case DATE:
- type.setKind(OrcProto.Type.Kind.DATE);
- break;
- case DECIMAL:
- type.setKind(OrcProto.Type.Kind.DECIMAL);
- type.setPrecision(typeDescr.getPrecision());
- type.setScale(typeDescr.getScale());
- break;
- case LIST:
- type.setKind(OrcProto.Type.Kind.LIST);
- type.addSubtypes(children.get(0).getId());
- break;
- case MAP:
- type.setKind(OrcProto.Type.Kind.MAP);
- for(TypeDescription t: children) {
- type.addSubtypes(t.getId());
- }
- break;
- case STRUCT:
- type.setKind(OrcProto.Type.Kind.STRUCT);
- for(TypeDescription t: children) {
- type.addSubtypes(t.getId());
- }
- for(String field: typeDescr.getFieldNames()) {
- type.addFieldNames(field);
- }
- break;
- case UNION:
- type.setKind(OrcProto.Type.Kind.UNION);
- for(TypeDescription t: children) {
- type.addSubtypes(t.getId());
- }
- break;
- default:
- throw new IllegalArgumentException("Unknown category: " +
- typeDescr.getCategory());
+ case DATE:
+ type.setKind(OrcProto.Type.Kind.DATE);
+ break;
+ case DECIMAL:
+ type.setKind(OrcProto.Type.Kind.DECIMAL);
+ type.setPrecision(typeDescr.getPrecision());
+ type.setScale(typeDescr.getScale());
+ break;
+ case LIST:
+ type.setKind(OrcProto.Type.Kind.LIST);
+ type.addSubtypes(children.get(0).getId());
+ break;
+ case MAP:
+ type.setKind(OrcProto.Type.Kind.MAP);
+ for(TypeDescription t: children) {
+ type.addSubtypes(t.getId());
+ }
+ break;
+ case STRUCT:
+ type.setKind(OrcProto.Type.Kind.STRUCT);
+ for(TypeDescription t: children) {
+ type.addSubtypes(t.getId());
+ }
+ for(String field: typeDescr.getFieldNames()) {
+ type.addFieldNames(field);
+ }
+ break;
+ case UNION:
+ type.setKind(OrcProto.Type.Kind.UNION);
+ for(TypeDescription t: children) {
+ type.addSubtypes(t.getId());
+ }
+ break;
+ default:
+ throw new IllegalArgumentException("Unknown category: " +
+ typeDescr.getCategory());
}
result.add(type.build());
if (children != null) {
@@ -297,12 +297,11 @@ public class OrcUtils {
result = TypeDescription.createString();
break;
case CHAR:
- case VARCHAR: {
- result = type.getKind() == OrcProto.Type.Kind.CHAR ?
- TypeDescription.createChar() : TypeDescription.createVarchar();
- if (type.hasMaximumLength()) {
- result.withMaxLength(type.getMaximumLength());
- }
+ case VARCHAR:
+ result = type.getKind() == OrcProto.Type.Kind.CHAR ?
+ TypeDescription.createChar() : TypeDescription.createVarchar();
+ if (type.hasMaximumLength()) {
+ result.withMaxLength(type.getMaximumLength());
}
break;
case BINARY:
@@ -318,15 +317,15 @@ public class OrcUtils {
result = TypeDescription.createDate();
break;
case DECIMAL: {
- result = TypeDescription.createDecimal();
- if (type.hasScale()) {
- result.withScale(type.getScale());
- }
- if (type.hasPrecision()) {
- result.withPrecision(type.getPrecision());
- }
+ result = TypeDescription.createDecimal();
+ if (type.hasScale()) {
+ result.withScale(type.getScale());
}
- break;
+ if (type.hasPrecision()) {
+ result.withPrecision(type.getPrecision());
+ }
+ }
+ break;
case LIST:
if (type.getSubtypesCount() != 1) {
throw new FileFormatException("LIST type should contain exactly " +
@@ -345,27 +344,27 @@ public class OrcUtils {
convertTypeFromProtobuf(types, type.getSubtypes(1)));
break;
case STRUCT: {
- result = TypeDescription.createStruct();
- for(int f=0; f < type.getSubtypesCount(); ++f) {
- String name = type.getFieldNames(f);
- name = name.startsWith("`") ? name : "`" + name + "`";
- String fieldName = ParserUtils.parseName(new ParserUtils.StringPosition(name));
- result.addField(fieldName, convertTypeFromProtobuf(types, type.getSubtypes(f)));
- }
+ result = TypeDescription.createStruct();
+ for(int f=0; f < type.getSubtypesCount(); ++f) {
+ String name = type.getFieldNames(f);
+ name = name.startsWith("`") ? name : "`" + name + "`";
+ String fieldName = ParserUtils.parseName(new ParserUtils.StringPosition(name));
+ result.addField(fieldName, convertTypeFromProtobuf(types, type.getSubtypes(f)));
}
- break;
+ }
+ break;
case UNION: {
- if (type.getSubtypesCount() == 0) {
- throw new FileFormatException("UNION type should contain at least" +
- " one subtype but has none");
- }
- result = TypeDescription.createUnion();
- for(int f=0; f < type.getSubtypesCount(); ++f) {
- result.addUnionChild(
- convertTypeFromProtobuf(types, type.getSubtypes(f)));
- }
+ if (type.getSubtypesCount() == 0) {
+ throw new FileFormatException("UNION type should contain at least" +
+ " one subtype but has none");
}
- break;
+ result = TypeDescription.createUnion();
+ for(int f=0; f < type.getSubtypesCount(); ++f) {
+ result.addUnionChild(
+ convertTypeFromProtobuf(types, type.getSubtypes(f)));
+ }
+ }
+ break;
default:
throw new IllegalArgumentException("Unknown ORC type " + type.getKind());
}
diff --git a/java/core/src/java/org/apache/orc/Reader.java b/java/core/src/java/org/apache/orc/Reader.java
index 9ea9eeabf..b22bdc50e 100644
--- a/java/core/src/java/org/apache/orc/Reader.java
+++ b/java/core/src/java/org/apache/orc/Reader.java
@@ -236,7 +236,7 @@ public interface Reader extends Closeable {
private boolean allowPluginFilters = false;
private int minSeekSize = (int) OrcConf.ORC_MIN_DISK_SEEK_SIZE.getDefaultValue();
private double minSeekSizeTolerance = (double) OrcConf.ORC_MIN_DISK_SEEK_SIZE_TOLERANCE
- .getDefaultValue();
+ .getDefaultValue();
private int rowBatchSize = (int) OrcConf.ROW_BATCH_SIZE.getDefaultValue();
/**
diff --git a/java/core/src/java/org/apache/orc/impl/ColumnStatisticsImpl.java b/java/core/src/java/org/apache/orc/impl/ColumnStatisticsImpl.java
index 5573d4bef..680e9f14e 100644
--- a/java/core/src/java/org/apache/orc/impl/ColumnStatisticsImpl.java
+++ b/java/core/src/java/org/apache/orc/impl/ColumnStatisticsImpl.java
@@ -117,7 +117,7 @@ public class ColumnStatisticsImpl implements ColumnStatistics {
public OrcProto.ColumnStatistics.Builder serialize() {
OrcProto.ColumnStatistics.Builder builder = super.serialize();
OrcProto.BucketStatistics.Builder bucket =
- OrcProto.BucketStatistics.newBuilder();
+ OrcProto.BucketStatistics.newBuilder();
bucket.addCount(trueCount);
builder.setBucketStatistics(bucket);
return builder;
@@ -584,7 +584,7 @@ public class ColumnStatisticsImpl implements ColumnStatistics {
public OrcProto.ColumnStatistics.Builder serialize() {
OrcProto.ColumnStatistics.Builder builder = super.serialize();
OrcProto.DoubleStatistics.Builder dbl =
- OrcProto.DoubleStatistics.newBuilder();
+ OrcProto.DoubleStatistics.newBuilder();
if (hasMinimum) {
dbl.setMinimum(minimum);
dbl.setMaximum(maximum);
@@ -790,7 +790,7 @@ public class ColumnStatisticsImpl implements ColumnStatistics {
public OrcProto.ColumnStatistics.Builder serialize() {
OrcProto.ColumnStatistics.Builder result = super.serialize();
OrcProto.StringStatistics.Builder str =
- OrcProto.StringStatistics.newBuilder();
+ OrcProto.StringStatistics.newBuilder();
if (getNumberOfValues() != 0) {
if (isLowerBoundSet) {
str.setLowerBound(minimum.toString());
@@ -2011,7 +2011,7 @@ public class ColumnStatisticsImpl implements ColumnStatistics {
public OrcProto.ColumnStatistics.Builder serialize() {
OrcProto.ColumnStatistics.Builder builder =
- OrcProto.ColumnStatistics.newBuilder();
+ OrcProto.ColumnStatistics.newBuilder();
builder.setNumberOfValues(count);
builder.setHasNull(hasNull);
if (bytesOnDisk != 0) {
diff --git a/java/core/src/java/org/apache/orc/impl/ConvertTreeReaderFactory.java b/java/core/src/java/org/apache/orc/impl/ConvertTreeReaderFactory.java
index 9cc1bc89b..058e5b060 100644
--- a/java/core/src/java/org/apache/orc/impl/ConvertTreeReaderFactory.java
+++ b/java/core/src/java/org/apache/orc/impl/ConvertTreeReaderFactory.java
@@ -94,21 +94,24 @@ public class ConvertTreeReaderFactory extends TreeReaderFactory {
}
static TreeReader getStringGroupTreeReader(int columnId,
- TypeDescription fileType, Context context) throws IOException {
+ TypeDescription fileType,
+ Context context) throws IOException {
switch (fileType.getCategory()) {
- case STRING:
- return new StringTreeReader(columnId, context);
- case CHAR:
- return new CharTreeReader(columnId, fileType.getMaxLength(), context);
- case VARCHAR:
- return new VarcharTreeReader(columnId, fileType.getMaxLength(), context);
- default:
- throw new RuntimeException("Unexpected type kind " + fileType.getCategory().name());
+ case STRING:
+ return new StringTreeReader(columnId, context);
+ case CHAR:
+ return new CharTreeReader(columnId, fileType.getMaxLength(), context);
+ case VARCHAR:
+ return new VarcharTreeReader(columnId, fileType.getMaxLength(), context);
+ default:
+ throw new RuntimeException("Unexpected type kind " + fileType.getCategory().name());
}
}
protected void assignStringGroupVectorEntry(BytesColumnVector bytesColVector,
- int elementNum, TypeDescription readerType, byte[] bytes) {
+ int elementNum,
+ TypeDescription readerType,
+ byte[] bytes) {
assignStringGroupVectorEntry(bytesColVector,
elementNum, readerType, bytes, 0, bytes.length);
}
@@ -118,62 +121,62 @@ public class ConvertTreeReaderFactory extends TreeReaderFactory {
* length for the string group which can be (STRING, CHAR, VARCHAR).
*/
protected void assignStringGroupVectorEntry(BytesColumnVector bytesColVector,
- int elementNum, TypeDescription readerType, byte[] bytes, int start, int length) {
+ int elementNum,
+ TypeDescription readerType,
+ byte[] bytes,
+ int start,
+ int length) {
switch (readerType.getCategory()) {
- case STRING:
- bytesColVector.setVal(elementNum, bytes, start, length);
- break;
- case CHAR:
- {
+ case STRING:
+ bytesColVector.setVal(elementNum, bytes, start, length);
+ break;
+ case CHAR: {
int adjustedDownLen =
StringExpr.rightTrimAndTruncate(bytes, start, length, readerType.getMaxLength());
bytesColVector.setVal(elementNum, bytes, start, adjustedDownLen);
}
break;
- case VARCHAR:
- {
+ case VARCHAR: {
int adjustedDownLen =
StringExpr.truncate(bytes, start, length, readerType.getMaxLength());
bytesColVector.setVal(elementNum, bytes, start, adjustedDownLen);
}
break;
- default:
- throw new RuntimeException("Unexpected type kind " + readerType.getCategory().name());
+ default:
+ throw new RuntimeException("Unexpected type kind " + readerType.getCategory().name());
}
}
protected void convertStringGroupVectorElement(BytesColumnVector bytesColVector,
- int elementNum, TypeDescription readerType) {
+ int elementNum, TypeDescription readerType) {
switch (readerType.getCategory()) {
- case STRING:
- // No conversion needed.
- break;
- case CHAR:
- {
+ case STRING:
+ // No conversion needed.
+ break;
+ case CHAR: {
int length = bytesColVector.length[elementNum];
int adjustedDownLen = StringExpr
- .rightTrimAndTruncate(bytesColVector.vector[elementNum],
- bytesColVector.start[elementNum], length,
- readerType.getMaxLength());
+ .rightTrimAndTruncate(bytesColVector.vector[elementNum],
+ bytesColVector.start[elementNum], length,
+ readerType.getMaxLength());
if (adjustedDownLen < length) {
bytesColVector.length[elementNum] = adjustedDownLen;
}
}
break;
- case VARCHAR:
- {
+ case VARCHAR: {
int length = bytesColVector.length[elementNum];
int adjustedDownLen = StringExpr
- .truncate(bytesColVector.vector[elementNum],
- bytesColVector.start[elementNum], length,
- readerType.getMaxLength());
+ .truncate(bytesColVector.vector[elementNum],
+ bytesColVector.start[elementNum], length,
+ readerType.getMaxLength());
if (adjustedDownLen < length) {
bytesColVector.length[elementNum] = adjustedDownLen;
}
}
break;
- default:
- throw new RuntimeException("Unexpected type kind " + readerType.getCategory().name());
+ default:
+ throw new RuntimeException("Unexpected type kind " + readerType.getCategory().name());
}
}
@@ -248,7 +251,7 @@ public class ConvertTreeReaderFactory extends TreeReaderFactory {
// don't want to catch an exception.
return ((MIN_LONG_AS_DOUBLE - doubleValue < 1.0) &&
- (doubleValue < MAX_LONG_AS_DOUBLE_PLUS_ONE));
+ (doubleValue < MAX_LONG_AS_DOUBLE_PLUS_ONE));
}
@Override
@@ -295,7 +298,8 @@ public class ConvertTreeReaderFactory extends TreeReaderFactory {
// Common code used by the conversion.
public void convertVector(ColumnVector fromColVector,
- ColumnVector resultColVector, final int batchSize) throws IOException {
+ ColumnVector resultColVector,
+ final int batchSize) throws IOException {
resultColVector.reset();
if (fromColVector.isRepeating) {
@@ -306,7 +310,7 @@ public class ConvertTreeReaderFactory extends TreeReaderFactory {
resultColVector.noNulls = false;
resultColVector.isNull[0] = true;
}
- } else if (fromColVector.noNulls){
+ } else if (fromColVector.noNulls) {
for (int i = 0; i < batchSize; i++) {
setConvertVectorElement(i);
}
@@ -323,35 +327,35 @@ public class ConvertTreeReaderFactory extends TreeReaderFactory {
}
public void downCastAnyInteger(LongColumnVector longColVector, int elementNum,
- TypeDescription readerType) {
+ TypeDescription readerType) {
downCastAnyInteger(longColVector, elementNum, longColVector.vector[elementNum], readerType);
}
public void downCastAnyInteger(LongColumnVector longColVector, int elementNum, long inputLong,
- TypeDescription readerType) {
+ TypeDescription readerType) {
long[] vector = longColVector.vector;
long outputLong;
Category readerCategory = readerType.getCategory();
switch (readerCategory) {
- case BOOLEAN:
- // No data loss for boolean.
- vector[elementNum] = inputLong == 0 ? 0 : 1;
- return;
- case BYTE:
- outputLong = (byte) inputLong;
- break;
- case SHORT:
- outputLong = (short) inputLong;
- break;
- case INT:
- outputLong = (int) inputLong;
- break;
- case LONG:
- // No data loss for long.
- vector[elementNum] = inputLong;
- return;
- default:
- throw new RuntimeException("Unexpected type kind " + readerCategory.name());
+ case BOOLEAN:
+ // No data loss for boolean.
+ vector[elementNum] = inputLong == 0 ? 0 : 1;
+ return;
+ case BYTE:
+ outputLong = (byte) inputLong;
+ break;
+ case SHORT:
+ outputLong = (short) inputLong;
+ break;
+ case INT:
+ outputLong = (int) inputLong;
+ break;
+ case LONG:
+ // No data loss for long.
+ vector[elementNum] = inputLong;
+ return;
+ default:
+ throw new RuntimeException("Unexpected type kind " + readerCategory.name());
}
if (outputLong != inputLong) {
@@ -374,18 +378,18 @@ public class ConvertTreeReaderFactory extends TreeReaderFactory {
TypeDescription fileType,
Context context) throws IOException {
switch (fileType.getCategory()) {
- case BOOLEAN:
- return new BooleanTreeReader(columnId, context);
- case BYTE:
- return new ByteTreeReader(columnId, context);
- case SHORT:
- return new ShortTreeReader(columnId, context);
- case INT:
- return new IntTreeReader(columnId, context);
- case LONG:
- return new LongTreeReader(columnId, context);
- default:
- throw new RuntimeException("Unexpected type kind " + fileType);
+ case BOOLEAN:
+ return new BooleanTreeReader(columnId, context);
+ case BYTE:
+ return new ByteTreeReader(columnId, context);
+ case SHORT:
+ return new ShortTreeReader(columnId, context);
+ case INT:
+ return new IntTreeReader(columnId, context);
+ case LONG:
+ return new LongTreeReader(columnId, context);
+ default:
+ throw new RuntimeException("Unexpected type kind " + fileType);
}
}
@@ -395,7 +399,7 @@ public class ConvertTreeReaderFactory extends TreeReaderFactory {
AnyIntegerFromAnyIntegerTreeReader(
int columnId, TypeDescription fileType, TypeDescription readerType,
- Context context) throws IOException {
+ Context context) throws IOException {
super(columnId, createFromInteger(columnId, fileType, context), context);
this.readerType = readerType;
downCastNeeded = integerDownCastNeeded(fileType, readerType);
@@ -438,8 +442,8 @@ public class ConvertTreeReaderFactory extends TreeReaderFactory {
TypeDescription readerType, Context context)
throws IOException {
super(columnId, fileType.getCategory() == Category.DOUBLE ?
- new DoubleTreeReader(columnId, context) :
- new FloatTreeReader(columnId, context), context);
+ new DoubleTreeReader(columnId, context) :
+ new FloatTreeReader(columnId, context), context);
this.readerType = readerType;
}
@@ -481,8 +485,10 @@ public class ConvertTreeReaderFactory extends TreeReaderFactory {
private DecimalColumnVector decimalColVector;
private LongColumnVector longColVector;
- AnyIntegerFromDecimalTreeReader(int columnId, TypeDescription fileType,
- TypeDescription readerType, Context context) throws IOException {
+ AnyIntegerFromDecimalTreeReader(int columnId,
+ TypeDescription fileType,
+ TypeDescription readerType,
+ Context context) throws IOException {
super(columnId, new DecimalTreeReader(columnId, fileType.getPrecision(),
fileType.getScale(), context), context);
this.precision = fileType.getPrecision();
@@ -567,8 +573,10 @@ public class ConvertTreeReaderFactory extends TreeReaderFactory {
private BytesColumnVector bytesColVector;
private LongColumnVector longColVector;
- AnyIntegerFromStringGroupTreeReader(int columnId, TypeDescription fileType,
- TypeDescription readerType, Context context) throws IOException {
+ AnyIntegerFromStringGroupTreeReader(int columnId,
+ TypeDescription fileType,
+ TypeDescription readerType,
+ Context context) throws IOException {
super(columnId, getStringGroupTreeReader(columnId, fileType, context), context);
this.readerType = readerType;
}
@@ -613,7 +621,7 @@ public class ConvertTreeReaderFactory extends TreeReaderFactory {
AnyIntegerFromTimestampTreeReader(int columnId, TypeDescription readerType,
Context context,
boolean instantType) throws IOException {
- super(columnId, new TimestampTreeReader(columnId, context, instantType), context);
+ super(columnId, new TimestampTreeReader(columnId, context, instantType), context);
this.readerType = readerType;
}
@@ -649,7 +657,7 @@ public class ConvertTreeReaderFactory extends TreeReaderFactory {
private DoubleColumnVector doubleColVector;
DoubleFromAnyIntegerTreeReader(int columnId, TypeDescription fileType,
- Context context) throws IOException {
+ Context context) throws IOException {
super(columnId, createFromInteger(columnId, fileType, context), context);
}
@@ -884,8 +892,8 @@ public class ConvertTreeReaderFactory extends TreeReaderFactory {
Context context)
throws IOException {
super(columnId, fileType.getCategory() == Category.DOUBLE ?
- new DoubleTreeReader(columnId, context) :
- new FloatTreeReader(columnId, context), context);
+ new DoubleTreeReader(columnId, context) :
+ new FloatTreeReader(columnId, context), context);
}
@Override
@@ -928,8 +936,10 @@ public class ConvertTreeReaderFactory extends TreeReaderFactory {
private BytesColumnVector bytesColVector;
private ColumnVector decimalColVector;
- DecimalFromStringGroupTreeReader(int columnId, TypeDescription fileType,
- TypeDescription readerType, Context context) throws IOException {
+ DecimalFromStringGroupTreeReader(int columnId,
+ TypeDescription fileType,
+ TypeDescription readerType,
+ Context context) throws IOException {
super(columnId, getStringGroupTreeReader(columnId, fileType, context), context);
}
@@ -976,7 +986,7 @@ public class ConvertTreeReaderFactory extends TreeReaderFactory {
DecimalFromTimestampTreeReader(int columnId, Context context,
boolean instantType) throws IOException {
- super(columnId, new TimestampTreeReader(columnId, context, instantType), context);
+ super(columnId, new TimestampTreeReader(columnId, context, instantType), context);
}
@Override
@@ -1074,8 +1084,10 @@ public class ConvertTreeReaderFactory extends TreeReaderFactory {
protected LongColumnVector longColVector;
protected BytesColumnVector bytesColVector;
- StringGroupFromAnyIntegerTreeReader(int columnId, TypeDescription fileType,
- TypeDescription readerType, Context context) throws IOException {
+ StringGroupFromAnyIntegerTreeReader(int columnId,
+ TypeDescription fileType,
+ TypeDescription readerType,
+ Context context) throws IOException {
super(columnId, createFromInteger(columnId, fileType, context), context);
this.readerType = readerType;
}
@@ -1083,7 +1095,7 @@ public class ConvertTreeReaderFactory extends TreeReaderFactory {
@Override
public void setConvertVectorElement(int elementNum) {
byte[] bytes = Long.toString(longColVector.vector[elementNum])
- .getBytes(StandardCharsets.UTF_8);
+ .getBytes(StandardCharsets.UTF_8);
assignStringGroupVectorEntry(bytesColVector, elementNum, readerType, bytes);
}
@@ -1131,10 +1143,10 @@ public class ConvertTreeReaderFactory extends TreeReaderFactory {
StringGroupFromDoubleTreeReader(int columnId, TypeDescription fileType,
TypeDescription readerType,
- Context context) throws IOException {
+ Context context) throws IOException {
super(columnId, fileType.getCategory() == Category.DOUBLE ?
- new DoubleTreeReader(columnId, context) :
- new FloatTreeReader(columnId, context), context);
+ new DoubleTreeReader(columnId, context) :
+ new FloatTreeReader(columnId, context), context);
this.readerType = readerType;
}
@@ -1172,7 +1184,6 @@ public class ConvertTreeReaderFactory extends TreeReaderFactory {
}
-
public static class StringGroupFromDecimalTreeReader extends ConvertTreeReader {
private int precision;
private int scale;
@@ -1181,8 +1192,10 @@ public class ConvertTreeReaderFactory extends TreeReaderFactory {
private BytesColumnVector bytesColVector;
private byte[] scratchBuffer;
- StringGroupFromDecimalTreeReader(int columnId, TypeDescription fileType,
- TypeDescription readerType, Context context) throws IOException {
+ StringGroupFromDecimalTreeReader(int columnId,
+ TypeDescription fileType,
+ TypeDescription readerType,
+ Context context) throws IOException {
super(columnId, new DecimalTreeReader(columnId, fileType.getPrecision(),
fileType.getScale(), context), context);
this.precision = fileType.getPrecision();
@@ -1241,18 +1254,18 @@ public class ConvertTreeReaderFactory extends TreeReaderFactory {
* Eg. "2019-07-09 13:11:00"
*/
static final DateTimeFormatter TIMESTAMP_FORMAT =
- new DateTimeFormatterBuilder()
- .append(DATE_FORMAT)
- .appendLiteral(' ')
- .appendValue(ChronoField.HOUR_OF_DAY, 2)
- .appendLiteral(':')
- .appendValue(ChronoField.MINUTE_OF_HOUR, 2)
- .optionalStart()
- .appendLiteral(':')
- .appendValue(ChronoField.SECOND_OF_MINUTE, 2)
- .optionalStart()
- .appendFraction(ChronoField.NANO_OF_SECOND, 0, 9, true)
- .toFormatter();
+ new DateTimeFormatterBuilder()
+ .append(DATE_FORMAT)
+ .appendLiteral(' ')
+ .appendValue(ChronoField.HOUR_OF_DAY, 2)
+ .appendLiteral(':')
+ .appendValue(ChronoField.MINUTE_OF_HOUR, 2)
+ .optionalStart()
+ .appendLiteral(':')
+ .appendValue(ChronoField.SECOND_OF_MINUTE, 2)
+ .optionalStart()
+ .appendFraction(ChronoField.NANO_OF_SECOND, 0, 9, true)
+ .toFormatter();
/**
* The format for converting from/to string/timestamp with local time zone.
@@ -1277,7 +1290,7 @@ public class ConvertTreeReaderFactory extends TreeReaderFactory {
*/
static Instant timestampToInstant(TimestampColumnVector vector, int element) {
return Instant.ofEpochSecond(Math.floorDiv(vector.time[element], 1000),
- vector.nanos[element]);
+ vector.nanos[element]);
}
/**
@@ -1288,7 +1301,7 @@ public class ConvertTreeReaderFactory extends TreeReaderFactory {
* @return the timestamp instant
*/
static Instant decimalToInstant(DecimalColumnVector vector, int element,
- HiveDecimalWritable value) {
+ HiveDecimalWritable value) {
final HiveDecimalWritable writable = vector.vector[element];
final long seconds = writable.longValue();
@@ -1317,7 +1330,7 @@ public class ConvertTreeReaderFactory extends TreeReaderFactory {
super(columnId, new TimestampTreeReader(columnId, context, instantType), context);
this.readerType = readerType;
local = context.getUseUTCTimestamp() ? ZoneId.of("UTC")
- : ZoneId.systemDefault();
+ : ZoneId.systemDefault();
Chronology chronology = context.useProlepticGregorian()
? IsoChronology.INSTANCE : HybridChronology.INSTANCE;
formatter = (instantType ? INSTANT_TIMESTAMP_FORMAT : TIMESTAMP_FORMAT)
@@ -1327,7 +1340,7 @@ public class ConvertTreeReaderFactory extends TreeReaderFactory {
@Override
public void setConvertVectorElement(int elementNum) throws IOException {
String string = timestampToInstant(timestampColVector, elementNum).atZone(local)
- .format(formatter);
+ .format(formatter);
byte[] bytes = string.getBytes(StandardCharsets.UTF_8);
assignStringGroupVectorEntry(bytesColVector, elementNum, readerType, bytes);
}
@@ -1359,7 +1372,7 @@ public class ConvertTreeReaderFactory extends TreeReaderFactory {
private final boolean useProlepticGregorian;
StringGroupFromDateTreeReader(int columnId, TypeDescription readerType,
- Context context) throws IOException {
+ Context context) throws IOException {
super(columnId, new DateTreeReader(columnId, context), context);
this.readerType = readerType;
useProlepticGregorian = context.useProlepticGregorian();
@@ -1396,8 +1409,10 @@ public class ConvertTreeReaderFactory extends TreeReaderFactory {
public static class StringGroupFromStringGroupTreeReader extends ConvertTreeReader {
private final TypeDescription readerType;
- StringGroupFromStringGroupTreeReader(int columnId, TypeDescription fileType,
- TypeDescription readerType, Context context) throws IOException {
+ StringGroupFromStringGroupTreeReader(int columnId,
+ TypeDescription fileType,
+ TypeDescription readerType,
+ Context context) throws IOException {
super(columnId, getStringGroupTreeReader(columnId, fileType, context), context);
this.readerType = readerType;
}
@@ -1440,7 +1455,7 @@ public class ConvertTreeReaderFactory extends TreeReaderFactory {
private BytesColumnVector outBytesColVector;
StringGroupFromBinaryTreeReader(int columnId, TypeDescription readerType,
- Context context) throws IOException {
+ Context context) throws IOException {
super(columnId, new BinaryTreeReader(columnId, context), context);
this.readerType = readerType;
}
@@ -1543,12 +1558,12 @@ public class ConvertTreeReaderFactory extends TreeReaderFactory {
private final boolean fileUsedProlepticGregorian;
TimestampFromDoubleTreeReader(int columnId, TypeDescription fileType,
- TypeDescription readerType, Context context) throws IOException {
+ TypeDescription readerType, Context context) throws IOException {
super(columnId, fileType.getCategory() == Category.DOUBLE ?
- new DoubleTreeReader(columnId, context) :
- new FloatTreeReader(columnId, context), context);
+ new DoubleTreeReader(columnId, context) :
+ new FloatTreeReader(columnId, context), context);
useUtc = readerType.getCategory() == Category.TIMESTAMP_INSTANT ||
- context.getUseUTCTimestamp();
+ context.getUseUTCTimestamp();
local = TimeZone.getDefault();
useProlepticGregorian = context.useProlepticGregorian();
fileUsedProlepticGregorian = context.fileUsedProlepticGregorian();
@@ -1564,7 +1579,7 @@ public class ConvertTreeReaderFactory extends TreeReaderFactory {
double doubleMillis = seconds * 1000;
long millis = Math.round(doubleMillis);
if (doubleMillis > Long.MAX_VALUE || doubleMillis < Long.MIN_VALUE ||
- ((millis >= 0) != (doubleMillis >= 0))) {
+ ((millis >= 0) != (doubleMillis >= 0))) {
timestampColVector.time[elementNum] = 0L;
timestampColVector.nanos[elementNum] = 0;
timestampColVector.isNull[elementNum] = true;
@@ -1674,16 +1689,16 @@ public class ConvertTreeReaderFactory extends TreeReaderFactory {
super(columnId, getStringGroupTreeReader(columnId, fileType, context), context);
useProlepticGregorian = context.useProlepticGregorian();
Chronology chronology = useProlepticGregorian
- ? IsoChronology.INSTANCE
- : HybridChronology.INSTANCE;
+ ? IsoChronology.INSTANCE
+ : HybridChronology.INSTANCE;
if (isInstant) {
formatter = INSTANT_TIMESTAMP_FORMAT.withChronology(chronology);
} else {
formatter = TIMESTAMP_FORMAT
- .withZone(context.getUseUTCTimestamp() ?
- ZoneId.of("UTC") :
- ZoneId.systemDefault())
- .withChronology(chronology);
+ .withZone(context.getUseUTCTimestamp() ?
+ ZoneId.of("UTC") :
+ ZoneId.systemDefault())
+ .withChronology(chronology);
}
}
@@ -1730,10 +1745,10 @@ public class ConvertTreeReaderFactory extends TreeReaderFactory {
private final boolean useProlepticGregorian;
TimestampFromDateTreeReader(int columnId, TypeDescription readerType,
- Context context) throws IOException {
+ Context context) throws IOException {
super(columnId, new DateTreeReader(columnId, context), context);
useUtc = readerType.getCategory() == Category.TIMESTAMP_INSTANT ||
- context.getUseUTCTimestamp();
+ context.getUseUTCTimestamp();
useProlepticGregorian = context.useProlepticGregorian();
}
@@ -1742,8 +1757,8 @@ public class ConvertTreeReaderFactory extends TreeReaderFactory {
long days = longColVector.vector[elementNum];
long millis = days * 24 * 60 * 60 * 1000;
timestampColVector.time[elementNum] = useUtc ?
- millis :
- SerializationUtils.convertFromUtc(local, millis);
+ millis :
+ SerializationUtils.convertFromUtc(local, millis);
timestampColVector.nanos[elementNum] = 0;
}
@@ -1809,7 +1824,7 @@ public class ConvertTreeReaderFactory extends TreeReaderFactory {
dateColumnVector = null;
if (useProlepticGregorian) {
throw new IllegalArgumentException("Can't use LongColumnVector with" +
- " proleptic Gregorian dates.");
+ " proleptic Gregorian dates.");
}
}
} else {
@@ -1843,7 +1858,7 @@ public class ConvertTreeReaderFactory extends TreeReaderFactory {
public void setConvertVectorElement(int elementNum) throws IOException {
LocalDate day = LocalDate.from(
Instant.ofEpochSecond(timestampColVector.time[elementNum] / 1000,
- timestampColVector.nanos[elementNum])
+ timestampColVector.nanos[elementNum])
.atZone(local));
longColVector.vector[elementNum] = day.toEpochDay();
}
@@ -1860,7 +1875,7 @@ public class ConvertTreeReaderFactory extends TreeReaderFactory {
longColVector = (LongColumnVector) previousVector;
if (useProlepticGregorian && !(longColVector instanceof DateColumnVector)) {
throw new IllegalArgumentException("Can't use LongColumnVector with" +
- " proleptic Gregorian dates.");
+ " proleptic Gregorian dates.");
}
} else {
timestampColVector.ensureSize(batchSize, false);
@@ -1885,46 +1900,46 @@ public class ConvertTreeReaderFactory extends TreeReaderFactory {
//
switch (readerType.getCategory()) {
- case BOOLEAN:
- case BYTE:
- case SHORT:
- case INT:
- case LONG:
- if (fileType.getCategory() == readerType.getCategory()) {
- throw new IllegalArgumentException("No conversion of type " +
- readerType.getCategory() + " to self needed");
- }
- return new AnyIntegerFromAnyIntegerTreeReader(columnId, fileType, readerType,
- context);
-
- case FLOAT:
- case DOUBLE:
- return new DoubleFromAnyIntegerTreeReader(columnId, fileType, context);
-
- case DECIMAL:
- return new DecimalFromAnyIntegerTreeReader(columnId, fileType, context);
-
- case STRING:
- case CHAR:
- case VARCHAR:
- return new StringGroupFromBooleanTreeReader(columnId, fileType, readerType,
- context);
-
- case TIMESTAMP:
- case TIMESTAMP_INSTANT:
- return new TimestampFromAnyIntegerTreeReader(columnId, fileType, context,
- readerType.getCategory() == Category.TIMESTAMP_INSTANT);
-
- // Not currently supported conversion(s):
- case BINARY:
- case DATE:
- case STRUCT:
- case LIST:
- case MAP:
- case UNION:
- default:
- throw new IllegalArgumentException("Unsupported type " +
- readerType.getCategory());
+ case BOOLEAN:
+ case BYTE:
+ case SHORT:
+ case INT:
+ case LONG:
+ if (fileType.getCategory() == readerType.getCategory()) {
+ throw new IllegalArgumentException("No conversion of type " +
+ readerType.getCategory() + " to self needed");
+ }
+ return new AnyIntegerFromAnyIntegerTreeReader(columnId, fileType, readerType,
+ context);
+
+ case FLOAT:
+ case DOUBLE:
+ return new DoubleFromAnyIntegerTreeReader(columnId, fileType, context);
+
+ case DECIMAL:
+ return new DecimalFromAnyIntegerTreeReader(columnId, fileType, context);
+
+ case STRING:
+ case CHAR:
+ case VARCHAR:
+ return new StringGroupFromBooleanTreeReader(columnId, fileType, readerType,
+ context);
+
+ case TIMESTAMP:
+ case TIMESTAMP_INSTANT:
+ return new TimestampFromAnyIntegerTreeReader(columnId, fileType, context,
+ readerType.getCategory() == Category.TIMESTAMP_INSTANT);
+
+ // Not currently supported conversion(s):
+ case BINARY:
+ case DATE:
+ case STRUCT:
+ case LIST:
+ case MAP:
+ case UNION:
+ default:
+ throw new IllegalArgumentException("Unsupported type " +
+ readerType.getCategory());
}
}
@@ -1937,48 +1952,48 @@ public class ConvertTreeReaderFactory extends TreeReaderFactory {
//
switch (readerType.getCategory()) {
- case BOOLEAN:
- case BYTE:
- case SHORT:
- case INT:
- case LONG:
- if (fileType.getCategory() == readerType.getCategory()) {
- throw new IllegalArgumentException("No conversion of type " +
- readerType.getCategory() + " to self needed");
- }
- return new AnyIntegerFromAnyIntegerTreeReader(columnId, fileType, readerType,
- context);
-
- case FLOAT:
- case DOUBLE:
- return new DoubleFromAnyIntegerTreeReader(columnId, fileType,
- context);
-
- case DECIMAL:
- return new DecimalFromAnyIntegerTreeReader(columnId, fileType, context);
-
- case STRING:
- case CHAR:
- case VARCHAR:
- return new StringGroupFromAnyIntegerTreeReader(columnId, fileType, readerType,
- context);
-
- case TIMESTAMP:
- case TIMESTAMP_INSTANT:
- return new TimestampFromAnyIntegerTreeReader(columnId, fileType, context,
- readerType.getCategory() == Category.TIMESTAMP_INSTANT);
-
- // Not currently supported conversion(s):
- case BINARY:
- case DATE:
-
- case STRUCT:
- case LIST:
- case MAP:
- case UNION:
- default:
- throw new IllegalArgumentException("Unsupported type " +
- readerType.getCategory());
+ case BOOLEAN:
+ case BYTE:
+ case SHORT:
+ case INT:
+ case LONG:
+ if (fileType.getCategory() == readerType.getCategory()) {
+ throw new IllegalArgumentException("No conversion of type " +
+ readerType.getCategory() + " to self needed");
+ }
+ return new AnyIntegerFromAnyIntegerTreeReader(columnId, fileType, readerType,
+ context);
+
+ case FLOAT:
+ case DOUBLE:
+ return new DoubleFromAnyIntegerTreeReader(columnId, fileType,
+ context);
+
+ case DECIMAL:
+ return new DecimalFromAnyIntegerTreeReader(columnId, fileType, context);
+
+ case STRING:
+ case CHAR:
+ case VARCHAR:
+ return new StringGroupFromAnyIntegerTreeReader(columnId, fileType, readerType,
+ context);
+
+ case TIMESTAMP:
+ case TIMESTAMP_INSTANT:
+ return new TimestampFromAnyIntegerTreeReader(columnId, fileType, context,
+ readerType.getCategory() == Category.TIMESTAMP_INSTANT);
+
+ // Not currently supported conversion(s):
+ case BINARY:
+ case DATE:
+
+ case STRUCT:
+ case LIST:
+ case MAP:
+ case UNION:
+ default:
+ throw new IllegalArgumentException("Unsupported type " +
+ readerType.getCategory());
}
}
@@ -1990,42 +2005,42 @@ public class ConvertTreeReaderFactory extends TreeReaderFactory {
// CONVERT from DOUBLE to schema type.
switch (readerType.getCategory()) {
- case BOOLEAN:
- case BYTE:
- case SHORT:
- case INT:
- case LONG:
- return new AnyIntegerFromDoubleTreeReader(columnId, fileType, readerType, context);
+ case BOOLEAN:
+ case BYTE:
+ case SHORT:
+ case INT:
+ case LONG:
+ return new AnyIntegerFromDoubleTreeReader(columnId, fileType, readerType, context);
- case FLOAT:
- return new FloatFromDoubleTreeReader(columnId, context);
+ case FLOAT:
+ return new FloatFromDoubleTreeReader(columnId, context);
- case DOUBLE:
- return new FloatTreeReader(columnId, context);
+ case DOUBLE:
+ return new FloatTreeReader(columnId, context);
- case DECIMAL:
- return new DecimalFromDoubleTreeReader(columnId, fileType, readerType, context);
+ case DECIMAL:
+ return new DecimalFromDoubleTreeReader(columnId, fileType, readerType, context);
- case STRING:
- case CHAR:
- case VARCHAR:
- return new StringGroupFromDoubleTreeReader(columnId, fileType, readerType, context);
+ case STRING:
+ case CHAR:
+ case VARCHAR:
+ return new StringGroupFromDoubleTreeReader(columnId, fileType, readerType, context);
- case TIMESTAMP:
- case TIMESTAMP_INSTANT:
- return new TimestampFromDoubleTreeReader(columnId, fileType, readerType, context);
+ case TIMESTAMP:
+ case TIMESTAMP_INSTANT:
+ return new TimestampFromDoubleTreeReader(columnId, fileType, readerType, context);
- // Not currently supported conversion(s):
- case BINARY:
- case DATE:
+ // Not currently supported conversion(s):
+ case BINARY:
+ case DATE:
- case STRUCT:
- case LIST:
- case MAP:
- case UNION:
- default:
- throw new IllegalArgumentException("Unsupported type " +
- readerType.getCategory());
+ case STRUCT:
+ case LIST:
+ case MAP:
+ case UNION:
+ default:
+ throw new IllegalArgumentException("Unsupported type " +
+ readerType.getCategory());
}
}
@@ -2037,41 +2052,41 @@ public class ConvertTreeReaderFactory extends TreeReaderFactory {
// CONVERT from DECIMAL to schema type.
switch (readerType.getCategory()) {
- case BOOLEAN:
- case BYTE:
- case SHORT:
- case INT:
- case LONG:
- return new AnyIntegerFromDecimalTreeReader(columnId, fileType, readerType, context);
-
- case FLOAT:
- case DOUBLE:
- return new DoubleFromDecimalTreeReader(columnId, fileType, context);
-
- case STRING:
- case CHAR:
- case VARCHAR:
- return new StringGroupFromDecimalTreeReader(columnId, fileType, readerType, context);
-
- case TIMESTAMP:
- case TIMESTAMP_INSTANT:
- return new TimestampFromDecimalTreeReader(columnId, fileType, context,
- readerType.getCategory() == Category.TIMESTAMP_INSTANT);
-
- case DECIMAL:
- return new DecimalFromDecimalTreeReader(columnId, fileType, readerType, context);
-
- // Not currently supported conversion(s):
- case BINARY:
- case DATE:
-
- case STRUCT:
- case LIST:
- case MAP:
- case UNION:
- default:
- throw new IllegalArgumentException("Unsupported type " +
- readerType.getCategory());
+ case BOOLEAN:
+ case BYTE:
+ case SHORT:
+ case INT:
+ case LONG:
+ return new AnyIntegerFromDecimalTreeReader(columnId, fileType, readerType, context);
+
+ case FLOAT:
+ case DOUBLE:
+ return new DoubleFromDecimalTreeReader(columnId, fileType, context);
+
+ case STRING:
+ case CHAR:
+ case VARCHAR:
+ return new StringGroupFromDecimalTreeReader(columnId, fileType, readerType, context);
+
+ case TIMESTAMP:
+ case TIMESTAMP_INSTANT:
+ return new TimestampFromDecimalTreeReader(columnId, fileType, context,
+ readerType.getCategory() == Category.TIMESTAMP_INSTANT);
+
+ case DECIMAL:
+ return new DecimalFromDecimalTreeReader(columnId, fileType, readerType, context);
+
+ // Not currently supported conversion(s):
+ case BINARY:
+ case DATE:
+
+ case STRUCT:
+ case LIST:
+ case MAP:
+ case UNION:
+ default:
+ throw new IllegalArgumentException("Unsupported type " +
+ readerType.getCategory());
}
}
@@ -2083,45 +2098,45 @@ public class ConvertTreeReaderFactory extends TreeReaderFactory {
// CONVERT from STRING to schema type.
switch (readerType.getCategory()) {
- case BOOLEAN:
- case BYTE:
- case SHORT:
- case INT:
- case LONG:
- return new AnyIntegerFromStringGroupTreeReader(columnId, fileType, readerType, context);
+ case BOOLEAN:
+ case BYTE:
+ case SHORT:
+ case INT:
+ case LONG:
+ return new AnyIntegerFromStringGroupTreeReader(columnId, fileType, readerType, context);
- case FLOAT:
- case DOUBLE:
- return new DoubleFromStringGroupTreeReader(columnId, fileType, context);
+ case FLOAT:
+ case DOUBLE:
+ return new DoubleFromStringGroupTreeReader(columnId, fileType, context);
- case DECIMAL:
- return new DecimalFromStringGroupTreeReader(columnId, fileType, readerType, context);
+ case DECIMAL:
+ return new DecimalFromStringGroupTreeReader(columnId, fileType, readerType, context);
- case CHAR:
- case VARCHAR:
- case STRING:
- return new StringGroupFromStringGroupTreeReader(columnId, fileType, readerType, context);
+ case CHAR:
+ case VARCHAR:
+ case STRING:
+ return new StringGroupFromStringGroupTreeReader(columnId, fileType, readerType, context);
- case BINARY:
- return new BinaryTreeReader(columnId, context);
+ case BINARY:
+ return new BinaryTreeReader(columnId, context);
- case TIMESTAMP:
- case TIMESTAMP_INSTANT:
- return new TimestampFromStringGroupTreeReader(columnId, fileType, context,
- readerType.getCategory() == Category.TIMESTAMP_INSTANT);
+ case TIMESTAMP:
+ case TIMESTAMP_INSTANT:
+ return new TimestampFromStringGroupTreeReader(columnId, fileType, context,
+ readerType.getCategory() == Category.TIMESTAMP_INSTANT);
- case DATE:
- return new DateFromStringGroupTreeReader(columnId, fileType, context);
+ case DATE:
+ return new DateFromStringGroupTreeReader(columnId, fileType, context);
- // Not currently supported conversion(s):
+ // Not currently supported conversion(s):
- case STRUCT:
- case LIST:
- case MAP:
- case UNION:
- default:
- throw new IllegalArgumentException("Unsupported type " +
- readerType.getCategory());
+ case STRUCT:
+ case LIST:
+ case MAP:
+ case UNION:
+ default:
+ throw new IllegalArgumentException("Unsupported type " +
+ readerType.getCategory());
}
}
@@ -2133,44 +2148,44 @@ public class ConvertTreeReaderFactory extends TreeReaderFactory {
// CONVERT from TIMESTAMP to schema type.
switch (readerType.getCategory()) {
- case BOOLEAN:
- case BYTE:
- case SHORT:
- case INT:
- case LONG:
- return new AnyIntegerFromTimestampTreeReader(columnId, readerType,
- context, isInstant);
-
- case FLOAT:
- case DOUBLE:
- return new DoubleFromTimestampTreeReader(columnId, context, isInstant);
-
- case DECIMAL:
- return new DecimalFromTimestampTreeReader(columnId, context, isInstant);
-
- case STRING:
- case CHAR:
- case VARCHAR:
- return new StringGroupFromTimestampTreeReader(columnId, readerType,
- context, isInstant);
-
- case TIMESTAMP:
- case TIMESTAMP_INSTANT:
- return new TimestampTreeReader(columnId, context, isInstant);
-
- case DATE:
- return new DateFromTimestampTreeReader(columnId, context, isInstant);
-
- // Not currently supported conversion(s):
- case BINARY:
-
- case STRUCT:
- case LIST:
- case MAP:
- case UNION:
- default:
- throw new IllegalArgumentException("Unsupported type " +
- readerType.getCategory());
+ case BOOLEAN:
+ case BYTE:
+ case SHORT:
+ case INT:
+ case LONG:
+ return new AnyIntegerFromTimestampTreeReader(columnId, readerType,
+ context, isInstant);
+
+ case FLOAT:
+ case DOUBLE:
+ return new DoubleFromTimestampTreeReader(columnId, context, isInstant);
+
+ case DECIMAL:
+ return new DecimalFromTimestampTreeReader(columnId, context, isInstant);
+
+ case STRING:
+ case CHAR:
+ case VARCHAR:
+ return new StringGroupFromTimestampTreeReader(columnId, readerType,
+ context, isInstant);
+
+ case TIMESTAMP:
+ case TIMESTAMP_INSTANT:
+ return new TimestampTreeReader(columnId, context, isInstant);
+
+ case DATE:
+ return new DateFromTimestampTreeReader(columnId, context, isInstant);
+
+ // Not currently supported conversion(s):
+ case BINARY:
+
+ case STRUCT:
+ case LIST:
+ case MAP:
+ case UNION:
+ default:
+ throw new IllegalArgumentException("Unsupported type " +
+ readerType.getCategory());
}
}
@@ -2181,37 +2196,37 @@ public class ConvertTreeReaderFactory extends TreeReaderFactory {
// CONVERT from DATE to schema type.
switch (readerType.getCategory()) {
- case STRING:
- case CHAR:
- case VARCHAR:
- return new StringGroupFromDateTreeReader(columnId, readerType, context);
+ case STRING:
+ case CHAR:
+ case VARCHAR:
+ return new StringGroupFromDateTreeReader(columnId, readerType, context);
- case TIMESTAMP:
- case TIMESTAMP_INSTANT:
- return new TimestampFromDateTreeReader(columnId, readerType, context);
+ case TIMESTAMP:
+ case TIMESTAMP_INSTANT:
+ return new TimestampFromDateTreeReader(columnId, readerType, context);
- case DATE:
- throw new IllegalArgumentException("No conversion of type " +
- readerType.getCategory() + " to self needed");
+ case DATE:
+ throw new IllegalArgumentException("No conversion of type " +
+ readerType.getCategory() + " to self needed");
- // Not currently supported conversion(s):
- case BOOLEAN:
- case BYTE:
- case FLOAT:
- case SHORT:
- case INT:
- case LONG:
- case DOUBLE:
- case BINARY:
- case DECIMAL:
-
- case STRUCT:
- case LIST:
- case MAP:
- case UNION:
- default:
- throw new IllegalArgumentException("Unsupported type " +
- readerType.getCategory());
+ // Not currently supported conversion(s):
+ case BOOLEAN:
+ case BYTE:
+ case FLOAT:
+ case SHORT:
+ case INT:
+ case LONG:
+ case DOUBLE:
+ case BINARY:
+ case DECIMAL:
+
+ case STRUCT:
+ case LIST:
+ case MAP:
+ case UNION:
+ default:
+ throw new IllegalArgumentException("Unsupported type " +
+ readerType.getCategory());
}
}
@@ -2222,33 +2237,33 @@ public class ConvertTreeReaderFactory extends TreeReaderFactory {
// CONVERT from BINARY to schema type.
switch (readerType.getCategory()) {
- case STRING:
- case CHAR:
- case VARCHAR:
- return new StringGroupFromBinaryTreeReader(columnId, readerType, context);
+ case STRING:
+ case CHAR:
+ case VARCHAR:
+ return new StringGroupFromBinaryTreeReader(columnId, readerType, context);
- case BINARY:
- throw new IllegalArgumentException("No conversion of type " +
- readerType.getCategory() + " to self needed");
+ case BINARY:
+ throw new IllegalArgumentException("No conversion of type " +
+ readerType.getCategory() + " to self needed");
- // Not currently supported conversion(s):
- case BOOLEAN:
- case BYTE:
- case FLOAT:
- case SHORT:
- case INT:
- case LONG:
- case DOUBLE:
- case TIMESTAMP:
- case TIMESTAMP_INSTANT:
- case DECIMAL:
- case STRUCT:
- case LIST:
- case MAP:
- case UNION:
- default:
- throw new IllegalArgumentException("Unsupported type " +
- readerType.getCategory());
+ // Not currently supported conversion(s):
+ case BOOLEAN:
+ case BYTE:
+ case FLOAT:
+ case SHORT:
+ case INT:
+ case LONG:
+ case DOUBLE:
+ case TIMESTAMP:
+ case TIMESTAMP_INSTANT:
+ case DECIMAL:
+ case STRUCT:
+ case LIST:
+ case MAP:
+ case UNION:
+ default:
+ throw new IllegalArgumentException("Unsupported type " +
+ readerType.getCategory());
}
}
@@ -2389,45 +2404,45 @@ public class ConvertTreeReaderFactory extends TreeReaderFactory {
switch (fileType.getCategory()) {
- case BYTE:
- case SHORT:
- case INT:
- case LONG:
- return createAnyIntegerConvertTreeReader(columnId, fileType, readerType, context);
+ case BYTE:
+ case SHORT:
+ case INT:
+ case LONG:
+ return createAnyIntegerConvertTreeReader(columnId, fileType, readerType, context);
- case BOOLEAN:
- return createBooleanConvertTreeReader(columnId, fileType, readerType, context);
+ case BOOLEAN:
+ return createBooleanConvertTreeReader(columnId, fileType, readerType, context);
- case FLOAT:
- case DOUBLE:
- return createDoubleConvertTreeReader(columnId, fileType, readerType, context);
+ case FLOAT:
+ case DOUBLE:
+ return createDoubleConvertTreeReader(columnId, fileType, readerType, context);
- case DECIMAL:
- return createDecimalConvertTreeReader(columnId, fileType, readerType, context);
+ case DECIMAL:
+ return createDecimalConvertTreeReader(columnId, fileType, readerType, context);
- case STRING:
- case CHAR:
- case VARCHAR:
- return createStringConvertTreeReader(columnId, fileType, readerType, context);
+ case STRING:
+ case CHAR:
+ case VARCHAR:
+ return createStringConvertTreeReader(columnId, fileType, readerType, context);
- case TIMESTAMP:
- case TIMESTAMP_INSTANT:
- return createTimestampConvertTreeReader(columnId, fileType, readerType, context);
+ case TIMESTAMP:
+ case TIMESTAMP_INSTANT:
+ return createTimestampConvertTreeReader(columnId, fileType, readerType, context);
- case DATE:
- return createDateConvertTreeReader(columnId, readerType, context);
+ case DATE:
+ return createDateConvertTreeReader(columnId, readerType, context);
- case BINARY:
- return createBinaryConvertTreeReader(columnId, readerType, context);
+ case BINARY:
+ return createBinaryConvertTreeReader(columnId, readerType, context);
- // UNDONE: Complex conversions...
- case STRUCT:
- case LIST:
- case MAP:
- case UNION:
- default:
- throw new IllegalArgumentException("Unsupported type " +
- fileType.getCategory());
+ // UNDONE: Complex conversions...
+ case STRUCT:
+ case LIST:
+ case MAP:
+ case UNION:
+ default:
+ throw new IllegalArgumentException("Unsupported type " +
+ fileType.getCategory());
}
}
@@ -2437,102 +2452,102 @@ public class ConvertTreeReaderFactory extends TreeReaderFactory {
// We don't convert from any to complex.
switch (readerTypeCategory) {
- case STRUCT:
- case LIST:
- case MAP:
- case UNION:
- return false;
+ case STRUCT:
+ case LIST:
+ case MAP:
+ case UNION:
+ return false;
- default:
- // Fall through.
+ default:
+ // Fall through.
}
// Now look for the few cases we don't convert from
switch (fileType.getCategory()) {
- case BOOLEAN:
- case BYTE:
- case SHORT:
- case INT:
- case LONG:
- case FLOAT:
- case DOUBLE:
- case DECIMAL:
- switch (readerType.getCategory()) {
- // Not currently supported conversion(s):
- case BINARY:
- case DATE:
- return false;
- default:
- return true;
- }
-
-
- case STRING:
- case CHAR:
- case VARCHAR:
- switch (readerType.getCategory()) {
- // Not currently supported conversion(s):
- // (None)
- default:
- return true;
- }
-
- case TIMESTAMP:
- case TIMESTAMP_INSTANT:
- switch (readerType.getCategory()) {
- // Not currently supported conversion(s):
- case BINARY:
- return false;
- default:
- return true;
- }
-
- case DATE:
- switch (readerType.getCategory()) {
- // Not currently supported conversion(s):
case BOOLEAN:
case BYTE:
- case FLOAT:
case SHORT:
case INT:
case LONG:
+ case FLOAT:
case DOUBLE:
- case BINARY:
case DECIMAL:
- return false;
- default:
- return true;
- }
+ switch (readerType.getCategory()) {
+ // Not currently supported conversion(s):
+ case BINARY:
+ case DATE:
+ return false;
+ default:
+ return true;
+ }
+
+
+ case STRING:
+ case CHAR:
+ case VARCHAR:
+ switch (readerType.getCategory()) {
+ // Not currently supported conversion(s):
+ // (None)
+ default:
+ return true;
+ }
- case BINARY:
- switch (readerType.getCategory()) {
- // Not currently supported conversion(s):
- case BOOLEAN:
- case BYTE:
- case FLOAT:
- case SHORT:
- case INT:
- case LONG:
- case DOUBLE:
case TIMESTAMP:
case TIMESTAMP_INSTANT:
- case DECIMAL:
- return false;
- default:
- return true;
- }
+ switch (readerType.getCategory()) {
+ // Not currently supported conversion(s):
+ case BINARY:
+ return false;
+ default:
+ return true;
+ }
+
+ case DATE:
+ switch (readerType.getCategory()) {
+ // Not currently supported conversion(s):
+ case BOOLEAN:
+ case BYTE:
+ case FLOAT:
+ case SHORT:
+ case INT:
+ case LONG:
+ case DOUBLE:
+ case BINARY:
+ case DECIMAL:
+ return false;
+ default:
+ return true;
+ }
- // We don't convert from complex to any.
- case STRUCT:
- case LIST:
- case MAP:
- case UNION:
- return false;
+ case BINARY:
+ switch (readerType.getCategory()) {
+ // Not currently supported conversion(s):
+ case BOOLEAN:
+ case BYTE:
+ case FLOAT:
+ case SHORT:
+ case INT:
+ case LONG:
+ case DOUBLE:
+ case TIMESTAMP:
+ case TIMESTAMP_INSTANT:
+ case DECIMAL:
+ return false;
+ default:
+ return true;
+ }
- default:
- throw new IllegalArgumentException("Unsupported type " +
- fileType.getCategory());
+ // We don't convert from complex to any.
+ case STRUCT:
+ case LIST:
+ case MAP:
+ case UNION:
+ return false;
+
+ default:
+ throw new IllegalArgumentException("Unsupported type " +
+ fileType.getCategory());
}
}
}
diff --git a/java/core/src/java/org/apache/orc/impl/DataReaderProperties.java b/java/core/src/java/org/apache/orc/impl/DataReaderProperties.java
index 19a537c6b..dfd0d64ab 100644
--- a/java/core/src/java/org/apache/orc/impl/DataReaderProperties.java
+++ b/java/core/src/java/org/apache/orc/impl/DataReaderProperties.java
@@ -93,7 +93,7 @@ public final class DataReaderProperties {
(int) OrcConf.ORC_MAX_DISK_RANGE_CHUNK_LIMIT.getDefaultValue();
private int minSeekSize = (int) OrcConf.ORC_MIN_DISK_SEEK_SIZE.getDefaultValue();
private double minSeekSizeTolerance = (double) OrcConf.ORC_MIN_DISK_SEEK_SIZE_TOLERANCE
- .getDefaultValue();
+ .getDefaultValue();
private Builder() {
diff --git a/java/core/src/java/org/apache/orc/impl/DynamicByteArray.java b/java/core/src/java/org/apache/orc/impl/DynamicByteArray.java
index a3b5b3c7d..851f0f144 100644
--- a/java/core/src/java/org/apache/orc/impl/DynamicByteArray.java
+++ b/java/core/src/java/org/apache/orc/impl/DynamicByteArray.java
@@ -132,7 +132,7 @@ public final class DynamicByteArray {
int currentOffset = length % chunkSize;
grow(currentChunk);
int currentLength = in.read(data[currentChunk], currentOffset,
- chunkSize - currentOffset);
+ chunkSize - currentOffset);
while (currentLength > 0) {
length += currentLength;
currentOffset = length % chunkSize;
diff --git a/java/core/src/java/org/apache/orc/impl/IOUtils.java b/java/core/src/java/org/apache/orc/impl/IOUtils.java
index 2dbe7352f..ff13bdaec 100644
--- a/java/core/src/java/org/apache/orc/impl/IOUtils.java
+++ b/java/core/src/java/org/apache/orc/impl/IOUtils.java
@@ -134,7 +134,7 @@ public final class IOUtils {
final byte[] byteArray = getByteArray();
final long n = input.read(byteArray, 0, (int) Math.min(remain, byteArray.length));
if (n < 0) { // EOF
- break;
+ break;
}
remain -= n;
}
diff --git a/java/core/src/java/org/apache/orc/impl/PhysicalFsWriter.java b/java/core/src/java/org/apache/orc/impl/PhysicalFsWriter.java
index ad6c19878..38a8edd25 100644
--- a/java/core/src/java/org/apache/orc/impl/PhysicalFsWriter.java
+++ b/java/core/src/java/org/apache/orc/impl/PhysicalFsWriter.java
@@ -193,7 +193,7 @@ public class PhysicalFsWriter implements PhysicalWriter {
builder.setColumn(name.getColumn())
.setKind(name.getKind())
.setLength(size);
- result.add(builder.build());
+ result.add(builder.build());
}
}
return result;
@@ -556,7 +556,7 @@ public class PhysicalFsWriter implements PhysicalWriter {
if (!isSuppressed) {
for (ByteBuffer buffer: output) {
raw.write(buffer.array(), buffer.arrayOffset() + buffer.position(),
- buffer.remaining());
+ buffer.remaining());
}
output.clear();
return true;
diff --git a/java/core/src/java/org/apache/orc/impl/ReaderImpl.java b/java/core/src/java/org/apache/orc/impl/ReaderImpl.java
index db6463c5b..5c4ced6b3 100644
--- a/java/core/src/java/org/apache/orc/impl/ReaderImpl.java
+++ b/java/core/src/java/org/apache/orc/impl/ReaderImpl.java
@@ -940,44 +940,44 @@ public class ReaderImpl implements Reader {
long numVals = colStat.getNumberOfValues();
switch (column.getCategory()) {
- case BINARY:
- // old orc format doesn't support binary statistics. checking for binary
- // statistics is not required as protocol buffers takes care of it.
- return colStat.getBinaryStatistics().getSum();
- case STRING:
- case CHAR:
- case VARCHAR:
- // old orc format doesn't support sum for string statistics. checking for
- // existence is not required as protocol buffers takes care of it.
-
- // ORC strings are deserialized to java strings. so use java data model's
- // string size
- numVals = numVals == 0 ? 1 : numVals;
- int avgStrLen = (int) (colStat.getStringStatistics().getSum() / numVals);
- return numVals * JavaDataModel.get().lengthForStringOfLength(avgStrLen);
- case TIMESTAMP:
- case TIMESTAMP_INSTANT:
- return numVals * JavaDataModel.get().lengthOfTimestamp();
- case DATE:
- return numVals * JavaDataModel.get().lengthOfDate();
- case DECIMAL:
- return numVals * JavaDataModel.get().lengthOfDecimal();
- case DOUBLE:
- case LONG:
- return numVals * JavaDataModel.get().primitive2();
- case FLOAT:
- case INT:
- case SHORT:
- case BOOLEAN:
- case BYTE:
- case STRUCT:
- case UNION:
- case MAP:
- case LIST:
- return numVals * JavaDataModel.get().primitive1();
- default:
- LOG.debug("Unknown primitive category: {}", column.getCategory());
- break;
+ case BINARY:
+ // old orc format doesn't support binary statistics. checking for binary
+ // statistics is not required as protocol buffers takes care of it.
+ return colStat.getBinaryStatistics().getSum();
+ case STRING:
+ case CHAR:
+ case VARCHAR:
+ // old orc format doesn't support sum for string statistics. checking for
+ // existence is not required as protocol buffers takes care of it.
+
+ // ORC strings are deserialized to java strings. so use java data model's
+ // string size
+ numVals = numVals == 0 ? 1 : numVals;
+ int avgStrLen = (int) (colStat.getStringStatistics().getSum() / numVals);
+ return numVals * JavaDataModel.get().lengthForStringOfLength(avgStrLen);
+ case TIMESTAMP:
+ case TIMESTAMP_INSTANT:
+ return numVals * JavaDataModel.get().lengthOfTimestamp();
+ case DATE:
+ return numVals * JavaDataModel.get().lengthOfDate();
+ case DECIMAL:
+ return numVals * JavaDataModel.get().lengthOfDecimal();
+ case DOUBLE:
+ case LONG:
+ return numVals * JavaDataModel.get().primitive2();
+ case FLOAT:
+ case INT:
+ case SHORT:
+ case BOOLEAN:
+ case BYTE:
+ case STRUCT:
+ case UNION:
+ case MAP:
+ case LIST:
+ return numVals * JavaDataModel.get().primitive1();
+ default:
+ LOG.debug("Unknown primitive category: {}", column.getCategory());
+ break;
}
return 0;
@@ -1017,12 +1017,11 @@ public class ReaderImpl implements Reader {
return fileStats;
}
- private static
- List<OrcProto.StripeStatistics> deserializeStripeStats(BufferChunk tailBuffer,
- long offset,
- int length,
- InStream.StreamOptions options
- ) throws IOException {
+ private static List<OrcProto.StripeStatistics> deserializeStripeStats(
+ BufferChunk tailBuffer,
+ long offset,
+ int length,
+ InStream.StreamOptions options) throws IOException {
InStream stream = InStream.create("stripe stats", tailBuffer, offset,
length, options);
OrcProto.Metadata meta = OrcProto.Metadata.parseFrom(
diff --git a/java/core/src/java/org/apache/orc/impl/RecordReaderImpl.java b/java/core/src/java/org/apache/orc/impl/RecordReaderImpl.java
index 6b6fd0259..a4f17a60e 100644
--- a/java/core/src/java/org/apache/orc/impl/RecordReaderImpl.java
+++ b/java/core/src/java/org/apache/orc/impl/RecordReaderImpl.java
@@ -138,7 +138,7 @@ public class RecordReaderImpl implements RecordReader {
static TypeDescription findColumnType(SchemaEvolution evolution, String columnName) {
try {
TypeDescription readerColumn = evolution.getReaderBaseSchema().findSubtype(
- columnName, evolution.isSchemaEvolutionCaseAware);
+ columnName, evolution.isSchemaEvolutionCaseAware);
return evolution.getFileType(readerColumn);
} catch (IllegalArgumentException e) {
throw new IllegalArgumentException("Filter could not find column with name: " +
@@ -160,7 +160,7 @@ public class RecordReaderImpl implements RecordReader {
static TypeDescription findMostCommonColumn(SchemaEvolution evolution, String columnName) {
try {
TypeDescription readerColumn = evolution.getReaderBaseSchema().findSubtype(
- columnName, evolution.isSchemaEvolutionCaseAware);
+ columnName, evolution.isSchemaEvolutionCaseAware);
TypeDescription fileColumn;
do {
fileColumn = evolution.getFileType(readerColumn);
@@ -301,7 +301,7 @@ public class RecordReaderImpl implements RecordReader {
String[] filterCols = null;
Consumer<OrcFilterContext> filterCallBack = null;
String filePath = options.allowPluginFilters() ?
- fileReader.getFileSystem().makeQualified(fileReader.path).toString() : null;
+ fileReader.getFileSystem().makeQualified(fileReader.path).toString() : null;
BatchFilter filter = FilterFactory.createBatchFilter(options,
evolution.getReaderBaseSchema(),
evolution.isSchemaEvolutionCaseAware(),
@@ -546,14 +546,14 @@ public class RecordReaderImpl implements RecordReader {
TruthValue addNull(TruthValue value) {
if (hasNulls) {
switch (value) {
- case YES:
- return TruthValue.YES_NULL;
- case NO:
- return TruthValue.NO_NULL;
- case YES_NO:
- return TruthValue.YES_NO_NULL;
- default:
- return value;
+ case YES:
+ return TruthValue.YES_NULL;
+ case NO:
+ return TruthValue.NO_NULL;
+ case YES_NO:
+ return TruthValue.YES_NO_NULL;
+ default:
+ return value;
}
} else {
return value;
@@ -947,13 +947,13 @@ public class RecordReaderImpl implements RecordReader {
result = TruthValue.YES_NO_NULL;
}
} else {
- // if the predicate object is null and if hasNull says there are no nulls then return NO
- if (predObj == null && !hasNull) {
- result = TruthValue.NO;
- } else {
- result = TruthValue.YES_NO_NULL;
- }
+ // if the predicate object is null and if hasNull says there are no nulls then return NO
+ if (predObj == null && !hasNull) {
+ result = TruthValue.NO;
+ } else {
+ result = TruthValue.YES_NO_NULL;
}
+ }
if (result == TruthValue.YES_NO_NULL && !hasNull) {
result = TruthValue.YES_NO;
diff --git a/java/core/src/java/org/apache/orc/impl/RunLengthByteReader.java b/java/core/src/java/org/apache/orc/impl/RunLengthByteReader.java
index 7cab5dda0..b564db613 100644
--- a/java/core/src/java/org/apache/orc/impl/RunLengthByteReader.java
+++ b/java/core/src/java/org/apache/orc/impl/RunLengthByteReader.java
@@ -30,7 +30,7 @@ import java.io.IOException;
public class RunLengthByteReader {
private InStream input;
private final byte[] literals =
- new byte[RunLengthByteWriter.MAX_LITERAL_SIZE];
+ new byte[RunLengthByteWriter.MAX_LITERAL_SIZE];
private int numLiterals = 0;
private int used = 0;
private boolean repeat = false;
diff --git a/java/core/src/java/org/apache/orc/impl/RunLengthByteWriter.java b/java/core/src/java/org/apache/orc/impl/RunLengthByteWriter.java
index f5fc1fe9a..9c025fc7b 100644
--- a/java/core/src/java/org/apache/orc/impl/RunLengthByteWriter.java
+++ b/java/core/src/java/org/apache/orc/impl/RunLengthByteWriter.java
@@ -44,7 +44,7 @@ public class RunLengthByteWriter {
if (repeat) {
output.write(numLiterals - MIN_REPEAT_SIZE);
output.write(literals, 0, 1);
- } else {
+ } else {
output.write(-numLiterals);
output.write(literals, 0, numLiterals);
}
diff --git a/java/core/src/java/org/apache/orc/impl/RunLengthIntegerReader.java b/java/core/src/java/org/apache/orc/impl/RunLengthIntegerReader.java
index 804b0e605..a17fb9ee9 100644
--- a/java/core/src/java/org/apache/orc/impl/RunLengthIntegerReader.java
+++ b/java/core/src/java/org/apache/orc/impl/RunLengthIntegerReader.java
@@ -29,7 +29,7 @@ public class RunLengthIntegerReader implements IntegerReader {
private InStream input;
private final boolean signed;
private final long[] literals =
- new long[RunLengthIntegerWriter.MAX_LITERAL_SIZE];
+ new long[RunLengthIntegerWriter.MAX_LITERAL_SIZE];
private int numLiterals = 0;
private int delta = 0;
private int used = 0;
diff --git a/java/core/src/java/org/apache/orc/impl/RunLengthIntegerReaderV2.java b/java/core/src/java/org/apache/orc/impl/RunLengthIntegerReaderV2.java
index ebf67d85a..9f5faff6a 100644
--- a/java/core/src/java/org/apache/orc/impl/RunLengthIntegerReaderV2.java
+++ b/java/core/src/java/org/apache/orc/impl/RunLengthIntegerReaderV2.java
@@ -64,20 +64,20 @@ public class RunLengthIntegerReaderV2 implements IntegerReader {
}
currentEncoding = encodings[(firstByte >>> 6) & 0x03];
switch (currentEncoding) {
- case SHORT_REPEAT:
- readShortRepeatValues(firstByte);
- break;
- case DIRECT:
- readDirectValues(firstByte);
- break;
- case PATCHED_BASE:
- readPatchedBaseValues(firstByte);
- break;
- case DELTA:
- readDeltaValues(firstByte);
- break;
- default:
- throw new IOException("Unknown encoding " + currentEncoding);
+ case SHORT_REPEAT:
+ readShortRepeatValues(firstByte);
+ break;
+ case DIRECT:
+ readDirectValues(firstByte);
+ break;
+ case PATCHED_BASE:
+ readPatchedBaseValues(firstByte);
+ break;
+ case DELTA:
+ readDeltaValues(firstByte);
+ break;
+ default:
+ throw new IOException("Unknown encoding " + currentEncoding);
}
}
diff --git a/java/core/src/java/org/apache/orc/impl/SchemaEvolution.java b/java/core/src/java/org/apache/orc/impl/SchemaEvolution.java
index 60aeba536..09b4b2ae6 100644
--- a/java/core/src/java/org/apache/orc/impl/SchemaEvolution.java
+++ b/java/core/src/java/org/apache/orc/impl/SchemaEvolution.java
@@ -63,9 +63,9 @@ public class SchemaEvolution {
private final boolean positionalColumns;
private static final Logger LOG =
- LoggerFactory.getLogger(SchemaEvolution.class);
+ LoggerFactory.getLogger(SchemaEvolution.class);
private static final Pattern missingMetadataPattern =
- Pattern.compile("_col\\d+");
+ Pattern.compile("_col\\d+");
public static class IllegalEvolutionException extends RuntimeException {
@@ -105,7 +105,7 @@ public class SchemaEvolution {
+ readerIncluded.length);
}
this.readerFileTypes =
- new TypeDescription[this.readerSchema.getMaximumId() + 1];
+ new TypeDescription[this.readerSchema.getMaximumId() + 1];
int positionalLevels = 0;
if (options.getForcePositionalEvolution()) {
positionalLevels = isAcid ? 2 : options.getPositionalEvolutionLevel();
@@ -255,31 +255,31 @@ public class SchemaEvolution {
private boolean typesAreImplicitConversion(final TypeDescription fileType,
final TypeDescription readerType) {
switch (fileType.getCategory()) {
- case BYTE:
+ case BYTE:
if (readerType.getCategory().equals(TypeDescription.Category.SHORT) ||
readerType.getCategory().equals(TypeDescription.Category.INT) ||
readerType.getCategory().equals(TypeDescription.Category.LONG)) {
return true;
}
break;
- case SHORT:
+ case SHORT:
if (readerType.getCategory().equals(TypeDescription.Category.INT) ||
readerType.getCategory().equals(TypeDescription.Category.LONG)) {
return true;
}
break;
- case INT:
+ case INT:
if (readerType.getCategory().equals(TypeDescription.Category.LONG)) {
return true;
}
break;
- case FLOAT:
+ case FLOAT:
if (readerType.getCategory().equals(TypeDescription.Category.DOUBLE)) {
return true;
}
break;
- case CHAR:
- case VARCHAR:
+ case CHAR:
+ case VARCHAR:
if (readerType.getCategory().equals(TypeDescription.Category.STRING)) {
return true;
}
@@ -288,7 +288,7 @@ public class SchemaEvolution {
return (fileType.getMaxLength() <= readerType.getMaxLength());
}
break;
- default:
+ default:
break;
}
return false;
@@ -612,7 +612,7 @@ public class SchemaEvolution {
}
private static final List<String> acidEventFieldNames=
- new ArrayList<String>();
+ new ArrayList<String>();
static {
acidEventFieldNames.add("operation");
diff --git a/java/core/src/java/org/apache/orc/impl/SerializationUtils.java b/java/core/src/java/org/apache/orc/impl/SerializationUtils.java
index 8b1802cd3..1105a42cc 100644
--- a/java/core/src/java/org/apache/orc/impl/SerializationUtils.java
+++ b/java/core/src/java/org/apache/orc/impl/SerializationUtils.java
@@ -497,41 +497,41 @@ public final class SerializationUtils {
}
switch (bitSize) {
- case 1:
- unrolledBitPack1(input, offset, len, output);
- return;
- case 2:
- unrolledBitPack2(input, offset, len, output);
- return;
- case 4:
- unrolledBitPack4(input, offset, len, output);
- return;
- case 8:
- unrolledBitPack8(input, offset, len, output);
- return;
- case 16:
- unrolledBitPack16(input, offset, len, output);
- return;
- case 24:
- unrolledBitPack24(input, offset, len, output);
- return;
- case 32:
- unrolledBitPack32(input, offset, len, output);
- return;
- case 40:
- unrolledBitPack40(input, offset, len, output);
- return;
- case 48:
- unrolledBitPack48(input, offset, len, output);
- return;
- case 56:
- unrolledBitPack56(input, offset, len, output);
- return;
- case 64:
- unrolledBitPack64(input, offset, len, output);
- return;
- default:
- break;
+ case 1:
+ unrolledBitPack1(input, offset, len, output);
+ return;
+ case 2:
+ unrolledBitPack2(input, offset, len, output);
+ return;
+ case 4:
+ unrolledBitPack4(input, offset, len, output);
+ return;
+ case 8:
+ unrolledBitPack8(input, offset, len, output);
+ return;
+ case 16:
+ unrolledBitPack16(input, offset, len, output);
+ return;
+ case 24:
+ unrolledBitPack24(input, offset, len, output);
+ return;
+ case 32:
+ unrolledBitPack32(input, offset, len, output);
+ return;
+ case 40:
+ unrolledBitPack40(input, offset, len, output);
+ return;
+ case 48:
+ unrolledBitPack48(input, offset, len, output);
+ return;
+ case 56:
+ unrolledBitPack56(input, offset, len, output);
+ return;
+ case 64:
+ unrolledBitPack64(input, offset, len, output);
+ return;
+ default:
+ break;
}
int bitsLeft = 8;
@@ -708,64 +708,64 @@ public final class SerializationUtils {
int idx = 0;
switch (numBytes) {
- case 1:
- while (remainder > 0) {
- writeBuffer[idx] = (byte) (input[offset + idx] & 255);
- remainder--;
- idx++;
- }
- break;
- case 2:
- while (remainder > 0) {
- writeLongBE2(output, input[offset + idx], idx * 2);
- remainder--;
- idx++;
- }
- break;
- case 3:
- while (remainder > 0) {
- writeLongBE3(output, input[offset + idx], idx * 3);
- remainder--;
- idx++;
- }
- break;
- case 4:
- while (remainder > 0) {
- writeLongBE4(output, input[offset + idx], idx * 4);
- remainder--;
- idx++;
- }
- break;
- case 5:
- while (remainder > 0) {
- writeLongBE5(output, input[offset + idx], idx * 5);
- remainder--;
- idx++;
- }
- break;
- case 6:
- while (remainder > 0) {
- writeLongBE6(output, input[offset + idx], idx * 6);
- remainder--;
- idx++;
- }
- break;
- case 7:
- while (remainder > 0) {
- writeLongBE7(output, input[offset + idx], idx * 7);
- remainder--;
- idx++;
- }
- break;
- case 8:
- while (remainder > 0) {
- writeLongBE8(output, input[offset + idx], idx * 8);
- remainder--;
- idx++;
- }
- break;
- default:
- break;
+ case 1:
+ while (remainder > 0) {
+ writeBuffer[idx] = (byte) (input[offset + idx] & 255);
+ remainder--;
+ idx++;
+ }
+ break;
+ case 2:
+ while (remainder > 0) {
+ writeLongBE2(output, input[offset + idx], idx * 2);
+ remainder--;
+ idx++;
+ }
+ break;
+ case 3:
+ while (remainder > 0) {
+ writeLongBE3(output, input[offset + idx], idx * 3);
+ remainder--;
+ idx++;
+ }
+ break;
+ case 4:
+ while (remainder > 0) {
+ writeLongBE4(output, input[offset + idx], idx * 4);
+ remainder--;
+ idx++;
+ }
+ break;
+ case 5:
+ while (remainder > 0) {
+ writeLongBE5(output, input[offset + idx], idx * 5);
+ remainder--;
+ idx++;
+ }
+ break;
+ case 6:
+ while (remainder > 0) {
+ writeLongBE6(output, input[offset + idx], idx * 6);
+ remainder--;
+ idx++;
+ }
+ break;
+ case 7:
+ while (remainder > 0) {
+ writeLongBE7(output, input[offset + idx], idx * 7);
+ remainder--;
+ idx++;
+ }
+ break;
+ case 8:
+ while (remainder > 0) {
+ writeLongBE8(output, input[offset + idx], idx * 8);
+ remainder--;
+ idx++;
+ }
+ break;
+ default:
+ break;
}
final int toWrite = numHops * numBytes;
@@ -776,86 +776,86 @@ public final class SerializationUtils {
int numHops, int numBytes) throws IOException {
switch (numBytes) {
- case 1:
- writeBuffer[0] = (byte) (input[offset + 0] & 255);
- writeBuffer[1] = (byte) (input[offset + 1] & 255);
- writeBuffer[2] = (byte) (input[offset + 2] & 255);
- writeBuffer[3] = (byte) (input[offset + 3] & 255);
- writeBuffer[4] = (byte) (input[offset + 4] & 255);
- writeBuffer[5] = (byte) (input[offset + 5] & 255);
- writeBuffer[6] = (byte) (input[offset + 6] & 255);
- writeBuffer[7] = (byte) (input[offset + 7] & 255);
- break;
- case 2:
- writeLongBE2(output, input[offset + 0], 0);
- writeLongBE2(output, input[offset + 1], 2);
- writeLongBE2(output, input[offset + 2], 4);
- writeLongBE2(output, input[offset + 3], 6);
- writeLongBE2(output, input[offset + 4], 8);
- writeLongBE2(output, input[offset + 5], 10);
- writeLongBE2(output, input[offset + 6], 12);
- writeLongBE2(output, input[offset + 7], 14);
- break;
- case 3:
- writeLongBE3(output, input[offset + 0], 0);
- writeLongBE3(output, input[offset + 1], 3);
- writeLongBE3(output, input[offset + 2], 6);
- writeLongBE3(output, input[offset + 3], 9);
- writeLongBE3(output, input[offset + 4], 12);
- writeLongBE3(output, input[offset + 5], 15);
- writeLongBE3(output, input[offset + 6], 18);
- writeLongBE3(output, input[offset + 7], 21);
- break;
- case 4:
- writeLongBE4(output, input[offset + 0], 0);
- writeLongBE4(output, input[offset + 1], 4);
- writeLongBE4(output, input[offset + 2], 8);
- writeLongBE4(output, input[offset + 3], 12);
- writeLongBE4(output, input[offset + 4], 16);
- writeLongBE4(output, input[offset + 5], 20);
- writeLongBE4(output, input[offset + 6], 24);
- writeLongBE4(output, input[offset + 7], 28);
- break;
- case 5:
- writeLongBE5(output, input[offset + 0], 0);
- writeLongBE5(output, input[offset + 1], 5);
- writeLongBE5(output, input[offset + 2], 10);
- writeLongBE5(output, input[offset + 3], 15);
- writeLongBE5(output, input[offset + 4], 20);
- writeLongBE5(output, input[offset + 5], 25);
- writeLongBE5(output, input[offset + 6], 30);
- writeLongBE5(output, input[offset + 7], 35);
- break;
- case 6:
- writeLongBE6(output, input[offset + 0], 0);
- writeLongBE6(output, input[offset + 1], 6);
- writeLongBE6(output, input[offset + 2], 12);
- writeLongBE6(output, input[offset + 3], 18);
- writeLongBE6(output, input[offset + 4], 24);
- writeLongBE6(output, input[offset + 5], 30);
- writeLongBE6(output, input[offset + 6], 36);
- writeLongBE6(output, input[offset + 7], 42);
- break;
- case 7:
- writeLongBE7(output, input[offset + 0], 0);
- writeLongBE7(output, input[offset + 1], 7);
- writeLongBE7(output, input[offset + 2], 14);
- writeLongBE7(output, input[offset + 3], 21);
- writeLongBE7(output, input[offset + 4], 28);
- writeLongBE7(output, input[offset + 5], 35);
- writeLongBE7(output, input[offset + 6], 42);
- writeLongBE7(output, input[offset + 7], 49);
- break;
- case 8:
- writeLongBE8(output, input[offset + 0], 0);
- writeLongBE8(output, input[offset + 1], 8);
- writeLongBE8(output, input[offset + 2], 16);
- writeLongBE8(output, input[offset + 3], 24);
- writeLongBE8(output, input[offset + 4], 32);
- writeLongBE8(output, input[offset + 5], 40);
- writeLongBE8(output, input[offset + 6], 48);
- writeLongBE8(output, input[offset + 7], 56);
- break;
+ case 1:
+ writeBuffer[0] = (byte) (input[offset + 0] & 255);
+ writeBuffer[1] = (byte) (input[offset + 1] & 255);
+ writeBuffer[2] = (byte) (input[offset + 2] & 255);
+ writeBuffer[3] = (byte) (input[offset + 3] & 255);
+ writeBuffer[4] = (byte) (input[offset + 4] & 255);
+ writeBuffer[5] = (byte) (input[offset + 5] & 255);
+ writeBuffer[6] = (byte) (input[offset + 6] & 255);
+ writeBuffer[7] = (byte) (input[offset + 7] & 255);
+ break;
+ case 2:
+ writeLongBE2(output, input[offset + 0], 0);
+ writeLongBE2(output, input[offset + 1], 2);
+ writeLongBE2(output, input[offset + 2], 4);
+ writeLongBE2(output, input[offset + 3], 6);
+ writeLongBE2(output, input[offset + 4], 8);
+ writeLongBE2(output, input[offset + 5], 10);
+ writeLongBE2(output, input[offset + 6], 12);
+ writeLongBE2(output, input[offset + 7], 14);
+ break;
+ case 3:
+ writeLongBE3(output, input[offset + 0], 0);
+ writeLongBE3(output, input[offset + 1], 3);
+ writeLongBE3(output, input[offset + 2], 6);
+ writeLongBE3(output, input[offset + 3], 9);
+ writeLongBE3(output, input[offset + 4], 12);
+ writeLongBE3(output, input[offset + 5], 15);
+ writeLongBE3(output, input[offset + 6], 18);
+ writeLongBE3(output, input[offset + 7], 21);
+ break;
+ case 4:
+ writeLongBE4(output, input[offset + 0], 0);
+ writeLongBE4(output, input[offset + 1], 4);
+ writeLongBE4(output, input[offset + 2], 8);
+ writeLongBE4(output, input[offset + 3], 12);
+ writeLongBE4(output, input[offset + 4], 16);
+ writeLongBE4(output, input[offset + 5], 20);
+ writeLongBE4(output, input[offset + 6], 24);
+ writeLongBE4(output, input[offset + 7], 28);
+ break;
+ case 5:
+ writeLongBE5(output, input[offset + 0], 0);
+ writeLongBE5(output, input[offset + 1], 5);
+ writeLongBE5(output, input[offset + 2], 10);
+ writeLongBE5(output, input[offset + 3], 15);
+ writeLongBE5(output, input[offset + 4], 20);
+ writeLongBE5(output, input[offset + 5], 25);
+ writeLongBE5(output, input[offset + 6], 30);
+ writeLongBE5(output, input[offset + 7], 35);
+ break;
+ case 6:
+ writeLongBE6(output, input[offset + 0], 0);
+ writeLongBE6(output, input[offset + 1], 6);
+ writeLongBE6(output, input[offset + 2], 12);
+ writeLongBE6(output, input[offset + 3], 18);
+ writeLongBE6(output, input[offset + 4], 24);
+ writeLongBE6(output, input[offset + 5], 30);
+ writeLongBE6(output, input[offset + 6], 36);
+ writeLongBE6(output, input[offset + 7], 42);
+ break;
+ case 7:
+ writeLongBE7(output, input[offset + 0], 0);
+ writeLongBE7(output, input[offset + 1], 7);
+ writeLongBE7(output, input[offset + 2], 14);
+ writeLongBE7(output, input[offset + 3], 21);
+ writeLongBE7(output, input[offset + 4], 28);
+ writeLongBE7(output, input[offset + 5], 35);
+ writeLongBE7(output, input[offset + 6], 42);
+ writeLongBE7(output, input[offset + 7], 49);
+ break;
+ case 8:
+ writeLongBE8(output, input[offset + 0], 0);
+ writeLongBE8(output, input[offset + 1], 8);
+ writeLongBE8(output, input[offset + 2], 16);
+ writeLongBE8(output, input[offset + 3], 24);
+ writeLongBE8(output, input[offset + 4], 32);
+ writeLongBE8(output, input[offset + 5], 40);
+ writeLongBE8(output, input[offset + 6], 48);
+ writeLongBE8(output, input[offset + 7], 56);
+ break;
default:
break;
}
@@ -935,41 +935,41 @@ public final class SerializationUtils {
int current = 0;
switch (bitSize) {
- case 1:
- unrolledUnPack1(buffer, offset, len, input);
- return;
- case 2:
- unrolledUnPack2(buffer, offset, len, input);
- return;
- case 4:
- unrolledUnPack4(buffer, offset, len, input);
- return;
- case 8:
- unrolledUnPack8(buffer, offset, len, input);
- return;
- case 16:
- unrolledUnPack16(buffer, offset, len, input);
- return;
- case 24:
- unrolledUnPack24(buffer, offset, len, input);
- return;
- case 32:
- unrolledUnPack32(buffer, offset, len, input);
- return;
- case 40:
- unrolledUnPack40(buffer, offset, len, input);
- return;
- case 48:
- unrolledUnPack48(buffer, offset, len, input);
- return;
- case 56:
- unrolledUnPack56(buffer, offset, len, input);
- return;
- case 64:
- unrolledUnPack64(buffer, offset, len, input);
- return;
- default:
- break;
+ case 1:
+ unrolledUnPack1(buffer, offset, len, input);
+ return;
+ case 2:
+ unrolledUnPack2(buffer, offset, len, input);
+ return;
+ case 4:
+ unrolledUnPack4(buffer, offset, len, input);
+ return;
+ case 8:
+ unrolledUnPack8(buffer, offset, len, input);
+ return;
+ case 16:
+ unrolledUnPack16(buffer, offset, len, input);
+ return;
+ case 24:
+ unrolledUnPack24(buffer, offset, len, input);
+ return;
+ case 32:
+ unrolledUnPack32(buffer, offset, len, input);
+ return;
+ case 40:
+ unrolledUnPack40(buffer, offset, len, input);
+ return;
+ case 48:
+ unrolledUnPack48(buffer, offset, len, input);
+ return;
+ case 56:
+ unrolledUnPack56(buffer, offset, len, input);
+ return;
+ case 64:
+ unrolledUnPack64(buffer, offset, len, input);
+ return;
+ default:
+ break;
}
for(int i = offset; i < (offset + len); i++) {
@@ -1138,64 +1138,64 @@ public final class SerializationUtils {
int idx = 0;
switch (numBytes) {
- case 1:
- while (remainder > 0) {
- buffer[offset++] = readBuffer[idx] & 255;
- remainder--;
- idx++;
- }
- break;
- case 2:
- while (remainder > 0) {
- buffer[offset++] = readLongBE2(input, idx * 2);
- remainder--;
- idx++;
- }
- break;
- case 3:
- while (remainder > 0) {
- buffer[offset++] = readLongBE3(input, idx * 3);
- remainder--;
- idx++;
- }
- break;
- case 4:
- while (remainder > 0) {
- buffer[offset++] = readLongBE4(input, idx * 4);
- remainder--;
- idx++;
- }
- break;
- case 5:
- while (remainder > 0) {
- buffer[offset++] = readLongBE5(input, idx * 5);
- remainder--;
- idx++;
- }
- break;
- case 6:
- while (remainder > 0) {
- buffer[offset++] = readLongBE6(input, idx * 6);
- remainder--;
- idx++;
- }
- break;
- case 7:
- while (remainder > 0) {
- buffer[offset++] = readLongBE7(input, idx * 7);
- remainder--;
- idx++;
- }
- break;
- case 8:
- while (remainder > 0) {
- buffer[offset++] = readLongBE8(input, idx * 8);
- remainder--;
- idx++;
- }
- break;
- default:
- break;
+ case 1:
+ while (remainder > 0) {
+ buffer[offset++] = readBuffer[idx] & 255;
+ remainder--;
+ idx++;
+ }
+ break;
+ case 2:
+ while (remainder > 0) {
+ buffer[offset++] = readLongBE2(input, idx * 2);
+ remainder--;
+ idx++;
+ }
+ break;
+ case 3:
+ while (remainder > 0) {
+ buffer[offset++] = readLongBE3(input, idx * 3);
+ remainder--;
+ idx++;
+ }
+ break;
+ case 4:
+ while (remainder > 0) {
+ buffer[offset++] = readLongBE4(input, idx * 4);
+ remainder--;
+ idx++;
+ }
+ break;
+ case 5:
+ while (remainder > 0) {
+ buffer[offset++] = readLongBE5(input, idx * 5);
+ remainder--;
+ idx++;
+ }
+ break;
+ case 6:
+ while (remainder > 0) {
+ buffer[offset++] = readLongBE6(input, idx * 6);
+ remainder--;
+ idx++;
+ }
+ break;
+ case 7:
+ while (remainder > 0) {
+ buffer[offset++] = readLongBE7(input, idx * 7);
+ remainder--;
+ idx++;
+ }
+ break;
+ case 8:
+ while (remainder > 0) {
+ buffer[offset++] = readLongBE8(input, idx * 8);
+ remainder--;
+ idx++;
+ }
+ break;
+ default:
+ break;
}
}
@@ -1209,88 +1209,88 @@ public final class SerializationUtils {
}
switch (numBytes) {
- case 1:
- buffer[start + 0] = readBuffer[0] & 255;
- buffer[start + 1] = readBuffer[1] & 255;
- buffer[start + 2] = readBuffer[2] & 255;
- buffer[start + 3] = readBuffer[3] & 255;
- buffer[start + 4] = readBuffer[4] & 255;
- buffer[start + 5] = readBuffer[5] & 255;
- buffer[start + 6] = readBuffer[6] & 255;
- buffer[start + 7] = readBuffer[7] & 255;
- break;
- case 2:
- buffer[start + 0] = readLongBE2(in, 0);
- buffer[start + 1] = readLongBE2(in, 2);
- buffer[start + 2] = readLongBE2(in, 4);
- buffer[start + 3] = readLongBE2(in, 6);
- buffer[start + 4] = readLongBE2(in, 8);
- buffer[start + 5] = readLongBE2(in, 10);
- buffer[start + 6] = readLongBE2(in, 12);
- buffer[start + 7] = readLongBE2(in, 14);
- break;
- case 3:
- buffer[start + 0] = readLongBE3(in, 0);
- buffer[start + 1] = readLongBE3(in, 3);
- buffer[start + 2] = readLongBE3(in, 6);
- buffer[start + 3] = readLongBE3(in, 9);
- buffer[start + 4] = readLongBE3(in, 12);
- buffer[start + 5] = readLongBE3(in, 15);
- buffer[start + 6] = readLongBE3(in, 18);
- buffer[start + 7] = readLongBE3(in, 21);
- break;
- case 4:
- buffer[start + 0] = readLongBE4(in, 0);
- buffer[start + 1] = readLongBE4(in, 4);
- buffer[start + 2] = readLongBE4(in, 8);
- buffer[start + 3] = readLongBE4(in, 12);
- buffer[start + 4] = readLongBE4(in, 16);
- buffer[start + 5] = readLongBE4(in, 20);
- buffer[start + 6] = readLongBE4(in, 24);
- buffer[start + 7] = readLongBE4(in, 28);
- break;
- case 5:
- buffer[start + 0] = readLongBE5(in, 0);
- buffer[start + 1] = readLongBE5(in, 5);
- buffer[start + 2] = readLongBE5(in, 10);
- buffer[start + 3] = readLongBE5(in, 15);
- buffer[start + 4] = readLongBE5(in, 20);
- buffer[start + 5] = readLongBE5(in, 25);
- buffer[start + 6] = readLongBE5(in, 30);
- buffer[start + 7] = readLongBE5(in, 35);
- break;
- case 6:
- buffer[start + 0] = readLongBE6(in, 0);
- buffer[start + 1] = readLongBE6(in, 6);
- buffer[start + 2] = readLongBE6(in, 12);
- buffer[start + 3] = readLongBE6(in, 18);
- buffer[start + 4] = readLongBE6(in, 24);
- buffer[start + 5] = readLongBE6(in, 30);
- buffer[start + 6] = readLongBE6(in, 36);
- buffer[start + 7] = readLongBE6(in, 42);
- break;
- case 7:
- buffer[start + 0] = readLongBE7(in, 0);
- buffer[start + 1] = readLongBE7(in, 7);
- buffer[start + 2] = readLongBE7(in, 14);
- buffer[start + 3] = readLongBE7(in, 21);
- buffer[start + 4] = readLongBE7(in, 28);
- buffer[start + 5] = readLongBE7(in, 35);
- buffer[start + 6] = readLongBE7(in, 42);
- buffer[start + 7] = readLongBE7(in, 49);
- break;
- case 8:
- buffer[start + 0] = readLongBE8(in, 0);
- buffer[start + 1] = readLongBE8(in, 8);
- buffer[start + 2] = readLongBE8(in, 16);
- buffer[start + 3] = readLongBE8(in, 24);
- buffer[start + 4] = readLongBE8(in, 32);
- buffer[start + 5] = readLongBE8(in, 40);
- buffer[start + 6] = readLongBE8(in, 48);
- buffer[start + 7] = readLongBE8(in, 56);
- break;
- default:
- break;
+ case 1:
+ buffer[start + 0] = readBuffer[0] & 255;
+ buffer[start + 1] = readBuffer[1] & 255;
+ buffer[start + 2] = readBuffer[2] & 255;
+ buffer[start + 3] = readBuffer[3] & 255;
+ buffer[start + 4] = readBuffer[4] & 255;
+ buffer[start + 5] = readBuffer[5] & 255;
+ buffer[start + 6] = readBuffer[6] & 255;
+ buffer[start + 7] = readBuffer[7] & 255;
+ break;
+ case 2:
+ buffer[start + 0] = readLongBE2(in, 0);
+ buffer[start + 1] = readLongBE2(in, 2);
+ buffer[start + 2] = readLongBE2(in, 4);
+ buffer[start + 3] = readLongBE2(in, 6);
+ buffer[start + 4] = readLongBE2(in, 8);
+ buffer[start + 5] = readLongBE2(in, 10);
+ buffer[start + 6] = readLongBE2(in, 12);
+ buffer[start + 7] = readLongBE2(in, 14);
+ break;
+ case 3:
+ buffer[start + 0] = readLongBE3(in, 0);
+ buffer[start + 1] = readLongBE3(in, 3);
+ buffer[start + 2] = readLongBE3(in, 6);
+ buffer[start + 3] = readLongBE3(in, 9);
+ buffer[start + 4] = readLongBE3(in, 12);
+ buffer[start + 5] = readLongBE3(in, 15);
+ buffer[start + 6] = readLongBE3(in, 18);
+ buffer[start + 7] = readLongBE3(in, 21);
+ break;
+ case 4:
+ buffer[start + 0] = readLongBE4(in, 0);
+ buffer[start + 1] = readLongBE4(in, 4);
+ buffer[start + 2] = readLongBE4(in, 8);
+ buffer[start + 3] = readLongBE4(in, 12);
+ buffer[start + 4] = readLongBE4(in, 16);
+ buffer[start + 5] = readLongBE4(in, 20);
+ buffer[start + 6] = readLongBE4(in, 24);
+ buffer[start + 7] = readLongBE4(in, 28);
+ break;
+ case 5:
+ buffer[start + 0] = readLongBE5(in, 0);
+ buffer[start + 1] = readLongBE5(in, 5);
+ buffer[start + 2] = readLongBE5(in, 10);
+ buffer[start + 3] = readLongBE5(in, 15);
+ buffer[start + 4] = readLongBE5(in, 20);
+ buffer[start + 5] = readLongBE5(in, 25);
+ buffer[start + 6] = readLongBE5(in, 30);
+ buffer[start + 7] = readLongBE5(in, 35);
+ break;
+ case 6:
+ buffer[start + 0] = readLongBE6(in, 0);
+ buffer[start + 1] = readLongBE6(in, 6);
+ buffer[start + 2] = readLongBE6(in, 12);
+ buffer[start + 3] = readLongBE6(in, 18);
+ buffer[start + 4] = readLongBE6(in, 24);
+ buffer[start + 5] = readLongBE6(in, 30);
+ buffer[start + 6] = readLongBE6(in, 36);
+ buffer[start + 7] = readLongBE6(in, 42);
+ break;
+ case 7:
+ buffer[start + 0] = readLongBE7(in, 0);
+ buffer[start + 1] = readLongBE7(in, 7);
+ buffer[start + 2] = readLongBE7(in, 14);
+ buffer[start + 3] = readLongBE7(in, 21);
+ buffer[start + 4] = readLongBE7(in, 28);
+ buffer[start + 5] = readLongBE7(in, 35);
+ buffer[start + 6] = readLongBE7(in, 42);
+ buffer[start + 7] = readLongBE7(in, 49);
+ break;
+ case 8:
+ buffer[start + 0] = readLongBE8(in, 0);
+ buffer[start + 1] = readLongBE8(in, 8);
+ buffer[start + 2] = readLongBE8(in, 16);
+ buffer[start + 3] = readLongBE8(in, 24);
+ buffer[start + 4] = readLongBE8(in, 32);
+ buffer[start + 5] = readLongBE8(in, 40);
+ buffer[start + 6] = readLongBE8(in, 48);
+ buffer[start + 7] = readLongBE8(in, 56);
+ break;
+ default:
+ break;
}
}
@@ -1385,36 +1385,35 @@ public final class SerializationUtils {
* @param kind the stream kind
* @return the tuned options or the original if it is the same
*/
- public static
- StreamOptions getCustomizedCodec(StreamOptions base,
- OrcFile.CompressionStrategy strategy,
- OrcProto.Stream.Kind kind) {
+ public static StreamOptions getCustomizedCodec(StreamOptions base,
+ OrcFile.CompressionStrategy strategy,
+ OrcProto.Stream.Kind kind) {
if (base.getCodec() != null) {
CompressionCodec.Options options = base.getCodecOptions();
switch (kind) {
- case BLOOM_FILTER:
- case DATA:
- case DICTIONARY_DATA:
- case BLOOM_FILTER_UTF8:
- options = options.copy().setData(CompressionCodec.DataKind.TEXT);
- if (strategy == OrcFile.CompressionStrategy.SPEED) {
- options.setSpeed(CompressionCodec.SpeedModifier.FAST);
- } else {
- options.setSpeed(CompressionCodec.SpeedModifier.DEFAULT);
- }
- break;
- case LENGTH:
- case DICTIONARY_COUNT:
- case PRESENT:
- case ROW_INDEX:
- case SECONDARY:
- options = options.copy()
- .setSpeed(CompressionCodec.SpeedModifier.FASTEST)
- .setData(CompressionCodec.DataKind.BINARY);
- break;
- default:
- LOG.info("Missing ORC compression modifiers for " + kind);
- break;
+ case BLOOM_FILTER:
+ case DATA:
+ case DICTIONARY_DATA:
+ case BLOOM_FILTER_UTF8:
+ options = options.copy().setData(CompressionCodec.DataKind.TEXT);
+ if (strategy == OrcFile.CompressionStrategy.SPEED) {
+ options.setSpeed(CompressionCodec.SpeedModifier.FAST);
+ } else {
+ options.setSpeed(CompressionCodec.SpeedModifier.DEFAULT);
+ }
+ break;
+ case LENGTH:
+ case DICTIONARY_COUNT:
+ case PRESENT:
+ case ROW_INDEX:
+ case SECONDARY:
+ options = options.copy()
+ .setSpeed(CompressionCodec.SpeedModifier.FASTEST)
+ .setData(CompressionCodec.DataKind.BINARY);
+ break;
+ default:
+ LOG.info("Missing ORC compression modifiers for " + kind);
+ break;
}
if (!base.getCodecOptions().equals(options)) {
StreamOptions result = new StreamOptions(base)
diff --git a/java/core/src/java/org/apache/orc/impl/TreeReaderFactory.java b/java/core/src/java/org/apache/orc/impl/TreeReaderFactory.java
index a93da9a6d..ecc02fb8d 100644
--- a/java/core/src/java/org/apache/orc/impl/TreeReaderFactory.java
+++ b/java/core/src/java/org/apache/orc/impl/TreeReaderFactory.java
@@ -203,8 +203,8 @@ public class TreeReaderFactory {
// FILTER_PARENT. This is used during the reposition for non-filter read. Only Struct and
// Union Readers are supported currently
TypeDescription col = columnId == -1 ? null : getSchemaEvolution()
- .getFileSchema()
- .findSubtype(columnId);
+ .getFileSchema()
+ .findSubtype(columnId);
if (col == null || col.getChildren() == null || col.getChildren().isEmpty()) {
result = TypeReader.ReaderCategory.FILTER_CHILD;
} else {
@@ -1747,11 +1747,11 @@ public class TreeReaderFactory {
// Read present/isNull stream
super.nextVector(result, isNull, batchSize, filterContext, readPhase);
if (result instanceof Decimal64ColumnVector) {
- if (filterContext.isSelectedInUse()) {
- nextVector((Decimal64ColumnVector) result, isNull, filterContext, batchSize);
- } else {
- nextVector((Decimal64ColumnVector) result, isNull, batchSize);
- }
+ if (filterContext.isSelectedInUse()) {
+ nextVector((Decimal64ColumnVector) result, isNull, filterContext, batchSize);
+ } else {
+ nextVector((Decimal64ColumnVector) result, isNull, batchSize);
+ }
} else {
if (filterContext.isSelectedInUse()) {
nextVector((DecimalColumnVector) result, isNull, filterContext, batchSize);
@@ -3047,7 +3047,7 @@ public class TreeReaderFactory {
public static boolean isDecimalAsLong(OrcFile.Version version, int precision) {
return version == OrcFile.Version.UNSTABLE_PRE_2_0 &&
- precision <= TypeDescription.MAX_DECIMAL64_PRECISION;
+ precision <= TypeDescription.MAX_DECIMAL64_PRECISION;
}
public static BatchReader createRootReader(TypeDescription readerType, Context context)
diff --git a/java/core/src/java/org/apache/orc/impl/TypeUtils.java b/java/core/src/java/org/apache/orc/impl/TypeUtils.java
index 0a1e9c344..a5daa8957 100644
--- a/java/core/src/java/org/apache/orc/impl/TypeUtils.java
+++ b/java/core/src/java/org/apache/orc/impl/TypeUtils.java
@@ -41,66 +41,66 @@ public class TypeUtils {
TypeDescription.RowBatchVersion version,
int maxSize) {
switch (schema.getCategory()) {
- case BOOLEAN:
- case BYTE:
- case SHORT:
- case INT:
- case LONG:
- return new LongColumnVector(maxSize);
- case DATE:
- return new DateColumnVector(maxSize);
- case TIMESTAMP:
- case TIMESTAMP_INSTANT:
- return new TimestampColumnVector(maxSize);
- case FLOAT:
- case DOUBLE:
- return new DoubleColumnVector(maxSize);
- case DECIMAL: {
- int precision = schema.getPrecision();
- int scale = schema.getScale();
- if (version == TypeDescription.RowBatchVersion.ORIGINAL ||
- precision > TypeDescription.MAX_DECIMAL64_PRECISION) {
- return new DecimalColumnVector(maxSize, precision, scale);
- } else {
- return new Decimal64ColumnVector(maxSize, precision, scale);
+ case BOOLEAN:
+ case BYTE:
+ case SHORT:
+ case INT:
+ case LONG:
+ return new LongColumnVector(maxSize);
+ case DATE:
+ return new DateColumnVector(maxSize);
+ case TIMESTAMP:
+ case TIMESTAMP_INSTANT:
+ return new TimestampColumnVector(maxSize);
+ case FLOAT:
+ case DOUBLE:
+ return new DoubleColumnVector(maxSize);
+ case DECIMAL: {
+ int precision = schema.getPrecision();
+ int scale = schema.getScale();
+ if (version == TypeDescription.RowBatchVersion.ORIGINAL ||
+ precision > TypeDescription.MAX_DECIMAL64_PRECISION) {
+ return new DecimalColumnVector(maxSize, precision, scale);
+ } else {
+ return new Decimal64ColumnVector(maxSize, precision, scale);
+ }
}
- }
- case STRING:
- case BINARY:
- case CHAR:
- case VARCHAR:
- return new BytesColumnVector(maxSize);
- case STRUCT: {
- List<TypeDescription> children = schema.getChildren();
- ColumnVector[] fieldVector = new ColumnVector[children.size()];
- for(int i=0; i < fieldVector.length; ++i) {
- fieldVector[i] = createColumn(children.get(i), version, maxSize);
+ case STRING:
+ case BINARY:
+ case CHAR:
+ case VARCHAR:
+ return new BytesColumnVector(maxSize);
+ case STRUCT: {
+ List<TypeDescription> children = schema.getChildren();
+ ColumnVector[] fieldVector = new ColumnVector[children.size()];
+ for(int i=0; i < fieldVector.length; ++i) {
+ fieldVector[i] = createColumn(children.get(i), version, maxSize);
+ }
+ return new StructColumnVector(maxSize,
+ fieldVector);
}
- return new StructColumnVector(maxSize,
- fieldVector);
- }
- case UNION: {
- List<TypeDescription> children = schema.getChildren();
- ColumnVector[] fieldVector = new ColumnVector[children.size()];
- for(int i=0; i < fieldVector.length; ++i) {
- fieldVector[i] = createColumn(children.get(i), version, maxSize);
+ case UNION: {
+ List<TypeDescription> children = schema.getChildren();
+ ColumnVector[] fieldVector = new ColumnVector[children.size()];
+ for(int i=0; i < fieldVector.length; ++i) {
+ fieldVector[i] = createColumn(children.get(i), version, maxSize);
+ }
+ return new UnionColumnVector(maxSize,
+ fieldVector);
}
- return new UnionColumnVector(maxSize,
- fieldVector);
- }
- case LIST: {
- List<TypeDescription> children = schema.getChildren();
- return new ListColumnVector(maxSize,
- createColumn(children.get(0), version, maxSize));
- }
- case MAP: {
- List<TypeDescription> children = schema.getChildren();
- return new MapColumnVector(maxSize,
- createColumn(children.get(0), version, maxSize),
- createColumn(children.get(1), version, maxSize));
- }
- default:
- throw new IllegalArgumentException("Unknown type " + schema.getCategory());
+ case LIST: {
+ List<TypeDescription> children = schema.getChildren();
+ return new ListColumnVector(maxSize,
+ createColumn(children.get(0), version, maxSize));
+ }
+ case MAP: {
+ List<TypeDescription> children = schema.getChildren();
+ return new MapColumnVector(maxSize,
+ createColumn(children.get(0), version, maxSize),
+ createColumn(children.get(1), version, maxSize));
+ }
+ default:
+ throw new IllegalArgumentException("Unknown type " + schema.getCategory());
}
}
diff --git a/java/core/src/java/org/apache/orc/impl/WriterImpl.java b/java/core/src/java/org/apache/orc/impl/WriterImpl.java
index 2e191be6c..714b17076 100644
--- a/java/core/src/java/org/apache/orc/impl/WriterImpl.java
+++ b/java/core/src/java/org/apache/orc/impl/WriterImpl.java
@@ -102,9 +102,9 @@ public class WriterImpl implements WriterInternal, MemoryManager.Callback {
private long lastFlushOffset = 0;
private int stripesAtLastFlush = -1;
private final List<OrcProto.StripeInformation> stripes =
- new ArrayList<>();
+ new ArrayList<>();
private final Map<String, ByteString> userMetadata =
- new TreeMap<>();
+ new TreeMap<>();
private final TreeWriter treeWriter;
private final boolean buildIndex;
private final MemoryManager memoryManager;
@@ -665,7 +665,7 @@ public class WriterImpl implements WriterInternal, MemoryManager.Callback {
// add all of the user metadata
for(Map.Entry<String, ByteString> entry: userMetadata.entrySet()) {
builder.addMetadata(OrcProto.UserMetadataItem.newBuilder()
- .setName(entry.getKey()).setValue(entry.getValue()));
+ .setName(entry.getKey()).setValue(entry.getValue()));
}
if (encryption.length > 0) {
builder.setEncryption(writeEncryptionFooter());
diff --git a/java/core/src/java/org/apache/orc/impl/filter/leaf/FloatFilters.java b/java/core/src/java/org/apache/orc/impl/filter/leaf/FloatFilters.java
index d1cda4cee..3b18a5d28 100644
--- a/java/core/src/java/org/apache/orc/impl/filter/leaf/FloatFilters.java
+++ b/java/core/src/java/org/apache/orc/impl/filter/leaf/FloatFilters.java
@@ -29,80 +29,80 @@ class FloatFilters {
private FloatFilters() {
}
- static class FloatBetween extends LeafFilter {
- private final double low;
- private final double high;
-
- FloatBetween(String colName, Object low, Object high, boolean negated) {
- super(colName, negated);
- this.low = (double) low;
- this.high = (double) high;
- }
-
- @Override
- protected boolean allow(ColumnVector v, int rowIdx) {
- return ((DoubleColumnVector) v).vector[rowIdx] >= low &&
- ((DoubleColumnVector) v).vector[rowIdx] <= high;
- }
- }
-
- static class FloatEquals extends LeafFilter {
- private final double aValue;
-
- FloatEquals(String colName, Object aValue, boolean negated) {
- super(colName, negated);
- this.aValue = (double) aValue;
- }
-
- @Override
- protected boolean allow(ColumnVector v, int rowIdx) {
- return ((DoubleColumnVector) v).vector[rowIdx] == aValue;
- }
- }
-
- static class FloatIn extends LeafFilter {
- private final double[] inValues;
-
- FloatIn(String colName, List<Object> values, boolean negated) {
- super(colName, negated);
- inValues = new double[values.size()];
- for (int i = 0; i < values.size(); i++) {
- inValues[i] = (double) values.get(i);
- }
- Arrays.sort(inValues);
- }
-
- @Override
- protected boolean allow(ColumnVector v, int rowIdx) {
- return Arrays.binarySearch(inValues, ((DoubleColumnVector) v).vector[rowIdx]) >= 0;
- }
- }
-
- static class FloatLessThan extends LeafFilter {
- private final double aValue;
-
- FloatLessThan(String colName, Object aValue, boolean negated) {
- super(colName, negated);
- this.aValue = (double) aValue;
- }
-
- @Override
- protected boolean allow(ColumnVector v, int rowIdx) {
- return ((DoubleColumnVector) v).vector[rowIdx] < aValue;
- }
- }
-
- static class FloatLessThanEquals extends LeafFilter {
- private final double aValue;
-
- FloatLessThanEquals(String colName, Object aValue, boolean negated) {
- super(colName, negated);
- this.aValue = (double) aValue;
- }
-
- @Override
- protected boolean allow(ColumnVector v, int rowIdx) {
- return ((DoubleColumnVector) v).vector[rowIdx] <= aValue;
- }
- }
+ static class FloatBetween extends LeafFilter {
+ private final double low;
+ private final double high;
+
+ FloatBetween(String colName, Object low, Object high, boolean negated) {
+ super(colName, negated);
+ this.low = (double) low;
+ this.high = (double) high;
+ }
+
+ @Override
+ protected boolean allow(ColumnVector v, int rowIdx) {
+ return ((DoubleColumnVector) v).vector[rowIdx] >= low &&
+ ((DoubleColumnVector) v).vector[rowIdx] <= high;
+ }
+ }
+
+ static class FloatEquals extends LeafFilter {
+ private final double aValue;
+
+ FloatEquals(String colName, Object aValue, boolean negated) {
+ super(colName, negated);
+ this.aValue = (double) aValue;
+ }
+
+ @Override
+ protected boolean allow(ColumnVector v, int rowIdx) {
+ return ((DoubleColumnVector) v).vector[rowIdx] == aValue;
+ }
+ }
+
+ static class FloatIn extends LeafFilter {
+ private final double[] inValues;
+
+ FloatIn(String colName, List<Object> values, boolean negated) {
+ super(colName, negated);
+ inValues = new double[values.size()];
+ for (int i = 0; i < values.size(); i++) {
+ inValues[i] = (double) values.get(i);
+ }
+ Arrays.sort(inValues);
+ }
+
+ @Override
+ protected boolean allow(ColumnVector v, int rowIdx) {
+ return Arrays.binarySearch(inValues, ((DoubleColumnVector) v).vector[rowIdx]) >= 0;
+ }
+ }
+
+ static class FloatLessThan extends LeafFilter {
+ private final double aValue;
+
+ FloatLessThan(String colName, Object aValue, boolean negated) {
+ super(colName, negated);
+ this.aValue = (double) aValue;
+ }
+
+ @Override
+ protected boolean allow(ColumnVector v, int rowIdx) {
+ return ((DoubleColumnVector) v).vector[rowIdx] < aValue;
+ }
+ }
+
+ static class FloatLessThanEquals extends LeafFilter {
+ private final double aValue;
+
+ FloatLessThanEquals(String colName, Object aValue, boolean negated) {
+ super(colName, negated);
+ this.aValue = (double) aValue;
+ }
+
+ @Override
+ protected boolean allow(ColumnVector v, int rowIdx) {
+ return ((DoubleColumnVector) v).vector[rowIdx] <= aValue;
+ }
+ }
}
diff --git a/java/core/src/java/org/apache/orc/impl/mask/ListIdentity.java b/java/core/src/java/org/apache/orc/impl/mask/ListIdentity.java
index 759280250..18e5622d8 100644
--- a/java/core/src/java/org/apache/orc/impl/mask/ListIdentity.java
+++ b/java/core/src/java/org/apache/orc/impl/mask/ListIdentity.java
@@ -47,12 +47,12 @@ public class ListIdentity implements DataMask {
(int) source.lengths[0]);
}
} else if (source.noNulls) {
- for(int r=start; r < start+length; ++r) {
- target.offsets[r] = source.offsets[r];
- target.lengths[r] = source.lengths[r];
- child.maskData(source.child, target.child, (int) target.offsets[r],
- (int) target.lengths[r]);
- }
+ for(int r=start; r < start+length; ++r) {
+ target.offsets[r] = source.offsets[r];
+ target.lengths[r] = source.lengths[r];
+ child.maskData(source.child, target.child, (int) target.offsets[r],
+ (int) target.lengths[r]);
+ }
} else {
for(int r=start; r < start+length; ++r) {
target.isNull[r] = source.isNull[r];
diff --git a/java/core/src/java/org/apache/orc/impl/mask/RedactMaskFactory.java b/java/core/src/java/org/apache/orc/impl/mask/RedactMaskFactory.java
index c6d314738..c6b65c3e8 100644
--- a/java/core/src/java/org/apache/orc/impl/mask/RedactMaskFactory.java
+++ b/java/core/src/java/org/apache/orc/impl/mask/RedactMaskFactory.java
@@ -945,24 +945,24 @@ public class RedactMaskFactory extends MaskFactory {
private boolean isIndexInUnmaskRange(final int index, final int length) {
for(final Map.Entry<Integer, Integer> pair : unmaskIndexRanges.entrySet()) {
- int start;
- int end;
-
- if(pair.getKey() >= 0) {
- // for positive indexes
- start = pair.getKey();
- } else {
- // for negative indexes
- start = length + pair.getKey();
- }
-
- if(pair.getValue() >= 0) {
- // for positive indexes
- end = pair.getValue();
- } else {
- // for negative indexes
- end = length + pair.getValue();
- }
+ int start;
+ int end;
+
+ if(pair.getKey() >= 0) {
+ // for positive indexes
+ start = pair.getKey();
+ } else {
+ // for negative indexes
+ start = length + pair.getKey();
+ }
+
+ if(pair.getValue() >= 0) {
+ // for positive indexes
+ end = pair.getValue();
+ } else {
+ // for negative indexes
+ end = length + pair.getValue();
+ }
// if the given index is in range
if(index >= start && index <= end ) {
diff --git a/java/core/src/java/org/apache/orc/impl/reader/ReaderEncryptionVariant.java b/java/core/src/java/org/apache/orc/impl/reader/ReaderEncryptionVariant.java
index 5d7f0122b..19bf9e5a4 100644
--- a/java/core/src/java/org/apache/orc/impl/reader/ReaderEncryptionVariant.java
+++ b/java/core/src/java/org/apache/orc/impl/reader/ReaderEncryptionVariant.java
@@ -145,30 +145,30 @@ public class ReaderEncryptionVariant implements EncryptionVariant {
Key result = localKey.getDecryptedKey();
if (result == null) {
switch (this.key.getState()) {
- case UNTRIED:
- try {
+ case UNTRIED:
+ try {
+ result = provider.decryptLocalKey(key.getMetadata(),
+ localKey.getEncryptedKey());
+ } catch (IOException ioe) {
+ LOG.info("Can't decrypt using key {}", key);
+ }
+ if (result != null) {
+ localKey.setDecryptedKey(result);
+ key.setSuccess();
+ } else {
+ key.setFailure();
+ }
+ break;
+ case SUCCESS:
result = provider.decryptLocalKey(key.getMetadata(),
localKey.getEncryptedKey());
- } catch (IOException ioe) {
- LOG.info("Can't decrypt using key {}", key);
- }
- if (result != null) {
+ if (result == null) {
+ throw new IOException("Can't decrypt local key " + key);
+ }
localKey.setDecryptedKey(result);
- key.setSuccess();
- } else {
- key.setFailure();
- }
- break;
- case SUCCESS:
- result = provider.decryptLocalKey(key.getMetadata(),
- localKey.getEncryptedKey());
- if (result == null) {
- throw new IOException("Can't decrypt local key " + key);
- }
- localKey.setDecryptedKey(result);
- break;
- case FAILURE:
- return null;
+ break;
+ case FAILURE:
+ return null;
}
}
return result;
diff --git a/java/core/src/java/org/apache/orc/impl/reader/StripePlanner.java b/java/core/src/java/org/apache/orc/impl/reader/StripePlanner.java
index dd88d362b..d5aad0834 100644
--- a/java/core/src/java/org/apache/orc/impl/reader/StripePlanner.java
+++ b/java/core/src/java/org/apache/orc/impl/reader/StripePlanner.java
@@ -184,8 +184,8 @@ public class StripePlanner {
boolean forceDirect)
throws IOException {
BufferChunkList chunks = (index == null || rowGroupInclude == null)
- ? planDataReading(TypeReader.ReadPhase.FOLLOWERS)
- : planPartialDataReading(index, rowGroupInclude, rgIdx, TypeReader.ReadPhase.FOLLOWERS);
+ ? planDataReading(TypeReader.ReadPhase.FOLLOWERS)
+ : planPartialDataReading(index, rowGroupInclude, rgIdx, TypeReader.ReadPhase.FOLLOWERS);
dataReader.readFileData(chunks, forceDirect);
return chunks;
}
@@ -309,13 +309,13 @@ public class StripePlanner {
StreamInformation info =
new StreamInformation(kind, column, offset, stream.getLength());
switch (StreamName.getArea(kind)) {
- case DATA:
- dataStreams.add(info);
- break;
- case INDEX:
- indexStreams.add(info);
- break;
- default:
+ case DATA:
+ dataStreams.add(info);
+ break;
+ case INDEX:
+ indexStreams.add(info);
+ break;
+ default:
}
streams.put(new StreamName(column, kind), info);
}
@@ -400,17 +400,17 @@ public class StripePlanner {
"index", stream.firstChunk, stream.offset,
stream.length, getStreamOptions(column, stream.kind)));
switch (stream.kind) {
- case ROW_INDEX:
- indexes[column] = OrcProto.RowIndex.parseFrom(data);
- break;
- case BLOOM_FILTER:
- case BLOOM_FILTER_UTF8:
- if (sargColumns != null && sargColumns[column]) {
- blooms[column] = OrcProto.BloomFilterIndex.parseFrom(data);
- }
- break;
- default:
- break;
+ case ROW_INDEX:
+ indexes[column] = OrcProto.RowIndex.parseFrom(data);
+ break;
+ case BLOOM_FILTER:
+ case BLOOM_FILTER_UTF8:
+ if (sargColumns != null && sargColumns[column]) {
+ blooms[column] = OrcProto.BloomFilterIndex.parseFrom(data);
+ }
+ break;
+ default:
+ break;
}
}
}
@@ -442,19 +442,19 @@ public class StripePlanner {
BufferChunkList result = new BufferChunkList();
for(StreamInformation stream: indexStreams) {
switch (stream.kind) {
- case ROW_INDEX:
- addChunk(result, stream, stream.offset, stream.length);
- break;
- case BLOOM_FILTER:
- case BLOOM_FILTER_UTF8:
- if (bloomFilterColumns[stream.column] &&
- bloomFilterKinds[stream.column] == stream.kind) {
+ case ROW_INDEX:
addChunk(result, stream, stream.offset, stream.length);
- }
- break;
- default:
- // PASS
- break;
+ break;
+ case BLOOM_FILTER:
+ case BLOOM_FILTER_UTF8:
+ if (bloomFilterColumns[stream.column] &&
+ bloomFilterKinds[stream.column] == stream.kind) {
+ addChunk(result, stream, stream.offset, stream.length);
+ }
+ break;
+ default:
+ // PASS
+ break;
}
}
return result;
@@ -486,17 +486,17 @@ public class StripePlanner {
static boolean hadBadBloomFilters(TypeDescription.Category category,
OrcFile.WriterVersion version) {
switch(category) {
- case STRING:
- case CHAR:
- case VARCHAR:
- return !version.includes(OrcFile.WriterVersion.HIVE_12055);
- case DECIMAL:
- // fixed by ORC-101, but ORC-101 changed stream kind to BLOOM_FILTER_UTF8
- return true;
- case TIMESTAMP:
- return !version.includes(OrcFile.WriterVersion.ORC_135);
- default:
- return false;
+ case STRING:
+ case CHAR:
+ case VARCHAR:
+ return !version.includes(OrcFile.WriterVersion.HIVE_12055);
+ case DECIMAL:
+ // fixed by ORC-101, but ORC-101 changed stream kind to BLOOM_FILTER_UTF8
+ return true;
+ case TIMESTAMP:
+ return !version.includes(OrcFile.WriterVersion.ORC_135);
+ default:
+ return false;
}
}
@@ -582,8 +582,8 @@ public class StripePlanner {
endGroup += 1;
}
int posn = RecordReaderUtils.getIndexPosition(
- encodings[stream.column].getKind(), kind, stream.kind,
- isCompressed, hasNull[column]);
+ encodings[stream.column].getKind(), kind, stream.kind,
+ isCompressed, hasNull[column]);
long start = Math.max(alreadyRead,
stream.offset + (group == 0 ? 0 : ri.getEntry(group).getPositions(posn)));
long end = stream.offset;
diff --git a/java/core/src/java/org/apache/orc/impl/writer/TreeWriter.java b/java/core/src/java/org/apache/orc/impl/writer/TreeWriter.java
index a43ee061f..71eb3a564 100644
--- a/java/core/src/java/org/apache/orc/impl/writer/TreeWriter.java
+++ b/java/core/src/java/org/apache/orc/impl/writer/TreeWriter.java
@@ -145,48 +145,48 @@ public interface TreeWriter {
WriterContext streamFactory) throws IOException {
OrcFile.Version version = streamFactory.getVersion();
switch (schema.getCategory()) {
- case BOOLEAN:
- return new BooleanTreeWriter(schema, encryption, streamFactory);
- case BYTE:
- return new ByteTreeWriter(schema, encryption, streamFactory);
- case SHORT:
- case INT:
- case LONG:
- return new IntegerTreeWriter(schema, encryption, streamFactory);
- case FLOAT:
- return new FloatTreeWriter(schema, encryption, streamFactory);
- case DOUBLE:
- return new DoubleTreeWriter(schema, encryption, streamFactory);
- case STRING:
- return new StringTreeWriter(schema, encryption, streamFactory);
- case CHAR:
- return new CharTreeWriter(schema, encryption, streamFactory);
- case VARCHAR:
- return new VarcharTreeWriter(schema, encryption, streamFactory);
- case BINARY:
- return new BinaryTreeWriter(schema, encryption, streamFactory);
- case TIMESTAMP:
- return new TimestampTreeWriter(schema, encryption, streamFactory, false);
- case TIMESTAMP_INSTANT:
- return new TimestampTreeWriter(schema, encryption, streamFactory, true);
- case DATE:
- return new DateTreeWriter(schema, encryption, streamFactory);
- case DECIMAL:
- if (version == OrcFile.Version.UNSTABLE_PRE_2_0 &&
- schema.getPrecision() <= TypeDescription.MAX_DECIMAL64_PRECISION) {
- return new Decimal64TreeWriter(schema, encryption, streamFactory);
- }
- return new DecimalTreeWriter(schema, encryption, streamFactory);
- case STRUCT:
- return new StructTreeWriter(schema, encryption, streamFactory);
- case MAP:
- return new MapTreeWriter(schema, encryption, streamFactory);
- case LIST:
- return new ListTreeWriter(schema, encryption, streamFactory);
- case UNION:
- return new UnionTreeWriter(schema, encryption, streamFactory);
- default:
- throw new IllegalArgumentException("Bad category: " +
+ case BOOLEAN:
+ return new BooleanTreeWriter(schema, encryption, streamFactory);
+ case BYTE:
+ return new ByteTreeWriter(schema, encryption, streamFactory);
+ case SHORT:
+ case INT:
+ case LONG:
+ return new IntegerTreeWriter(schema, encryption, streamFactory);
+ case FLOAT:
+ return new FloatTreeWriter(schema, encryption, streamFactory);
+ case DOUBLE:
+ return new DoubleTreeWriter(schema, encryption, streamFactory);
+ case STRING:
+ return new StringTreeWriter(schema, encryption, streamFactory);
+ case CHAR:
+ return new CharTreeWriter(schema, encryption, streamFactory);
+ case VARCHAR:
+ return new VarcharTreeWriter(schema, encryption, streamFactory);
+ case BINARY:
+ return new BinaryTreeWriter(schema, encryption, streamFactory);
+ case TIMESTAMP:
+ return new TimestampTreeWriter(schema, encryption, streamFactory, false);
+ case TIMESTAMP_INSTANT:
+ return new TimestampTreeWriter(schema, encryption, streamFactory, true);
+ case DATE:
+ return new DateTreeWriter(schema, encryption, streamFactory);
+ case DECIMAL:
+ if (version == OrcFile.Version.UNSTABLE_PRE_2_0 &&
+ schema.getPrecision() <= TypeDescription.MAX_DECIMAL64_PRECISION) {
+ return new Decimal64TreeWriter(schema, encryption, streamFactory);
+ }
+ return new DecimalTreeWriter(schema, encryption, streamFactory);
+ case STRUCT:
+ return new StructTreeWriter(schema, encryption, streamFactory);
+ case MAP:
+ return new MapTreeWriter(schema, encryption, streamFactory);
+ case LIST:
+ return new ListTreeWriter(schema, encryption, streamFactory);
+ case UNION:
+ return new UnionTreeWriter(schema, encryption, streamFactory);
+ default:
+ throw new IllegalArgumentException("Bad category: " +
schema.getCategory());
}
}
diff --git a/java/core/src/java/org/apache/orc/util/CuckooSetBytes.java b/java/core/src/java/org/apache/orc/util/CuckooSetBytes.java
index efe2d36a6..cabe3289c 100644
--- a/java/core/src/java/org/apache/orc/util/CuckooSetBytes.java
+++ b/java/core/src/java/org/apache/orc/util/CuckooSetBytes.java
@@ -372,33 +372,33 @@ public class CuckooSetBytes {
//-------------------------------- last block: affect all 32 bits of (c)
switch (length) { // all the case statements fall through
- case 12:
- c = (c + (((key[offset + 11] & BYTE_MASK) << 24) & INT_MASK)) & INT_MASK;
- case 11:
- c = (c + (((key[offset + 10] & BYTE_MASK) << 16) & INT_MASK)) & INT_MASK;
- case 10:
- c = (c + (((key[offset + 9] & BYTE_MASK) << 8) & INT_MASK)) & INT_MASK;
- case 9:
- c = (c + (key[offset + 8] & BYTE_MASK)) & INT_MASK;
- case 8:
- b = (b + (((key[offset + 7] & BYTE_MASK) << 24) & INT_MASK)) & INT_MASK;
- case 7:
- b = (b + (((key[offset + 6] & BYTE_MASK) << 16) & INT_MASK)) & INT_MASK;
- case 6:
- b = (b + (((key[offset + 5] & BYTE_MASK) << 8) & INT_MASK)) & INT_MASK;
- case 5:
- b = (b + (key[offset + 4] & BYTE_MASK)) & INT_MASK;
- case 4:
- a = (a + (((key[offset + 3] & BYTE_MASK) << 24) & INT_MASK)) & INT_MASK;
- case 3:
- a = (a + (((key[offset + 2] & BYTE_MASK) << 16) & INT_MASK)) & INT_MASK;
- case 2:
- a = (a + (((key[offset + 1] & BYTE_MASK) << 8) & INT_MASK)) & INT_MASK;
- case 1:
- a = (a + (key[offset] & BYTE_MASK)) & INT_MASK;
- break;
- case 0:
- return (int)(c & INT_MASK);
+ case 12:
+ c = (c + (((key[offset + 11] & BYTE_MASK) << 24) & INT_MASK)) & INT_MASK;
+ case 11:
+ c = (c + (((key[offset + 10] & BYTE_MASK) << 16) & INT_MASK)) & INT_MASK;
+ case 10:
+ c = (c + (((key[offset + 9] & BYTE_MASK) << 8) & INT_MASK)) & INT_MASK;
+ case 9:
+ c = (c + (key[offset + 8] & BYTE_MASK)) & INT_MASK;
+ case 8:
+ b = (b + (((key[offset + 7] & BYTE_MASK) << 24) & INT_MASK)) & INT_MASK;
+ case 7:
+ b = (b + (((key[offset + 6] & BYTE_MASK) << 16) & INT_MASK)) & INT_MASK;
+ case 6:
+ b = (b + (((key[offset + 5] & BYTE_MASK) << 8) & INT_MASK)) & INT_MASK;
+ case 5:
+ b = (b + (key[offset + 4] & BYTE_MASK)) & INT_MASK;
+ case 4:
+ a = (a + (((key[offset + 3] & BYTE_MASK) << 24) & INT_MASK)) & INT_MASK;
+ case 3:
+ a = (a + (((key[offset + 2] & BYTE_MASK) << 16) & INT_MASK)) & INT_MASK;
+ case 2:
+ a = (a + (((key[offset + 1] & BYTE_MASK) << 8) & INT_MASK)) & INT_MASK;
+ case 1:
+ a = (a + (key[offset] & BYTE_MASK)) & INT_MASK;
+ break;
+ case 0:
+ return (int)(c & INT_MASK);
}
/*
* final -- final mixing of 3 32-bit values (a,b,c) into c
diff --git a/java/core/src/java/org/apache/orc/util/StreamWrapperFileSystem.java b/java/core/src/java/org/apache/orc/util/StreamWrapperFileSystem.java
index def194fed..d14dbd3a3 100644
--- a/java/core/src/java/org/apache/orc/util/StreamWrapperFileSystem.java
+++ b/java/core/src/java/org/apache/orc/util/StreamWrapperFileSystem.java
@@ -66,9 +66,9 @@ public class StreamWrapperFileSystem extends FileSystem {
Path path,
long fileSize,
Configuration conf) {
- this(stream,
- new FileStatus(fileSize, false, 1, OrcConf.BLOCK_SIZE.getInt(conf), 0, path),
- conf);
+ this(stream,
+ new FileStatus(fileSize, false, 1, OrcConf.BLOCK_SIZE.getInt(conf), 0, path),
+ conf);
}
@Override
diff --git a/java/core/src/java/org/threeten/extra/chrono/HybridChronology.java b/java/core/src/java/org/threeten/extra/chrono/HybridChronology.java
index 8dc775d83..682c34402 100644
--- a/java/core/src/java/org/threeten/extra/chrono/HybridChronology.java
+++ b/java/core/src/java/org/threeten/extra/chrono/HybridChronology.java
@@ -86,372 +86,379 @@ import java.util.Map;
* This class is immutable and thread-safe.
*/
public final class HybridChronology
- extends AbstractChronology
- implements Serializable {
-
- /**
- * Singleton instance for the Coptic chronology.
- */
- public static final HybridChronology INSTANCE = new HybridChronology();
- /**
- * The cutover date, October 15, 1582.
- */
- public static final LocalDate CUTOVER = LocalDate.of(1582, 10, 15);
- /**
- * The number of cutover days.
- */
- static final int CUTOVER_DAYS = 10;
- /**
- * The cutover year.
- */
- static final int CUTOVER_YEAR = 1582;
-
- /**
- * Serialization version.
- */
- private static final long serialVersionUID = 87235724675472658L;
- /**
- * Range of day-of-year.
- */
- static final ValueRange DOY_RANGE = ValueRange.of(1, 355, 366);
- /**
- * Range of aligned-week-of-month.
- */
- static final ValueRange ALIGNED_WOM_RANGE = ValueRange.of(1, 3, 5);
- /**
- * Range of aligned-week-of-year.
- */
- static final ValueRange ALIGNED_WOY_RANGE = ValueRange.of(1, 51, 53);
- /**
- * Range of proleptic-year.
- */
- static final ValueRange YEAR_RANGE = ValueRange.of(-999_998, 999_999);
- /**
- * Range of year.
- */
- static final ValueRange YOE_RANGE = ValueRange.of(1, 999_999);
- /**
- * Range of proleptic month.
- */
- static final ValueRange PROLEPTIC_MONTH_RANGE =
- ValueRange.of(-999_998 * 12L, 999_999 * 12L + 11);
-
- /**
- * Private constructor, that is public to satisfy the {@code ServiceLoader}.
- * @deprecated Use the singleton {@link #INSTANCE} instead.
- */
- @Deprecated
- public HybridChronology() {
- }
+ extends AbstractChronology
+ implements Serializable {
- /**
- * Resolve singleton.
- *
- * @return the singleton instance, not null
- */
- private Object readResolve() {
- return INSTANCE;
- }
+ /**
+ * Singleton instance for the Coptic chronology.
+ */
+ public static final HybridChronology INSTANCE = new HybridChronology();
+ /**
+ * The cutover date, October 15, 1582.
+ */
+ public static final LocalDate CUTOVER = LocalDate.of(1582, 10, 15);
+ /**
+ * The number of cutover days.
+ */
+ static final int CUTOVER_DAYS = 10;
+ /**
+ * The cutover year.
+ */
+ static final int CUTOVER_YEAR = 1582;
- //-------------------------------------------------------------------------
- /**
- * Gets the cutover date between the Julian and Gregorian calendar.
- * <p>
- * The date returned is the first date that the Gregorian (ISO) calendar applies,
- * which is Thursday 14th September 1752.
- *
- * @return the first date after the cutover, not null
- */
- public LocalDate getCutover() {
- return CUTOVER;
- }
+ /**
+ * Serialization version.
+ */
+ private static final long serialVersionUID = 87235724675472658L;
+ /**
+ * Range of day-of-year.
+ */
+ static final ValueRange DOY_RANGE = ValueRange.of(1, 355, 366);
+ /**
+ * Range of aligned-week-of-month.
+ */
+ static final ValueRange ALIGNED_WOM_RANGE = ValueRange.of(1, 3, 5);
+ /**
+ * Range of aligned-week-of-year.
+ */
+ static final ValueRange ALIGNED_WOY_RANGE = ValueRange.of(1, 51, 53);
+ /**
+ * Range of proleptic-year.
+ */
+ static final ValueRange YEAR_RANGE = ValueRange.of(-999_998, 999_999);
+ /**
+ * Range of year.
+ */
+ static final ValueRange YOE_RANGE = ValueRange.of(1, 999_999);
+ /**
+ * Range of proleptic month.
+ */
+ static final ValueRange PROLEPTIC_MONTH_RANGE =
+ ValueRange.of(-999_998 * 12L, 999_999 * 12L + 11);
- //-----------------------------------------------------------------------
- /**
- * Gets the ID of the chronology - 'Hybrid'.
- * <p>
- * The ID uniquely identifies the {@code Chronology}.
- * It can be used to lookup the {@code Chronology} using {@link Chronology#of(String)}.
- *
- * @return the chronology ID - 'Hybrid'
- * @see #getCalendarType()
- */
- @Override
- public String getId() {
- return "Hybrid";
- }
+ /**
+ * Private constructor, that is public to satisfy the {@code ServiceLoader}.
+ *
+ * @deprecated Use the singleton {@link #INSTANCE} instead.
+ */
+ @Deprecated
+ public HybridChronology() {
+ }
- /**
- * Gets the calendar type of the underlying calendar system, which returns null.
- * <p>
- * The <em>Unicode Locale Data Markup Language (LDML)</em> specification
- * does not define an identifier for this calendar system, thus null is returned.
- *
- * @return the calendar system type, null
- * @see #getId()
- */
- @Override
- public String getCalendarType() {
- return null;
- }
+ /**
+ * Resolve singleton.
+ *
+ * @return the singleton instance, not null
+ */
+ private Object readResolve() {
+ return INSTANCE;
+ }
- //-----------------------------------------------------------------------
- /**
- * Obtains a local date in British Cutover calendar system from the
- * era, year-of-era, month-of-year and day-of-month fields.
- * <p>
- * Dates in the middle of the cutover gap, such as the 10th September 1752,
- * will not throw an exception. Instead, the date will be treated as a Julian date
- * and converted to an ISO date, with the day of month shifted by 11 days.
- *
- * @param era the British Cutover era, not null
- * @param yearOfEra the year-of-era
- * @param month the month-of-year
- * @param dayOfMonth the day-of-month
- * @return the British Cutover local date, not null
- * @throws DateTimeException if unable to create the date
- * @throws ClassCastException if the {@code era} is not a {@code JulianEra}
- */
- @Override
- public HybridDate date(Era era, int yearOfEra, int month, int dayOfMonth) {
- return date(prolepticYear(era, yearOfEra), month, dayOfMonth);
- }
+ //-------------------------------------------------------------------------
- /**
- * Obtains a local date in British Cutover calendar system from the
- * proleptic-year, month-of-year and day-of-month fields.
- * <p>
- * Dates in the middle of the cutover gap, such as the 10th September 1752,
- * will not throw an exception. Instead, the date will be treated as a Julian date
- * and converted to an ISO date, with the day of month shifted by 11 days.
- *
- * @param prolepticYear the proleptic-year
- * @param month the month-of-year
- * @param dayOfMonth the day-of-month
- * @return the British Cutover local date, not null
- * @throws DateTimeException if unable to create the date
- */
- @Override
- public HybridDate date(int prolepticYear, int month, int dayOfMonth) {
- return HybridDate.of(prolepticYear, month, dayOfMonth);
- }
+ /**
+ * Gets the cutover date between the Julian and Gregorian calendar.
+ * <p>
+ * The date returned is the first date that the Gregorian (ISO) calendar applies,
+ * which is Thursday 14th September 1752.
+ *
+ * @return the first date after the cutover, not null
+ */
+ public LocalDate getCutover() {
+ return CUTOVER;
+ }
- /**
- * Obtains a local date in British Cutover calendar system from the
- * era, year-of-era and day-of-year fields.
- * <p>
- * The day-of-year takes into account the cutover, thus there are only 355 days in 1752.
- *
- * @param era the British Cutover era, not null
- * @param yearOfEra the year-of-era
- * @param dayOfYear the day-of-year
- * @return the British Cutover local date, not null
- * @throws DateTimeException if unable to create the date
- * @throws ClassCastException if the {@code era} is not a {@code JulianEra}
- */
- @Override
- public HybridDate dateYearDay(Era era, int yearOfEra, int dayOfYear) {
- return dateYearDay(prolepticYear(era, yearOfEra), dayOfYear);
- }
+ //-----------------------------------------------------------------------
- /**
- * Obtains a local date in British Cutover calendar system from the
- * proleptic-year and day-of-year fields.
- * <p>
- * The day-of-year takes into account the cutover, thus there are only 355 days in 1752.
- *
- * @param prolepticYear the proleptic-year
- * @param dayOfYear the day-of-year
- * @return the British Cutover local date, not null
- * @throws DateTimeException if unable to create the date
- */
- @Override
- public HybridDate dateYearDay(int prolepticYear, int dayOfYear) {
- return HybridDate.ofYearDay(prolepticYear, dayOfYear);
- }
+ /**
+ * Gets the ID of the chronology - 'Hybrid'.
+ * <p>
+ * The ID uniquely identifies the {@code Chronology}.
+ * It can be used to lookup the {@code Chronology} using {@link Chronology#of(String)}.
+ *
+ * @return the chronology ID - 'Hybrid'
+ * @see #getCalendarType()
+ */
+ @Override
+ public String getId() {
+ return "Hybrid";
+ }
- /**
- * Obtains a local date in the British Cutover calendar system from the epoch-day.
- *
- * @param epochDay the epoch day
- * @return the British Cutover local date, not null
- * @throws DateTimeException if unable to create the date
- */
- @Override // override with covariant return type
- public HybridDate dateEpochDay(long epochDay) {
- return HybridDate.ofEpochDay(epochDay);
- }
+ /**
+ * Gets the calendar type of the underlying calendar system, which returns null.
+ * <p>
+ * The <em>Unicode Locale Data Markup Language (LDML)</em> specification
+ * does not define an identifier for this calendar system, thus null is returned.
+ *
+ * @return the calendar system type, null
+ * @see #getId()
+ */
+ @Override
+ public String getCalendarType() {
+ return null;
+ }
- //-------------------------------------------------------------------------
- /**
- * Obtains the current British Cutover local date from the system clock in the default time-zone.
- * <p>
- * This will query the {@link Clock#systemDefaultZone() system clock} in the default
- * time-zone to obtain the current date.
- * <p>
- * Using this method will prevent the ability to use an alternate clock for testing
- * because the clock is hard-coded.
- *
- * @return the current British Cutover local date using the system clock and default time-zone, not null
- * @throws DateTimeException if unable to create the date
- */
- @Override // override with covariant return type
- public HybridDate dateNow() {
- return HybridDate.now();
- }
+ //-----------------------------------------------------------------------
- /**
- * Obtains the current British Cutover local date from the system clock in the specified time-zone.
- * <p>
- * This will query the {@link Clock#system(ZoneId) system clock} to obtain the current date.
- * Specifying the time-zone avoids dependence on the default time-zone.
- * <p>
- * Using this method will prevent the ability to use an alternate clock for testing
- * because the clock is hard-coded.
- *
- * @param zone the zone ID to use, not null
- * @return the current British Cutover local date using the system clock, not null
- * @throws DateTimeException if unable to create the date
- */
- @Override // override with covariant return type
- public HybridDate dateNow(ZoneId zone) {
- return HybridDate.now(zone);
- }
+ /**
+ * Obtains a local date in British Cutover calendar system from the
+ * era, year-of-era, month-of-year and day-of-month fields.
+ * <p>
+ * Dates in the middle of the cutover gap, such as the 10th September 1752,
+ * will not throw an exception. Instead, the date will be treated as a Julian date
+ * and converted to an ISO date, with the day of month shifted by 11 days.
+ *
+ * @param era the British Cutover era, not null
+ * @param yearOfEra the year-of-era
+ * @param month the month-of-year
+ * @param dayOfMonth the day-of-month
+ * @return the British Cutover local date, not null
+ * @throws DateTimeException if unable to create the date
+ * @throws ClassCastException if the {@code era} is not a {@code JulianEra}
+ */
+ @Override
+ public HybridDate date(Era era, int yearOfEra, int month, int dayOfMonth) {
+ return date(prolepticYear(era, yearOfEra), month, dayOfMonth);
+ }
- /**
- * Obtains the current British Cutover local date from the specified clock.
- * <p>
- * This will query the specified clock to obtain the current date - today.
- * Using this method allows the use of an alternate clock for testing.
- * The alternate clock may be introduced using {@link Clock dependency injection}.
- *
- * @param clock the clock to use, not null
- * @return the current British Cutover local date, not null
- * @throws DateTimeException if unable to create the date
- */
- @Override // override with covariant return type
- public HybridDate dateNow(Clock clock) {
- return HybridDate.now(clock);
- }
+ /**
+ * Obtains a local date in British Cutover calendar system from the
+ * proleptic-year, month-of-year and day-of-month fields.
+ * <p>
+ * Dates in the middle of the cutover gap, such as the 10th September 1752,
+ * will not throw an exception. Instead, the date will be treated as a Julian date
+ * and converted to an ISO date, with the day of month shifted by 11 days.
+ *
+ * @param prolepticYear the proleptic-year
+ * @param month the month-of-year
+ * @param dayOfMonth the day-of-month
+ * @return the British Cutover local date, not null
+ * @throws DateTimeException if unable to create the date
+ */
+ @Override
+ public HybridDate date(int prolepticYear, int month, int dayOfMonth) {
+ return HybridDate.of(prolepticYear, month, dayOfMonth);
+ }
- //-------------------------------------------------------------------------
- /**
- * Obtains a British Cutover local date from another date-time object.
- *
- * @param temporal the date-time object to convert, not null
- * @return the British Cutover local date, not null
- * @throws DateTimeException if unable to create the date
- */
- @Override
- public HybridDate date(TemporalAccessor temporal) {
- return HybridDate.from(temporal);
- }
+ /**
+ * Obtains a local date in British Cutover calendar system from the
+ * era, year-of-era and day-of-year fields.
+ * <p>
+ * The day-of-year takes into account the cutover, thus there are only 355 days in 1752.
+ *
+ * @param era the British Cutover era, not null
+ * @param yearOfEra the year-of-era
+ * @param dayOfYear the day-of-year
+ * @return the British Cutover local date, not null
+ * @throws DateTimeException if unable to create the date
+ * @throws ClassCastException if the {@code era} is not a {@code JulianEra}
+ */
+ @Override
+ public HybridDate dateYearDay(Era era, int yearOfEra, int dayOfYear) {
+ return dateYearDay(prolepticYear(era, yearOfEra), dayOfYear);
+ }
- /**
- * Obtains a British Cutover local date-time from another date-time object.
- *
- * @param temporal the date-time object to convert, not null
- * @return the British Cutover local date-time, not null
- * @throws DateTimeException if unable to create the date-time
- */
- @Override
- @SuppressWarnings("unchecked")
- public ChronoLocalDateTime<HybridDate> localDateTime(TemporalAccessor temporal) {
- return (ChronoLocalDateTime<HybridDate>) super.localDateTime(temporal);
- }
+ /**
+ * Obtains a local date in British Cutover calendar system from the
+ * proleptic-year and day-of-year fields.
+ * <p>
+ * The day-of-year takes into account the cutover, thus there are only 355 days in 1752.
+ *
+ * @param prolepticYear the proleptic-year
+ * @param dayOfYear the day-of-year
+ * @return the British Cutover local date, not null
+ * @throws DateTimeException if unable to create the date
+ */
+ @Override
+ public HybridDate dateYearDay(int prolepticYear, int dayOfYear) {
+ return HybridDate.ofYearDay(prolepticYear, dayOfYear);
+ }
- /**
- * Obtains a British Cutover zoned date-time from another date-time object.
- *
- * @param temporal the date-time object to convert, not null
- * @return the British Cutover zoned date-time, not null
- * @throws DateTimeException if unable to create the date-time
- */
- @Override
- @SuppressWarnings("unchecked")
- public ChronoZonedDateTime<HybridDate> zonedDateTime(TemporalAccessor temporal) {
- return (ChronoZonedDateTime<HybridDate>) super.zonedDateTime(temporal);
- }
+ /**
+ * Obtains a local date in the British Cutover calendar system from the epoch-day.
+ *
+ * @param epochDay the epoch day
+ * @return the British Cutover local date, not null
+ * @throws DateTimeException if unable to create the date
+ */
+ @Override // override with covariant return type
+ public HybridDate dateEpochDay(long epochDay) {
+ return HybridDate.ofEpochDay(epochDay);
+ }
- /**
- * Obtains a British Cutover zoned date-time in this chronology from an {@code Instant}.
- *
- * @param instant the instant to create the date-time from, not null
- * @param zone the time-zone, not null
- * @return the British Cutover zoned date-time, not null
- * @throws DateTimeException if the result exceeds the supported range
- */
- @Override
- @SuppressWarnings("unchecked")
- public ChronoZonedDateTime<HybridDate> zonedDateTime(Instant instant, ZoneId zone) {
- return (ChronoZonedDateTime<HybridDate>) super.zonedDateTime(instant, zone);
- }
+ //-------------------------------------------------------------------------
- //-----------------------------------------------------------------------
- /**
- * Checks if the specified year is a leap year.
- * <p>
- * The result will return the same as {@link JulianChronology#isLeapYear(long)} for
- * year 1752 and earlier, and {@link IsoChronology#isLeapYear(long)} otherwise.
- * This method does not validate the year passed in, and only has a
- * well-defined result for years in the supported range.
- *
- * @param prolepticYear the proleptic-year to check, not validated for range
- * @return true if the year is a leap year
- */
- @Override
- public boolean isLeapYear(long prolepticYear) {
- if (prolepticYear <= CUTOVER_YEAR) {
- return JulianChronology.INSTANCE.isLeapYear(prolepticYear);
- }
- return IsoChronology.INSTANCE.isLeapYear(prolepticYear);
- }
+ /**
+ * Obtains the current British Cutover local date from the system clock in the default time-zone.
+ * <p>
+ * This will query the {@link Clock#systemDefaultZone() system clock} in the default
+ * time-zone to obtain the current date.
+ * <p>
+ * Using this method will prevent the ability to use an alternate clock for testing
+ * because the clock is hard-coded.
+ *
+ * @return the current British Cutover local date using the system clock and default time-zone, not null
+ * @throws DateTimeException if unable to create the date
+ */
+ @Override // override with covariant return type
+ public HybridDate dateNow() {
+ return HybridDate.now();
+ }
- @Override
- public int prolepticYear(Era era, int yearOfEra) {
- if (era instanceof JulianEra == false) {
- throw new ClassCastException("Era must be JulianEra");
- }
- return (era == JulianEra.AD ? yearOfEra : 1 - yearOfEra);
- }
+ /**
+ * Obtains the current British Cutover local date from the system clock in the specified time-zone.
+ * <p>
+ * This will query the {@link Clock#system(ZoneId) system clock} to obtain the current date.
+ * Specifying the time-zone avoids dependence on the default time-zone.
+ * <p>
+ * Using this method will prevent the ability to use an alternate clock for testing
+ * because the clock is hard-coded.
+ *
+ * @param zone the zone ID to use, not null
+ * @return the current British Cutover local date using the system clock, not null
+ * @throws DateTimeException if unable to create the date
+ */
+ @Override // override with covariant return type
+ public HybridDate dateNow(ZoneId zone) {
+ return HybridDate.now(zone);
+ }
- @Override
- public JulianEra eraOf(int eraValue) {
- return JulianEra.of(eraValue);
- }
+ /**
+ * Obtains the current British Cutover local date from the specified clock.
+ * <p>
+ * This will query the specified clock to obtain the current date - today.
+ * Using this method allows the use of an alternate clock for testing.
+ * The alternate clock may be introduced using {@link Clock dependency injection}.
+ *
+ * @param clock the clock to use, not null
+ * @return the current British Cutover local date, not null
+ * @throws DateTimeException if unable to create the date
+ */
+ @Override // override with covariant return type
+ public HybridDate dateNow(Clock clock) {
+ return HybridDate.now(clock);
+ }
+
+ //-------------------------------------------------------------------------
+
+ /**
+ * Obtains a British Cutover local date from another date-time object.
+ *
+ * @param temporal the date-time object to convert, not null
+ * @return the British Cutover local date, not null
+ * @throws DateTimeException if unable to create the date
+ */
+ @Override
+ public HybridDate date(TemporalAccessor temporal) {
+ return HybridDate.from(temporal);
+ }
- @Override
- public List<Era> eras() {
- return Arrays.<Era>asList(JulianEra.values());
+ /**
+ * Obtains a British Cutover local date-time from another date-time object.
+ *
+ * @param temporal the date-time object to convert, not null
+ * @return the British Cutover local date-time, not null
+ * @throws DateTimeException if unable to create the date-time
+ */
+ @Override
+ @SuppressWarnings("unchecked")
+ public ChronoLocalDateTime<HybridDate> localDateTime(TemporalAccessor temporal) {
+ return (ChronoLocalDateTime<HybridDate>) super.localDateTime(temporal);
+ }
+
+ /**
+ * Obtains a British Cutover zoned date-time from another date-time object.
+ *
+ * @param temporal the date-time object to convert, not null
+ * @return the British Cutover zoned date-time, not null
+ * @throws DateTimeException if unable to create the date-time
+ */
+ @Override
+ @SuppressWarnings("unchecked")
+ public ChronoZonedDateTime<HybridDate> zonedDateTime(TemporalAccessor temporal) {
+ return (ChronoZonedDateTime<HybridDate>) super.zonedDateTime(temporal);
+ }
+
+ /**
+ * Obtains a British Cutover zoned date-time in this chronology from an {@code Instant}.
+ *
+ * @param instant the instant to create the date-time from, not null
+ * @param zone the time-zone, not null
+ * @return the British Cutover zoned date-time, not null
+ * @throws DateTimeException if the result exceeds the supported range
+ */
+ @Override
+ @SuppressWarnings("unchecked")
+ public ChronoZonedDateTime<HybridDate> zonedDateTime(Instant instant, ZoneId zone) {
+ return (ChronoZonedDateTime<HybridDate>) super.zonedDateTime(instant, zone);
+ }
+
+ //-----------------------------------------------------------------------
+
+ /**
+ * Checks if the specified year is a leap year.
+ * <p>
+ * The result will return the same as {@link JulianChronology#isLeapYear(long)} for
+ * year 1752 and earlier, and {@link IsoChronology#isLeapYear(long)} otherwise.
+ * This method does not validate the year passed in, and only has a
+ * well-defined result for years in the supported range.
+ *
+ * @param prolepticYear the proleptic-year to check, not validated for range
+ * @return true if the year is a leap year
+ */
+ @Override
+ public boolean isLeapYear(long prolepticYear) {
+ if (prolepticYear <= CUTOVER_YEAR) {
+ return JulianChronology.INSTANCE.isLeapYear(prolepticYear);
}
+ return IsoChronology.INSTANCE.isLeapYear(prolepticYear);
+ }
- //-----------------------------------------------------------------------
- @Override
- public ValueRange range(ChronoField field) {
- switch (field) {
- case DAY_OF_YEAR:
- return DOY_RANGE;
- case ALIGNED_WEEK_OF_MONTH:
- return ALIGNED_WOM_RANGE;
- case ALIGNED_WEEK_OF_YEAR:
- return ALIGNED_WOY_RANGE;
- case PROLEPTIC_MONTH:
- return PROLEPTIC_MONTH_RANGE;
- case YEAR_OF_ERA:
- return YOE_RANGE;
- case YEAR:
- return YEAR_RANGE;
- default:
- break;
- }
- return field.range();
+ @Override
+ public int prolepticYear(Era era, int yearOfEra) {
+ if (era instanceof JulianEra == false) {
+ throw new ClassCastException("Era must be JulianEra");
}
+ return (era == JulianEra.AD ? yearOfEra : 1 - yearOfEra);
+ }
- //-----------------------------------------------------------------------
- @Override // override for return type
- public HybridDate resolveDate(
- Map<TemporalField, Long> fieldValues, ResolverStyle resolverStyle) {
- return (HybridDate) super.resolveDate(fieldValues, resolverStyle);
+ @Override
+ public JulianEra eraOf(int eraValue) {
+ return JulianEra.of(eraValue);
+ }
+
+ @Override
+ public List<Era> eras() {
+ return Arrays.<Era>asList(JulianEra.values());
+ }
+
+ //-----------------------------------------------------------------------
+ @Override
+ public ValueRange range(ChronoField field) {
+ switch (field) {
+ case DAY_OF_YEAR:
+ return DOY_RANGE;
+ case ALIGNED_WEEK_OF_MONTH:
+ return ALIGNED_WOM_RANGE;
+ case ALIGNED_WEEK_OF_YEAR:
+ return ALIGNED_WOY_RANGE;
+ case PROLEPTIC_MONTH:
+ return PROLEPTIC_MONTH_RANGE;
+ case YEAR_OF_ERA:
+ return YOE_RANGE;
+ case YEAR:
+ return YEAR_RANGE;
+ default:
+ break;
}
+ return field.range();
+ }
+
+ //-----------------------------------------------------------------------
+ @Override // override for return type
+ public HybridDate resolveDate(
+ Map<TemporalField, Long> fieldValues, ResolverStyle resolverStyle) {
+ return (HybridDate) super.resolveDate(fieldValues, resolverStyle);
+ }
}
diff --git a/java/core/src/java/org/threeten/extra/chrono/HybridDate.java b/java/core/src/java/org/threeten/extra/chrono/HybridDate.java
index d64207eed..9e94f5178 100644
--- a/java/core/src/java/org/threeten/extra/chrono/HybridDate.java
+++ b/java/core/src/java/org/threeten/extra/chrono/HybridDate.java
@@ -54,486 +54,490 @@ import static org.threeten.extra.chrono.HybridChronology.CUTOVER_YEAR;
* identity hash code or use the distinction between equals() and ==.
*/
public final class HybridDate
- extends AbstractDate
- implements ChronoLocalDate, Serializable {
- /**
- * Serialization version.
- */
- private static final long serialVersionUID = -9626278512674L;
- /**
- * The underlying date.
- */
- private final LocalDate isoDate;
- /**
- * The underlying Julian date if before the cutover.
- */
- private final transient JulianDate julianDate;
-
- //-----------------------------------------------------------------------
- /**
- * Obtains the current {@code HybridDate} from the system clock in the default time-zone.
- * <p>
- * This will query the {@link Clock#systemDefaultZone() system clock} in the default
- * time-zone to obtain the current date.
- * <p>
- * Using this method will prevent the ability to use an alternate clock for testing
- * because the clock is hard-coded.
- *
- * @return the current date using the system clock and default time-zone, not null
- */
- public static HybridDate now() {
- return now(Clock.systemDefaultZone());
- }
-
- /**
- * Obtains the current {@code HybridDate} from the system clock in the specified time-zone.
- * <p>
- * This will query the {@link Clock#system(ZoneId) system clock} to obtain the current date.
- * Specifying the time-zone avoids dependence on the default time-zone.
- * <p>
- * Using this method will prevent the ability to use an alternate clock for testing
- * because the clock is hard-coded.
- *
- * @param zone the zone ID to use, not null
- * @return the current date using the system clock, not null
- */
- public static HybridDate now(ZoneId zone) {
- return now(Clock.system(zone));
- }
-
- /**
- * Obtains the current {@code HybridDate} from the specified clock.
- * <p>
- * This will query the specified clock to obtain the current date - today.
- * Using this method allows the use of an alternate clock for testing.
- * The alternate clock may be introduced using {@linkplain Clock dependency injection}.
- *
- * @param clock the clock to use, not null
- * @return the current date, not null
- * @throws DateTimeException if the current date cannot be obtained
- */
- public static HybridDate now(Clock clock) {
- return new HybridDate(LocalDate.now(clock));
- }
-
- /**
- * Obtains a {@code HybridDate} representing a date in the British Cutover calendar
- * system from the proleptic-year, month-of-year and day-of-month fields.
- * <p>
- * This returns a {@code HybridDate} with the specified fields.
- * <p>
- * Dates in the middle of the cutover gap, such as the 10th September 1752,
- * will not throw an exception. Instead, the date will be treated as a Julian date
- * and converted to an ISO date, with the day of month shifted by 11 days.
- * <p>
- * Invalid dates, such as September 31st will throw an exception.
- *
- * @param prolepticYear the British Cutover proleptic-year
- * @param month the British Cutover month-of-year, from 1 to 12
- * @param dayOfMonth the British Cutover day-of-month, from 1 to 31
- * @return the date in British Cutover calendar system, not null
- * @throws DateTimeException if the value of any field is out of range,
- * or if the day-of-month is invalid for the month-year
- */
- public static HybridDate of(int prolepticYear, int month, int dayOfMonth) {
- return HybridDate.create(prolepticYear, month, dayOfMonth);
- }
-
- /**
- * Obtains a {@code HybridDate} from a temporal object.
- * <p>
- * This obtains a date in the British Cutover calendar system based on the specified temporal.
- * A {@code TemporalAccessor} represents an arbitrary set of date and time information,
- * which this factory converts to an instance of {@code HybridDate}.
- * <p>
- * The conversion uses the {@link ChronoField#EPOCH_DAY EPOCH_DAY}
- * field, which is standardized across calendar systems.
- * <p>
- * This method matches the signature of the functional interface {@link TemporalQuery}
- * allowing it to be used as a query via method reference, {@code HybridDate::from}.
- *
- * @param temporal the temporal object to convert, not null
- * @return the date in British Cutover calendar system, not null
- * @throws DateTimeException if unable to convert to a {@code HybridDate}
- */
- public static HybridDate from(TemporalAccessor temporal) {
- if (temporal instanceof HybridDate) {
- return (HybridDate) temporal;
- }
- return new HybridDate(LocalDate.from(temporal));
- }
-
- //-----------------------------------------------------------------------
- /**
- * Obtains a {@code HybridDate} representing a date in the British Cutover calendar
- * system from the proleptic-year and day-of-year fields.
- * <p>
- * This returns a {@code HybridDate} with the specified fields.
- * The day must be valid for the year, otherwise an exception will be thrown.
- *
- * @param prolepticYear the British Cutover proleptic-year
- * @param dayOfYear the British Cutover day-of-year, from 1 to 366
- * @return the date in British Cutover calendar system, not null
- * @throws DateTimeException if the value of any field is out of range,
- * or if the day-of-year is invalid for the year
- */
- static HybridDate ofYearDay(int prolepticYear, int dayOfYear) {
- if (prolepticYear < CUTOVER_YEAR || (prolepticYear == CUTOVER_YEAR && dayOfYear <= 246)) {
- JulianDate julian = JulianDate.ofYearDay(prolepticYear, dayOfYear);
- return new HybridDate(julian);
- } else if (prolepticYear == CUTOVER_YEAR) {
- LocalDate iso = LocalDate.ofYearDay(prolepticYear, dayOfYear + CUTOVER_DAYS);
- return new HybridDate(iso);
- } else {
- LocalDate iso = LocalDate.ofYearDay(prolepticYear, dayOfYear);
- return new HybridDate(iso);
- }
- }
-
- /**
- * Obtains a {@code HybridDate} representing a date in the British Cutover calendar
- * system from the epoch-day.
- *
- * @param epochDay the epoch day to convert based on 1970-01-01 (ISO)
- * @return the date in British Cutover calendar system, not null
- * @throws DateTimeException if the epoch-day is out of range
- */
- static HybridDate ofEpochDay(final long epochDay) {
- return new HybridDate(LocalDate.ofEpochDay(epochDay));
- }
-
- /**
- * Creates a {@code HybridDate} validating the input.
- *
- * @param prolepticYear the British Cutover proleptic-year
- * @param month the British Cutover month-of-year, from 1 to 12
- * @param dayOfMonth the British Cutover day-of-month, from 1 to 31
- * @return the date in British Cutover calendar system, not null
- * @throws DateTimeException if the value of any field is out of range,
- * or if the day-of-month is invalid for the month-year
- */
- static HybridDate create(int prolepticYear, int month, int dayOfMonth) {
- if (prolepticYear < CUTOVER_YEAR) {
- JulianDate julian = JulianDate.of(prolepticYear, month, dayOfMonth);
- return new HybridDate(julian);
- } else {
- LocalDate iso = LocalDate.of(prolepticYear, month, dayOfMonth);
- if (iso.isBefore(CUTOVER)) {
- JulianDate julian = JulianDate.of(prolepticYear, month, dayOfMonth);
- return new HybridDate(julian);
- }
- return new HybridDate(iso);
- }
- }
-
- //-----------------------------------------------------------------------
- /**
- * Creates an instance from an ISO date.
- *
- * @param isoDate the standard local date, not null
- */
- HybridDate(LocalDate isoDate) {
- Objects.requireNonNull(isoDate, "isoDate");
- this.isoDate = isoDate;
- this.julianDate = (isoDate.isBefore(CUTOVER) ? JulianDate.from(isoDate) : null);
- }
-
- /**
- * Creates an instance from a Julian date.
- *
- * @param julianDate the Julian date before the cutover, not null
- */
- HybridDate(JulianDate julianDate) {
- Objects.requireNonNull(julianDate, "julianDate");
- this.isoDate = LocalDate.from(julianDate);
- this.julianDate = (isoDate.isBefore(CUTOVER) ? julianDate : null);
- }
-
- /**
- * Validates the object.
- *
- * @return the resolved date, not null
- */
- private Object readResolve() {
- return new HybridDate(isoDate);
- }
-
- //-----------------------------------------------------------------------
- private boolean isCutoverYear() {
- return isoDate.getYear() == CUTOVER_YEAR && isoDate.getDayOfYear() > CUTOVER_DAYS;
- }
-
- private boolean isCutoverMonth() {
- return isoDate.getYear() == CUTOVER_YEAR &&
- isoDate.getMonthValue() == 9 && isoDate.getDayOfMonth() > CUTOVER_DAYS;
- }
-
- //-------------------------------------------------------------------------
- @Override
- int getAlignedDayOfWeekInMonth() {
- if (isCutoverMonth() && julianDate == null) {
- return ((getDayOfMonth() - 1 - CUTOVER_DAYS) % lengthOfWeek()) + 1;
- }
- return super.getAlignedDayOfWeekInMonth();
- }
-
- @Override
- int getAlignedWeekOfMonth() {
- if (isCutoverMonth() && julianDate == null) {
- return ((getDayOfMonth() - 1 - CUTOVER_DAYS) / lengthOfWeek()) + 1;
- }
- return super.getAlignedWeekOfMonth();
- }
-
- @Override
- int getProlepticYear() {
- return (julianDate != null ? julianDate.getProlepticYear() : isoDate.getYear());
- }
-
- @Override
- int getMonth() {
- return (julianDate != null ? julianDate.getMonth() : isoDate.getMonthValue());
- }
-
- @Override
- int getDayOfMonth() {
- return (julianDate != null ? julianDate.getDayOfMonth() : isoDate.getDayOfMonth());
- }
-
- @Override
- int getDayOfYear() {
- if (julianDate != null) {
- return julianDate.getDayOfYear();
- }
- if (isoDate.getYear() == CUTOVER_YEAR) {
- return isoDate.getDayOfYear() - CUTOVER_DAYS;
- }
- return isoDate.getDayOfYear();
- }
-
- @Override
- public ValueRange rangeChrono(ChronoField field) {
- switch (field) {
- case DAY_OF_MONTH:
- // short length, but value range still 1 to 30
- if (isCutoverMonth()) {
- return ValueRange.of(1, 30);
- }
- return ValueRange.of(1, lengthOfMonth());
- case DAY_OF_YEAR:
- // 1 to 355 in cutover year, otherwise 1 to 365/366
- return ValueRange.of(1, lengthOfYear());
- case ALIGNED_WEEK_OF_MONTH:
- // 1 to 3 in cutover month, otherwise 1 to 4/5
- return rangeAlignedWeekOfMonth();
- case ALIGNED_WEEK_OF_YEAR:
- // 1 to 51 in cutover year, otherwise 1 to 53
- if (isCutoverYear()) {
- return ValueRange.of(1, 51);
- }
- return ChronoField.ALIGNED_WEEK_OF_YEAR.range();
- default:
- return getChronology().range(field);
- }
- }
-
- @Override
- ValueRange rangeAlignedWeekOfMonth() {
+ extends AbstractDate
+ implements ChronoLocalDate, Serializable {
+ /**
+ * Serialization version.
+ */
+ private static final long serialVersionUID = -9626278512674L;
+ /**
+ * The underlying date.
+ */
+ private final LocalDate isoDate;
+ /**
+ * The underlying Julian date if before the cutover.
+ */
+ private final transient JulianDate julianDate;
+
+ //-----------------------------------------------------------------------
+
+ /**
+ * Obtains the current {@code HybridDate} from the system clock in the default time-zone.
+ * <p>
+ * This will query the {@link Clock#systemDefaultZone() system clock} in the default
+ * time-zone to obtain the current date.
+ * <p>
+ * Using this method will prevent the ability to use an alternate clock for testing
+ * because the clock is hard-coded.
+ *
+ * @return the current date using the system clock and default time-zone, not null
+ */
+ public static HybridDate now() {
+ return now(Clock.systemDefaultZone());
+ }
+
+ /**
+ * Obtains the current {@code HybridDate} from the system clock in the specified time-zone.
+ * <p>
+ * This will query the {@link Clock#system(ZoneId) system clock} to obtain the current date.
+ * Specifying the time-zone avoids dependence on the default time-zone.
+ * <p>
+ * Using this method will prevent the ability to use an alternate clock for testing
+ * because the clock is hard-coded.
+ *
+ * @param zone the zone ID to use, not null
+ * @return the current date using the system clock, not null
+ */
+ public static HybridDate now(ZoneId zone) {
+ return now(Clock.system(zone));
+ }
+
+ /**
+ * Obtains the current {@code HybridDate} from the specified clock.
+ * <p>
+ * This will query the specified clock to obtain the current date - today.
+ * Using this method allows the use of an alternate clock for testing.
+ * The alternate clock may be introduced using {@linkplain Clock dependency injection}.
+ *
+ * @param clock the clock to use, not null
+ * @return the current date, not null
+ * @throws DateTimeException if the current date cannot be obtained
+ */
+ public static HybridDate now(Clock clock) {
+ return new HybridDate(LocalDate.now(clock));
+ }
+
+ /**
+ * Obtains a {@code HybridDate} representing a date in the British Cutover calendar
+ * system from the proleptic-year, month-of-year and day-of-month fields.
+ * <p>
+ * This returns a {@code HybridDate} with the specified fields.
+ * <p>
+ * Dates in the middle of the cutover gap, such as the 10th September 1752,
+ * will not throw an exception. Instead, the date will be treated as a Julian date
+ * and converted to an ISO date, with the day of month shifted by 11 days.
+ * <p>
+ * Invalid dates, such as September 31st will throw an exception.
+ *
+ * @param prolepticYear the British Cutover proleptic-year
+ * @param month the British Cutover month-of-year, from 1 to 12
+ * @param dayOfMonth the British Cutover day-of-month, from 1 to 31
+ * @return the date in British Cutover calendar system, not null
+ * @throws DateTimeException if the value of any field is out of range,
+ * or if the day-of-month is invalid for the month-year
+ */
+ public static HybridDate of(int prolepticYear, int month, int dayOfMonth) {
+ return HybridDate.create(prolepticYear, month, dayOfMonth);
+ }
+
+ /**
+ * Obtains a {@code HybridDate} from a temporal object.
+ * <p>
+ * This obtains a date in the British Cutover calendar system based on the specified temporal.
+ * A {@code TemporalAccessor} represents an arbitrary set of date and time information,
+ * which this factory converts to an instance of {@code HybridDate}.
+ * <p>
+ * The conversion uses the {@link ChronoField#EPOCH_DAY EPOCH_DAY}
+ * field, which is standardized across calendar systems.
+ * <p>
+ * This method matches the signature of the functional interface {@link TemporalQuery}
+ * allowing it to be used as a query via method reference, {@code HybridDate::from}.
+ *
+ * @param temporal the temporal object to convert, not null
+ * @return the date in British Cutover calendar system, not null
+ * @throws DateTimeException if unable to convert to a {@code HybridDate}
+ */
+ public static HybridDate from(TemporalAccessor temporal) {
+ if (temporal instanceof HybridDate) {
+ return (HybridDate) temporal;
+ }
+ return new HybridDate(LocalDate.from(temporal));
+ }
+
+ //-----------------------------------------------------------------------
+
+ /**
+ * Obtains a {@code HybridDate} representing a date in the British Cutover calendar
+ * system from the proleptic-year and day-of-year fields.
+ * <p>
+ * This returns a {@code HybridDate} with the specified fields.
+ * The day must be valid for the year, otherwise an exception will be thrown.
+ *
+ * @param prolepticYear the British Cutover proleptic-year
+ * @param dayOfYear the British Cutover day-of-year, from 1 to 366
+ * @return the date in British Cutover calendar system, not null
+ * @throws DateTimeException if the value of any field is out of range,
+ * or if the day-of-year is invalid for the year
+ */
+ static HybridDate ofYearDay(int prolepticYear, int dayOfYear) {
+ if (prolepticYear < CUTOVER_YEAR || (prolepticYear == CUTOVER_YEAR && dayOfYear <= 246)) {
+ JulianDate julian = JulianDate.ofYearDay(prolepticYear, dayOfYear);
+ return new HybridDate(julian);
+ } else if (prolepticYear == CUTOVER_YEAR) {
+ LocalDate iso = LocalDate.ofYearDay(prolepticYear, dayOfYear + CUTOVER_DAYS);
+ return new HybridDate(iso);
+ } else {
+ LocalDate iso = LocalDate.ofYearDay(prolepticYear, dayOfYear);
+ return new HybridDate(iso);
+ }
+ }
+
+ /**
+ * Obtains a {@code HybridDate} representing a date in the British Cutover calendar
+ * system from the epoch-day.
+ *
+ * @param epochDay the epoch day to convert based on 1970-01-01 (ISO)
+ * @return the date in British Cutover calendar system, not null
+ * @throws DateTimeException if the epoch-day is out of range
+ */
+ static HybridDate ofEpochDay(final long epochDay) {
+ return new HybridDate(LocalDate.ofEpochDay(epochDay));
+ }
+
+ /**
+ * Creates a {@code HybridDate} validating the input.
+ *
+ * @param prolepticYear the British Cutover proleptic-year
+ * @param month the British Cutover month-of-year, from 1 to 12
+ * @param dayOfMonth the British Cutover day-of-month, from 1 to 31
+ * @return the date in British Cutover calendar system, not null
+ * @throws DateTimeException if the value of any field is out of range,
+ * or if the day-of-month is invalid for the month-year
+ */
+ static HybridDate create(int prolepticYear, int month, int dayOfMonth) {
+ if (prolepticYear < CUTOVER_YEAR) {
+ JulianDate julian = JulianDate.of(prolepticYear, month, dayOfMonth);
+ return new HybridDate(julian);
+ } else {
+ LocalDate iso = LocalDate.of(prolepticYear, month, dayOfMonth);
+ if (iso.isBefore(CUTOVER)) {
+ JulianDate julian = JulianDate.of(prolepticYear, month, dayOfMonth);
+ return new HybridDate(julian);
+ }
+ return new HybridDate(iso);
+ }
+ }
+
+ //-----------------------------------------------------------------------
+
+ /**
+ * Creates an instance from an ISO date.
+ *
+ * @param isoDate the standard local date, not null
+ */
+ HybridDate(LocalDate isoDate) {
+ Objects.requireNonNull(isoDate, "isoDate");
+ this.isoDate = isoDate;
+ this.julianDate = (isoDate.isBefore(CUTOVER) ? JulianDate.from(isoDate) : null);
+ }
+
+ /**
+ * Creates an instance from a Julian date.
+ *
+ * @param julianDate the Julian date before the cutover, not null
+ */
+ HybridDate(JulianDate julianDate) {
+ Objects.requireNonNull(julianDate, "julianDate");
+ this.isoDate = LocalDate.from(julianDate);
+ this.julianDate = (isoDate.isBefore(CUTOVER) ? julianDate : null);
+ }
+
+ /**
+ * Validates the object.
+ *
+ * @return the resolved date, not null
+ */
+ private Object readResolve() {
+ return new HybridDate(isoDate);
+ }
+
+ //-----------------------------------------------------------------------
+ private boolean isCutoverYear() {
+ return isoDate.getYear() == CUTOVER_YEAR && isoDate.getDayOfYear() > CUTOVER_DAYS;
+ }
+
+ private boolean isCutoverMonth() {
+ return isoDate.getYear() == CUTOVER_YEAR &&
+ isoDate.getMonthValue() == 9 && isoDate.getDayOfMonth() > CUTOVER_DAYS;
+ }
+
+ //-------------------------------------------------------------------------
+ @Override
+ int getAlignedDayOfWeekInMonth() {
+ if (isCutoverMonth() && julianDate == null) {
+ return ((getDayOfMonth() - 1 - CUTOVER_DAYS) % lengthOfWeek()) + 1;
+ }
+ return super.getAlignedDayOfWeekInMonth();
+ }
+
+ @Override
+ int getAlignedWeekOfMonth() {
+ if (isCutoverMonth() && julianDate == null) {
+ return ((getDayOfMonth() - 1 - CUTOVER_DAYS) / lengthOfWeek()) + 1;
+ }
+ return super.getAlignedWeekOfMonth();
+ }
+
+ @Override
+ int getProlepticYear() {
+ return (julianDate != null ? julianDate.getProlepticYear() : isoDate.getYear());
+ }
+
+ @Override
+ int getMonth() {
+ return (julianDate != null ? julianDate.getMonth() : isoDate.getMonthValue());
+ }
+
+ @Override
+ int getDayOfMonth() {
+ return (julianDate != null ? julianDate.getDayOfMonth() : isoDate.getDayOfMonth());
+ }
+
+ @Override
+ int getDayOfYear() {
+ if (julianDate != null) {
+ return julianDate.getDayOfYear();
+ }
+ if (isoDate.getYear() == CUTOVER_YEAR) {
+ return isoDate.getDayOfYear() - CUTOVER_DAYS;
+ }
+ return isoDate.getDayOfYear();
+ }
+
+ @Override
+ public ValueRange rangeChrono(ChronoField field) {
+ switch (field) {
+ case DAY_OF_MONTH:
+ // short length, but value range still 1 to 30
if (isCutoverMonth()) {
- return ValueRange.of(1, 3);
- }
- return ValueRange.of(1, getMonth() == 2 && isLeapYear() == false ? 4 : 5);
- }
-
- @Override
- HybridDate resolvePrevious(int year, int month, int dayOfMonth) {
- switch (month) {
- case 2:
- dayOfMonth = Math.min(dayOfMonth, getChronology().isLeapYear(year) ? 29 : 28);
- break;
- case 4:
- case 6:
- case 9:
- case 11:
- dayOfMonth = Math.min(dayOfMonth, 30);
- break;
- default:
- break;
+ return ValueRange.of(1, 30);
}
- return create(year, month, dayOfMonth);
- }
-
- //-----------------------------------------------------------------------
- /**
- * Gets the chronology of this date, which is the British Cutover calendar system.
- * <p>
- * The {@code Chronology} represents the calendar system in use.
- * The era and other fields in {@link ChronoField} are defined by the chronology.
- *
- * @return the British Cutover chronology, not null
- */
- @Override
- public HybridChronology getChronology() {
- return HybridChronology.INSTANCE;
- }
-
- /**
- * Gets the era applicable at this date.
- * <p>
- * The British Cutover calendar system has two eras, 'AD' and 'BC',
- * defined by {@link JulianEra}.
- *
- * @return the era applicable at this date, not null
- */
- @Override
- public JulianEra getEra() {
- return (getProlepticYear() >= 1 ? JulianEra.AD : JulianEra.BC);
- }
-
- /**
- * Returns the length of the month represented by this date.
- * <p>
- * This returns the length of the month in days.
- * This takes into account the cutover, returning 19 in September 1752.
- *
- * @return the length of the month in days, from 19 to 31
- */
- @Override
- public int lengthOfMonth() {
- if (isCutoverMonth()) {
- return 19;
- }
- return (julianDate != null ? julianDate.lengthOfMonth() : isoDate.lengthOfMonth());
- }
-
- /**
- * Returns the length of the year represented by this date.
- * <p>
- * This returns the length of the year in days.
- * This takes into account the cutover, returning 355 in 1752.
- *
- * @return the length of the month in days, from 19 to 31
- */
- @Override
- public int lengthOfYear() {
+ return ValueRange.of(1, lengthOfMonth());
+ case DAY_OF_YEAR:
+ // 1 to 355 in cutover year, otherwise 1 to 365/366
+ return ValueRange.of(1, lengthOfYear());
+ case ALIGNED_WEEK_OF_MONTH:
+ // 1 to 3 in cutover month, otherwise 1 to 4/5
+ return rangeAlignedWeekOfMonth();
+ case ALIGNED_WEEK_OF_YEAR:
+ // 1 to 51 in cutover year, otherwise 1 to 53
if (isCutoverYear()) {
- return 355;
- }
- return (julianDate != null ? julianDate.lengthOfYear() : isoDate.lengthOfYear());
- }
-
- //-------------------------------------------------------------------------
- @Override
- public HybridDate with(TemporalAdjuster adjuster) {
- return (HybridDate) adjuster.adjustInto(this);
- }
-
- @Override
- public HybridDate with(TemporalField field, long newValue) {
- return (HybridDate) super.with(field, newValue);
- }
-
- //-----------------------------------------------------------------------
- @Override
- public HybridDate plus(TemporalAmount amount) {
- return (HybridDate) amount.addTo(this);
- }
-
- @Override
- public HybridDate plus(long amountToAdd, TemporalUnit unit) {
- return (HybridDate) super.plus(amountToAdd, unit);
- }
-
- @Override
- public HybridDate minus(TemporalAmount amount) {
- return (HybridDate) amount.subtractFrom(this);
- }
-
- @Override
- public HybridDate minus(long amountToSubtract, TemporalUnit unit) {
- return (amountToSubtract == Long.MIN_VALUE ?
- plus(Long.MAX_VALUE, unit).plus(1, unit) : plus(-amountToSubtract, unit));
- }
-
- //-------------------------------------------------------------------------
- @Override // for covariant return type
- @SuppressWarnings("unchecked")
- public ChronoLocalDateTime<HybridDate> atTime(LocalTime localTime) {
- return (ChronoLocalDateTime<HybridDate>) super.atTime(localTime);
- }
-
- @Override
- public long until(Temporal endExclusive, TemporalUnit unit) {
- return super.until(HybridDate.from(endExclusive), unit);
- }
-
- @Override
- public ChronoPeriod until(ChronoLocalDate endDateExclusive) {
- HybridDate end = HybridDate.from(endDateExclusive);
- long totalMonths = end.getProlepticMonth() - this.getProlepticMonth(); // safe
- int days = end.getDayOfMonth() - this.getDayOfMonth();
- if (totalMonths == 0 && isCutoverMonth()) {
- if (julianDate != null && end.julianDate == null) {
- days -= CUTOVER_DAYS;
- } else if (julianDate == null && end.julianDate != null) {
- days += CUTOVER_DAYS;
- }
- } else if (totalMonths > 0) {
- if (julianDate != null && end.julianDate == null) {
- AbstractDate calcDate = this.plusMonths(totalMonths);
- days = (int) (end.toEpochDay() - calcDate.toEpochDay()); // safe
- }
- if (days < 0) {
- totalMonths--;
- AbstractDate calcDate = this.plusMonths(totalMonths);
- days = (int) (end.toEpochDay() - calcDate.toEpochDay()); // safe
- }
- } else if (totalMonths < 0 && days > 0) {
- totalMonths++;
- AbstractDate calcDate = this.plusMonths(totalMonths);
- days = (int) (end.toEpochDay() - calcDate.toEpochDay()); // safe
+ return ValueRange.of(1, 51);
}
- int years = Math.toIntExact(totalMonths / lengthOfYearInMonths()); // safe
- int months = (int) (totalMonths % lengthOfYearInMonths()); // safe
- return getChronology().period(years, months, days);
- }
-
- //-----------------------------------------------------------------------
- @Override
- public long toEpochDay() {
- return isoDate.toEpochDay();
- }
-
- @SuppressWarnings("unchecked")
- @Override
- public <R> R query(TemporalQuery<R> query) {
- if (query == TemporalQueries.localDate()) {
- return (R) isoDate;
- }
- return super.query(query);
- }
-
- //-------------------------------------------------------------------------
- @Override
- public boolean equals(Object obj) {
- if (this == obj) {
- return true;
- }
- if (obj instanceof HybridDate) {
- HybridDate otherDate = (HybridDate) obj;
- return this.isoDate.equals(otherDate.isoDate);
- }
- return false;
- }
-
- /**
- * A hash code for this date.
- *
- * @return a suitable hash code based only on the Chronology and the date
- */
- @Override
- public int hashCode() {
- return getChronology().getId().hashCode() ^ isoDate.hashCode();
- }
+ return ChronoField.ALIGNED_WEEK_OF_YEAR.range();
+ default:
+ return getChronology().range(field);
+ }
+ }
+
+ @Override
+ ValueRange rangeAlignedWeekOfMonth() {
+ if (isCutoverMonth()) {
+ return ValueRange.of(1, 3);
+ }
+ return ValueRange.of(1, getMonth() == 2 && isLeapYear() == false ? 4 : 5);
+ }
+
+ @Override
+ HybridDate resolvePrevious(int year, int month, int dayOfMonth) {
+ switch (month) {
+ case 2:
+ dayOfMonth = Math.min(dayOfMonth, getChronology().isLeapYear(year) ? 29 : 28);
+ break;
+ case 4:
+ case 6:
+ case 9:
+ case 11:
+ dayOfMonth = Math.min(dayOfMonth, 30);
+ break;
+ default:
+ break;
+ }
+ return create(year, month, dayOfMonth);
+ }
+
+ //-----------------------------------------------------------------------
+
+ /**
+ * Gets the chronology of this date, which is the British Cutover calendar system.
+ * <p>
+ * The {@code Chronology} represents the calendar system in use.
+ * The era and other fields in {@link ChronoField} are defined by the chronology.
+ *
+ * @return the British Cutover chronology, not null
+ */
+ @Override
+ public HybridChronology getChronology() {
+ return HybridChronology.INSTANCE;
+ }
+
+ /**
+ * Gets the era applicable at this date.
+ * <p>
+ * The British Cutover calendar system has two eras, 'AD' and 'BC',
+ * defined by {@link JulianEra}.
+ *
+ * @return the era applicable at this date, not null
+ */
+ @Override
+ public JulianEra getEra() {
+ return (getProlepticYear() >= 1 ? JulianEra.AD : JulianEra.BC);
+ }
+
+ /**
+ * Returns the length of the month represented by this date.
+ * <p>
+ * This returns the length of the month in days.
+ * This takes into account the cutover, returning 19 in September 1752.
+ *
+ * @return the length of the month in days, from 19 to 31
+ */
+ @Override
+ public int lengthOfMonth() {
+ if (isCutoverMonth()) {
+ return 19;
+ }
+ return (julianDate != null ? julianDate.lengthOfMonth() : isoDate.lengthOfMonth());
+ }
+
+ /**
+ * Returns the length of the year represented by this date.
+ * <p>
+ * This returns the length of the year in days.
+ * This takes into account the cutover, returning 355 in 1752.
+ *
+ * @return the length of the month in days, from 19 to 31
+ */
+ @Override
+ public int lengthOfYear() {
+ if (isCutoverYear()) {
+ return 355;
+ }
+ return (julianDate != null ? julianDate.lengthOfYear() : isoDate.lengthOfYear());
+ }
+
+ //-------------------------------------------------------------------------
+ @Override
+ public HybridDate with(TemporalAdjuster adjuster) {
+ return (HybridDate) adjuster.adjustInto(this);
+ }
+
+ @Override
+ public HybridDate with(TemporalField field, long newValue) {
+ return (HybridDate) super.with(field, newValue);
+ }
+
+ //-----------------------------------------------------------------------
+ @Override
+ public HybridDate plus(TemporalAmount amount) {
+ return (HybridDate) amount.addTo(this);
+ }
+
+ @Override
+ public HybridDate plus(long amountToAdd, TemporalUnit unit) {
+ return (HybridDate) super.plus(amountToAdd, unit);
+ }
+
+ @Override
+ public HybridDate minus(TemporalAmount amount) {
+ return (HybridDate) amount.subtractFrom(this);
+ }
+
+ @Override
+ public HybridDate minus(long amountToSubtract, TemporalUnit unit) {
+ return (amountToSubtract == Long.MIN_VALUE ?
+ plus(Long.MAX_VALUE, unit).plus(1, unit) : plus(-amountToSubtract, unit));
+ }
+
+ //-------------------------------------------------------------------------
+ @Override // for covariant return type
+ @SuppressWarnings("unchecked")
+ public ChronoLocalDateTime<HybridDate> atTime(LocalTime localTime) {
+ return (ChronoLocalDateTime<HybridDate>) super.atTime(localTime);
+ }
+
+ @Override
+ public long until(Temporal endExclusive, TemporalUnit unit) {
+ return super.until(HybridDate.from(endExclusive), unit);
+ }
+
+ @Override
+ public ChronoPeriod until(ChronoLocalDate endDateExclusive) {
+ HybridDate end = HybridDate.from(endDateExclusive);
+ long totalMonths = end.getProlepticMonth() - this.getProlepticMonth(); // safe
+ int days = end.getDayOfMonth() - this.getDayOfMonth();
+ if (totalMonths == 0 && isCutoverMonth()) {
+ if (julianDate != null && end.julianDate == null) {
+ days -= CUTOVER_DAYS;
+ } else if (julianDate == null && end.julianDate != null) {
+ days += CUTOVER_DAYS;
+ }
+ } else if (totalMonths > 0) {
+ if (julianDate != null && end.julianDate == null) {
+ AbstractDate calcDate = this.plusMonths(totalMonths);
+ days = (int) (end.toEpochDay() - calcDate.toEpochDay()); // safe
+ }
+ if (days < 0) {
+ totalMonths--;
+ AbstractDate calcDate = this.plusMonths(totalMonths);
+ days = (int) (end.toEpochDay() - calcDate.toEpochDay()); // safe
+ }
+ } else if (totalMonths < 0 && days > 0) {
+ totalMonths++;
+ AbstractDate calcDate = this.plusMonths(totalMonths);
+ days = (int) (end.toEpochDay() - calcDate.toEpochDay()); // safe
+ }
+ int years = Math.toIntExact(totalMonths / lengthOfYearInMonths()); // safe
+ int months = (int) (totalMonths % lengthOfYearInMonths()); // safe
+ return getChronology().period(years, months, days);
+ }
+
+ //-----------------------------------------------------------------------
+ @Override
+ public long toEpochDay() {
+ return isoDate.toEpochDay();
+ }
+
+ @SuppressWarnings("unchecked")
+ @Override
+ public <R> R query(TemporalQuery<R> query) {
+ if (query == TemporalQueries.localDate()) {
+ return (R) isoDate;
+ }
+ return super.query(query);
+ }
+
+ //-------------------------------------------------------------------------
+ @Override
+ public boolean equals(Object obj) {
+ if (this == obj) {
+ return true;
+ }
+ if (obj instanceof HybridDate) {
+ HybridDate otherDate = (HybridDate) obj;
+ return this.isoDate.equals(otherDate.isoDate);
+ }
+ return false;
+ }
+
+ /**
+ * A hash code for this date.
+ *
+ * @return a suitable hash code based only on the Chronology and the date
+ */
+ @Override
+ public int hashCode() {
+ return getChronology().getId().hashCode() ^ isoDate.hashCode();
+ }
}
diff --git a/java/examples/src/java/org/apache/orc/examples/CoreWriter.java b/java/examples/src/java/org/apache/orc/examples/CoreWriter.java
index 845c87a4c..3a5cf553b 100644
--- a/java/examples/src/java/org/apache/orc/examples/CoreWriter.java
+++ b/java/examples/src/java/org/apache/orc/examples/CoreWriter.java
@@ -32,8 +32,7 @@ import java.nio.charset.StandardCharsets;
public class CoreWriter {
public static void main(Configuration conf, String[] args) throws IOException {
- TypeDescription schema =
- TypeDescription.fromString("struct<x:int,y:string>");
+ TypeDescription schema = TypeDescription.fromString("struct<x:int,y:string>");
Writer writer = OrcFile.createWriter(new Path("my-file.orc"),
OrcFile.writerOptions(conf)
.setSchema(schema));
diff --git a/java/mapreduce/src/java/org/apache/orc/mapred/OrcInputFormat.java b/java/mapreduce/src/java/org/apache/orc/mapred/OrcInputFormat.java
index e8781a791..ddb6a6ac2 100644
--- a/java/mapreduce/src/java/org/apache/orc/mapred/OrcInputFormat.java
+++ b/java/mapreduce/src/java/org/apache/orc/mapred/OrcInputFormat.java
@@ -144,17 +144,16 @@ public class OrcInputFormat<V extends WritableComparable>
}
@Override
- public RecordReader<NullWritable, V>
- getRecordReader(InputSplit inputSplit,
- JobConf conf,
- Reporter reporter) throws IOException {
+ public RecordReader<NullWritable, V> getRecordReader(InputSplit inputSplit,
+ JobConf conf,
+ Reporter reporter) throws IOException {
FileSplit split = (FileSplit) inputSplit;
Reader file = OrcFile.createReader(split.getPath(),
OrcFile.readerOptions(conf)
.maxLength(OrcConf.MAX_FILE_LENGTH.getLong(conf)));
//Mapreduce supports selected vector
Reader.Options options = buildOptions(conf, file, split.getStart(), split.getLength())
- .useSelected(true);
+ .useSelected(true);
return new OrcMapredRecordReader<>(file, options);
}
diff --git a/java/mapreduce/src/java/org/apache/orc/mapred/OrcMapredRecordWriter.java b/java/mapreduce/src/java/org/apache/orc/mapred/OrcMapredRecordWriter.java
index 6cf7539f1..3f654e7e6 100644
--- a/java/mapreduce/src/java/org/apache/orc/mapred/OrcMapredRecordWriter.java
+++ b/java/mapreduce/src/java/org/apache/orc/mapred/OrcMapredRecordWriter.java
@@ -94,9 +94,8 @@ public class OrcMapredRecordWriter<V extends Writable>
* @param result the list to be appended to
* @param vector the column vector to scan
*/
- private static
- void addVariableLengthColumns(List<MultiValuedColumnVector> result,
- ColumnVector vector) {
+ private static void addVariableLengthColumns(List<MultiValuedColumnVector> result,
+ ColumnVector vector) {
switch (vector.type) {
case LIST: {
ListColumnVector cv = (ListColumnVector) vector;
@@ -133,9 +132,8 @@ public class OrcMapredRecordWriter<V extends Writable>
* @param result the list to be appended to
* @param batch the batch to scan
*/
- public static
- void addVariableLengthColumns(List<MultiValuedColumnVector> result,
- VectorizedRowBatch batch) {
+ public static void addVariableLengthColumns(List<MultiValuedColumnVector> result,
+ VectorizedRowBatch batch) {
for(ColumnVector cv: batch.cols) {
addVariableLengthColumns(result, cv);
}
diff --git a/java/mapreduce/src/java/org/apache/orc/mapreduce/OrcInputFormat.java b/java/mapreduce/src/java/org/apache/orc/mapreduce/OrcInputFormat.java
index 72b183a10..5e0627324 100644
--- a/java/mapreduce/src/java/org/apache/orc/mapreduce/OrcInputFormat.java
+++ b/java/mapreduce/src/java/org/apache/orc/mapreduce/OrcInputFormat.java
@@ -67,12 +67,9 @@ public class OrcInputFormat<V extends WritableComparable>
OrcFile.readerOptions(conf)
.maxLength(OrcConf.MAX_FILE_LENGTH.getLong(conf)));
//Mapreduce supports selected vector
- Reader.Options options =
- org.apache.orc.mapred.OrcInputFormat.buildOptions(conf,
- file,
- split.getStart(),
- split.getLength())
- .useSelected(true);
+ Reader.Options options = org.apache.orc.mapred.OrcInputFormat.buildOptions(
+ conf, file, split.getStart(), split.getLength())
+ .useSelected(true);
return new OrcMapreduceRecordReader<>(file, options);
}
diff --git a/java/mapreduce/src/java/org/apache/orc/mapreduce/OrcOutputFormat.java b/java/mapreduce/src/java/org/apache/orc/mapreduce/OrcOutputFormat.java
index 87dd9a83a..a37f07ea6 100644
--- a/java/mapreduce/src/java/org/apache/orc/mapreduce/OrcOutputFormat.java
+++ b/java/mapreduce/src/java/org/apache/orc/mapreduce/OrcOutputFormat.java
@@ -50,9 +50,9 @@ public class OrcOutputFormat<V extends Writable>
Path filename = getDefaultWorkFile(taskAttemptContext, EXTENSION);
Writer writer = OrcFile.createWriter(filename,
org.apache.orc.mapred.OrcOutputFormat.buildOptions(conf));
- return new OrcMapreduceRecordWriter<V>(writer,
- OrcConf.ROW_BATCH_SIZE.getInt(conf),
- OrcConf.ROW_BATCH_CHILD_LIMIT.getInt(conf));
+ return new OrcMapreduceRecordWriter<V>(writer,
+ OrcConf.ROW_BATCH_SIZE.getInt(conf),
+ OrcConf.ROW_BATCH_CHILD_LIMIT.getInt(conf));
}
@Override
diff --git a/java/shims/src/java/org/apache/orc/impl/HadoopShimsCurrent.java b/java/shims/src/java/org/apache/orc/impl/HadoopShimsCurrent.java
index 1eed63b8e..b05fae739 100644
--- a/java/shims/src/java/org/apache/orc/impl/HadoopShimsCurrent.java
+++ b/java/shims/src/java/org/apache/orc/impl/HadoopShimsCurrent.java
@@ -49,15 +49,15 @@ public class HadoopShimsCurrent implements HadoopShims {
static DirectDecompressor getDecompressor(DirectCompressionType codec) {
switch (codec) {
case ZLIB:
- return new ZlibDirectDecompressWrapper
- (new ZlibDecompressor.ZlibDirectDecompressor());
+ return new ZlibDirectDecompressWrapper(
+ new ZlibDecompressor.ZlibDirectDecompressor());
case ZLIB_NOHEADER:
- return new ZlibDirectDecompressWrapper
- (new ZlibDecompressor.ZlibDirectDecompressor
- (ZlibDecompressor.CompressionHeader.NO_HEADER, 0));
+ return new ZlibDirectDecompressWrapper(
+ new ZlibDecompressor.ZlibDirectDecompressor(
+ ZlibDecompressor.CompressionHeader.NO_HEADER, 0));
case SNAPPY:
- return new SnappyDirectDecompressWrapper
- (new SnappyDecompressor.SnappyDirectDecompressor());
+ return new SnappyDirectDecompressWrapper(
+ new SnappyDecompressor.SnappyDirectDecompressor());
default:
return null;
}
diff --git a/java/shims/src/java/org/apache/orc/impl/HadoopShimsPre2_6.java b/java/shims/src/java/org/apache/orc/impl/HadoopShimsPre2_6.java
index ae740f99c..b59171d50 100644
--- a/java/shims/src/java/org/apache/orc/impl/HadoopShimsPre2_6.java
+++ b/java/shims/src/java/org/apache/orc/impl/HadoopShimsPre2_6.java
@@ -39,7 +39,7 @@ public class HadoopShimsPre2_6 implements HadoopShims {
@Override
public DirectDecompressor getDirectDecompressor(DirectCompressionType codec) {
return HadoopShimsCurrent.getDecompressor(codec);
- }
+ }
@Override
public ZeroCopyReaderShim getZeroCopyReader(FSDataInputStream in,
diff --git a/java/shims/src/java/org/apache/orc/impl/HadoopShimsPre2_7.java b/java/shims/src/java/org/apache/orc/impl/HadoopShimsPre2_7.java
index 47f1066f7..f8ad0ee06 100644
--- a/java/shims/src/java/org/apache/orc/impl/HadoopShimsPre2_7.java
+++ b/java/shims/src/java/org/apache/orc/impl/HadoopShimsPre2_7.java
@@ -44,7 +44,7 @@ public class HadoopShimsPre2_7 implements HadoopShims {
@Override
public DirectDecompressor getDirectDecompressor(DirectCompressionType codec) {
return HadoopShimsCurrent.getDecompressor(codec);
- }
+ }
@Override
public ZeroCopyReaderShim getZeroCopyReader(FSDataInputStream in,
diff --git a/java/shims/src/java/org/apache/orc/impl/NullKeyProvider.java b/java/shims/src/java/org/apache/orc/impl/NullKeyProvider.java
index a9c8c3dfc..a7285b9ae 100644
--- a/java/shims/src/java/org/apache/orc/impl/NullKeyProvider.java
+++ b/java/shims/src/java/org/apache/orc/impl/NullKeyProvider.java
@@ -24,28 +24,28 @@ import java.util.List;
class NullKeyProvider implements KeyProvider {
- @Override
- public List<String> getKeyNames() {
- return new ArrayList<>();
- }
-
- @Override
- public HadoopShims.KeyMetadata getCurrentKeyVersion(String keyName) {
- throw new IllegalArgumentException("Unknown key " + keyName);
- }
-
- @Override
- public LocalKey createLocalKey(HadoopShims.KeyMetadata key) {
- throw new IllegalArgumentException("Unknown key " + key);
- }
-
- @Override
- public Key decryptLocalKey(HadoopShims.KeyMetadata key, byte[] encryptedKey) {
- return null;
- }
-
- @Override
- public HadoopShims.KeyProviderKind getKind() {
- return null;
- }
+ @Override
+ public List<String> getKeyNames() {
+ return new ArrayList<>();
+ }
+
+ @Override
+ public HadoopShims.KeyMetadata getCurrentKeyVersion(String keyName) {
+ throw new IllegalArgumentException("Unknown key " + keyName);
+ }
+
+ @Override
+ public LocalKey createLocalKey(HadoopShims.KeyMetadata key) {
+ throw new IllegalArgumentException("Unknown key " + key);
+ }
+
+ @Override
+ public Key decryptLocalKey(HadoopShims.KeyMetadata key, byte[] encryptedKey) {
+ return null;
+ }
+
+ @Override
+ public HadoopShims.KeyProviderKind getKind() {
+ return null;
+ }
}
diff --git a/java/shims/src/java/org/apache/orc/impl/SnappyDirectDecompressWrapper.java b/java/shims/src/java/org/apache/orc/impl/SnappyDirectDecompressWrapper.java
index 8e4d32f34..3bd1cfcd0 100644
--- a/java/shims/src/java/org/apache/orc/impl/SnappyDirectDecompressWrapper.java
+++ b/java/shims/src/java/org/apache/orc/impl/SnappyDirectDecompressWrapper.java
@@ -24,30 +24,30 @@ import java.io.IOException;
import java.nio.ByteBuffer;
class SnappyDirectDecompressWrapper implements HadoopShims.DirectDecompressor {
- private final SnappyDecompressor.SnappyDirectDecompressor root;
- private boolean isFirstCall = true;
-
- SnappyDirectDecompressWrapper(SnappyDecompressor.SnappyDirectDecompressor root) {
- this.root = root;
- }
-
- @Override
- public void decompress(ByteBuffer input, ByteBuffer output) throws IOException {
- if (!isFirstCall) {
- root.reset();
- } else {
- isFirstCall = false;
- }
- root.decompress(input, output);
- }
-
- @Override
- public void reset() {
- root.reset();
- }
-
- @Override
- public void end() {
- root.end();
+ private final SnappyDecompressor.SnappyDirectDecompressor root;
+ private boolean isFirstCall = true;
+
+ SnappyDirectDecompressWrapper(SnappyDecompressor.SnappyDirectDecompressor root) {
+ this.root = root;
+ }
+
+ @Override
+ public void decompress(ByteBuffer input, ByteBuffer output) throws IOException {
+ if (!isFirstCall) {
+ root.reset();
+ } else {
+ isFirstCall = false;
}
+ root.decompress(input, output);
+ }
+
+ @Override
+ public void reset() {
+ root.reset();
+ }
+
+ @Override
+ public void end() {
+ root.end();
+ }
}
diff --git a/java/shims/src/java/org/apache/orc/impl/ZlibDirectDecompressWrapper.java b/java/shims/src/java/org/apache/orc/impl/ZlibDirectDecompressWrapper.java
index 01fe34aae..b9be08ab9 100644
--- a/java/shims/src/java/org/apache/orc/impl/ZlibDirectDecompressWrapper.java
+++ b/java/shims/src/java/org/apache/orc/impl/ZlibDirectDecompressWrapper.java
@@ -24,30 +24,30 @@ import java.io.IOException;
import java.nio.ByteBuffer;
class ZlibDirectDecompressWrapper implements HadoopShims.DirectDecompressor {
- private final ZlibDecompressor.ZlibDirectDecompressor root;
- private boolean isFirstCall = true;
-
- ZlibDirectDecompressWrapper(ZlibDecompressor.ZlibDirectDecompressor root) {
- this.root = root;
- }
-
- @Override
- public void decompress(ByteBuffer input, ByteBuffer output) throws IOException {
- if (!isFirstCall) {
- root.reset();
- } else {
- isFirstCall = false;
- }
- root.decompress(input, output);
- }
-
- @Override
- public void reset() {
- root.reset();
- }
-
- @Override
- public void end() {
- root.end();
+ private final ZlibDecompressor.ZlibDirectDecompressor root;
+ private boolean isFirstCall = true;
+
+ ZlibDirectDecompressWrapper(ZlibDecompressor.ZlibDirectDecompressor root) {
+ this.root = root;
+ }
+
+ @Override
+ public void decompress(ByteBuffer input, ByteBuffer output) throws IOException {
+ if (!isFirstCall) {
+ root.reset();
+ } else {
+ isFirstCall = false;
}
+ root.decompress(input, output);
+ }
+
+ @Override
+ public void reset() {
+ root.reset();
+ }
+
+ @Override
+ public void end() {
+ root.end();
+ }
}
diff --git a/java/tools/src/java/org/apache/orc/tools/ColumnSizes.java b/java/tools/src/java/org/apache/orc/tools/ColumnSizes.java
index 49dbc2be9..79b24304d 100644
--- a/java/tools/src/java/org/apache/orc/tools/ColumnSizes.java
+++ b/java/tools/src/java/org/apache/orc/tools/ColumnSizes.java
@@ -150,8 +150,8 @@ public class ColumnSizes {
Long.compare(y.size, x.size) : x.name.compareTo(y.name));
out.println("Percent Bytes/Row Name");
for (StringLongPair item: sizes) {
- out.println(String.format(" %-5.2f %-9.2f %s",
- 100.0 * item.size / totalSize, (double) item.size / rows, item.name));
+ out.println(String.format(" %-5.2f %-9.2f %s",
+ 100.0 * item.size / totalSize, (double) item.size / rows, item.name));
}
}
diff --git a/java/tools/src/java/org/apache/orc/tools/FileDump.java b/java/tools/src/java/org/apache/orc/tools/FileDump.java
index d4d00d2fd..3c5bfd77a 100644
--- a/java/tools/src/java/org/apache/orc/tools/FileDump.java
+++ b/java/tools/src/java/org/apache/orc/tools/FileDump.java
@@ -452,7 +452,7 @@ public final class FileDump {
}
ByteBuffer byteBuffer = reader.getMetadataValue(keys.get(i));
System.out.println(" " + keys.get(i) + "="
- + StandardCharsets.UTF_8.decode(byteBuffer));
+ + StandardCharsets.UTF_8.decode(byteBuffer));
}
rows.close();
}
diff --git a/java/tools/src/java/org/apache/orc/tools/PrintData.java b/java/tools/src/java/org/apache/orc/tools/PrintData.java
index 2846166ca..11075dcba 100644
--- a/java/tools/src/java/org/apache/orc/tools/PrintData.java
+++ b/java/tools/src/java/org/apache/orc/tools/PrintData.java
@@ -207,7 +207,7 @@ public class PrintData {
Integer counter = 0;
while (rows.nextBatch(batch)) {
if (numberOfRows.isPresent() && counter >= numberOfRows.get()){
- break;
+ break;
}
for (int r=0; r < batch.size; ++r) {
JsonWriter writer = new JsonWriter(out);
diff --git a/java/tools/src/java/org/apache/orc/tools/convert/JsonReader.java b/java/tools/src/java/org/apache/orc/tools/convert/JsonReader.java
index 8055310ee..8faf51901 100644
--- a/java/tools/src/java/org/apache/orc/tools/convert/JsonReader.java
+++ b/java/tools/src/java/org/apache/orc/tools/convert/JsonReader.java
@@ -170,7 +170,7 @@ public class JsonReader implements RecordReader {
} else {
TimestampColumnVector vector = (TimestampColumnVector) vect;
TemporalAccessor temporalAccessor = dateTimeFormatter.parseBest(value.getAsString(),
- ZonedDateTime::from, OffsetDateTime::from, LocalDateTime::from);
+ ZonedDateTime::from, OffsetDateTime::from, LocalDateTime::from);
if (temporalAccessor instanceof ZonedDateTime) {
ZonedDateTime zonedDateTime = ((ZonedDateTime) temporalAccessor);
Timestamp timestamp = Timestamp.from(zonedDateTime.toInstant());