You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@orc.apache.org by do...@apache.org on 2021/09/02 17:42:00 UTC

[orc] branch main updated: MINOR: Fix typos in project (#892)

This is an automated email from the ASF dual-hosted git repository.

dongjoon pushed a commit to branch main
in repository https://gitbox.apache.org/repos/asf/orc.git


The following commit(s) were added to refs/heads/main by this push:
     new 9d90781  MINOR: Fix typos in project (#892)
9d90781 is described below

commit 9d907815fa988e8de00c833e20c448d624aa8f7a
Author: guiyanakaung <gu...@gmail.com>
AuthorDate: Fri Sep 3 01:41:52 2021 +0800

    MINOR: Fix typos in project (#892)
    
    ### What changes were proposed in this pull request?
    
    This PR aims to fix a typos in project.
    
    ### Why are the changes needed?
    
    Easy to read
    
    ### How was this patch tested?
    
    Pass the CIs.
---
 .../src/java/org/apache/orc/bench/core/CompressionKind.java    |  4 ++--
 .../hive/src/java/org/apache/orc/bench/hive/DecimalBench.java  |  2 +-
 java/core/src/java/org/apache/orc/DataMask.java                |  2 +-
 java/core/src/java/org/apache/orc/OrcFile.java                 |  4 ++--
 java/core/src/java/org/apache/orc/PhysicalWriter.java          |  4 ++--
 java/core/src/java/org/apache/orc/Reader.java                  |  2 +-
 java/core/src/java/org/apache/orc/RecordReader.java            |  2 +-
 .../src/java/org/apache/orc/impl/ConvertTreeReaderFactory.java |  4 ++--
 java/core/src/java/org/apache/orc/impl/RecordReaderUtils.java  |  2 +-
 java/core/src/java/org/apache/orc/impl/RedBlackTree.java       |  4 ++--
 java/core/src/java/org/apache/orc/impl/SchemaEvolution.java    |  6 +++---
 .../src/java/org/apache/orc/impl/mask/RedactMaskFactory.java   |  6 +++---
 java/core/src/java/org/apache/orc/impl/writer/TreeWriter.java  |  2 +-
 java/core/src/test/org/apache/orc/TestRowFilteringNoSkip.java  |  2 +-
 java/core/src/test/org/apache/orc/TestRowFilteringSkip.java    |  2 +-
 .../src/test/org/apache/orc/mapred/TestOrcOutputFormat.java    |  2 +-
 java/tools/src/java/org/apache/orc/tools/json/HiveType.java    |  2 +-
 java/tools/src/test/org/apache/orc/impl/FakeKeyProvider.java   | 10 +++++-----
 18 files changed, 31 insertions(+), 31 deletions(-)

diff --git a/java/bench/core/src/java/org/apache/orc/bench/core/CompressionKind.java b/java/bench/core/src/java/org/apache/orc/bench/core/CompressionKind.java
index 4f5b028..6eda6ad 100644
--- a/java/bench/core/src/java/org/apache/orc/bench/core/CompressionKind.java
+++ b/java/bench/core/src/java/org/apache/orc/bench/core/CompressionKind.java
@@ -36,8 +36,8 @@ public enum CompressionKind {
   SNAPPY("snappy"),
   ZSTD("zstd");
 
-  CompressionKind(String extendsion) {
-    this.extension = extendsion;
+  CompressionKind(String extension) {
+    this.extension = extension;
   }
 
   private final String extension;
diff --git a/java/bench/hive/src/java/org/apache/orc/bench/hive/DecimalBench.java b/java/bench/hive/src/java/org/apache/orc/bench/hive/DecimalBench.java
index ac09441..7fae5b1 100644
--- a/java/bench/hive/src/java/org/apache/orc/bench/hive/DecimalBench.java
+++ b/java/bench/hive/src/java/org/apache/orc/bench/hive/DecimalBench.java
@@ -80,7 +80,7 @@ public class DecimalBench implements OrcBenchmark {
     /**
      * Load the data from the values array into the ColumnVector.
      * @param vector the output
-     * @param values the intput
+     * @param values the input
      * @param offset the first input value
      * @param length the number of values to copy
      */
diff --git a/java/core/src/java/org/apache/orc/DataMask.java b/java/core/src/java/org/apache/orc/DataMask.java
index a6b9416..c92a0fa 100644
--- a/java/core/src/java/org/apache/orc/DataMask.java
+++ b/java/core/src/java/org/apache/orc/DataMask.java
@@ -58,7 +58,7 @@ public interface DataMask {
 
     /**
      * Build a DataMaskDescription given the name and a set of parameters.
-     * @param params the paramters
+     * @param params the parameters
      * @return a MaskDescription with the given parameters
      */
     public DataMaskDescription getDescription(String... params) {
diff --git a/java/core/src/java/org/apache/orc/OrcFile.java b/java/core/src/java/org/apache/orc/OrcFile.java
index c6cb0f2..e7a051c 100644
--- a/java/core/src/java/org/apache/orc/OrcFile.java
+++ b/java/core/src/java/org/apache/orc/OrcFile.java
@@ -238,7 +238,7 @@ public class OrcFile {
         return FUTURE;
       }
       if (writer != WriterImplementation.ORC_JAVA && val < 6) {
-        throw new IllegalArgumentException("ORC File with illegval version " +
+        throw new IllegalArgumentException("ORC File with illegal version " +
             val + " for writer " + writer);
       }
       WriterVersion[] versions = values[writer.id];
@@ -281,7 +281,7 @@ public class OrcFile {
     private long maxLength = Long.MAX_VALUE;
     private OrcTail orcTail;
     private KeyProvider keyProvider;
-    // TODO: We can generalize FileMetada interface. Make OrcTail implement FileMetadata interface
+    // TODO: We can generalize FileMetadata interface. Make OrcTail implement FileMetadata interface
     // and remove this class altogether. Both footer caching and llap caching just needs OrcTail.
     // For now keeping this around to avoid complex surgery
     private FileMetadata fileMetadata;
diff --git a/java/core/src/java/org/apache/orc/PhysicalWriter.java b/java/core/src/java/org/apache/orc/PhysicalWriter.java
index bb0dbf9..0c6fe68 100644
--- a/java/core/src/java/org/apache/orc/PhysicalWriter.java
+++ b/java/core/src/java/org/apache/orc/PhysicalWriter.java
@@ -120,7 +120,7 @@ public interface PhysicalWriter {
   void close() throws IOException;
 
   /**
-   * Flushes the writer so that readers can see the preceeding postscripts.
+   * Flushes the writer so that readers can see the preceding postscripts.
    */
   void flush() throws IOException;
 
@@ -135,7 +135,7 @@ public interface PhysicalWriter {
                        ) throws IOException;
 
   /**
-   * Get the number of bytes for a file in a givem column.
+   * Get the number of bytes for a file in a given column.
    * @param column column from which to get file size
    * @param variant the encryption variant to check
    * @return number of bytes for the given column
diff --git a/java/core/src/java/org/apache/orc/Reader.java b/java/core/src/java/org/apache/orc/Reader.java
index d4b3cab..d976586 100644
--- a/java/core/src/java/org/apache/orc/Reader.java
+++ b/java/core/src/java/org/apache/orc/Reader.java
@@ -414,7 +414,7 @@ public interface Reader extends Closeable {
 
 
     /**
-     * Set boolean flag to determine if the comparision of field names in schema
+     * Set boolean flag to determine if the comparison of field names in schema
      * evolution is case sensitive
      * @param value the flag for schema evolution is case sensitive or not.
      * @return this
diff --git a/java/core/src/java/org/apache/orc/RecordReader.java b/java/core/src/java/org/apache/orc/RecordReader.java
index dbd0c84..f80ac88 100644
--- a/java/core/src/java/org/apache/orc/RecordReader.java
+++ b/java/core/src/java/org/apache/orc/RecordReader.java
@@ -30,7 +30,7 @@ public interface RecordReader extends Closeable {
   /**
    * Read the next row batch. The size of the batch to read cannot be
    * controlled by the callers. Caller need to look at
-   * VectorizedRowBatch.size of the retunred object to know the batch
+   * VectorizedRowBatch.size of the returned object to know the batch
    * size read.
    * @param batch a row batch object to read into
    * @return were more rows available to read?
diff --git a/java/core/src/java/org/apache/orc/impl/ConvertTreeReaderFactory.java b/java/core/src/java/org/apache/orc/impl/ConvertTreeReaderFactory.java
index d78aa43..bdff28d 100644
--- a/java/core/src/java/org/apache/orc/impl/ConvertTreeReaderFactory.java
+++ b/java/core/src/java/org/apache/orc/impl/ConvertTreeReaderFactory.java
@@ -290,7 +290,7 @@ public class ConvertTreeReaderFactory extends TreeReaderFactory {
      */
     // Override this to use convertVector.
     public void setConvertVectorElement(int elementNum) throws IOException {
-      throw new RuntimeException("Expected this method to be overriden");
+      throw new RuntimeException("Expected this method to be overridden");
     }
 
     // Common code used by the conversion.
@@ -2281,7 +2281,7 @@ public class ConvertTreeReaderFactory extends TreeReaderFactory {
    *   TimestampFromAnyIntegerTreeReader (written)
    *   TimestampFromFloatTreeReader (written)
    *   TimestampFromDoubleTreeReader (written)
-   *   TimestampFromDecimalTreeeReader (written)
+   *   TimestampFromDecimalTreeReader (written)
    *   TimestampFromStringGroupTreeReader (written)
    *   TimestampFromDateTreeReader
    *
diff --git a/java/core/src/java/org/apache/orc/impl/RecordReaderUtils.java b/java/core/src/java/org/apache/orc/impl/RecordReaderUtils.java
index f8f0d9d..39f9118 100644
--- a/java/core/src/java/org/apache/orc/impl/RecordReaderUtils.java
+++ b/java/core/src/java/org/apache/orc/impl/RecordReaderUtils.java
@@ -310,7 +310,7 @@ public class RecordReaderUtils {
   }
 
   /**
-   * Zero-copy tead the data from the file based on a list of ranges in a
+   * Zero-copy read the data from the file based on a list of ranges in a
    * single read.
    *
    * As a side note, the HDFS zero copy API really sucks from a user's point of
diff --git a/java/core/src/java/org/apache/orc/impl/RedBlackTree.java b/java/core/src/java/org/apache/orc/impl/RedBlackTree.java
index 16c8acb..283321f 100644
--- a/java/core/src/java/org/apache/orc/impl/RedBlackTree.java
+++ b/java/core/src/java/org/apache/orc/impl/RedBlackTree.java
@@ -137,8 +137,8 @@ abstract class RedBlackTree {
    * the parents, this routine passing down the context.
    *
    * The fix is broken down into 6 cases (1.{1,2,3} and 2.{1,2,3} that are
-   * left-right mirror images of each other). See Algorighms by Cormen,
-   * Leiserson, and Rivest for the explaination of the subcases.
+   * left-right mirror images of each other). See Algorithms by Cormen,
+   * Leiserson, and Rivest for the explanation of the subcases.
    *
    * @param node The node that we are fixing right now.
    * @param fromLeft Did we come down from the left?
diff --git a/java/core/src/java/org/apache/orc/impl/SchemaEvolution.java b/java/core/src/java/org/apache/orc/impl/SchemaEvolution.java
index 654fc29..eb5beac 100644
--- a/java/core/src/java/org/apache/orc/impl/SchemaEvolution.java
+++ b/java/core/src/java/org/apache/orc/impl/SchemaEvolution.java
@@ -313,7 +313,7 @@ public class SchemaEvolution {
     boolean[] result = new boolean[readerSchema.getMaximumId() + 1];
     boolean safePpd = validatePPDConversion(fileSchema, readerSchema);
     result[readerSchema.getId()] = safePpd;
-    return populatePpdSafeConversionForChildern(result,
+    return populatePpdSafeConversionForChildren(result,
         readerSchema.getChildren());
   }
 
@@ -325,7 +325,7 @@ public class SchemaEvolution {
    *
    * @return boolean array to represent list of column safe or not.
    */
-  private boolean[] populatePpdSafeConversionForChildern(
+  private boolean[] populatePpdSafeConversionForChildren(
                         boolean[] ppdSafeConversion,
                         List<TypeDescription> children) {
     boolean safePpd;
@@ -334,7 +334,7 @@ public class SchemaEvolution {
         TypeDescription fileType = getFileType(child.getId());
         safePpd = validatePPDConversion(fileType, child);
         ppdSafeConversion[child.getId()] = safePpd;
-        populatePpdSafeConversionForChildern(ppdSafeConversion,
+        populatePpdSafeConversionForChildren(ppdSafeConversion,
             child.getChildren());
       }
     }
diff --git a/java/core/src/java/org/apache/orc/impl/mask/RedactMaskFactory.java b/java/core/src/java/org/apache/orc/impl/mask/RedactMaskFactory.java
index f9c7cdc..e929d6d 100644
--- a/java/core/src/java/org/apache/orc/impl/mask/RedactMaskFactory.java
+++ b/java/core/src/java/org/apache/orc/impl/mask/RedactMaskFactory.java
@@ -93,7 +93,7 @@ public class RedactMaskFactory extends MaskFactory {
   // The replacement codepoint for each character category. We use codepoints
   // here so that we don't have to worry about handling long UTF characters
   // as special cases.
-  private final int UPPPER_REPLACEMENT;
+  private final int UPPER_REPLACEMENT;
   private final int LOWER_REPLACEMENT;
   private final int OTHER_LETTER_REPLACEMENT;
   private final int MARK_REPLACEMENT;
@@ -123,7 +123,7 @@ public class RedactMaskFactory extends MaskFactory {
   public RedactMaskFactory(String... params) {
     ByteBuffer param = params.length < 1 ? ByteBuffer.allocate(0) :
         ByteBuffer.wrap(params[0].getBytes(StandardCharsets.UTF_8));
-    UPPPER_REPLACEMENT = getNextCodepoint(param, DEFAULT_LETTER_UPPER);
+    UPPER_REPLACEMENT = getNextCodepoint(param, DEFAULT_LETTER_UPPER);
     LOWER_REPLACEMENT = getNextCodepoint(param, DEFAULT_LETTER_LOWER);
     DIGIT_CP_REPLACEMENT = getNextCodepoint(param, DEFAULT_NUMBER_DIGIT_CP);
     DIGIT_REPLACEMENT = getReplacementDigit(DIGIT_CP_REPLACEMENT);
@@ -733,7 +733,7 @@ public class RedactMaskFactory extends MaskFactory {
   int getReplacement(int codepoint) {
     switch (Character.getType(codepoint)) {
       case Character.UPPERCASE_LETTER:
-        return UPPPER_REPLACEMENT;
+        return UPPER_REPLACEMENT;
       case Character.LOWERCASE_LETTER:
         return LOWER_REPLACEMENT;
       case Character.TITLECASE_LETTER:
diff --git a/java/core/src/java/org/apache/orc/impl/writer/TreeWriter.java b/java/core/src/java/org/apache/orc/impl/writer/TreeWriter.java
index 7141d00..a43ee06 100644
--- a/java/core/src/java/org/apache/orc/impl/writer/TreeWriter.java
+++ b/java/core/src/java/org/apache/orc/impl/writer/TreeWriter.java
@@ -53,7 +53,7 @@ public interface TreeWriter {
   void prepareStripe(int stripeId);
 
   /**
-   * Write a VectorizedRowBath to the file. This is called by the WriterImplV2
+   * Write a VectorizedRowBatch to the file. This is called by the WriterImplV2
    * at the top level.
    * @param batch the list of all of the columns
    * @param offset the first row from the batch to write
diff --git a/java/core/src/test/org/apache/orc/TestRowFilteringNoSkip.java b/java/core/src/test/org/apache/orc/TestRowFilteringNoSkip.java
index 2ab97a2..1249c09 100644
--- a/java/core/src/test/org/apache/orc/TestRowFilteringNoSkip.java
+++ b/java/core/src/test/org/apache/orc/TestRowFilteringNoSkip.java
@@ -40,7 +40,7 @@ import static org.junit.jupiter.api.Assertions.assertTrue;
 /**
  * Types that are not skipped at row-level include: Long, Short, Int, Date, Binary
  * As it turns out it is more expensive to skip non-selected rows rather that just decode all and propagate the
- * selected array. Skipping for these type breaks instruction pipelining and introduces more branch misspredictions.
+ * selected array. Skipping for these type breaks instruction pipelining and introduces more branch mispredictions.
  */
 public class TestRowFilteringNoSkip {
 
diff --git a/java/core/src/test/org/apache/orc/TestRowFilteringSkip.java b/java/core/src/test/org/apache/orc/TestRowFilteringSkip.java
index af120b7..b97d884 100644
--- a/java/core/src/test/org/apache/orc/TestRowFilteringSkip.java
+++ b/java/core/src/test/org/apache/orc/TestRowFilteringSkip.java
@@ -1539,7 +1539,7 @@ public class TestRowFilteringSkip {
   }
 
   @Test
-  public void testcustomFileTimestampRoundRobbinRowFilterCallback() throws Exception {
+  public void testCustomFileTimestampRoundRobbinRowFilterCallback() throws Exception {
     testFilePath = new Path(getClass().getClassLoader().
         getSystemResource("orc_split_elim.orc").getPath());
 
diff --git a/java/mapreduce/src/test/org/apache/orc/mapred/TestOrcOutputFormat.java b/java/mapreduce/src/test/org/apache/orc/mapred/TestOrcOutputFormat.java
index 2eeb1d9..00040ad 100644
--- a/java/mapreduce/src/test/org/apache/orc/mapred/TestOrcOutputFormat.java
+++ b/java/mapreduce/src/test/org/apache/orc/mapred/TestOrcOutputFormat.java
@@ -343,7 +343,7 @@ public class TestOrcOutputFormat {
       Path path = getTaskOutputPath(conf, name);
       Writer writer = OrcFile.createWriter(path,
           buildOptions(conf).fileSystem(fileSystem));
-      //Ensure that orc.row.batch.size confing is set in the JobConf
+      //Ensure that orc.row.batch.size config is set in the JobConf
       int rowBatchSize = Integer.parseInt(conf.get(ROW_BATCH_SIZE));
       return new OrcMapredRecordWriter<>(writer, rowBatchSize);
     }
diff --git a/java/tools/src/java/org/apache/orc/tools/json/HiveType.java b/java/tools/src/java/org/apache/orc/tools/json/HiveType.java
index b467898..609e3b2 100644
--- a/java/tools/src/java/org/apache/orc/tools/json/HiveType.java
+++ b/java/tools/src/java/org/apache/orc/tools/json/HiveType.java
@@ -76,7 +76,7 @@ abstract class HiveType {
   public abstract boolean subsumes(HiveType other);
 
   /**
-   * Merge the other type into this one. It assumes that subsubes(other) is
+   * Merge the other type into this one. It assumes that subsumes(other) is
    * true.
    * @param other
    */
diff --git a/java/tools/src/test/org/apache/orc/impl/FakeKeyProvider.java b/java/tools/src/test/org/apache/orc/impl/FakeKeyProvider.java
index 1c3f6c5..95139dd 100644
--- a/java/tools/src/test/org/apache/orc/impl/FakeKeyProvider.java
+++ b/java/tools/src/test/org/apache/orc/impl/FakeKeyProvider.java
@@ -39,7 +39,7 @@ import java.util.Map;
  */
 public class FakeKeyProvider extends KeyProvider {
   // map from key name to metadata
-  private final Map<String, TestMetadata> keyMetdata = new HashMap<>();
+  private final Map<String, TestMetadata> keyMetadata = new HashMap<>();
   // map from key version name to material
   private final Map<String, KeyVersion> keyVersions = new HashMap<>();
 
@@ -54,7 +54,7 @@ public class FakeKeyProvider extends KeyProvider {
 
   @Override
   public List<String> getKeys() {
-    return new ArrayList<>(keyMetdata.keySet());
+    return new ArrayList<>(keyMetadata.keySet());
   }
 
   @Override
@@ -73,13 +73,13 @@ public class FakeKeyProvider extends KeyProvider {
 
   @Override
   public Metadata getMetadata(String name)  {
-    return keyMetdata.get(name);
+    return keyMetadata.get(name);
   }
 
   @Override
   public KeyVersion createKey(String name, byte[] bytes, Options options) {
     String versionName = buildVersionName(name, 0);
-    keyMetdata.put(name, new TestMetadata(options.getCipher(),
+    keyMetadata.put(name, new TestMetadata(options.getCipher(),
         options.getBitLength(), 1));
     KeyVersion result = new KMSClientProvider.KMSKeyVersion(name, versionName, bytes);
     keyVersions.put(versionName, result);
@@ -93,7 +93,7 @@ public class FakeKeyProvider extends KeyProvider {
 
   @Override
   public KeyVersion rollNewVersion(String name, byte[] bytes) {
-    TestMetadata key = keyMetdata.get(name);
+    TestMetadata key = keyMetadata.get(name);
     String versionName = buildVersionName(name, key.addVersion());
     KeyVersion result = new KMSClientProvider.KMSKeyVersion(name, versionName,
         bytes);