You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@carbondata.apache.org by xu...@apache.org on 2018/10/26 11:21:28 UTC

carbondata git commit: [CARBONDATA_3025]make cli compilable with java 1.7

Repository: carbondata
Updated Branches:
  refs/heads/master e6d15da74 -> b62b0fd9c


[CARBONDATA_3025]make cli compilable with java 1.7

This commit removes some code in jdk1.8 style to make it compilable with jdk1.7

This closes #2853


Project: http://git-wip-us.apache.org/repos/asf/carbondata/repo
Commit: http://git-wip-us.apache.org/repos/asf/carbondata/commit/b62b0fd9
Tree: http://git-wip-us.apache.org/repos/asf/carbondata/tree/b62b0fd9
Diff: http://git-wip-us.apache.org/repos/asf/carbondata/diff/b62b0fd9

Branch: refs/heads/master
Commit: b62b0fd9ce0122735a24b1359e601af1e5ccb6b9
Parents: e6d15da
Author: akashrn5 <ak...@gmail.com>
Authored: Wed Oct 24 17:35:00 2018 +0530
Committer: xuchuanyin <xu...@hust.edu.cn>
Committed: Fri Oct 26 19:19:06 2018 +0800

----------------------------------------------------------------------
 .../apache/carbondata/tool/FileCollector.java   | 20 +++---
 .../apache/carbondata/tool/ScanBenchmark.java   | 64 ++++++++++++--------
 2 files changed, 48 insertions(+), 36 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/carbondata/blob/b62b0fd9/tools/cli/src/main/java/org/apache/carbondata/tool/FileCollector.java
----------------------------------------------------------------------
diff --git a/tools/cli/src/main/java/org/apache/carbondata/tool/FileCollector.java b/tools/cli/src/main/java/org/apache/carbondata/tool/FileCollector.java
index aa48b93..b2ff061 100644
--- a/tools/cli/src/main/java/org/apache/carbondata/tool/FileCollector.java
+++ b/tools/cli/src/main/java/org/apache/carbondata/tool/FileCollector.java
@@ -18,11 +18,7 @@
 package org.apache.carbondata.tool;
 
 import java.io.IOException;
-import java.util.ArrayList;
-import java.util.HashSet;
-import java.util.LinkedHashMap;
-import java.util.List;
-import java.util.Set;
+import java.util.*;
 
 import org.apache.carbondata.common.Strings;
 import org.apache.carbondata.core.datastore.filesystem.CarbonFile;
@@ -77,13 +73,17 @@ class FileCollector {
 
       }
     }
-    unsortedFiles.sort((o1, o2) -> {
-      if (o1.getShardName().equalsIgnoreCase(o2.getShardName())) {
-        return Integer.parseInt(o1.getPartNo()) - Integer.parseInt(o2.getPartNo());
-      } else {
-        return o1.getShardName().compareTo(o2.getShardName());
+
+    Collections.sort(unsortedFiles, new Comparator<DataFile>() {
+      @Override public int compare(DataFile o1, DataFile o2) {
+        if (o1.getShardName().equalsIgnoreCase(o2.getShardName())) {
+          return Integer.parseInt(o1.getPartNo()) - Integer.parseInt(o2.getPartNo());
+        } else {
+          return o1.getShardName().compareTo(o2.getShardName());
+        }
       }
     });
+
     for (DataFile collectedFile : unsortedFiles) {
       this.dataFiles.put(collectedFile.getFilePath(), collectedFile);
     }

http://git-wip-us.apache.org/repos/asf/carbondata/blob/b62b0fd9/tools/cli/src/main/java/org/apache/carbondata/tool/ScanBenchmark.java
----------------------------------------------------------------------
diff --git a/tools/cli/src/main/java/org/apache/carbondata/tool/ScanBenchmark.java b/tools/cli/src/main/java/org/apache/carbondata/tool/ScanBenchmark.java
index af5bdb3..ddb9652 100644
--- a/tools/cli/src/main/java/org/apache/carbondata/tool/ScanBenchmark.java
+++ b/tools/cli/src/main/java/org/apache/carbondata/tool/ScanBenchmark.java
@@ -72,57 +72,69 @@ class ScanBenchmark implements Command {
     }
 
     outPuts.add("\n## Benchmark");
-    AtomicReference<FileHeader> fileHeaderRef = new AtomicReference<>();
-    AtomicReference<FileFooter3> fileFoorterRef = new AtomicReference<>();
-    AtomicReference<DataFileFooter> convertedFooterRef = new AtomicReference<>();
+    final AtomicReference<FileHeader> fileHeaderRef = new AtomicReference<>();
+    final AtomicReference<FileFooter3> fileFoorterRef = new AtomicReference<>();
+    final AtomicReference<DataFileFooter> convertedFooterRef = new AtomicReference<>();
 
     // benchmark read header and footer time
-    benchmarkOperation("ReadHeaderAndFooter", () -> {
-      fileHeaderRef.set(file.readHeader());
-      fileFoorterRef.set(file.readFooter());
+    benchmarkOperation("ReadHeaderAndFooter", new Operation() {
+      @Override public void run() throws IOException, MemoryException {
+        fileHeaderRef.set(file.readHeader());
+        fileFoorterRef.set(file.readFooter());
+      }
     });
-    FileHeader fileHeader = fileHeaderRef.get();
-    FileFooter3 fileFooter = fileFoorterRef.get();
+    final FileHeader fileHeader = fileHeaderRef.get();
+    final FileFooter3 fileFooter = fileFoorterRef.get();
 
     // benchmark convert footer
-    benchmarkOperation("ConvertFooter", () -> {
-      convertFooter(fileHeader, fileFooter);
+    benchmarkOperation("ConvertFooter", new Operation() {
+      @Override public void run() throws IOException, MemoryException {
+        convertFooter(fileHeader, fileFooter);
+      }
     });
 
     // benchmark read all meta and convert footer
-    benchmarkOperation("ReadAllMetaAndConvertFooter", () -> {
-      DataFileFooter footer = readAndConvertFooter(file);
-      convertedFooterRef.set(footer);
+    benchmarkOperation("ReadAllMetaAndConvertFooter", new Operation() {
+      @Override public void run() throws IOException, MemoryException {
+        DataFileFooter footer = readAndConvertFooter(file);
+        convertedFooterRef.set(footer);
+      }
     });
 
     if (line.hasOption("c")) {
       String columnName = line.getOptionValue("c");
       outPuts.add("\nScan column '" + columnName + "'");
 
-      DataFileFooter footer = convertedFooterRef.get();
-      AtomicReference<AbstractRawColumnChunk> columnChunk = new AtomicReference<>();
-      int columnIndex = file.getColumnIndex(columnName);
-      boolean dimension = file.getColumn(columnName).isDimensionColumn();
+      final DataFileFooter footer = convertedFooterRef.get();
+      final AtomicReference<AbstractRawColumnChunk> columnChunk = new AtomicReference<>();
+      final int columnIndex = file.getColumnIndex(columnName);
+      final boolean dimension = file.getColumn(columnName).isDimensionColumn();
       for (int i = 0; i < footer.getBlockletList().size(); i++) {
-        int blockletId = i;
+        final int blockletId = i;
         outPuts.add(String.format("Blocklet#%d: total size %s, %,d pages, %,d rows",
             blockletId,
             Strings.formatSize(file.getColumnDataSizeInBytes(blockletId, columnIndex)),
             footer.getBlockletList().get(blockletId).getNumberOfPages(),
             footer.getBlockletList().get(blockletId).getNumberOfRows()));
-        benchmarkOperation("\tColumnChunk IO", () -> {
-          columnChunk.set(readBlockletColumnChunkIO(footer, blockletId, columnIndex, dimension));
+        benchmarkOperation("\tColumnChunk IO", new Operation() {
+          @Override public void run() throws IOException, MemoryException {
+            columnChunk.set(readBlockletColumnChunkIO(footer, blockletId, columnIndex, dimension));
+          }
         });
 
         if (dimensionColumnChunkReader != null) {
-          benchmarkOperation("\tDecompress Pages", () -> {
-            decompressDimensionPages(columnChunk.get(),
-                footer.getBlockletList().get(blockletId).getNumberOfPages());
+          benchmarkOperation("\tDecompress Pages", new Operation() {
+            @Override public void run() throws IOException, MemoryException {
+              decompressDimensionPages(columnChunk.get(),
+                  footer.getBlockletList().get(blockletId).getNumberOfPages());
+            }
           });
         } else {
-          benchmarkOperation("\tDecompress Pages", () -> {
-            decompressMeasurePages(columnChunk.get(),
-                footer.getBlockletList().get(blockletId).getNumberOfPages());
+          benchmarkOperation("\tDecompress Pages", new Operation() {
+            @Override public void run() throws IOException, MemoryException {
+              decompressMeasurePages(columnChunk.get(),
+                  footer.getBlockletList().get(blockletId).getNumberOfPages());
+            }
           });
         }
       }