You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@iotdb.apache.org by qi...@apache.org on 2020/07/14 05:49:23 UTC

[incubator-iotdb] branch rel/0.10 updated (c587352 -> 561b393)

This is an automated email from the ASF dual-hosted git repository.

qiaojialin pushed a change to branch rel/0.10
in repository https://gitbox.apache.org/repos/asf/incubator-iotdb.git.


    from c587352  [IOTDB-794][To rel/0.10] fix upgrade system.properties (#1492)
     new 05b9099  solve hive-connector bug
     new c11f9a7  reformat code
     new 561b393  fix sonar

The 3 revisions listed above as "new" are entirely new to this
repository and will be described in separate emails.  The revisions
listed as "add" were already present in the repository and have only
been added to this reference.


Summary of changes:
 .../apache/iotdb/hadoop/tsfile/TSFInputFormat.java | 23 +++++++++++-------
 .../iotdb/hadoop/tsfile/TSFRecordReader.java       | 27 +++++++++++++---------
 .../iotdb/hadoop/tsfile/TSFRecordWriter.java       |  3 ++-
 .../org/apache/iotdb/hive/TSFHiveInputFormat.java  | 11 +++++----
 .../java/org/apache/iotdb/hive/TsFileSerDe.java    | 18 +++++++++++----
 5 files changed, 54 insertions(+), 28 deletions(-)


[incubator-iotdb] 03/03: fix sonar

Posted by qi...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

qiaojialin pushed a commit to branch rel/0.10
in repository https://gitbox.apache.org/repos/asf/incubator-iotdb.git

commit 561b39317576f961265b27e3fc35fcd4f0a1b1a6
Author: JackieTien97 <Ja...@foxmail.com>
AuthorDate: Tue Jul 14 08:40:33 2020 +0800

    fix sonar
---
 hive-connector/src/main/java/org/apache/iotdb/hive/TsFileSerDe.java | 6 ++++--
 1 file changed, 4 insertions(+), 2 deletions(-)

diff --git a/hive-connector/src/main/java/org/apache/iotdb/hive/TsFileSerDe.java b/hive-connector/src/main/java/org/apache/iotdb/hive/TsFileSerDe.java
index 35d94e9..2237fc4 100644
--- a/hive-connector/src/main/java/org/apache/iotdb/hive/TsFileSerDe.java
+++ b/hive-connector/src/main/java/org/apache/iotdb/hive/TsFileSerDe.java
@@ -85,8 +85,10 @@ public class TsFileSerDe extends AbstractSerDe {
       throw new TsFileSerDeException("len(columnNames) != len(columnTypes)");
     }
 
-    conf.set(READ_DELTAOBJECTS, deviceId);
-    conf.set(READ_MEASUREMENTID, columnNames.get(1));
+    if (conf != null) {
+      conf.set(READ_DELTAOBJECTS, deviceId);
+      conf.set(READ_MEASUREMENTID, columnNames.get(1));
+    }
 
     oi = createObjectInspector();
   }


[incubator-iotdb] 01/03: solve hive-connector bug

Posted by qi...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

qiaojialin pushed a commit to branch rel/0.10
in repository https://gitbox.apache.org/repos/asf/incubator-iotdb.git

commit 05b909967b53df33f636988459ce978fa23b52b9
Author: JackieTien97 <Ja...@foxmail.com>
AuthorDate: Mon Jul 13 12:38:03 2020 +0800

    solve hive-connector bug
---
 .../apache/iotdb/hadoop/tsfile/TSFInputFormat.java | 23 ++++++++++++++--------
 .../org/apache/iotdb/hive/TSFHiveInputFormat.java  | 11 +++++++----
 .../java/org/apache/iotdb/hive/TsFileSerDe.java    | 16 +++++++++++----
 3 files changed, 34 insertions(+), 16 deletions(-)

diff --git a/hadoop/src/main/java/org/apache/iotdb/hadoop/tsfile/TSFInputFormat.java b/hadoop/src/main/java/org/apache/iotdb/hadoop/tsfile/TSFInputFormat.java
index 4808af2..6df9a0e 100644
--- a/hadoop/src/main/java/org/apache/iotdb/hadoop/tsfile/TSFInputFormat.java
+++ b/hadoop/src/main/java/org/apache/iotdb/hadoop/tsfile/TSFInputFormat.java
@@ -18,6 +18,8 @@
  */
 package org.apache.iotdb.hadoop.tsfile;
 
+import static org.apache.iotdb.tsfile.common.constant.TsFileConstant.TSFILE_SUFFIX;
+
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.BlockLocation;
 import org.apache.hadoop.fs.FileStatus;
@@ -73,7 +75,7 @@ TSFInputFormat extends FileInputFormat<NullWritable, MapWritable> {
   /**
    * Set the deltaObjectIds which want to be read
    *
-   * @param job hadoop job
+   * @param job   hadoop job
    * @param value the deltaObjectIds will be read
    * @throws TSFHadoopException
    */
@@ -95,8 +97,8 @@ TSFInputFormat extends FileInputFormat<NullWritable, MapWritable> {
    * Get the deltaObjectIds which want to be read
    *
    * @param configuration
-   * @return List of device, if configuration has been set the deviceIds.
-   * 		   null, if configuration has not been set the deviceIds.
+   * @return List of device, if configuration has been set the deviceIds. null, if configuration has
+   * not been set the deviceIds.
    */
   public static List<String> getReadDeviceIds(Configuration configuration) {
     String deviceIds = configuration.get(READ_DELTAOBJECTS);
@@ -111,7 +113,7 @@ TSFInputFormat extends FileInputFormat<NullWritable, MapWritable> {
   /**
    * Set the measurementIds which want to be read
    *
-   * @param job hadoop job
+   * @param job   hadoop job
    * @param value the measurementIds will be read
    * @throws TSFHadoopException
    */
@@ -236,11 +238,13 @@ TSFInputFormat extends FileInputFormat<NullWritable, MapWritable> {
 
   @Override
   public List<InputSplit> getSplits(JobContext job) throws IOException {
+    job.getConfiguration().setBoolean(INPUT_DIR_RECURSIVE, true);
     List<FileStatus> listFileStatus = super.listStatus(job);
     return new ArrayList<>(getTSFInputSplit(job.getConfiguration(), listFileStatus, logger));
   }
 
-  public static List<TSFInputSplit> getTSFInputSplit(Configuration configuration, List<FileStatus> listFileStatus, Logger logger) throws IOException {
+  public static List<TSFInputSplit> getTSFInputSplit(Configuration configuration,
+      List<FileStatus> listFileStatus, Logger logger) throws IOException {
     BlockLocation[] blockLocations;
     List<TSFInputSplit> splits = new ArrayList<>();
     // get the all file in the directory
@@ -250,6 +254,9 @@ TSFInputFormat extends FileInputFormat<NullWritable, MapWritable> {
       logger.info("The file path is {}", fileStatus.getPath());
       // Get the file path
       Path path = fileStatus.getPath();
+      if (!path.toString().endsWith(TSFILE_SUFFIX)) {
+        continue;
+      }
       // Get the file length
       long length = fileStatus.getLen();
       // Check the file length. if the length is less than 0, return the
@@ -273,8 +280,7 @@ TSFInputFormat extends FileInputFormat<NullWritable, MapWritable> {
   }
 
   /**
-   * get the TSFInputSplit from tsfMetaData and hdfs block location
-   * information with the filter
+   * get the TSFInputSplit from tsfMetaData and hdfs block location information with the filter
    *
    * @throws IOException
    */
@@ -282,7 +288,8 @@ TSFInputFormat extends FileInputFormat<NullWritable, MapWritable> {
       throws IOException {
     List<TSFInputSplit> splits = new ArrayList<>();
     for (BlockLocation blockLocation : blockLocations) {
-      splits.add(new TSFInputSplit(path, blockLocation.getHosts(), blockLocation.getOffset(), blockLocation.getLength()));
+      splits.add(new TSFInputSplit(path, blockLocation.getHosts(), blockLocation.getOffset(),
+          blockLocation.getLength()));
     }
     return splits;
   }
diff --git a/hive-connector/src/main/java/org/apache/iotdb/hive/TSFHiveInputFormat.java b/hive-connector/src/main/java/org/apache/iotdb/hive/TSFHiveInputFormat.java
index bb2d5ca..cf7b3e0 100644
--- a/hive-connector/src/main/java/org/apache/iotdb/hive/TSFHiveInputFormat.java
+++ b/hive-connector/src/main/java/org/apache/iotdb/hive/TSFHiveInputFormat.java
@@ -29,8 +29,8 @@ import java.io.IOException;
 import java.util.Arrays;
 
 /**
- * The class implement is same as {@link org.apache.iotdb.hadoop.tsfile.TSFInputFormat}
- * and is customized for Hive to implements JobConfigurable interface.
+ * The class implement is same as {@link org.apache.iotdb.hadoop.tsfile.TSFInputFormat} and is
+ * customized for Hive to implements JobConfigurable interface.
  */
 public class TSFHiveInputFormat extends FileInputFormat<NullWritable, MapWritable> {
 
@@ -39,13 +39,16 @@ public class TSFHiveInputFormat extends FileInputFormat<NullWritable, MapWritabl
 
 
   @Override
-  public RecordReader<NullWritable, MapWritable> getRecordReader(InputSplit split, JobConf job, Reporter reporter) throws IOException {
+  public RecordReader<NullWritable, MapWritable> getRecordReader(InputSplit split, JobConf job,
+      Reporter reporter) throws IOException {
     return new TSFHiveRecordReader(split, job);
   }
 
   @Override
   public InputSplit[] getSplits(JobConf job, int numSplits) throws IOException {
-    return TSFInputFormat.getTSFInputSplit(job, Arrays.asList(super.listStatus(job)), logger).toArray(new InputSplit[0]);
+    job.setBoolean(INPUT_DIR_RECURSIVE, true);
+    return TSFInputFormat.getTSFInputSplit(job, Arrays.asList(super.listStatus(job)), logger)
+        .toArray(new InputSplit[0]);
   }
 
 }
diff --git a/hive-connector/src/main/java/org/apache/iotdb/hive/TsFileSerDe.java b/hive-connector/src/main/java/org/apache/iotdb/hive/TsFileSerDe.java
index 9263f2a..35d94e9 100644
--- a/hive-connector/src/main/java/org/apache/iotdb/hive/TsFileSerDe.java
+++ b/hive-connector/src/main/java/org/apache/iotdb/hive/TsFileSerDe.java
@@ -18,6 +18,15 @@
  */
 package org.apache.iotdb.hive;
 
+import static org.apache.iotdb.hadoop.tsfile.TSFInputFormat.READ_DELTAOBJECTS;
+import static org.apache.iotdb.hadoop.tsfile.TSFInputFormat.READ_MEASUREMENTID;
+
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.Collections;
+import java.util.List;
+import java.util.Properties;
+import javax.annotation.Nullable;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hive.common.StringInternUtils;
 import org.apache.hadoop.hive.serde.serdeConstants;
@@ -36,9 +45,6 @@ import org.apache.iotdb.hadoop.tsfile.record.HDFSTSRecord;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
-import javax.annotation.Nullable;
-import java.util.*;
-
 public class TsFileSerDe extends AbstractSerDe {
 
   private static final Logger logger = LoggerFactory.getLogger(TsFileSerDe.class);
@@ -63,7 +69,6 @@ public class TsFileSerDe extends AbstractSerDe {
 
     deviceId = tbl.getProperty(DEVICE_ID);
 
-
     if (columnNameProperty == null || columnNameProperty.isEmpty()
     || columnTypeProperty == null || columnTypeProperty.isEmpty()) {
       columnNames = Collections.emptyList();
@@ -80,6 +85,9 @@ public class TsFileSerDe extends AbstractSerDe {
       throw new TsFileSerDeException("len(columnNames) != len(columnTypes)");
     }
 
+    conf.set(READ_DELTAOBJECTS, deviceId);
+    conf.set(READ_MEASUREMENTID, columnNames.get(1));
+
     oi = createObjectInspector();
   }
 


[incubator-iotdb] 02/03: reformat code

Posted by qi...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

qiaojialin pushed a commit to branch rel/0.10
in repository https://gitbox.apache.org/repos/asf/incubator-iotdb.git

commit c11f9a756b64ac55fb7d84284dcf031adf3907a7
Author: JackieTien97 <Ja...@foxmail.com>
AuthorDate: Mon Jul 13 14:54:04 2020 +0800

    reformat code
---
 .../iotdb/hadoop/tsfile/TSFRecordReader.java       | 27 +++++++++++++---------
 .../iotdb/hadoop/tsfile/TSFRecordWriter.java       |  3 ++-
 2 files changed, 18 insertions(+), 12 deletions(-)

diff --git a/hadoop/src/main/java/org/apache/iotdb/hadoop/tsfile/TSFRecordReader.java b/hadoop/src/main/java/org/apache/iotdb/hadoop/tsfile/TSFRecordReader.java
index a0e3983..3d042cc 100644
--- a/hadoop/src/main/java/org/apache/iotdb/hadoop/tsfile/TSFRecordReader.java
+++ b/hadoop/src/main/java/org/apache/iotdb/hadoop/tsfile/TSFRecordReader.java
@@ -18,8 +18,20 @@
  */
 package org.apache.iotdb.hadoop.tsfile;
 
+import static java.util.stream.Collectors.toList;
+
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.List;
 import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.io.*;
+import org.apache.hadoop.io.BooleanWritable;
+import org.apache.hadoop.io.DoubleWritable;
+import org.apache.hadoop.io.FloatWritable;
+import org.apache.hadoop.io.IntWritable;
+import org.apache.hadoop.io.LongWritable;
+import org.apache.hadoop.io.MapWritable;
+import org.apache.hadoop.io.NullWritable;
+import org.apache.hadoop.io.Text;
 import org.apache.hadoop.mapreduce.InputSplit;
 import org.apache.hadoop.mapreduce.RecordReader;
 import org.apache.hadoop.mapreduce.TaskAttemptContext;
@@ -35,12 +47,6 @@ import org.apache.iotdb.tsfile.read.query.dataset.QueryDataSet;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
-import java.io.IOException;
-import java.util.ArrayList;
-import java.util.List;
-
-import static java.util.stream.Collectors.toList;
-
 public class TSFRecordReader extends RecordReader<NullWritable, MapWritable> implements IReaderSet {
 
   private static final Logger logger = LoggerFactory.getLogger(TSFRecordReader.class);
@@ -100,12 +106,11 @@ public class TSFRecordReader extends RecordReader<NullWritable, MapWritable> imp
     ReadOnlyTsFile queryEngine = new ReadOnlyTsFile(reader);
     for (String deviceId : deviceIds) {
       List<Path> paths = measurementIds.stream()
-              .map(
-                      measurementId -> new Path(deviceId + TsFileConstant.PATH_SEPARATOR + measurementId))
-              .collect(toList());
+          .map(measurementId -> new Path(deviceId + TsFileConstant.PATH_SEPARATOR + measurementId))
+          .collect(toList());
       QueryExpression queryExpression = QueryExpression.create(paths, null);
       QueryDataSet dataSet = queryEngine.query(queryExpression,
-              split.getStart(), split.getStart() + split.getLength());
+          split.getStart(), split.getStart() + split.getLength());
       dataSetList.add(dataSet);
       deviceIdList.add(deviceId);
     }
diff --git a/hadoop/src/main/java/org/apache/iotdb/hadoop/tsfile/TSFRecordWriter.java b/hadoop/src/main/java/org/apache/iotdb/hadoop/tsfile/TSFRecordWriter.java
index 7e064d6..ca468f3 100644
--- a/hadoop/src/main/java/org/apache/iotdb/hadoop/tsfile/TSFRecordWriter.java
+++ b/hadoop/src/main/java/org/apache/iotdb/hadoop/tsfile/TSFRecordWriter.java
@@ -45,7 +45,8 @@ public class TSFRecordWriter extends RecordWriter<NullWritable, HDFSTSRecord> {
   }
 
   @Override
-  public synchronized void write(NullWritable key, HDFSTSRecord value) throws IOException, InterruptedException {
+  public synchronized void write(NullWritable key, HDFSTSRecord value)
+      throws IOException, InterruptedException {
     try {
       writer.write(value.convertToTSRecord());
     } catch (WriteProcessException e) {