You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@iotdb.apache.org by qi...@apache.org on 2020/06/19 07:13:07 UTC

[incubator-iotdb] branch rel/0.9 updated: fix bug (#1388)

This is an automated email from the ASF dual-hosted git repository.

qiaojialin pushed a commit to branch rel/0.9
in repository https://gitbox.apache.org/repos/asf/incubator-iotdb.git


The following commit(s) were added to refs/heads/rel/0.9 by this push:
     new ed0edcd  fix bug (#1388)
ed0edcd is described below

commit ed0edcd4d0bc4364e2d2ab28864213bfa2c1f177
Author: SilverNarcissus <15...@smail.nju.edu.cn>
AuthorDate: Fri Jun 19 15:12:55 2020 +0800

    fix bug (#1388)
---
 .../scala/org/apache/iotdb/spark/tsfile/DefaultSource.scala | 13 ++++++++++++-
 1 file changed, 12 insertions(+), 1 deletion(-)

diff --git a/spark-tsfile/src/main/scala/org/apache/iotdb/spark/tsfile/DefaultSource.scala b/spark-tsfile/src/main/scala/org/apache/iotdb/spark/tsfile/DefaultSource.scala
index 76329c0..d6bbd8e 100755
--- a/spark-tsfile/src/main/scala/org/apache/iotdb/spark/tsfile/DefaultSource.scala
+++ b/spark-tsfile/src/main/scala/org/apache/iotdb/spark/tsfile/DefaultSource.scala
@@ -41,6 +41,9 @@ import org.apache.spark.sql.execution.datasources.{FileFormat, OutputWriterFacto
 import org.apache.spark.sql.sources.{DataSourceRegister, Filter}
 import org.apache.spark.sql.types._
 import org.slf4j.LoggerFactory
+import scala.collection.JavaConversions._
+import scala.collection.mutable
+import scala.collection.mutable.ListBuffer
 
 private[tsfile] class DefaultSource extends FileFormat with DataSourceRegister {
 
@@ -116,7 +119,15 @@ private[tsfile] class DefaultSource extends FileFormat with DataSourceRegister {
 
       if (options.getOrElse(DefaultSource.isNarrowForm, "").equals("narrow_form")) {
         val deviceNames = tsFileMetaData.getDeviceMap.keySet()
-        val measurementNames = tsFileMetaData.getMeasurementSchema.keySet()
+
+        val measurementNames = new java.util.HashSet[String]()
+
+        requiredSchema.foreach((field: StructField) => {
+          if (field.name != QueryConstant.RESERVED_TIME
+            && field.name != NarrowConverter.DEVICE_NAME) {
+            measurementNames += field.name
+          }
+        })
 
         // construct queryExpression based on queriedSchema and filters
         val queryExpressions = NarrowConverter.toQueryExpression(dataSchema, deviceNames,