You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@kylin.apache.org by sh...@apache.org on 2018/12/10 13:54:25 UTC
[kylin] 01/02: KYLIN-3597 fix snoar issues
This is an automated email from the ASF dual-hosted git repository.
shaofengshi pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/kylin.git
commit 4e171c8b0fbcd7a6ffaa24e62a12dd7a070ccd49
Author: whuwb <sk...@gmail.com>
AuthorDate: Mon Dec 10 16:20:05 2018 +0800
KYLIN-3597 fix snoar issues
---
.../kylin/common/BackwardCompatibilityConfig.java | 11 ++---------
.../java/org/apache/kylin/common/KylinConfig.java | 22 ++++++++--------------
.../src/main/resources/kylin-defaults.properties | 2 +-
3 files changed, 11 insertions(+), 24 deletions(-)
diff --git a/core-common/src/main/java/org/apache/kylin/common/BackwardCompatibilityConfig.java b/core-common/src/main/java/org/apache/kylin/common/BackwardCompatibilityConfig.java
index aeb7636..751d2f7 100644
--- a/core-common/src/main/java/org/apache/kylin/common/BackwardCompatibilityConfig.java
+++ b/core-common/src/main/java/org/apache/kylin/common/BackwardCompatibilityConfig.java
@@ -144,26 +144,21 @@ public class BackwardCompatibilityConfig {
BackwardCompatibilityConfig bcc = new BackwardCompatibilityConfig();
File repoDir = new File(kylinRepoPath).getCanonicalFile();
File outputDir = new File(outputPath).getCanonicalFile();
- PrintWriter out = null;
// generate sed file
File sedFile = new File(outputDir, "upgrade-old-config.sed");
- try {
- out = new PrintWriter(sedFile, "UTF-8");
+ try (PrintWriter out = new PrintWriter(sedFile, "UTF-8")) {
for (Entry<String, String> e : bcc.old2new.entrySet()) {
out.println("s/" + quote(e.getKey()) + "/" + e.getValue() + "/g");
}
for (Entry<String, String> e : bcc.old2newPrefix.entrySet()) {
out.println("s/" + quote(e.getKey()) + "/" + e.getValue() + "/g");
}
- } finally {
- IOUtils.closeQuietly(out);
}
// generate sh file
File shFile = new File(outputDir, "upgrade-old-config.sh");
- try {
- out = new PrintWriter(shFile, "UTF-8");
+ try (PrintWriter out = new PrintWriter(shFile, "UTF-8")) {
out.println("#!/bin/bash");
Stack<File> stack = new Stack<>();
stack.push(repoDir);
@@ -179,8 +174,6 @@ public class BackwardCompatibilityConfig {
out.println("sed -i -f upgrade-old-config.sed " + f.getAbsolutePath());
}
}
- } finally {
- IOUtils.closeQuietly(out);
}
System.out.println("Files generated:");
diff --git a/core-common/src/main/java/org/apache/kylin/common/KylinConfig.java b/core-common/src/main/java/org/apache/kylin/common/KylinConfig.java
index 4a86b76..4db1748 100644
--- a/core-common/src/main/java/org/apache/kylin/common/KylinConfig.java
+++ b/core-common/src/main/java/org/apache/kylin/common/KylinConfig.java
@@ -50,6 +50,7 @@ import com.google.common.base.Preconditions;
public class KylinConfig extends KylinConfigBase {
private static final long serialVersionUID = 1L;
private static final Logger logger = LoggerFactory.getLogger(KylinConfig.class);
+ private static final String METADATA_URI_PREFIX = "Metadata uri : ";
/**
* Kylin properties file name
@@ -192,20 +193,20 @@ public class KylinConfig extends KylinConfigBase {
return UriType.PROPERTIES_FILE;
} else {
throw new IllegalStateException(
- "Metadata uri : " + metaUri + " is a local file but not kylin.properties");
+ METADATA_URI_PREFIX + metaUri + " is a local file but not kylin.properties");
}
} else {
throw new IllegalStateException(
- "Metadata uri : " + metaUri + " looks like a file but it's neither a file nor a directory");
+ METADATA_URI_PREFIX + metaUri + " looks like a file but it's neither a file nor a directory");
}
} else {
if (RestClient.matchFullRestPattern(metaUri))
return UriType.REST_ADDR;
else
- throw new IllegalStateException("Metadata uri : " + metaUri + " is not a valid REST URI address");
+ throw new IllegalStateException(METADATA_URI_PREFIX + metaUri + " is not a valid REST URI address");
}
} catch (Exception e) {
- throw new IllegalStateException("Metadata uri : " + metaUri + " is not recognized", e);
+ throw new IllegalStateException(METADATA_URI_PREFIX + metaUri + " is not recognized", e);
}
}
@@ -393,9 +394,8 @@ public class KylinConfig extends KylinConfigBase {
*/
private static void loadPropertiesFromInputStream(InputStream inputStream, OrderedProperties properties) {
Preconditions.checkNotNull(properties);
- BufferedReader confReader = null;
- try {
- confReader = new BufferedReader(new InputStreamReader(inputStream, "UTF-8"));
+
+ try (BufferedReader confReader = new BufferedReader(new InputStreamReader(inputStream, "UTF-8"))) {
OrderedProperties temp = new OrderedProperties();
temp.load(confReader);
temp = BCC.check(temp);
@@ -403,8 +403,6 @@ public class KylinConfig extends KylinConfigBase {
properties.putAll(temp);
} catch (Exception e) {
throw new RuntimeException(e);
- } finally {
- IOUtils.closeQuietly(confReader);
}
}
@@ -521,12 +519,8 @@ public class KylinConfig extends KylinConfigBase {
}
public void exportToFile(File file) throws IOException {
- FileOutputStream fos = null;
- try {
- fos = new FileOutputStream(file);
+ try (FileOutputStream fos = new FileOutputStream(file)) {
getAllProperties().store(fos, file.getAbsolutePath());
- } finally {
- IOUtils.closeQuietly(fos);
}
}
diff --git a/core-common/src/main/resources/kylin-defaults.properties b/core-common/src/main/resources/kylin-defaults.properties
index 6f2db9a..ee824bf 100644
--- a/core-common/src/main/resources/kylin-defaults.properties
+++ b/core-common/src/main/resources/kylin-defaults.properties
@@ -91,7 +91,7 @@ kylin.source.hive.beeline-shell=beeline
# While hive client uses above settings to read hive table metadata,
# table operations can go through a separate SparkSQL command line, given SparkSQL connects to the same Hive metastore.
kylin.source.hive.enable-sparksql-for-table-ops=false
-#kylin.source.hive.sparksql-beeline-shell=/path/to/spark-client/bin/beeline
+#kylin.source.hive.sparksql-beeline-shell=/path/to/spark-client/bin/bee
#kylin.source.hive.sparksql-beeline-params=-n root --hiveconf hive.security.authorization.sqlstd.confwhitelist.append='mapreduce.job.*|dfs.*' -u jdbc:hive2://localhost:10000
kylin.source.hive.keep-flat-table=false