You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hive.apache.org by an...@apache.org on 2020/05/15 14:59:45 UTC
[hive] branch master updated: HIVE-23432:Add Ranger Replication
Metrics (Aasha Medhi, reviewed by Pravin Kumar Sinha)
This is an automated email from the ASF dual-hosted git repository.
anishek pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/hive.git
The following commit(s) were added to refs/heads/master by this push:
new a2b3b36 HIVE-23432:Add Ranger Replication Metrics (Aasha Medhi, reviewed by Pravin Kumar Sinha)
a2b3b36 is described below
commit a2b3b36da3548b3e2cf265144971d01a7683cc8d
Author: Aasha Medhi <aa...@gmail.com>
AuthorDate: Fri May 15 20:29:31 2020 +0530
HIVE-23432:Add Ranger Replication Metrics (Aasha Medhi, reviewed by Pravin Kumar Sinha)
---
.../TestReplicationScenariosAcrossInstances.java | 4 +-
ql/pom.xml | 6 ++-
.../hadoop/hive/ql/exec/repl/RangerDumpTask.java | 9 +++-
.../hadoop/hive/ql/exec/repl/RangerLoadTask.java | 11 +++-
.../ql/exec/repl/ranger/NoOpRangerRestClient.java | 2 +-
.../hadoop/hive/ql/parse/repl/ReplLogger.java | 10 +++-
.../hadoop/hive/ql/parse/repl/ReplState.java | 9 ++++
.../parse/repl/dump/log/BootstrapDumpLogger.java | 7 ++-
.../parse/repl/dump/log/IncrementalDumpLogger.java | 7 ++-
.../log/RangerDumpLogger.java} | 33 +++++-------
.../log/state/RangerDumpBegin.java} | 32 +++++++-----
.../parse/repl/dump/log/state/RangerDumpEnd.java | 54 +++++++++++++++++++
.../parse/repl/load/log/BootstrapLoadLogger.java | 7 ++-
.../parse/repl/load/log/IncrementalLoadLogger.java | 7 ++-
...mentalLoadLogger.java => RangerLoadLogger.java} | 39 +++++++-------
.../parse/repl/load/log/state/RangerLoadBegin.java | 52 +++++++++++++++++++
.../parse/repl/load/log/state/RangerLoadEnd.java | 60 ++++++++++++++++++++++
.../hive/ql/exec/repl/TestRangerDumpTask.java | 44 ++++++++++++++--
.../hive/ql/exec/repl/TestRangerLoadTask.java | 47 ++++++++++++++++-
19 files changed, 368 insertions(+), 72 deletions(-)
diff --git a/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/parse/TestReplicationScenariosAcrossInstances.java b/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/parse/TestReplicationScenariosAcrossInstances.java
index 901a4ed..eeb81da 100644
--- a/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/parse/TestReplicationScenariosAcrossInstances.java
+++ b/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/parse/TestReplicationScenariosAcrossInstances.java
@@ -1542,8 +1542,8 @@ public class TestReplicationScenariosAcrossInstances extends BaseReplicationAcro
}
/*
-Can't test complete replication as mini ranger is not supported
-Testing just the configs and no impact on existing replication
+ Can't test complete replication as mini ranger is not supported
+ Testing just the configs and no impact on existing replication
*/
@Test
public void testFailureUnsupportedAuthorizerReplication() throws Throwable {
diff --git a/ql/pom.xml b/ql/pom.xml
index d6dc7ce..9bf7b90 100644
--- a/ql/pom.xml
+++ b/ql/pom.xml
@@ -821,8 +821,12 @@
<artifactId>jersey-multipart</artifactId>
<version>${jersey.version}</version>
</dependency>
+ <dependency>
+ <groupId>com.google.code.findbugs</groupId>
+ <artifactId>findbugs-annotations</artifactId>
+ <version>3.0.1</version>
+ </dependency>
</dependencies>
-
<profiles>
<profile>
<id>sources</id>
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/repl/RangerDumpTask.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/repl/RangerDumpTask.java
index f9d3de7..e078371 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/repl/RangerDumpTask.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/repl/RangerDumpTask.java
@@ -30,6 +30,8 @@ import org.apache.hadoop.hive.ql.exec.repl.ranger.RangerPolicy;
import org.apache.hadoop.hive.ql.exec.repl.ranger.NoOpRangerRestClient;
import org.apache.hadoop.hive.ql.exec.repl.ranger.RangerRestClientImpl;
import org.apache.hadoop.hive.ql.exec.repl.util.ReplUtils;
+import org.apache.hadoop.hive.ql.parse.repl.ReplLogger;
+import org.apache.hadoop.hive.ql.parse.repl.dump.log.RangerDumpLogger;
import org.apache.hadoop.hive.ql.plan.api.StageType;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@@ -52,6 +54,8 @@ public class RangerDumpTask extends Task<RangerDumpWork> implements Serializable
private transient RangerRestClient rangerRestClient;
+ private transient ReplLogger replLogger;
+
public RangerDumpTask() {
super();
}
@@ -71,7 +75,7 @@ public class RangerDumpTask extends Task<RangerDumpWork> implements Serializable
@Override
public int execute() {
try {
- int exportCount = 0;
+ long exportCount = 0;
Path filePath = null;
LOG.info("Exporting Ranger Metadata");
if (rangerRestClient == null) {
@@ -83,6 +87,8 @@ public class RangerDumpTask extends Task<RangerDumpWork> implements Serializable
+ "Please pass a valid config hive.repl.authorization.provider.service.endpoint");
}
String rangerHiveServiceName = conf.getVar(REPL_RANGER_SERVICE_NAME);
+ replLogger = new RangerDumpLogger(work.getDbName(), work.getCurrentDumpPath().toString());
+ replLogger.startLog();
RangerExportPolicyList rangerExportPolicyList = rangerRestClient.exportRangerPolicies(rangerEndpoint,
work.getDbName(), rangerHiveServiceName);
List<RangerPolicy> rangerPolicies = rangerExportPolicyList.getPolicies();
@@ -101,6 +107,7 @@ public class RangerDumpTask extends Task<RangerDumpWork> implements Serializable
exportCount = rangerExportPolicyList.getListSize();
}
}
+ replLogger.endLog(exportCount);
LOG.debug("Ranger policy export filePath:" + filePath);
LOG.info("Number of ranger policies exported {}", exportCount);
return 0;
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/repl/RangerLoadTask.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/repl/RangerLoadTask.java
index 5497d28..4d62a51 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/repl/RangerLoadTask.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/repl/RangerLoadTask.java
@@ -30,6 +30,8 @@ import org.apache.hadoop.hive.ql.exec.repl.ranger.NoOpRangerRestClient;
import org.apache.hadoop.hive.ql.exec.repl.ranger.RangerPolicy;
import org.apache.hadoop.hive.ql.exec.repl.ranger.RangerExportPolicyList;
import org.apache.hadoop.hive.ql.exec.repl.util.ReplUtils;
+import org.apache.hadoop.hive.ql.parse.repl.ReplLogger;
+import org.apache.hadoop.hive.ql.parse.repl.load.log.RangerLoadLogger;
import org.apache.hadoop.hive.ql.plan.api.StageType;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@@ -53,6 +55,8 @@ public class RangerLoadTask extends Task<RangerLoadWork> implements Serializable
private transient RangerRestClient rangerRestClient;
+ private transient ReplLogger replLogger;
+
public RangerLoadTask() {
super();
}
@@ -87,6 +91,10 @@ public class RangerLoadTask extends Task<RangerLoadWork> implements Serializable
LOG.info("Importing Ranger Metadata from {} ", work.getCurrentDumpPath());
rangerExportPolicyList = rangerRestClient.readRangerPoliciesFromJsonFile(new Path(work.getCurrentDumpPath(),
ReplUtils.HIVE_RANGER_POLICIES_FILE_NAME), conf);
+ int expectedPolicyCount = rangerExportPolicyList == null ? 0 : rangerExportPolicyList.getListSize();
+ replLogger = new RangerLoadLogger(work.getSourceDbName(), work.getTargetDbName(),
+ work.getCurrentDumpPath().toString(), expectedPolicyCount);
+ replLogger.startLog();
if (rangerExportPolicyList != null && !CollectionUtils.isEmpty(rangerExportPolicyList.getPolicies())) {
rangerPolicies = rangerExportPolicyList.getPolicies();
}
@@ -98,7 +106,7 @@ public class RangerLoadTask extends Task<RangerLoadWork> implements Serializable
}
List<RangerPolicy> updatedRangerPolicies = rangerRestClient.changeDataSet(rangerPolicies, work.getSourceDbName(),
work.getTargetDbName());
- int importCount = 0;
+ long importCount = 0;
if (!CollectionUtils.isEmpty(updatedRangerPolicies)) {
if (rangerExportPolicyList == null) {
rangerExportPolicyList = new RangerExportPolicyList();
@@ -108,6 +116,7 @@ public class RangerLoadTask extends Task<RangerLoadWork> implements Serializable
conf.getVar(REPL_RANGER_SERVICE_NAME));
LOG.info("Number of ranger policies imported {}", rangerExportPolicyList.getListSize());
importCount = rangerExportPolicyList.getListSize();
+ replLogger.endLog(importCount);
LOG.info("Ranger policy import finished {} ", importCount);
}
return 0;
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/repl/ranger/NoOpRangerRestClient.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/repl/ranger/NoOpRangerRestClient.java
index 4e3fa61..b0fdff4 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/repl/ranger/NoOpRangerRestClient.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/repl/ranger/NoOpRangerRestClient.java
@@ -61,7 +61,7 @@ public class NoOpRangerRestClient implements RangerRestClient {
@Override
public RangerExportPolicyList readRangerPoliciesFromJsonFile(Path filePath, HiveConf conf) throws SemanticException {
- return null;
+ return new RangerExportPolicyList();
}
@Override
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/ReplLogger.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/ReplLogger.java
index 645b2c6..1afcf17 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/ReplLogger.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/ReplLogger.java
@@ -19,13 +19,19 @@ package org.apache.hadoop.hive.ql.parse.repl;
import org.apache.hadoop.hive.metastore.TableType;
-public abstract class ReplLogger {
+/**
+ * ReplLogger.
+ *
+ * Logger class for Repl Events.
+ **/
+public abstract class ReplLogger<T> {
public ReplLogger() {
}
public abstract void startLog();
- public abstract void endLog(String lastReplId);
+
+ public abstract void endLog(T logVal);
public void tableLog(String tableName, TableType tableType) {
}
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/ReplState.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/ReplState.java
index 6429866..e441153 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/ReplState.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/ReplState.java
@@ -23,6 +23,11 @@ import org.codehaus.jackson.map.SerializationConfig;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
+/**
+ * ReplState.
+ *
+ * Logger class for Repl Events.
+ **/
public abstract class ReplState {
@JsonIgnoreProperties
private static final Logger REPL_LOG = LoggerFactory.getLogger("ReplState");
@@ -41,9 +46,13 @@ public abstract class ReplState {
TABLE_DUMP,
FUNCTION_DUMP,
EVENT_DUMP,
+ RANGER_DUMP_START,
+ RANGER_DUMP_END,
TABLE_LOAD,
FUNCTION_LOAD,
EVENT_LOAD,
+ RANGER_LOAD_START,
+ RANGER_LOAD_END,
END
}
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/dump/log/BootstrapDumpLogger.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/dump/log/BootstrapDumpLogger.java
index 198b94f..0872134 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/dump/log/BootstrapDumpLogger.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/dump/log/BootstrapDumpLogger.java
@@ -25,7 +25,12 @@ import org.apache.hadoop.hive.ql.parse.repl.dump.log.state.BootstrapDumpTable;
import org.apache.hadoop.hive.ql.parse.repl.ReplLogger;
import org.apache.hadoop.hive.ql.parse.repl.ReplState.LogTag;
-public class BootstrapDumpLogger extends ReplLogger {
+/**
+ * BootstrapDumpLogger.
+ *
+ * Repllogger for bootstrap dump.
+ **/
+public class BootstrapDumpLogger extends ReplLogger<String> {
private String dbName;
private String dumpDir;
private long estimatedNumTables;
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/dump/log/IncrementalDumpLogger.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/dump/log/IncrementalDumpLogger.java
index f5c0837..4f24c0c 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/dump/log/IncrementalDumpLogger.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/dump/log/IncrementalDumpLogger.java
@@ -23,7 +23,12 @@ import org.apache.hadoop.hive.ql.parse.repl.dump.log.state.IncrementalDumpEvent;
import org.apache.hadoop.hive.ql.parse.repl.ReplLogger;
import org.apache.hadoop.hive.ql.parse.repl.ReplState.LogTag;
-public class IncrementalDumpLogger extends ReplLogger {
+/**
+ * IncrementalDumpLogger.
+ *
+ * Repllogger for incremental dump.
+ **/
+public class IncrementalDumpLogger extends ReplLogger<String> {
private String dbName;
private String dumpDir;
private long estimatedNumEvents;
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/log/IncrementalLoadLogger.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/dump/log/RangerDumpLogger.java
similarity index 53%
copy from ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/log/IncrementalLoadLogger.java
copy to ql/src/java/org/apache/hadoop/hive/ql/parse/repl/dump/log/RangerDumpLogger.java
index 77db6ed..1f48645 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/log/IncrementalLoadLogger.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/dump/log/RangerDumpLogger.java
@@ -15,41 +15,34 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
-package org.apache.hadoop.hive.ql.parse.repl.load.log;
+package org.apache.hadoop.hive.ql.parse.repl.dump.log;
-import org.apache.hadoop.hive.ql.parse.repl.load.log.state.IncrementalLoadBegin;
-import org.apache.hadoop.hive.ql.parse.repl.load.log.state.IncrementalLoadEnd;
-import org.apache.hadoop.hive.ql.parse.repl.load.log.state.IncrementalLoadEvent;
import org.apache.hadoop.hive.ql.parse.repl.ReplLogger;
import org.apache.hadoop.hive.ql.parse.repl.ReplState.LogTag;
+import org.apache.hadoop.hive.ql.parse.repl.dump.log.state.RangerDumpBegin;
+import org.apache.hadoop.hive.ql.parse.repl.dump.log.state.RangerDumpEnd;
-public class IncrementalLoadLogger extends ReplLogger {
+/**
+ * RangerDumpLogger.
+ *
+ * Repllogger for Ranger Dump.
+ **/
+public class RangerDumpLogger extends ReplLogger<Long> {
private String dbName;
private String dumpDir;
- private long numEvents;
- private long eventSeqNo;
- public IncrementalLoadLogger(String dbName, String dumpDir, int numEvents) {
+ public RangerDumpLogger(String dbName, String dumpDir) {
this.dbName = dbName;
this.dumpDir = dumpDir;
- this.numEvents = numEvents;
- this.eventSeqNo = 0;
}
@Override
public void startLog() {
- (new IncrementalLoadBegin(dbName, dumpDir, numEvents)).log(LogTag.START);
- }
-
- @Override
- public void eventLog(String eventId, String eventType) {
- eventSeqNo++;
- (new IncrementalLoadEvent(dbName, eventId, eventType, eventSeqNo, numEvents))
- .log(LogTag.EVENT_LOAD);
+ new RangerDumpBegin(dbName).log(LogTag.RANGER_DUMP_START);
}
@Override
- public void endLog(String lastReplId) {
- (new IncrementalLoadEnd(dbName, numEvents, dumpDir, lastReplId)).log(LogTag.END);
+ public void endLog(Long count) {
+ new RangerDumpEnd(dbName, count, dumpDir).log(LogTag.RANGER_DUMP_END);
}
}
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/ReplLogger.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/dump/log/state/RangerDumpBegin.java
similarity index 55%
copy from ql/src/java/org/apache/hadoop/hive/ql/parse/repl/ReplLogger.java
copy to ql/src/java/org/apache/hadoop/hive/ql/parse/repl/dump/log/state/RangerDumpBegin.java
index 645b2c6..114de86 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/ReplLogger.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/dump/log/state/RangerDumpBegin.java
@@ -15,22 +15,28 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
-package org.apache.hadoop.hive.ql.parse.repl;
+package org.apache.hadoop.hive.ql.parse.repl.dump.log.state;
-import org.apache.hadoop.hive.metastore.TableType;
+import edu.umd.cs.findbugs.annotations.SuppressFBWarnings;
+import org.apache.hadoop.hive.ql.parse.repl.ReplState;
+import org.codehaus.jackson.annotate.JsonProperty;
-public abstract class ReplLogger {
-
- public ReplLogger() {
- }
+/**
+ * RangerDumpBegin.
+ *
+ * ReplState to define Ranger Dump Start.
+ **/
+public class RangerDumpBegin extends ReplState {
+ @SuppressFBWarnings("URF_UNREAD_FIELD")
+ @JsonProperty
+ private String dbName;
- public abstract void startLog();
- public abstract void endLog(String lastReplId);
+ @SuppressFBWarnings("URF_UNREAD_FIELD")
+ @JsonProperty
+ private Long dumpStartTime;
- public void tableLog(String tableName, TableType tableType) {
- }
- public void functionLog(String funcName){
- }
- public void eventLog(String eventId, String eventType) {
+ public RangerDumpBegin(String dbName) {
+ this.dbName = dbName;
+ this.dumpStartTime = System.currentTimeMillis() / 1000;
}
}
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/dump/log/state/RangerDumpEnd.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/dump/log/state/RangerDumpEnd.java
new file mode 100644
index 0000000..8e4dbc7
--- /dev/null
+++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/dump/log/state/RangerDumpEnd.java
@@ -0,0 +1,54 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hive.ql.parse.repl.dump.log.state;
+
+import edu.umd.cs.findbugs.annotations.SuppressFBWarnings;
+import org.apache.hadoop.hive.ql.parse.repl.ReplState;
+import org.codehaus.jackson.annotate.JsonProperty;
+
+/**
+ * RangerDumpEnd.
+ *
+ * ReplState to define Ranger Dump End.
+ **/
+public class RangerDumpEnd extends ReplState {
+ @SuppressFBWarnings("URF_UNREAD_FIELD")
+ @JsonProperty
+ private String dbName;
+
+ @SuppressFBWarnings("URF_UNREAD_FIELD")
+ @JsonProperty
+ private Long actualNumPolicies;
+
+ @SuppressFBWarnings("URF_UNREAD_FIELD")
+ @JsonProperty
+ private Long dumpEndTime;
+
+ @SuppressFBWarnings("URF_UNREAD_FIELD")
+ @JsonProperty
+ private String dumpDir;
+
+ public RangerDumpEnd(String dbName,
+ long actualNumPolicies,
+ String dumpDir) {
+ this.dbName = dbName;
+ this.actualNumPolicies = actualNumPolicies;
+ this.dumpEndTime = System.currentTimeMillis() / 1000;
+ this.dumpDir = dumpDir;
+ }
+}
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/log/BootstrapLoadLogger.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/log/BootstrapLoadLogger.java
index cf35826..b804cd2 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/log/BootstrapLoadLogger.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/log/BootstrapLoadLogger.java
@@ -22,7 +22,12 @@ import org.apache.hadoop.hive.ql.parse.repl.load.log.state.*;
import org.apache.hadoop.hive.ql.parse.repl.ReplLogger;
import org.apache.hadoop.hive.ql.parse.repl.ReplState.LogTag;
-public class BootstrapLoadLogger extends ReplLogger {
+/**
+ * BootstrapLoadLogger.
+ *
+ * Repllogger for bootstrap Load.
+ **/
+public class BootstrapLoadLogger extends ReplLogger<String> {
private String dbName;
private String dumpDir;
private long numTables;
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/log/IncrementalLoadLogger.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/log/IncrementalLoadLogger.java
index 77db6ed..9e3e2d9 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/log/IncrementalLoadLogger.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/log/IncrementalLoadLogger.java
@@ -23,7 +23,12 @@ import org.apache.hadoop.hive.ql.parse.repl.load.log.state.IncrementalLoadEvent;
import org.apache.hadoop.hive.ql.parse.repl.ReplLogger;
import org.apache.hadoop.hive.ql.parse.repl.ReplState.LogTag;
-public class IncrementalLoadLogger extends ReplLogger {
+/**
+ * IncrementalLoadLogger.
+ *
+ * Repllogger for Incremental Load.
+ **/
+public class IncrementalLoadLogger extends ReplLogger<String> {
private String dbName;
private String dumpDir;
private long numEvents;
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/log/IncrementalLoadLogger.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/log/RangerLoadLogger.java
similarity index 53%
copy from ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/log/IncrementalLoadLogger.java
copy to ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/log/RangerLoadLogger.java
index 77db6ed..1eee3af 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/log/IncrementalLoadLogger.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/log/RangerLoadLogger.java
@@ -17,39 +17,36 @@
*/
package org.apache.hadoop.hive.ql.parse.repl.load.log;
-import org.apache.hadoop.hive.ql.parse.repl.load.log.state.IncrementalLoadBegin;
-import org.apache.hadoop.hive.ql.parse.repl.load.log.state.IncrementalLoadEnd;
-import org.apache.hadoop.hive.ql.parse.repl.load.log.state.IncrementalLoadEvent;
import org.apache.hadoop.hive.ql.parse.repl.ReplLogger;
import org.apache.hadoop.hive.ql.parse.repl.ReplState.LogTag;
+import org.apache.hadoop.hive.ql.parse.repl.load.log.state.RangerLoadBegin;
+import org.apache.hadoop.hive.ql.parse.repl.load.log.state.RangerLoadEnd;
-public class IncrementalLoadLogger extends ReplLogger {
- private String dbName;
+/**
+ * RangerLoadLogger.
+ *
+ * Repllogger for Ranger Load.
+ **/
+public class RangerLoadLogger extends ReplLogger<Long> {
+ private String sourceDbName;
+ private String targetDbName;
private String dumpDir;
- private long numEvents;
- private long eventSeqNo;
+ private long estimatedNumPolicies;
- public IncrementalLoadLogger(String dbName, String dumpDir, int numEvents) {
- this.dbName = dbName;
+ public RangerLoadLogger(String sourceDbName, String targetDbName, String dumpDir, long estimatedNumPolicies) {
+ this.sourceDbName = sourceDbName;
+ this.targetDbName = targetDbName;
+ this.estimatedNumPolicies = estimatedNumPolicies;
this.dumpDir = dumpDir;
- this.numEvents = numEvents;
- this.eventSeqNo = 0;
}
@Override
public void startLog() {
- (new IncrementalLoadBegin(dbName, dumpDir, numEvents)).log(LogTag.START);
- }
-
- @Override
- public void eventLog(String eventId, String eventType) {
- eventSeqNo++;
- (new IncrementalLoadEvent(dbName, eventId, eventType, eventSeqNo, numEvents))
- .log(LogTag.EVENT_LOAD);
+ new RangerLoadBegin(sourceDbName, targetDbName, estimatedNumPolicies).log(LogTag.RANGER_LOAD_START);
}
@Override
- public void endLog(String lastReplId) {
- (new IncrementalLoadEnd(dbName, numEvents, dumpDir, lastReplId)).log(LogTag.END);
+ public void endLog(Long count) {
+ new RangerLoadEnd(sourceDbName, targetDbName, count, dumpDir).log(LogTag.RANGER_LOAD_END);
}
}
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/log/state/RangerLoadBegin.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/log/state/RangerLoadBegin.java
new file mode 100644
index 0000000..16f6d96
--- /dev/null
+++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/log/state/RangerLoadBegin.java
@@ -0,0 +1,52 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hive.ql.parse.repl.load.log.state;
+
+import edu.umd.cs.findbugs.annotations.SuppressFBWarnings;
+import org.apache.hadoop.hive.ql.parse.repl.ReplState;
+import org.codehaus.jackson.annotate.JsonProperty;
+
+/**
+ * RangerLoadBegin.
+ *
+ * ReplState to define Ranger Load Begin.
+ **/
+public class RangerLoadBegin extends ReplState {
+ @SuppressFBWarnings("URF_UNREAD_FIELD")
+ @JsonProperty
+ private String sourceDbName;
+
+ @SuppressFBWarnings("URF_UNREAD_FIELD")
+ @JsonProperty
+ private String targetDbName;
+
+ @SuppressFBWarnings("URF_UNREAD_FIELD")
+ @JsonProperty
+ private Long estimatedNumPolicies;
+
+ @SuppressFBWarnings("URF_UNREAD_FIELD")
+ @JsonProperty
+ private Long loadStartTime;
+
+ public RangerLoadBegin(String sourceDbName, String targetDbName, long estimatedNumPolicies) {
+ this.sourceDbName = sourceDbName;
+ this.targetDbName = targetDbName;
+ this.estimatedNumPolicies = estimatedNumPolicies;
+ this.loadStartTime = System.currentTimeMillis() / 1000;
+ }
+}
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/log/state/RangerLoadEnd.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/log/state/RangerLoadEnd.java
new file mode 100644
index 0000000..3317f08
--- /dev/null
+++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/log/state/RangerLoadEnd.java
@@ -0,0 +1,60 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hive.ql.parse.repl.load.log.state;
+
+import edu.umd.cs.findbugs.annotations.SuppressFBWarnings;
+import org.apache.hadoop.hive.ql.parse.repl.ReplState;
+import org.codehaus.jackson.annotate.JsonProperty;
+
+/**
+ * RangerLoadEnd.
+ *
+ * ReplState to define Ranger Load End.
+ **/
+public class RangerLoadEnd extends ReplState {
+ @SuppressFBWarnings("URF_UNREAD_FIELD")
+ @JsonProperty
+ private String sourceDbName;
+
+ @SuppressFBWarnings("URF_UNREAD_FIELD")
+ @JsonProperty
+ private String targetDbName;
+
+ @SuppressFBWarnings("URF_UNREAD_FIELD")
+ @JsonProperty
+ private Long actualNumPolicies;
+
+ @SuppressFBWarnings("URF_UNREAD_FIELD")
+ @JsonProperty
+ private Long loadEndTime;
+
+ @SuppressFBWarnings("URF_UNREAD_FIELD")
+ @JsonProperty
+ private String dumpDir;
+
+ public RangerLoadEnd(String sourceDbName,
+ String targetDbName,
+ long actualNumPolicies,
+ String dumpDir) {
+ this.sourceDbName = sourceDbName;
+ this.targetDbName = targetDbName;
+ this.actualNumPolicies = actualNumPolicies;
+ this.loadEndTime = System.currentTimeMillis() / 1000;
+ this.dumpDir = dumpDir;
+ }
+}
diff --git a/ql/src/test/org/apache/hadoop/hive/ql/exec/repl/TestRangerDumpTask.java b/ql/src/test/org/apache/hadoop/hive/ql/exec/repl/TestRangerDumpTask.java
index 89cec53..8ef0987 100644
--- a/ql/src/test/org/apache/hadoop/hive/ql/exec/repl/TestRangerDumpTask.java
+++ b/ql/src/test/org/apache/hadoop/hive/ql/exec/repl/TestRangerDumpTask.java
@@ -25,13 +25,17 @@ import org.apache.hadoop.hive.ql.exec.repl.ranger.RangerExportPolicyList;
import org.apache.hadoop.hive.ql.exec.repl.ranger.RangerRestClientImpl;
import org.apache.hadoop.hive.ql.exec.repl.ranger.RangerPolicy;
import org.apache.hadoop.hive.ql.exec.repl.util.ReplUtils;
+import org.apache.hadoop.hive.ql.parse.repl.ReplState;
import org.junit.Assert;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
+import org.mockito.ArgumentCaptor;
import org.mockito.Mock;
import org.mockito.Mockito;
-import org.mockito.junit.MockitoJUnitRunner;
+import org.powermock.core.classloader.annotations.PrepareForTest;
+import org.powermock.modules.junit4.PowerMockRunner;
+import org.powermock.reflect.Whitebox;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@@ -43,10 +47,10 @@ import static org.apache.hadoop.hive.conf.HiveConf.ConfVars.REPL_RANGER_SERVICE_
/**
* Unit test class for testing Ranger Dump.
*/
-@RunWith(MockitoJUnitRunner.class)
+@RunWith(PowerMockRunner.class)
+@PrepareForTest({LoggerFactory.class})
public class TestRangerDumpTask {
- protected static final Logger LOG = LoggerFactory.getLogger(TestRangerDumpTask.class);
private RangerDumpTask task;
@Mock
@@ -79,8 +83,9 @@ public class TestRangerDumpTask {
Mockito.when(mockClient.exportRangerPolicies(Mockito.anyString(), Mockito.anyString(), Mockito.anyString()))
.thenReturn(rangerPolicyList);
Mockito.when(conf.getVar(REPL_AUTHORIZATION_PROVIDER_SERVICE_ENDPOINT)).thenReturn("rangerEndpoint");
- Mockito.when(conf.getVar(REPL_RANGER_SERVICE_NAME)).thenReturn("cm_hive");
+ Mockito.when(conf.getVar(REPL_RANGER_SERVICE_NAME)).thenReturn("hive");
Mockito.when(work.getDbName()).thenReturn("testdb");
+ Mockito.when(work.getCurrentDumpPath()).thenReturn(new Path("/tmp"));
int status = task.execute();
Assert.assertEquals(0, status);
}
@@ -102,7 +107,7 @@ public class TestRangerDumpTask {
Mockito.when(mockClient.exportRangerPolicies(Mockito.anyString(), Mockito.anyString(), Mockito.anyString()))
.thenReturn(rangerPolicyList);
Mockito.when(conf.getVar(REPL_AUTHORIZATION_PROVIDER_SERVICE_ENDPOINT)).thenReturn("rangerEndpoint");
- Mockito.when(conf.getVar(REPL_RANGER_SERVICE_NAME)).thenReturn("cm_hive");
+ Mockito.when(conf.getVar(REPL_RANGER_SERVICE_NAME)).thenReturn("hive");
Mockito.when(work.getDbName()).thenReturn("testdb");
Path rangerDumpPath = new Path("/tmp");
Mockito.when(work.getCurrentDumpPath()).thenReturn(rangerDumpPath);
@@ -112,4 +117,33 @@ public class TestRangerDumpTask {
int status = task.execute();
Assert.assertEquals(0, status);
}
+
+ @Test
+ public void testSuccessRangerDumpMetrics() throws Exception {
+ Logger logger = Mockito.mock(Logger.class);
+ Whitebox.setInternalState(ReplState.class, logger);
+ RangerExportPolicyList rangerPolicyList = new RangerExportPolicyList();
+ rangerPolicyList.setPolicies(new ArrayList<RangerPolicy>());
+ Mockito.when(mockClient.exportRangerPolicies(Mockito.anyString(), Mockito.anyString(), Mockito.anyString()))
+ .thenReturn(rangerPolicyList);
+ Mockito.when(conf.getVar(REPL_AUTHORIZATION_PROVIDER_SERVICE_ENDPOINT)).thenReturn("rangerEndpoint");
+ Mockito.when(conf.getVar(REPL_RANGER_SERVICE_NAME)).thenReturn("hive");
+ Mockito.when(work.getDbName()).thenReturn("testdb");
+ Mockito.when(work.getCurrentDumpPath()).thenReturn(new Path("/tmp"));
+ int status = task.execute();
+ Assert.assertEquals(0, status);
+ ArgumentCaptor<String> replStateCaptor = ArgumentCaptor.forClass(String.class);
+ ArgumentCaptor<Object> eventCaptor = ArgumentCaptor.forClass(Object.class);
+ ArgumentCaptor<Object> eventDetailsCaptor = ArgumentCaptor.forClass(Object.class);
+ Mockito.verify(logger,
+ Mockito.times(2)).info(replStateCaptor.capture(),
+ eventCaptor.capture(), eventDetailsCaptor.capture());
+ Assert.assertEquals("REPL::{}: {}", replStateCaptor.getAllValues().get(0));
+ Assert.assertEquals("RANGER_DUMP_START", eventCaptor.getAllValues().get(0));
+ Assert.assertEquals("RANGER_DUMP_END", eventCaptor.getAllValues().get(1));
+ Assert.assertTrue(eventDetailsCaptor.getAllValues().get(0)
+ .toString().contains("{\"dbName\":\"testdb\",\"dumpStartTime"));
+ Assert.assertTrue(eventDetailsCaptor
+ .getAllValues().get(1).toString().contains("{\"dbName\":\"testdb\",\"actualNumPolicies\":0,\"dumpEndTime\""));
+ }
}
diff --git a/ql/src/test/org/apache/hadoop/hive/ql/exec/repl/TestRangerLoadTask.java b/ql/src/test/org/apache/hadoop/hive/ql/exec/repl/TestRangerLoadTask.java
index 10c1afd..296bd38 100644
--- a/ql/src/test/org/apache/hadoop/hive/ql/exec/repl/TestRangerLoadTask.java
+++ b/ql/src/test/org/apache/hadoop/hive/ql/exec/repl/TestRangerLoadTask.java
@@ -23,17 +23,19 @@ import org.apache.hadoop.hive.conf.HiveConf;
import org.apache.hadoop.hive.ql.exec.repl.ranger.RangerExportPolicyList;
import org.apache.hadoop.hive.ql.exec.repl.ranger.RangerRestClientImpl;
import org.apache.hadoop.hive.ql.exec.repl.util.ReplUtils;
+import org.apache.hadoop.hive.ql.parse.repl.ReplState;
import org.junit.Assert;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
+import org.mockito.ArgumentCaptor;
import org.mockito.Mock;
import org.mockito.Mockito;
import org.mockito.junit.MockitoJUnitRunner;
+import org.powermock.reflect.Whitebox;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
-
import static org.apache.hadoop.hive.conf.HiveConf.ConfVars.REPL_AUTHORIZATION_PROVIDER_SERVICE_ENDPOINT;
/**
@@ -103,4 +105,47 @@ public class TestRangerLoadTask {
int status = task.execute();
Assert.assertEquals(0, status);
}
+
+ @Test
+ public void testSuccessRangerDumpMetrics() throws Exception {
+ Logger logger = Mockito.mock(Logger.class);
+ Whitebox.setInternalState(ReplState.class, logger);
+ String rangerResponse = "{\"metaDataInfo\":{\"Host name\":\"ranger.apache.org\","
+ + "\"Exported by\":\"hive\",\"Export time\":\"May 5, 2020, 8:55:03 AM\",\"Ranger apache version\""
+ + ":\"2.0.0.7.2.0.0-61\"},\"policies\":[{\"service\":\"cm_hive\",\"name\":\"db-level\",\"policyType\":0,"
+ + "\"description\":\"\",\"isAuditEnabled\":true,\"resources\":{\"database\":{\"values\":[\"aa\"],"
+ + "\"isExcludes\":false,\"isRecursive\":false},\"column\":{\"values\":[\"id\"],\"isExcludes\":false,"
+ + "\"isRecursive\":false},\"table\":{\"values\":[\"*\"],\"isExcludes\":false,\"isRecursive\":false}},"
+ + "\"policyItems\":[{\"accesses\":[{\"type\":\"select\",\"isAllowed\":true},{\"type\":\"update\","
+ + "\"isAllowed\":true}],\"users\":[\"admin\"],\"groups\":[\"public\"],\"conditions\":[],"
+ + "\"delegateAdmin\":false}],\"denyPolicyItems\":[],\"allowExceptions\":[],\"denyExceptions\":[],"
+ + "\"dataMaskPolicyItems\":[],\"rowFilterPolicyItems\":[],\"id\":40,\"guid\":"
+ + "\"4e2b3406-7b9a-4004-8cdf-7a239c8e2cae\",\"isEnabled\":true,\"version\":1}]}";
+ RangerExportPolicyList rangerPolicyList = new Gson().fromJson(rangerResponse, RangerExportPolicyList.class);
+ Mockito.when(conf.getVar(REPL_AUTHORIZATION_PROVIDER_SERVICE_ENDPOINT)).thenReturn("rangerEndpoint");
+ Mockito.when(work.getSourceDbName()).thenReturn("srcdb");
+ Mockito.when(work.getTargetDbName()).thenReturn("tgtdb");
+ Path rangerDumpPath = new Path("/tmp");
+ Mockito.when(work.getCurrentDumpPath()).thenReturn(rangerDumpPath);
+ mockClient.saveRangerPoliciesToFile(rangerPolicyList,
+ rangerDumpPath, ReplUtils.HIVE_RANGER_POLICIES_FILE_NAME, new HiveConf());
+ Mockito.when(mockClient.readRangerPoliciesFromJsonFile(Mockito.any(), Mockito.any())).thenReturn(rangerPolicyList);
+ int status = task.execute();
+ Assert.assertEquals(0, status);
+ ArgumentCaptor<String> replStateCaptor = ArgumentCaptor.forClass(String.class);
+ ArgumentCaptor<Object> eventCaptor = ArgumentCaptor.forClass(Object.class);
+ ArgumentCaptor<Object> eventDetailsCaptor = ArgumentCaptor.forClass(Object.class);
+ Mockito.verify(logger,
+ Mockito.times(2)).info(replStateCaptor.capture(),
+ eventCaptor.capture(), eventDetailsCaptor.capture());
+ Assert.assertEquals("REPL::{}: {}", replStateCaptor.getAllValues().get(0));
+ Assert.assertEquals("RANGER_LOAD_START", eventCaptor.getAllValues().get(0));
+ Assert.assertEquals("RANGER_LOAD_END", eventCaptor.getAllValues().get(1));
+ Assert.assertTrue(eventDetailsCaptor.getAllValues().get(0)
+ .toString().contains("{\"sourceDbName\":\"srcdb\",\"targetDbName\":\"tgtdb\""
+ + ",\"estimatedNumPolicies\":1,\"loadStartTime\":"));
+ Assert.assertTrue(eventDetailsCaptor
+ .getAllValues().get(1).toString().contains("{\"sourceDbName\":\"srcdb\",\"targetDbName\""
+ + ":\"tgtdb\",\"actualNumPolicies\":1,\"loadEndTime\""));
+ }
}