You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@ranger.apache.org by co...@apache.org on 2017/09/18 12:25:15 UTC
[1/2] ranger git commit: RANGER-1785 - Remove ranger-hive-utils module
Repository: ranger
Updated Branches:
refs/heads/master 98351d4c1 -> dbc84c18a
RANGER-1785 - Remove ranger-hive-utils module
Signed-off-by: Colm O hEigeartaigh <co...@apache.org>
Project: http://git-wip-us.apache.org/repos/asf/ranger/repo
Commit: http://git-wip-us.apache.org/repos/asf/ranger/commit/e7d8dea8
Tree: http://git-wip-us.apache.org/repos/asf/ranger/tree/e7d8dea8
Diff: http://git-wip-us.apache.org/repos/asf/ranger/diff/e7d8dea8
Branch: refs/heads/master
Commit: e7d8dea885da7a5a740852fc0f86fb5435e5a704
Parents: 98351d4
Author: Colm O hEigeartaigh <co...@apache.org>
Authored: Fri Sep 15 12:21:46 2017 +0100
Committer: Colm O hEigeartaigh <co...@apache.org>
Committed: Mon Sep 18 12:59:33 2017 +0100
----------------------------------------------------------------------
pom.xml | 1 -
ranger-hive-utils/.gitignore | 1 -
ranger-hive-utils/pom.xml | 50 --
.../authorization/hive/udf/RangerBaseUdf.java | 474 -------------------
.../authorization/hive/udf/RangerUdfMask.java | 334 -------------
.../hive/udf/RangerUdfMaskFirstN.java | 229 ---------
.../hive/udf/RangerUdfMaskHash.java | 77 ---
.../hive/udf/RangerUdfMaskLastN.java | 193 --------
.../hive/udf/RangerUdfMaskShowFirstN.java | 248 ----------
.../hive/udf/RangerUdfMaskShowLastN.java | 198 --------
10 files changed, 1805 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/ranger/blob/e7d8dea8/pom.xml
----------------------------------------------------------------------
diff --git a/pom.xml b/pom.xml
index 0ebadee..f7d690f 100644
--- a/pom.xml
+++ b/pom.xml
@@ -105,7 +105,6 @@
<module>ranger-solr-plugin-shim</module>
<module>ranger-atlas-plugin-shim</module>
<module>ranger-kms-plugin-shim</module>
- <module>ranger-hive-utils</module>
<module>ranger-examples</module>
<module>ranger-tools</module>
<module>plugin-atlas</module>
http://git-wip-us.apache.org/repos/asf/ranger/blob/e7d8dea8/ranger-hive-utils/.gitignore
----------------------------------------------------------------------
diff --git a/ranger-hive-utils/.gitignore b/ranger-hive-utils/.gitignore
deleted file mode 100644
index b83d222..0000000
--- a/ranger-hive-utils/.gitignore
+++ /dev/null
@@ -1 +0,0 @@
-/target/
http://git-wip-us.apache.org/repos/asf/ranger/blob/e7d8dea8/ranger-hive-utils/pom.xml
----------------------------------------------------------------------
diff --git a/ranger-hive-utils/pom.xml b/ranger-hive-utils/pom.xml
deleted file mode 100644
index 6653bab..0000000
--- a/ranger-hive-utils/pom.xml
+++ /dev/null
@@ -1,50 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-<!--
- Licensed to the Apache Software Foundation (ASF) under one or more
- contributor license agreements. See the NOTICE file distributed with
- this work for additional information regarding copyright ownership.
- The ASF licenses this file to You under the Apache License, Version 2.0
- (the "License"); you may not use this file except in compliance with
- the License. You may obtain a copy of the License at
-
- http://www.apache.org/licenses/LICENSE-2.0
-
- Unless required by applicable law or agreed to in writing, software
- distributed under the License is distributed on an "AS IS" BASIS,
- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- See the License for the specific language governing permissions and
- limitations under the License.
--->
-<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
- <modelVersion>4.0.0</modelVersion>
- <artifactId>ranger-hive-utils</artifactId>
- <name>Ranger Hive Utils</name>
- <description>Ranger Hive Utils</description>
- <packaging>jar</packaging>
- <properties>
- <project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
- </properties>
- <parent>
- <groupId>org.apache.ranger</groupId>
- <artifactId>ranger</artifactId>
- <version>1.0.0-SNAPSHOT</version>
- <relativePath>..</relativePath>
- </parent>
- <dependencies>
- <dependency>
- <groupId>org.apache.hadoop</groupId>
- <artifactId>hadoop-common</artifactId>
- <version>${hadoop.version}</version>
- </dependency>
- <dependency>
- <groupId>org.apache.hive</groupId>
- <artifactId>hive-common</artifactId>
- <version>${hive.version}</version>
- </dependency>
- <dependency>
- <groupId>org.apache.hive</groupId>
- <artifactId>hive-exec</artifactId>
- <version>${hive.version}</version>
- </dependency>
- </dependencies>
-</project>
http://git-wip-us.apache.org/repos/asf/ranger/blob/e7d8dea8/ranger-hive-utils/src/main/java/org/apache/ranger/authorization/hive/udf/RangerBaseUdf.java
----------------------------------------------------------------------
diff --git a/ranger-hive-utils/src/main/java/org/apache/ranger/authorization/hive/udf/RangerBaseUdf.java b/ranger-hive-utils/src/main/java/org/apache/ranger/authorization/hive/udf/RangerBaseUdf.java
deleted file mode 100644
index 8bc6b36..0000000
--- a/ranger-hive-utils/src/main/java/org/apache/ranger/authorization/hive/udf/RangerBaseUdf.java
+++ /dev/null
@@ -1,474 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.ranger.authorization.hive.udf;
-
-
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
-import org.apache.hadoop.hive.common.type.HiveChar;
-import org.apache.hadoop.hive.common.type.HiveVarchar;
-import org.apache.hadoop.hive.ql.exec.UDFArgumentException;
-import org.apache.hadoop.hive.ql.metadata.HiveException;
-import org.apache.hadoop.hive.ql.udf.generic.GenericUDF;
-import org.apache.hadoop.hive.ql.udf.generic.GenericUDF.DeferredObject;
-import org.apache.hadoop.hive.serde2.io.*;
-import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
-import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector;
-import org.apache.hadoop.hive.serde2.objectinspector.primitive.*;
-import org.apache.hadoop.io.IntWritable;
-import org.apache.hadoop.io.LongWritable;
-import org.apache.hadoop.io.Text;
-
-import java.sql.Date;
-
-
-public abstract class RangerBaseUdf extends GenericUDF {
- private static final Log LOG = LogFactory.getLog(RangerBaseUdf.class);
-
- final protected AbstractTransformer transformer;
- final protected String displayName;
- protected AbstractTransformerAdapter transformerAdapter = null;
-
- protected RangerBaseUdf(AbstractTransformer transformer, String displayName) {
- this.transformer = transformer;
- this.displayName = displayName;
- }
-
- public ObjectInspector initialize(ObjectInspector[] arguments) throws UDFArgumentException {
- LOG.debug("==> RangerBaseUdf.initialize()");
-
- checkArgPrimitive(arguments, 0); // first argument is the column to be transformed
-
- PrimitiveObjectInspector columnType = ((PrimitiveObjectInspector) arguments[0]);
-
- transformer.init(arguments, 1);
-
- transformerAdapter = AbstractTransformerAdapter.getTransformerAdapter(columnType, transformer);
-
- ObjectInspector ret = PrimitiveObjectInspectorFactory.getPrimitiveWritableObjectInspector(columnType.getPrimitiveCategory());
-
- LOG.debug("<== RangerBaseUdf.initialize()");
-
- return ret;
- }
-
- @Override
- public Object evaluate(DeferredObject[] arguments) throws HiveException {
- Object ret = transformerAdapter.getTransformedWritable(arguments[0]);
-
- return ret;
- }
-
- @Override
- public String getDisplayString(String[] children) {
- return getStandardDisplayString(displayName, children);
- }
-}
-
-
-/**
- * Interface to be implemented by transformers which transform a given value according to its specification.
- */
-abstract class AbstractTransformer {
- /**
- * Initialize the transformer object
- * @param arguments arguments given to GenericUDF.initialize()
- * @param startIdx index into array, from which the transformer should read values
- */
- abstract void init(ObjectInspector[] arguments, int startIdx);
-
- /**
- * Transform a String value
- * @param value value to transform
- * @return transformed value
- */
- abstract String transform(String value);
-
- /**
- * Transform a Byte value
- * @param value value to transform
- * @return transformed value
- */
- abstract Byte transform(Byte value);
-
- /**
- * Transform a Short value
- * @param value value to transform
- * @return transformed value
- */
- abstract Short transform(Short value);
-
- /**
- * Transform a Integer value
- * @param value value to transform
- * @return transformed value
- */
- abstract Integer transform(Integer value);
-
- /**
- * Transform a Long value
- * @param value value to transform
- * @return transformed value
- */
- abstract Long transform(Long value);
-
- /**
- * Transform a Date value
- * @param value value to transform
- * @return transformed value
- */
- abstract Date transform(Date value);
-}
-
-/**
- * Interface to be implemented by datatype specific adapters that handle necessary conversion of the transformed value
- * into appropriate Writable object, which GenericUDF.evaluate() is expected to return.
- */
-abstract class AbstractTransformerAdapter {
- final AbstractTransformer transformer;
-
- AbstractTransformerAdapter(AbstractTransformer transformer) {
- this.transformer = transformer;
- }
-
- abstract Object getTransformedWritable(DeferredObject value) throws HiveException;
-
- static AbstractTransformerAdapter getTransformerAdapter(PrimitiveObjectInspector columnType, AbstractTransformer transformer) {
- final AbstractTransformerAdapter ret;
-
- switch(columnType.getPrimitiveCategory()) {
- case STRING:
- ret = new StringTransformerAdapter((StringObjectInspector)columnType, transformer);
- break;
-
- case CHAR:
- ret = new HiveCharTransformerAdapter((HiveCharObjectInspector)columnType, transformer);
- break;
-
- case VARCHAR:
- ret = new HiveVarcharTransformerAdapter((HiveVarcharObjectInspector)columnType, transformer);
- break;
-
- case BYTE:
- ret = new ByteTransformerAdapter((ByteObjectInspector)columnType, transformer);
- break;
-
- case SHORT:
- ret = new ShortTransformerAdapter((ShortObjectInspector)columnType, transformer);
- break;
-
- case INT:
- ret = new IntegerTransformerAdapter((IntObjectInspector)columnType, transformer);
- break;
-
- case LONG:
- ret = new LongTransformerAdapter((LongObjectInspector)columnType, transformer);
- break;
-
- case DATE:
- ret = new DateTransformerAdapter((DateObjectInspector)columnType, transformer);
- break;
-
- default:
- ret = new UnsupportedDatatypeTransformAdapter(columnType, transformer);
- break;
- }
-
- return ret;
- }
-}
-
-class ByteTransformerAdapter extends AbstractTransformerAdapter {
- final ByteObjectInspector columnType;
- final ByteWritable writable;
-
- public ByteTransformerAdapter(ByteObjectInspector columnType, AbstractTransformer transformer) {
- this(columnType, transformer, new ByteWritable());
- }
-
- public ByteTransformerAdapter(ByteObjectInspector columnType, AbstractTransformer transformer, ByteWritable writable) {
- super(transformer);
-
- this.columnType = columnType;
- this.writable = writable;
- }
-
- @Override
- public Object getTransformedWritable(DeferredObject object) throws HiveException {
- Byte value = (Byte)columnType.getPrimitiveJavaObject(object.get());
-
- if(value != null) {
- Byte transformedValue = transformer.transform(value);
-
- if(transformedValue != null) {
- writable.set(transformedValue);
-
- return writable;
- }
- }
-
- return null;
- }
-}
-
-class DateTransformerAdapter extends AbstractTransformerAdapter {
- final DateObjectInspector columnType;
- final DateWritable writable;
-
- public DateTransformerAdapter(DateObjectInspector columnType, AbstractTransformer transformer) {
- this(columnType, transformer, new DateWritable());
- }
-
- public DateTransformerAdapter(DateObjectInspector columnType, AbstractTransformer transformer, DateWritable writable) {
- super(transformer);
-
- this.columnType = columnType;
- this.writable = writable;
- }
-
- @Override
- public Object getTransformedWritable(DeferredObject object) throws HiveException {
- Date value = columnType.getPrimitiveJavaObject(object.get());
-
- if(value != null) {
- Date transformedValue = transformer.transform(value);
-
- if(transformedValue != null) {
- writable.set(transformedValue);
-
- return writable;
- }
- }
-
- return null;
- }
-}
-
-class HiveCharTransformerAdapter extends AbstractTransformerAdapter {
- final HiveCharObjectInspector columnType;
- final HiveCharWritable writable;
-
- public HiveCharTransformerAdapter(HiveCharObjectInspector columnType, AbstractTransformer transformer) {
- this(columnType, transformer, new HiveCharWritable());
- }
-
- public HiveCharTransformerAdapter(HiveCharObjectInspector columnType, AbstractTransformer transformer, HiveCharWritable writable) {
- super(transformer);
-
- this.columnType = columnType;
- this.writable = writable;
- }
-
- @Override
- public Object getTransformedWritable(DeferredObject object) throws HiveException {
- HiveChar value = columnType.getPrimitiveJavaObject(object.get());
-
- if(value != null) {
- String transformedValue = transformer.transform(value.getValue());
-
- if(transformedValue != null) {
- writable.set(transformedValue);
-
- return writable;
- }
- }
-
- return null;
- }
-}
-
-class HiveVarcharTransformerAdapter extends AbstractTransformerAdapter {
- final HiveVarcharObjectInspector columnType;
- final HiveVarcharWritable writable;
-
- public HiveVarcharTransformerAdapter(HiveVarcharObjectInspector columnType, AbstractTransformer transformer) {
- this(columnType, transformer, new HiveVarcharWritable());
- }
-
- public HiveVarcharTransformerAdapter(HiveVarcharObjectInspector columnType, AbstractTransformer transformer, HiveVarcharWritable writable) {
- super(transformer);
-
- this.columnType = columnType;
- this.writable = writable;
- }
-
- @Override
- public Object getTransformedWritable(DeferredObject object) throws HiveException {
- HiveVarchar value = columnType.getPrimitiveJavaObject(object.get());
-
- if(value != null) {
- String transformedValue = transformer.transform(value.getValue());
-
- if(transformedValue != null) {
- writable.set(transformedValue);
-
- return writable;
- }
- }
-
- return null;
- }
-}
-
-class IntegerTransformerAdapter extends AbstractTransformerAdapter {
- final IntObjectInspector columnType;
- final IntWritable writable;
-
- public IntegerTransformerAdapter(IntObjectInspector columnType, AbstractTransformer transformer) {
- this(columnType, transformer, new IntWritable());
- }
-
- public IntegerTransformerAdapter(IntObjectInspector columnType, AbstractTransformer transformer, IntWritable writable) {
- super(transformer);
-
- this.columnType = columnType;
- this.writable = writable;
- }
-
- @Override
- public Object getTransformedWritable(DeferredObject object) throws HiveException {
- Integer value = (Integer)columnType.getPrimitiveJavaObject(object.get());
-
- if(value != null) {
- Integer transformedValue = transformer.transform(value);
-
- if(transformedValue != null) {
- writable.set(transformedValue);
-
- return writable;
- }
- }
-
- return null;
- }
-}
-
-class LongTransformerAdapter extends AbstractTransformerAdapter {
- final LongObjectInspector columnType;
- final LongWritable writable;
-
- public LongTransformerAdapter(LongObjectInspector columnType, AbstractTransformer transformer) {
- this(columnType, transformer, new LongWritable());
- }
-
- public LongTransformerAdapter(LongObjectInspector columnType, AbstractTransformer transformer, LongWritable writable) {
- super(transformer);
-
- this.columnType = columnType;
- this.writable = writable;
- }
-
- @Override
- public Object getTransformedWritable(DeferredObject object) throws HiveException {
- Long value = (Long)columnType.getPrimitiveJavaObject(object.get());
-
- if(value != null) {
- Long transformedValue = transformer.transform(value);
-
- if(transformedValue != null) {
- writable.set(transformedValue);
-
- return writable;
- }
- }
-
- return null;
- }
-}
-
-class ShortTransformerAdapter extends AbstractTransformerAdapter {
- final ShortObjectInspector columnType;
- final ShortWritable writable;
-
- public ShortTransformerAdapter(ShortObjectInspector columnType, AbstractTransformer transformer) {
- this(columnType, transformer, new ShortWritable());
- }
-
- public ShortTransformerAdapter(ShortObjectInspector columnType, AbstractTransformer transformer, ShortWritable writable) {
- super(transformer);
-
- this.columnType = columnType;
- this.writable = writable;
- }
-
- @Override
- public Object getTransformedWritable(DeferredObject object) throws HiveException {
- Short value = (Short)columnType.getPrimitiveJavaObject(object.get());
-
- if(value != null) {
- Short transformedValue = transformer.transform(value);
-
- if(transformedValue != null) {
- writable.set(transformedValue);
-
- return writable;
- }
- }
-
- return null;
- }
-}
-
-class StringTransformerAdapter extends AbstractTransformerAdapter {
- final StringObjectInspector columnType;
- final Text writable;
-
- public StringTransformerAdapter(StringObjectInspector columnType, AbstractTransformer transformer) {
- this(columnType, transformer, new Text());
- }
-
- public StringTransformerAdapter(StringObjectInspector columnType, AbstractTransformer transformer, Text writable) {
- super(transformer);
-
- this.columnType = columnType;
- this.writable = writable;
- }
-
- @Override
- public Object getTransformedWritable(DeferredObject object) throws HiveException {
- String value = columnType.getPrimitiveJavaObject(object.get());
-
- if(value != null) {
- String transformedValue = transformer.transform(value);
-
- if(transformedValue != null) {
- writable.set(transformedValue);
-
- return writable;
- }
- }
-
- return null;
- }
-}
-
-class UnsupportedDatatypeTransformAdapter extends AbstractTransformerAdapter {
- final PrimitiveObjectInspector columnType;
-
- public UnsupportedDatatypeTransformAdapter(PrimitiveObjectInspector columnType, AbstractTransformer transformer) {
- super(transformer);
-
- this.columnType = columnType;
- }
-
- @Override
- public Object getTransformedWritable(DeferredObject object) throws HiveException {
- return null;
- }
-}
http://git-wip-us.apache.org/repos/asf/ranger/blob/e7d8dea8/ranger-hive-utils/src/main/java/org/apache/ranger/authorization/hive/udf/RangerUdfMask.java
----------------------------------------------------------------------
diff --git a/ranger-hive-utils/src/main/java/org/apache/ranger/authorization/hive/udf/RangerUdfMask.java b/ranger-hive-utils/src/main/java/org/apache/ranger/authorization/hive/udf/RangerUdfMask.java
deleted file mode 100644
index d85ee2b..0000000
--- a/ranger-hive-utils/src/main/java/org/apache/ranger/authorization/hive/udf/RangerUdfMask.java
+++ /dev/null
@@ -1,334 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.ranger.authorization.hive.udf;
-
-
-import java.sql.Date;
-
-import org.apache.hadoop.hive.ql.exec.Description;
-import org.apache.hadoop.hive.serde2.io.ShortWritable;
-import org.apache.hadoop.hive.serde2.objectinspector.ConstantObjectInspector;
-import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
-import org.apache.hadoop.hive.serde2.objectinspector.primitive.*;
-import org.apache.hadoop.io.IntWritable;
-import org.apache.hadoop.io.LongWritable;
-
-
-@Description(name = "mask",
- value = "masks the given value",
- extended = "Examples:\n "
- + " mask(ccn)\n "
- + " mask(ccn, 'X', 'x', '0')\n "
- + " mask(ccn, 'x', 'x', 'x')\n "
- + "Arguments:\n "
- + " mask(value, upperChar, lowerChar, digitChar, otherChar, numberChar, dayValue, monthValue, yearValue)\n "
- + " value - value to mask. Supported types: TINYINT, SMALLINT, INT, BIGINT, STRING, VARCHAR, CHAR, DATE\n "
- + " upperChar - character to replace upper-case characters with. Specify -1 to retain original character. Default value: 'X'\n "
- + " lowerChar - character to replace lower-case characters with. Specify -1 to retain original character. Default value: 'x'\n "
- + " digitChar - character to replace digit characters with. Specify -1 to retain original character. Default value: 'n'\n "
- + " otherChar - character to replace all other characters with. Specify -1 to retain original character. Default value: -1\n "
- + " numberChar - character to replace digits in a number with. Valid values: 0-9. Default value: '1'\n "
- + " dayValue - value to replace day field in a date with. Specify -1 to retain original value. Valid values: 1-31. Default value: 1\n "
- + " monthValue - value to replace month field in a date with. Specify -1 to retain original value. Valid values: 0-11. Default value: 0\n "
- + " yearValue - value to replace year field in a date with. Specify -1 to retain original value. Default value: 0\n "
- )
-public class RangerUdfMask extends RangerBaseUdf {
- public static final String UDF_NAME = "mask";
-
- public RangerUdfMask() {
- super(new MaskTransformer(), UDF_NAME);
- }
-}
-
-class MaskTransformer extends AbstractTransformer {
- final static int MASKED_UPPERCASE = 'X';
- final static int MASKED_LOWERCASE = 'x';
- final static int MASKED_DIGIT = 'n';
- final static int MASKED_OTHER_CHAR = -1;
- final static int MASKED_NUMBER = 1;
- final static int MASKED_DAY_COMPONENT_VAL = 1;
- final static int MASKED_MONTH_COMPONENT_VAL = 0;
- final static int MASKED_YEAR_COMPONENT_VAL = 0;
- final static int UNMASKED_VAL = -1;
-
- int maskedUpperChar = MASKED_UPPERCASE;
- int maskedLowerChar = MASKED_LOWERCASE;
- int maskedDigitChar = MASKED_DIGIT;
- int maskedOtherChar = MASKED_OTHER_CHAR;
- int maskedNumber = MASKED_NUMBER;
- int maskedDayValue = MASKED_DAY_COMPONENT_VAL;
- int maskedMonthValue = MASKED_MONTH_COMPONENT_VAL;
- int maskedYearValue = MASKED_YEAR_COMPONENT_VAL;
-
- public MaskTransformer() {
- }
-
- @Override
- public void init(ObjectInspector[] arguments, int startIdx) {
- int idx = startIdx;
-
- maskedUpperChar = getCharArg(arguments, idx++, MASKED_UPPERCASE);
- maskedLowerChar = getCharArg(arguments, idx++, MASKED_LOWERCASE);
- maskedDigitChar = getCharArg(arguments, idx++, MASKED_DIGIT);
- maskedOtherChar = getCharArg(arguments, idx++, MASKED_OTHER_CHAR);
- maskedNumber = getIntArg(arguments, idx++, MASKED_NUMBER);
- maskedDayValue = getIntArg(arguments, idx++, MASKED_DAY_COMPONENT_VAL);
- maskedMonthValue = getIntArg(arguments, idx++, MASKED_MONTH_COMPONENT_VAL);
- maskedYearValue = getIntArg(arguments, idx++, MASKED_YEAR_COMPONENT_VAL);
-
- if(maskedNumber < 0 || maskedNumber > 9) {
- maskedNumber = MASKED_NUMBER;
- }
-
- if(maskedDayValue < 1 || maskedDayValue > 31) {
- maskedDayValue = MASKED_DAY_COMPONENT_VAL;
- }
-
- if(maskedMonthValue < 0 || maskedMonthValue > 11) {
- maskedMonthValue = MASKED_MONTH_COMPONENT_VAL;
- }
- }
-
- @Override
- String transform(final String val) {
- StringBuilder ret = new StringBuilder(val.length());
-
- for(int i = 0; i < val.length(); i++) {
- ret.appendCodePoint(transformChar(val.charAt(i)));
- }
-
- return ret.toString();
- }
-
- @Override
- Byte transform(final Byte value) {
- byte val = value;
-
- if(value < 0) {
- val *= -1;
- }
-
- byte ret = 0;
- int pos = 1;
- while(val != 0) {
- ret += maskedNumber * pos;
-
- val /= 10;
- pos *= 10;
- }
-
- if(value < 0) {
- ret *= -1;
- }
-
- return ret;
- }
-
- @Override
- Short transform(final Short value) {
- short val = value;
-
- if(value < 0) {
- val *= -1;
- }
-
- short ret = 0;
- int pos = 1;
- while(val != 0) {
- ret += maskedNumber * pos;
-
- val /= 10;
- pos *= 10;
- }
-
- if(value < 0) {
- ret *= -1;
- }
-
- return ret;
- }
-
- @Override
- Integer transform(final Integer value) {
- int val = value;
-
- if(value < 0) {
- val *= -1;
- }
-
- int ret = 0;
- int pos = 1;
- while(val != 0) {
- ret += maskedNumber * pos;
-
- val /= 10;
- pos *= 10;
- }
-
- if(value < 0) {
- ret *= -1;
- }
-
- return ret;
- }
-
- @Override
- Long transform(final Long value) {
- long val = value;
-
- if(value < 0) {
- val *= -1;
- }
-
- long ret = 0;
- long pos = 1;
- while (val != 0) {
- ret += maskedNumber * pos;
-
- val /= 10;
- pos *= 10;
- }
-
- if(value < 0) {
- ret *= -1;
- }
-
- return ret;
- }
-
- @Override
- Date transform(final Date value) {
- int year = maskedYearValue == UNMASKED_VAL ? value.getYear() : maskedYearValue;
- int month = maskedMonthValue == UNMASKED_VAL ? value.getMonth() : maskedMonthValue;
- int day = maskedDayValue == UNMASKED_VAL ? value.getDate() : maskedDayValue;
-
- return new Date(year, month, day);
- }
-
- protected int transformChar(final int c) {
- switch(Character.getType(c)) {
- case Character.UPPERCASE_LETTER:
- if(maskedUpperChar != UNMASKED_VAL) {
- return maskedUpperChar;
- }
- break;
-
- case Character.LOWERCASE_LETTER:
- if(maskedLowerChar != UNMASKED_VAL) {
- return maskedLowerChar;
- }
- break;
-
- case Character.DECIMAL_DIGIT_NUMBER:
- if(maskedDigitChar != UNMASKED_VAL) {
- return maskedDigitChar;
- }
- break;
-
- default:
- if(maskedOtherChar != UNMASKED_VAL) {
- return maskedOtherChar;
- }
- break;
- }
-
- return c;
- }
-
- int getCharArg(ObjectInspector[] arguments, int index, int defaultValue) {
- int ret = defaultValue;
-
- ObjectInspector arg = (arguments != null && arguments.length > index) ? arguments[index] : null;
-
- if (arg != null) {
- if(arg instanceof WritableConstantIntObjectInspector) {
- IntWritable value = ((WritableConstantIntObjectInspector)arg).getWritableConstantValue();
-
- if(value != null) {
- ret = value.get();
- }
- } else if(arg instanceof WritableConstantLongObjectInspector) {
- LongWritable value = ((WritableConstantLongObjectInspector)arg).getWritableConstantValue();
-
- if(value != null) {
- ret = (int)value.get();
- }
- } else if(arg instanceof WritableConstantShortObjectInspector) {
- ShortWritable value = ((WritableConstantShortObjectInspector)arg).getWritableConstantValue();
-
- if(value != null) {
- ret = value.get();
- }
- } else if(arg instanceof ConstantObjectInspector) {
- Object value = ((ConstantObjectInspector) arg).getWritableConstantValue();
-
- if (value != null) {
- String strValue = value.toString();
-
- if (strValue != null && strValue.length() > 0) {
- ret = strValue.charAt(0);
- }
- }
- }
- }
-
- return ret;
- }
-
- int getIntArg(ObjectInspector[] arguments, int index, int defaultValue) {
- int ret = defaultValue;
-
- ObjectInspector arg = (arguments != null && arguments.length > index) ? arguments[index] : null;
-
- if (arg != null) {
- if (arg instanceof WritableConstantIntObjectInspector) {
- IntWritable value = ((WritableConstantIntObjectInspector) arg).getWritableConstantValue();
-
- if (value != null) {
- ret = value.get();
- }
- } else if (arg instanceof WritableConstantLongObjectInspector) {
- LongWritable value = ((WritableConstantLongObjectInspector) arg).getWritableConstantValue();
-
- if (value != null) {
- ret = (int) value.get();
- }
- } else if (arg instanceof WritableConstantShortObjectInspector) {
- ShortWritable value = ((WritableConstantShortObjectInspector) arg).getWritableConstantValue();
-
- if (value != null) {
- ret = value.get();
- }
- } else if (arg instanceof ConstantObjectInspector) {
- Object value = ((ConstantObjectInspector) arg).getWritableConstantValue();
-
- if (value != null) {
- String strValue = value.toString();
-
- if (strValue != null && strValue.length() > 0) {
- ret = Integer.parseInt(value.toString());
- }
- }
- }
- }
-
- return ret;
- }
-}
-
http://git-wip-us.apache.org/repos/asf/ranger/blob/e7d8dea8/ranger-hive-utils/src/main/java/org/apache/ranger/authorization/hive/udf/RangerUdfMaskFirstN.java
----------------------------------------------------------------------
diff --git a/ranger-hive-utils/src/main/java/org/apache/ranger/authorization/hive/udf/RangerUdfMaskFirstN.java b/ranger-hive-utils/src/main/java/org/apache/ranger/authorization/hive/udf/RangerUdfMaskFirstN.java
deleted file mode 100644
index 24c0988..0000000
--- a/ranger-hive-utils/src/main/java/org/apache/ranger/authorization/hive/udf/RangerUdfMaskFirstN.java
+++ /dev/null
@@ -1,229 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.ranger.authorization.hive.udf;
-
-import org.apache.hadoop.hive.ql.exec.Description;
-import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
-
-
-@Description(name = "mask_first_n",
- value = "masks the first n characters of the value",
- extended = "Examples:\n "
- + " mask_first_n(ccn, 8)\n "
- + " mask_first_n(ccn, 8, 'x', 'x', 'x')\n "
- + "Arguments:\n "
- + " mask(value, charCount, upperChar, lowerChar, digitChar, otherChar, numberChar)\n "
- + " value - value to mask. Supported types: TINYINT, SMALLINT, INT, BIGINT, STRING, VARCHAR, CHAR\n "
- + " charCount - number of characters. Default value: 4\n "
- + " upperChar - character to replace upper-case characters with. Specify -1 to retain original character. Default value: 'X'\n "
- + " lowerChar - character to replace lower-case characters with. Specify -1 to retain original character. Default value: 'x'\n "
- + " digitChar - character to replace digit characters with. Specify -1 to retain original character. Default value: 'n'\n "
- + " otherChar - character to replace all other characters with. Specify -1 to retain original character. Default value: -1\n "
- + " numberChar - character to replace digits in a number with. Valid values: 0-9. Default value: '1'\n "
- )
-public class RangerUdfMaskFirstN extends RangerBaseUdf {
- public static final String UDF_NAME = "mask_first_n";
-
- public RangerUdfMaskFirstN() {
- super(new MaskFirstNTransformer(), UDF_NAME);
- }
-}
-
-class MaskFirstNTransformer extends MaskTransformer {
- int charCount = 4;
-
- public MaskFirstNTransformer() {
- super();
- }
-
- @Override
- public void init(ObjectInspector[] arguments, int argsStartIdx) {
- super.init(arguments, argsStartIdx + 1); // first argument is charCount, which is consumed in this method below
-
- charCount = getIntArg(arguments, argsStartIdx, 4);
-
- if(charCount < 0) {
- charCount = 0;
- }
- }
-
- @Override
- String transform(final String value) {
- final StringBuilder ret = new StringBuilder(value.length());
- final int endIdx = value.length() < charCount ? value.length() : charCount;
-
- for(int i = 0; i < endIdx; i++) {
- ret.appendCodePoint(transformChar(value.charAt(i)));
- }
-
- for(int i = endIdx; i < value.length(); i++) {
- ret.appendCodePoint(value.charAt(i));
- }
-
- return ret.toString();
- }
-
- @Override
- Byte transform(final Byte value) {
- byte val = value;
-
- if(value < 0) {
- val *= -1;
- }
-
- // count number of digits in the value
- int digitCount = 0;
- for(byte v = val; v != 0; v /= 10) {
- digitCount++;
- }
-
- // number of digits to retain from the end
- final int retainCount = digitCount < charCount ? 0 : (digitCount - charCount);
-
- byte ret = 0;
- int pos = 1;
- for(int i = 0; val != 0; i++) {
- if(i >= retainCount) { // mask this digit
- ret += maskedNumber * pos;
- } else { //retain this digit
- ret += (val % 10) * pos;
- }
-
- val /= 10;
- pos *= 10;
- }
-
- if(value < 0) {
- ret *= -1;
- }
-
- return ret;
- }
-
- @Override
- Short transform(final Short value) {
- short val = value;
-
- if(value < 0) {
- val *= -1;
- }
-
- // count number of digits in the value
- int digitCount = 0;
- for(short v = val; v != 0; v /= 10) {
- digitCount++;
- }
-
- // number of digits to retain from the end
- final int retainCount = digitCount < charCount ? 0 : (digitCount - charCount);
-
- short ret = 0;
- int pos = 1;
- for(int i = 0; val != 0; i++) {
- if(i >= retainCount) { // mask this digit
- ret += maskedNumber * pos;
- } else { // retain this digit
- ret += (val % 10) * pos;
- }
-
- val /= 10;
- pos *= 10;
- }
-
- if(value < 0) {
- ret *= -1;
- }
-
- return ret;
- }
-
- @Override
- Integer transform(final Integer value) {
- int val = value;
-
- if(value < 0) {
- val *= -1;
- }
-
- // count number of digits in the value
- int digitCount = 0;
- for(int v = val; v != 0; v /= 10) {
- digitCount++;
- }
-
- // number of digits to retain from the end
- final int retainCount = digitCount < charCount ? 0 : (digitCount - charCount);
-
- int ret = 0;
- int pos = 1;
- for(int i = 0; val != 0; i++) {
- if(i >= retainCount) { // mask this digit
- ret += maskedNumber * pos;
- } else { // retain this digit
- ret += (val % 10) * pos;
- }
-
- val /= 10;
- pos *= 10;
- }
-
- if(value < 0) {
- ret *= -1;
- }
-
- return ret;
- }
-
- @Override
- Long transform(final Long value) {
- long val = value;
-
- if(value < 0) {
- val *= -1;
- }
-
- // count number of digits in the value
- int digitCount = 0;
- for(long v = val; v != 0; v /= 10) {
- digitCount++;
- }
-
- // number of digits to retain from the end
- final int retainCount = digitCount < charCount ? 0 : (digitCount - charCount);
-
- long ret = 0;
- long pos = 1;
- for(int i = 0; val != 0; i++) {
- if(i >= retainCount) { // mask this digit
- ret += maskedNumber * pos;
- } else { // retain this digit
- ret += (val % 10) * pos;
- }
-
- val /= 10;
- pos *= 10;
- }
-
- if(value < 0) {
- ret *= -1;
- }
-
- return ret;
- }
-}
http://git-wip-us.apache.org/repos/asf/ranger/blob/e7d8dea8/ranger-hive-utils/src/main/java/org/apache/ranger/authorization/hive/udf/RangerUdfMaskHash.java
----------------------------------------------------------------------
diff --git a/ranger-hive-utils/src/main/java/org/apache/ranger/authorization/hive/udf/RangerUdfMaskHash.java b/ranger-hive-utils/src/main/java/org/apache/ranger/authorization/hive/udf/RangerUdfMaskHash.java
deleted file mode 100644
index 02ead75..0000000
--- a/ranger-hive-utils/src/main/java/org/apache/ranger/authorization/hive/udf/RangerUdfMaskHash.java
+++ /dev/null
@@ -1,77 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.ranger.authorization.hive.udf;
-
-import java.sql.Date;
-
-import org.apache.commons.codec.digest.DigestUtils;
-import org.apache.hadoop.hive.ql.exec.Description;
-import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
-
-
-@Description(name = "mask_hash",
- value = "returns hash of the given value",
- extended = "Examples:\n "
- + " mask_hash(value)\n "
- + "Arguments:\n "
- + " value - value to mask. Supported types: STRING, VARCHAR, CHAR"
- )
-public class RangerUdfMaskHash extends RangerBaseUdf {
- public static final String UDF_NAME = "mask_hash";
-
- public RangerUdfMaskHash() {
- super(new MaskHashTransformer(), UDF_NAME);
- }
-}
-
-class MaskHashTransformer extends AbstractTransformer {
- @Override
- public void init(ObjectInspector[] arguments, int startIdx) {
- }
-
- @Override
- String transform(final String value) {
- return DigestUtils.sha256Hex(value);
- }
-
- @Override
- Byte transform(final Byte value) {
- return null;
- }
-
- @Override
- Short transform(final Short value) {
- return null;
- }
-
- @Override
- Integer transform(final Integer value) {
- return null;
- }
-
- @Override
- Long transform(final Long value) {
- return null;
- }
-
- @Override
- Date transform(final Date value) {
- return null;
- }
-}
http://git-wip-us.apache.org/repos/asf/ranger/blob/e7d8dea8/ranger-hive-utils/src/main/java/org/apache/ranger/authorization/hive/udf/RangerUdfMaskLastN.java
----------------------------------------------------------------------
diff --git a/ranger-hive-utils/src/main/java/org/apache/ranger/authorization/hive/udf/RangerUdfMaskLastN.java b/ranger-hive-utils/src/main/java/org/apache/ranger/authorization/hive/udf/RangerUdfMaskLastN.java
deleted file mode 100644
index 554dc91..0000000
--- a/ranger-hive-utils/src/main/java/org/apache/ranger/authorization/hive/udf/RangerUdfMaskLastN.java
+++ /dev/null
@@ -1,193 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.ranger.authorization.hive.udf;
-
-import org.apache.hadoop.hive.ql.exec.Description;
-import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
-
-
-@Description(name = "mask_last_n",
- value = "masks the last n characters of the value",
- extended = "Examples:\n "
- + " mask_last_n(ccn, 8)\n "
- + " mask_last_n(ccn, 8, 'x', 'x', 'x')\n "
- + "Arguments:\n "
- + " mask_last_n(value, charCount, upperChar, lowerChar, digitChar, otherChar, numberChar)\n "
- + " value - value to mask. Supported types: TINYINT, SMALLINT, INT, BIGINT, STRING, VARCHAR, CHAR\n "
- + " charCount - number of characters. Default value: 4\n "
- + " upperChar - character to replace upper-case characters with. Specify -1 to retain original character. Default value: 'X'\n "
- + " lowerChar - character to replace lower-case characters with. Specify -1 to retain original character. Default value: 'x'\n "
- + " digitChar - character to replace digit characters with. Specify -1 to retain original character. Default value: 'n'\n "
- + " otherChar - character to replace all other characters with. Specify -1 to retain original character. Default value: -1\n "
- + " numberChar - character to replace digits in a number with. Valid values: 0-9. Default value: '1'\n "
- )
-public class RangerUdfMaskLastN extends RangerBaseUdf {
- public static final String UDF_NAME = "mask_last_n";
-
- public RangerUdfMaskLastN() {
- super(new MaskLastNTransformer(), UDF_NAME);
- }
-}
-
-class MaskLastNTransformer extends MaskTransformer {
- int charCount = 4;
-
- public MaskLastNTransformer() {
- super();
- }
-
- @Override
- public void init(ObjectInspector[] arguments, int argsStartIdx) {
- super.init(arguments, argsStartIdx + 1); // first argument is charCount, which is consumed in this method below
-
- charCount = getIntArg(arguments, argsStartIdx, 4);
-
- if(charCount < 0) {
- charCount = 0;
- }
- }
-
- @Override
- String transform(final String value) {
- final StringBuilder ret = new StringBuilder(value.length());
- final int startIdx = value.length() <= charCount ? 0 : (value.length() - charCount);
-
- for(int i = 0; i < startIdx; i++) {
- ret.appendCodePoint(value.charAt(i));
- }
-
- for(int i = startIdx; i < value.length(); i++) {
- ret.appendCodePoint(transformChar(value.charAt(i)));
- }
-
- return ret.toString();
- }
-
- @Override
- Byte transform(final Byte value) {
- byte val = value;
-
- if(value < 0) {
- val *= -1;
- }
-
- byte ret = 0;
- int pos = 1;
- for(int i = 0; val != 0; i++) {
- if(i < charCount) { // mask this digit
- ret += maskedNumber * pos;
- } else { //retain this digit
- ret += (val % 10) * pos;
- }
-
- val /= 10;
- pos *= 10;
- }
-
- if(value < 0) {
- ret *= -1;
- }
-
- return ret;
- }
-
- @Override
- Short transform(final Short value) {
- short val = value;
-
- if(value < 0) {
- val *= -1;
- }
-
- short ret = 0;
- int pos = 1;
- for(int i = 0; val != 0; i++) {
- if(i < charCount) { // mask this digit
- ret += maskedNumber * pos;
- } else { // retain this digit
- ret += (val % 10) * pos;
- }
-
- val /= 10;
- pos *= 10;
- }
-
- if(value < 0) {
- ret *= -1;
- }
-
- return ret;
- }
-
- @Override
- Integer transform(final Integer value) {
- int val = value;
-
- if(value < 0) {
- val *= -1;
- }
-
- int ret = 0;
- int pos = 1;
- for(int i = 0; val != 0; i++) {
- if(i < charCount) { // mask this digit
- ret += maskedNumber * pos;
- } else { // retain this digit
- ret += (val % 10) * pos;
- }
-
- val /= 10;
- pos *= 10;
- }
-
- if(value < 0) {
- ret *= -1;
- }
-
- return ret;
- }
-
- @Override
- Long transform(final Long value) {
- long val = value;
-
- if(value < 0) {
- val *= -1;
- }
-
- long ret = 0;
- long pos = 1;
- for(int i = 0; val != 0; i++) {
- if(i < charCount) { // mask this digit
- ret += maskedNumber * pos;
- } else { // retain this digit
- ret += (val % 10) * pos;
- }
-
- val /= 10;
- pos *= 10;
- }
-
- if(value < 0) {
- ret *= -1;
- }
-
- return ret;
- }
-}
http://git-wip-us.apache.org/repos/asf/ranger/blob/e7d8dea8/ranger-hive-utils/src/main/java/org/apache/ranger/authorization/hive/udf/RangerUdfMaskShowFirstN.java
----------------------------------------------------------------------
diff --git a/ranger-hive-utils/src/main/java/org/apache/ranger/authorization/hive/udf/RangerUdfMaskShowFirstN.java b/ranger-hive-utils/src/main/java/org/apache/ranger/authorization/hive/udf/RangerUdfMaskShowFirstN.java
deleted file mode 100644
index 24929b7..0000000
--- a/ranger-hive-utils/src/main/java/org/apache/ranger/authorization/hive/udf/RangerUdfMaskShowFirstN.java
+++ /dev/null
@@ -1,248 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.ranger.authorization.hive.udf;
-
-import org.apache.hadoop.hive.ql.exec.Description;
-import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
-
-
-@Description(name = "mask_show_first_n",
- value = "masks all but first n characters of the value",
- extended = "Examples:\n "
- + " mask_show_first_n(ccn, 8)\n "
- + " mask_show_first_n(ccn, 8, 'x', 'x', 'x')\n "
- + "Arguments:\n "
- + " mask_show_first_n(value, charCount, upperChar, lowerChar, digitChar, otherChar, numberChar)\n "
- + " value - value to mask. Supported types: TINYINT, SMALLINT, INT, BIGINT, STRING, VARCHAR, CHAR\n "
- + " charCount - number of characters. Default value: 4\n "
- + " upperChar - character to replace upper-case characters with. Specify -1 to retain original character. Default value: 'X'\n "
- + " lowerChar - character to replace lower-case characters with. Specify -1 to retain original character. Default value: 'x'\n "
- + " digitChar - character to replace digit characters with. Specify -1 to retain original character. Default value: 'n'\n "
- + " otherChar - character to replace all other characters with. Specify -1 to retain original character. Default value: -1\n "
- + " numberChar - character to replace digits in a number with. Valid values: 0-9. Default value: '1'\n "
- )
-public class RangerUdfMaskShowFirstN extends RangerBaseUdf {
- public static final String UDF_NAME = "mask_show_first_n";
-
- public RangerUdfMaskShowFirstN() {
- super(new MaskShowFirstNTransformer(), UDF_NAME);
- }
-}
-
-class MaskShowFirstNTransformer extends MaskTransformer {
- int charCount = 4;
-
- public MaskShowFirstNTransformer() {
- super();
- }
-
- @Override
- public void init(ObjectInspector[] arguments, int argsStartIdx) {
- super.init(arguments, argsStartIdx + 1); // first argument is charCount, which is consumed here
-
- charCount = getIntArg(arguments, argsStartIdx, 4);
-
- if(charCount < 0) {
- charCount = 0;
- }
- }
-
- @Override
- String transform(final String value) {
- if(value.length() <= charCount) {
- return value;
- }
-
- final StringBuilder ret = new StringBuilder(value.length());
-
- for(int i = 0; i < charCount; i++) {
- ret.appendCodePoint(value.charAt(i));
- }
-
- for(int i = charCount; i < value.length(); i++) {
- ret.appendCodePoint(transformChar(value.charAt(i)));
- }
-
- return ret.toString();
- }
-
- @Override
- Byte transform(final Byte value) {
- byte val = value;
-
- if(value < 0) {
- val *= -1;
- }
-
- // count number of digits in the value
- int digitCount = 0;
- for(byte v = val; v != 0; v /= 10) {
- digitCount++;
- }
-
- // number of digits to mask from the end
- final int maskCount = digitCount - charCount;
-
- if(maskCount <= 0) {
- return value;
- }
-
- byte ret = 0;
- int pos = 1;
- for(int i = 0; val != 0; i++) {
- if(i < maskCount) { // mask this digit
- ret += (maskedNumber * pos);
- } else { //retain this digit
- ret += ((val % 10) * pos);
- }
-
- val /= 10;
- pos *= 10;
- }
-
- if(value < 0) {
- ret *= -1;
- }
-
- return ret;
- }
-
- @Override
- Short transform(final Short value) {
- short val = value;
-
- if(value < 0) {
- val *= -1;
- }
-
- // count number of digits in the value
- int digitCount = 0;
- for(short v = val; v != 0; v /= 10) {
- digitCount++;
- }
-
- // number of digits to mask from the end
- final int maskCount = digitCount - charCount;
-
- if(maskCount <= 0) {
- return value;
- }
-
- short ret = 0;
- int pos = 1;
- for(int i = 0; val != 0; i++) {
- if(i < maskCount) { // mask this digit
- ret += (maskedNumber * pos);
- } else { // retain this digit
- ret += ((val % 10) * pos);
- }
-
- val /= 10;
- pos *= 10;
- }
-
- if(value < 0) {
- ret *= -1;
- }
-
- return ret;
- }
-
- @Override
- Integer transform(final Integer value) {
- int val = value;
-
- if(value < 0) {
- val *= -1;
- }
-
- // count number of digits in the value
- int digitCount = 0;
- for(int v = val; v != 0; v /= 10) {
- digitCount++;
- }
-
- // number of digits to mask from the end
- final int maskCount = digitCount - charCount;
-
- if(maskCount <= 0) {
- return value;
- }
-
- int ret = 0;
- int pos = 1;
- for(int i = 0; val != 0; i++) {
- if(i < maskCount) { // mask this digit
- ret += maskedNumber * pos;
- } else { // retain this digit
- ret += ((val % 10) * pos);
- }
-
- val /= 10;
- pos *= 10;
- }
-
- if(value < 0) {
- ret *= -1;
- }
-
- return ret;
- }
-
- @Override
- Long transform(final Long value) {
- long val = value;
-
- if(value < 0) {
- val *= -1;
- }
-
- // count number of digits in the value
- int digitCount = 0;
- for(long v = val; v != 0; v /= 10) {
- digitCount++;
- }
-
- // number of digits to mask from the end
- final int maskCount = digitCount - charCount;
-
- if(maskCount <= 0) {
- return value;
- }
-
- long ret = 0;
- long pos = 1;
- for(int i = 0; val != 0; i++) {
- if(i < maskCount) { // mask this digit
- ret += (maskedNumber * pos);
- } else { // retain this digit
- ret += ((val % 10) * pos);
- }
-
- val /= 10;
- pos *= 10;
- }
-
- if(value < 0) {
- ret *= -1;
- }
-
- return ret;
- }
-}
http://git-wip-us.apache.org/repos/asf/ranger/blob/e7d8dea8/ranger-hive-utils/src/main/java/org/apache/ranger/authorization/hive/udf/RangerUdfMaskShowLastN.java
----------------------------------------------------------------------
diff --git a/ranger-hive-utils/src/main/java/org/apache/ranger/authorization/hive/udf/RangerUdfMaskShowLastN.java b/ranger-hive-utils/src/main/java/org/apache/ranger/authorization/hive/udf/RangerUdfMaskShowLastN.java
deleted file mode 100644
index d3db2af..0000000
--- a/ranger-hive-utils/src/main/java/org/apache/ranger/authorization/hive/udf/RangerUdfMaskShowLastN.java
+++ /dev/null
@@ -1,198 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.ranger.authorization.hive.udf;
-
-import org.apache.hadoop.hive.ql.exec.Description;
-import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
-
-
-@Description(name = "mask_show_last_n",
- value = "masks all but last n characters of the value",
- extended = "Examples:\n "
- + " mask_show_last_n(ccn, 8)\n "
- + " mask_show_last_n(ccn, 8, 'x', 'x', 'x')\n "
- + "Arguments:\n "
- + " mask_show_last_n(value, charCount, upperChar, lowerChar, digitChar, otherChar, numberChar)\n "
- + " value - value to mask. Supported types: TINYINT, SMALLINT, INT, BIGINT, STRING, VARCHAR, CHAR\n "
- + " charCount - number of characters. Default value: 4\n "
- + " upperChar - character to replace upper-case characters with. Specify -1 to retain original character. Default value: 'X'\n "
- + " lowerChar - character to replace lower-case characters with. Specify -1 to retain original character. Default value: 'x'\n "
- + " digitChar - character to replace digit characters with. Specify -1 to retain original character. Default value: 'n'\n "
- + " otherChar - character to replace all other characters with. Specify -1 to retain original character. Default value: -1\n "
- + " numberChar - character to replace digits in a number with. Valid values: 0-9. Default value: '1'\n "
- )
-public class RangerUdfMaskShowLastN extends RangerBaseUdf {
- public static final String UDF_NAME = "mask_show_last_n";
-
- public RangerUdfMaskShowLastN() {
- super(new MaskShowLastNTransformer(), UDF_NAME);
- }
-}
-
-class MaskShowLastNTransformer extends MaskTransformer {
- int charCount = 4;
-
- public MaskShowLastNTransformer() {
- super();
- }
-
- @Override
- public void init(ObjectInspector[] arguments, int argsStartIdx) {
- super.init(arguments, argsStartIdx + 1); // first argument is charCount, which is consumed in this method below
-
- charCount = getIntArg(arguments, argsStartIdx, 4);
-
- if(charCount < 0) {
- charCount = 0;
- }
- }
-
-
- @Override
- String transform(final String value) {
- if(value.length() <= charCount) {
- return value;
- }
-
- final StringBuilder ret = new StringBuilder(value.length());
- final int endIdx = value.length() - charCount;
-
- for(int i = 0; i < endIdx; i++) {
- ret.appendCodePoint(transformChar(value.charAt(i)));
- }
-
- for(int i = endIdx; i < value.length(); i++) {
- ret.appendCodePoint(value.charAt(i));
- }
-
- return ret.toString();
- }
-
- @Override
- Byte transform(final Byte value) {
- byte val = value;
-
- if(value < 0) {
- val *= -1;
- }
-
- byte ret = 0;
- int pos = 1;
- for(int i = 0; val != 0; i++) {
- if(i >= charCount) { // mask this digit
- ret += maskedNumber * pos;
- } else { //retain this digit
- ret += (val % 10) * pos;
- }
-
- val /= 10;
- pos *= 10;
- }
-
- if(value < 0) {
- ret *= -1;
- }
-
- return ret;
- }
-
- @Override
- Short transform(final Short value) {
- short val = value;
-
- if(value < 0) {
- val *= -1;
- }
-
- short ret = 0;
- int pos = 1;
- for(int i = 0; val != 0; i++) {
- if(i >= charCount) { // mask this digit
- ret += maskedNumber * pos;
- } else { // retain this digit
- ret += (val % 10) * pos;
- }
-
- val /= 10;
- pos *= 10;
- }
-
- if(value < 0) {
- ret *= -1;
- }
-
- return ret;
- }
-
- @Override
- Integer transform(final Integer value) {
- int val = value;
-
- if(value < 0) {
- val *= -1;
- }
-
- int ret = 0;
- int pos = 1;
- for(int i = 0; val != 0; i++) {
- if(i >= charCount) { // mask this digit
- ret += maskedNumber * pos;
- } else { // retain this digit
- ret += (val % 10) * pos;
- }
-
- val /= 10;
- pos *= 10;
- }
-
- if(value < 0) {
- ret *= -1;
- }
-
- return ret;
- }
-
- @Override
- Long transform(final Long value) {
- long val = value;
-
- if(value < 0) {
- val *= -1;
- }
-
- long ret = 0;
- long pos = 1;
- for(int i = 0; val != 0; i++) {
- if(i >= charCount) { // mask this digit
- ret += (maskedNumber * pos);
- } else { // retain this digit
- ret += ((val % 10) * pos);
- }
-
- val /= 10;
- pos *= 10;
- }
-
- if(value < 0) {
- ret *= -1;
- }
-
- return ret;
- }
-}
[2/2] ranger git commit: RANGER-1770 - Add tests for the Knox plugin
Posted by co...@apache.org.
RANGER-1770 - Add tests for the Knox plugin
Signed-off-by: Colm O hEigeartaigh <co...@apache.org>
Project: http://git-wip-us.apache.org/repos/asf/ranger/repo
Commit: http://git-wip-us.apache.org/repos/asf/ranger/commit/dbc84c18
Tree: http://git-wip-us.apache.org/repos/asf/ranger/tree/dbc84c18
Diff: http://git-wip-us.apache.org/repos/asf/ranger/diff/dbc84c18
Branch: refs/heads/master
Commit: dbc84c18ade3c05c0a8f4fe39c495148a65dea03
Parents: e7d8dea
Author: Colm O hEigeartaigh <co...@apache.org>
Authored: Fri Sep 8 17:13:56 2017 +0100
Committer: Colm O hEigeartaigh <co...@apache.org>
Committed: Mon Sep 18 12:59:56 2017 +0100
----------------------------------------------------------------------
knox-agent/pom.xml | 107 +++++
.../ranger/services/knox/KnoxRangerTest.java | 398 +++++++++++++++++++
.../services/knox/RangerAdminClientImpl.java | 83 ++++
.../services/knox/client/KnoxClientTest.java | 41 --
.../knox/client/TestRangerServiceKnox.java | 132 ------
.../test/resources/cluster-configuration.json | 141 +++++++
.../src/test/resources/knox-policies.json | 285 +++++++++++++
knox-agent/src/test/resources/log4j.properties | 2 +-
.../src/test/resources/query_response.xml | 20 +
.../src/test/resources/ranger-knox-security.xml | 52 +++
knox-agent/src/test/resources/users.ldif | 55 +++
.../src/test/resources/webhbase-table-list.xml | 26 ++
.../test/resources/webhdfs-liststatus-test.json | 3 +
pom.xml | 2 +-
14 files changed, 1172 insertions(+), 175 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/ranger/blob/dbc84c18/knox-agent/pom.xml
----------------------------------------------------------------------
diff --git a/knox-agent/pom.xml b/knox-agent/pom.xml
index 9874912..52f15cd 100644
--- a/knox-agent/pom.xml
+++ b/knox-agent/pom.xml
@@ -110,5 +110,112 @@
<artifactId>httpcore</artifactId>
<version>${httpcomponents.httpcore.version}</version>
</dependency>
+ <dependency>
+ <groupId>io.rest-assured</groupId>
+ <artifactId>rest-assured</artifactId>
+ <scope>test</scope>
+ <version>3.0.3</version>
+ </dependency>
+ <dependency>
+ <groupId>com.mycila.xmltool</groupId>
+ <artifactId>xmltool</artifactId>
+ <scope>test</scope>
+ <version>3.3</version>
+ </dependency>
+ <dependency>
+ <groupId>org.apache.knox</groupId>
+ <artifactId>gateway-server</artifactId>
+ <scope>test</scope>
+ <version>${knox.gateway.version}</version>
+ </dependency>
+ <dependency>
+ <groupId>org.apache.knox</groupId>
+ <artifactId>gateway-test-utils</artifactId>
+ <scope>test</scope>
+ <version>${knox.gateway.version}</version>
+ </dependency>
+ <dependency>
+ <groupId>org.apache.knox</groupId>
+ <artifactId>gateway-test-release-utils</artifactId>
+ <scope>test</scope>
+ <version>${knox.gateway.version}</version>
+ <exclusions>
+ <exclusion>
+ <groupId>org.apache.knox</groupId>
+ <artifactId>gateway-provider-security-pac4j</artifactId>
+ </exclusion>
+ </exclusions>
+ </dependency>
+ <dependency>
+ <groupId>org.apache.ranger</groupId>
+ <artifactId>ranger-knox-plugin-shim</artifactId>
+ <scope>test</scope>
+ <version>${project.version}</version>
+ </dependency>
</dependencies>
+ <build>
+ <sourceDirectory>${basedir}/src/main/java</sourceDirectory>
+ <testSourceDirectory>${basedir}/src/test/java</testSourceDirectory>
+ <resources>
+ <resource>
+ <directory>src/main/java</directory>
+ <excludes>
+ <exclude>**/*.java</exclude>
+ </excludes>
+ </resource>
+ <resource>
+ <directory>src/main/resources</directory>
+ <includes>
+ <include>**/*</include>
+ </includes>
+ </resource>
+ </resources>
+ <testResources>
+ <testResource>
+ <directory>src/test/java</directory>
+ <excludes>
+ <exclude>**/*.java</exclude>
+ </excludes>
+ </testResource>
+ <testResource>
+ <directory>src/test/resources</directory>
+ <includes>
+ <include>**/*</include>
+ </includes>
+ <filtering>true</filtering>
+ </testResource>
+ </testResources>
+ <plugins>
+ <plugin>
+ <groupId>org.apache.maven.plugins</groupId>
+ <artifactId>maven-dependency-plugin</artifactId>
+ <executions>
+ <execution>
+ <id>copy-services</id>
+ <phase>generate-resources</phase>
+ <goals>
+ <goal>unpack</goal>
+ </goals>
+ <configuration>
+ <artifactItems>
+ <artifactItem>
+ <groupId>org.apache.knox</groupId>
+ <artifactId>gateway-service-definitions</artifactId>
+ <version>${knox.gateway.version}</version>
+ <type>jar</type>
+ <overWrite>true</overWrite>
+ <outputDirectory>target</outputDirectory>
+ <includes>**/services/**</includes>
+ </artifactItem>
+ </artifactItems>
+ <outputAbsoluteArtifactFilename>true</outputAbsoluteArtifactFilename>
+ <overWriteSnapshots>true</overWriteSnapshots>
+ <overWriteIfNewer>true</overWriteIfNewer>
+ <stripVersion>true</stripVersion>
+ </configuration>
+ </execution>
+ </executions>
+ </plugin>
+ </plugins>
+ </build>
</project>
http://git-wip-us.apache.org/repos/asf/ranger/blob/dbc84c18/knox-agent/src/test/java/org/apache/ranger/services/knox/KnoxRangerTest.java
----------------------------------------------------------------------
diff --git a/knox-agent/src/test/java/org/apache/ranger/services/knox/KnoxRangerTest.java b/knox-agent/src/test/java/org/apache/ranger/services/knox/KnoxRangerTest.java
new file mode 100644
index 0000000..deae684
--- /dev/null
+++ b/knox-agent/src/test/java/org/apache/ranger/services/knox/KnoxRangerTest.java
@@ -0,0 +1,398 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.ranger.services.knox;
+
+import static io.restassured.RestAssured.given;
+import static org.hamcrest.CoreMatchers.is;
+
+import java.io.File;
+import java.io.FileOutputStream;
+import java.io.IOException;
+import java.nio.file.FileSystems;
+import java.nio.file.Path;
+import java.util.HashMap;
+import java.util.Map;
+import java.util.UUID;
+
+import org.apache.commons.io.FileUtils;
+import org.apache.commons.io.IOUtils;
+import org.apache.directory.server.protocol.shared.transport.TcpTransport;
+import org.apache.hadoop.gateway.GatewayServer;
+import org.apache.hadoop.gateway.GatewayTestConfig;
+import org.apache.hadoop.gateway.security.ldap.SimpleLdapDirectoryServer;
+import org.apache.hadoop.gateway.services.DefaultGatewayServices;
+import org.apache.hadoop.gateway.services.ServiceLifecycleException;
+import org.apache.hadoop.test.mock.MockServer;
+import org.apache.http.HttpStatus;
+import org.junit.AfterClass;
+import org.junit.BeforeClass;
+import org.junit.Test;
+
+import com.mycila.xmltool.XMLDoc;
+import com.mycila.xmltool.XMLTag;
+
+import io.restassured.http.ContentType;
+import io.restassured.response.ValidatableResponse;
+
+/**
+ * Test Apache Knox secured by Apache Ranger.
+ */
+public class KnoxRangerTest {
+
+ private static GatewayTestConfig config;
+ private static GatewayServer gateway;
+ private static SimpleLdapDirectoryServer ldap;
+ private static TcpTransport ldapTransport;
+ private static MockServer hdfsServer;
+ private static MockServer stormServer;
+ private static MockServer hbaseServer;
+ private static MockServer kafkaServer;
+ private static MockServer solrServer;
+
+ @BeforeClass
+ public static void setupSuite() throws Exception {
+ setupLdap();
+ hdfsServer = new MockServer( "hdfs", true );
+ stormServer = new MockServer( "storm", true );
+ hbaseServer = new MockServer( "hbase", true );
+ kafkaServer = new MockServer( "kafka", true );
+ solrServer = new MockServer( "solr", true );
+
+ setupGateway();
+ }
+
+ @AfterClass
+ public static void cleanupSuite() throws Exception {
+ gateway.stop();
+
+ FileUtils.deleteQuietly( new File( config.getGatewayTopologyDir() ) );
+ FileUtils.deleteQuietly( new File( config.getGatewayConfDir() ) );
+ FileUtils.deleteQuietly( new File( config.getGatewaySecurityDir() ) );
+ FileUtils.deleteQuietly( new File( config.getGatewayDeploymentDir() ) );
+ FileUtils.deleteQuietly( new File( config.getGatewayDataDir() ) );
+
+ hdfsServer.stop();
+ stormServer.stop();
+ hbaseServer.stop();
+ kafkaServer.stop();
+ solrServer.stop();
+
+ ldap.stop( true );
+ }
+
+
+ public static void setupLdap() throws Exception {
+ String basedir = System.getProperty("basedir");
+ if (basedir == null) {
+ basedir = new File(".").getCanonicalPath();
+ }
+ Path path = FileSystems.getDefault().getPath(basedir, "/src/test/resources/users.ldif");
+ ldapTransport = new TcpTransport( 0 );
+ ldap = new SimpleLdapDirectoryServer( "dc=hadoop,dc=apache,dc=org", path.toFile(), ldapTransport );
+ ldap.start();
+ }
+
+ public static void setupGateway() throws Exception {
+
+ File targetDir = new File( System.getProperty( "user.dir" ), "target" );
+ File gatewayDir = new File( targetDir, "gateway-home-" + UUID.randomUUID() );
+ gatewayDir.mkdirs();
+
+ config = new GatewayTestConfig();
+ config.setGatewayHomeDir( gatewayDir.getAbsolutePath() );
+
+ config.setGatewayServicesDir(targetDir.getPath() + File.separator + "services");
+
+ File topoDir = new File( config.getGatewayTopologyDir() );
+ topoDir.mkdirs();
+
+ File deployDir = new File( config.getGatewayDeploymentDir() );
+ deployDir.mkdirs();
+
+ File descriptor = new File( topoDir, "cluster.xml" );
+ FileOutputStream stream = new FileOutputStream( descriptor );
+ createTopology().toStream( stream );
+ stream.close();
+
+ DefaultGatewayServices srvcs = new DefaultGatewayServices();
+ Map<String,String> options = new HashMap<>();
+ options.put( "persist-master", "false" );
+ options.put( "master", "password" );
+ try {
+ srvcs.init( config, options );
+ } catch ( ServiceLifecycleException e ) {
+ e.printStackTrace(); // I18N not required.
+ }
+
+ gateway = GatewayServer.startGateway( config, srvcs );
+ }
+
+ /**
+ * Creates a topology that is deployed to the gateway instance for the test suite.
+ * Note that this topology is shared by all of the test methods in this suite.
+ * @return A populated XML structure for a topology file.
+ */
+ private static XMLTag createTopology() {
+ XMLTag xml = XMLDoc.newDocument( true )
+ .addRoot( "topology" )
+ .addTag( "gateway" )
+ .addTag( "provider" )
+ .addTag( "role" ).addText( "webappsec" )
+ .addTag("name").addText("WebAppSec")
+ .addTag("enabled").addText("true")
+ .addTag( "param" )
+ .addTag("name").addText("csrf.enabled")
+ .addTag("value").addText("true").gotoParent().gotoParent()
+ .addTag("provider")
+ .addTag("role").addText("authentication")
+ .addTag("name").addText("ShiroProvider")
+ .addTag("enabled").addText("true")
+ .addTag( "param" )
+ .addTag("name").addText("main.ldapRealm")
+ .addTag("value").addText("org.apache.hadoop.gateway.shirorealm.KnoxLdapRealm").gotoParent()
+ .addTag( "param" )
+ .addTag( "name" ).addText( "main.ldapRealm.userDnTemplate" )
+ .addTag( "value" ).addText( "uid={0},ou=people,dc=hadoop,dc=apache,dc=org" ).gotoParent()
+ .addTag( "param" )
+ .addTag( "name" ).addText( "main.ldapRealm.contextFactory.url" )
+ .addTag( "value" ).addText( "ldap://localhost:" + ldapTransport.getAcceptor().getLocalAddress().getPort() ).gotoParent()
+ //.addTag( "value" ).addText(driver.getLdapUrl() ).gotoParent()
+ .addTag( "param" )
+ .addTag( "name" ).addText( "main.ldapRealm.contextFactory.authenticationMechanism" )
+ .addTag( "value" ).addText( "simple" ).gotoParent()
+ .addTag( "param" )
+ .addTag( "name" ).addText( "urls./**" )
+ .addTag( "value" ).addText( "authcBasic" ).gotoParent().gotoParent()
+ .addTag("provider")
+ .addTag("role").addText("identity-assertion")
+ .addTag("enabled").addText("true")
+ .addTag("name").addText("Default").gotoParent()
+ .addTag("provider")
+ .addTag( "role" ).addText( "authorization" )
+ .addTag("name").addText("XASecurePDPKnox")
+ .addTag( "enabled" ).addText( "true" )
+ .gotoRoot()
+ .addTag("service")
+ .addTag("role").addText("WEBHDFS")
+ .addTag("url").addText("http://localhost:" + hdfsServer.getPort()).gotoParent()
+ .addTag("service")
+ .addTag("role").addText("STORM")
+ .addTag("url").addText("http://localhost:" + stormServer.getPort()).gotoParent()
+ .addTag("service")
+ .addTag("role").addText("WEBHBASE")
+ .addTag("url").addText("http://localhost:" + hbaseServer.getPort()).gotoParent()
+ .addTag("service")
+ .addTag("role").addText("KAFKA")
+ .addTag("url").addText("http://localhost:" + kafkaServer.getPort()).gotoParent()
+ .addTag("service")
+ .addTag("role").addText("SOLR")
+ .addTag("url").addText("http://localhost:" + solrServer.getPort() + "/solr").gotoParent()
+ .gotoRoot();
+ return xml;
+ }
+
+ @Test
+ public void testHDFSAllowed() throws IOException {
+ makeWebHDFSInvocation(HttpStatus.SC_OK, "alice", "password");
+ }
+
+ @Test
+ public void testHDFSNotAllowed() throws IOException {
+ makeWebHDFSInvocation(HttpStatus.SC_FORBIDDEN, "bob", "password");
+ }
+
+ @Test
+ public void testStormUiAllowed() throws Exception {
+ makeStormUIInvocation(HttpStatus.SC_OK, "bob", "password");
+ }
+
+ @Test
+ public void testStormNotUiAllowed() throws Exception {
+ makeStormUIInvocation(HttpStatus.SC_FORBIDDEN, "alice", "password");
+ }
+
+ @Test
+ public void testHBaseAllowed() throws Exception {
+ makeHBaseInvocation(HttpStatus.SC_OK, "alice", "password");
+ }
+
+ @Test
+ public void testHBaseNotAllowed() throws Exception {
+ makeHBaseInvocation(HttpStatus.SC_FORBIDDEN, "bob", "password");
+ }
+
+ @Test
+ public void testKafkaAllowed() throws IOException {
+ makeKafkaInvocation(HttpStatus.SC_OK, "alice", "password");
+ }
+
+ @Test
+ public void testKafkaNotAllowed() throws IOException {
+ makeKafkaInvocation(HttpStatus.SC_FORBIDDEN, "bob", "password");
+ }
+
+ @Test
+ public void testSolrAllowed() throws Exception {
+ makeSolrInvocation(HttpStatus.SC_OK, "alice", "password");
+ }
+
+ @Test
+ public void testSolrNotAllowed() throws Exception {
+ makeSolrInvocation(HttpStatus.SC_FORBIDDEN, "bob", "password");
+ }
+
+ private void makeWebHDFSInvocation(int statusCode, String user, String password) throws IOException {
+
+ String basedir = System.getProperty("basedir");
+ if (basedir == null) {
+ basedir = new File(".").getCanonicalPath();
+ }
+ Path path = FileSystems.getDefault().getPath(basedir, "/src/test/resources/webhdfs-liststatus-test.json");
+
+ hdfsServer
+ .expect()
+ .method( "GET" )
+ .pathInfo( "/v1/hdfstest" )
+ .queryParam( "op", "LISTSTATUS" )
+ .respond()
+ .status( HttpStatus.SC_OK )
+ .content( IOUtils.toByteArray( path.toUri() ) )
+ .contentType( "application/json" );
+
+ ValidatableResponse response = given()
+ .log().all()
+ .auth().preemptive().basic( user, password )
+ .header("X-XSRF-Header", "jksdhfkhdsf")
+ .queryParam( "op", "LISTSTATUS" )
+ .when()
+ .get( "http://localhost:" + gateway.getAddresses()[0].getPort() + "/gateway/cluster/webhdfs" + "/v1/hdfstest" )
+ .then()
+ .statusCode(statusCode)
+ .log().body();
+
+ if (statusCode == HttpStatus.SC_OK) {
+ response.body( "FileStatuses.FileStatus[0].pathSuffix", is ("dir") );
+ }
+ }
+
+ private void makeStormUIInvocation(int statusCode, String user, String password) throws IOException {
+ String basedir = System.getProperty("basedir");
+ if (basedir == null) {
+ basedir = new File(".").getCanonicalPath();
+ }
+ Path path = FileSystems.getDefault().getPath(basedir, "/src/test/resources/cluster-configuration.json");
+
+ stormServer
+ .expect()
+ .method("GET")
+ .pathInfo("/api/v1/cluster/configuration")
+ .respond()
+ .status(HttpStatus.SC_OK)
+ .content(IOUtils.toByteArray( path.toUri() ))
+ .contentType("application/json");
+
+ given()
+ .auth().preemptive().basic(user, password)
+ .header("X-XSRF-Header", "jksdhfkhdsf")
+ .header("Accept", "application/json")
+ .when().get( "http://localhost:" + gateway.getAddresses()[0].getPort() + "/gateway/cluster/storm" + "/api/v1/cluster/configuration")
+ .then()
+ .log().all()
+ .statusCode(statusCode);
+
+ }
+
+ private void makeHBaseInvocation(int statusCode, String user, String password) throws IOException {
+ String basedir = System.getProperty("basedir");
+ if (basedir == null) {
+ basedir = new File(".").getCanonicalPath();
+ }
+ Path path = FileSystems.getDefault().getPath(basedir, "/src/test/resources/webhbase-table-list.xml");
+
+
+ hbaseServer
+ .expect()
+ .method( "GET" )
+ .pathInfo( "/" )
+ .header( "Accept", ContentType.XML.toString() )
+ .respond()
+ .status( HttpStatus.SC_OK )
+ .content( IOUtils.toByteArray( path.toUri() ) )
+ .contentType( ContentType.XML.toString() );
+
+ given()
+ .log().all()
+ .auth().preemptive().basic( user, password )
+ .header("X-XSRF-Header", "jksdhfkhdsf")
+ .header( "Accept", ContentType.XML.toString() )
+ .when().get( "http://localhost:" + gateway.getAddresses()[0].getPort() + "/gateway/cluster/hbase" )
+ .then()
+ .statusCode( statusCode )
+ .log().body();
+ }
+
+ private void makeKafkaInvocation(int statusCode, String user, String password) throws IOException {
+
+ kafkaServer
+ .expect()
+ .method( "GET" )
+ .pathInfo( "/topics" )
+ .respond()
+ .status( HttpStatus.SC_OK );
+
+ given()
+ .log().all()
+ .auth().preemptive().basic( user, password )
+ .header("X-XSRF-Header", "jksdhfkhdsf")
+ .when()
+ .get( "http://localhost:" + gateway.getAddresses()[0].getPort() + "/gateway/cluster/kafka" + "/topics" )
+ .then()
+ .statusCode(statusCode)
+ .log().body();
+
+ }
+
+ private void makeSolrInvocation(int statusCode, String user, String password) throws IOException {
+ String basedir = System.getProperty("basedir");
+ if (basedir == null) {
+ basedir = new File(".").getCanonicalPath();
+ }
+ Path path = FileSystems.getDefault().getPath(basedir, "/src/test/resources/query_response.xml");
+
+ solrServer
+ .expect()
+ .method("GET")
+ .pathInfo("/solr/gettingstarted/select")
+ .queryParam("q", "author_s:William+Shakespeare")
+ .respond()
+ .status(HttpStatus.SC_OK)
+ .content(IOUtils.toByteArray( path.toUri() ))
+ .contentType("application/json");
+
+ given()
+ .auth().preemptive().basic(user, password)
+ .header("X-XSRF-Header", "jksdhfkhdsf")
+ .header("Accept", "application/json")
+ .when().get( "http://localhost:" + gateway.getAddresses()[0].getPort() + "/gateway/cluster/solr"
+ + "/gettingstarted/select?q=author_s:William+Shakespeare")
+ .then()
+ .log().all()
+ .statusCode(statusCode);
+
+ }
+}
\ No newline at end of file
http://git-wip-us.apache.org/repos/asf/ranger/blob/dbc84c18/knox-agent/src/test/java/org/apache/ranger/services/knox/RangerAdminClientImpl.java
----------------------------------------------------------------------
diff --git a/knox-agent/src/test/java/org/apache/ranger/services/knox/RangerAdminClientImpl.java b/knox-agent/src/test/java/org/apache/ranger/services/knox/RangerAdminClientImpl.java
new file mode 100644
index 0000000..6ce6979
--- /dev/null
+++ b/knox-agent/src/test/java/org/apache/ranger/services/knox/RangerAdminClientImpl.java
@@ -0,0 +1,83 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ranger.services.knox;
+
+import java.io.File;
+import java.nio.file.FileSystems;
+import java.nio.file.Files;
+import java.util.List;
+
+import org.apache.ranger.admin.client.RangerAdminClient;
+import org.apache.ranger.plugin.util.GrantRevokeRequest;
+import org.apache.ranger.plugin.util.ServicePolicies;
+import org.apache.ranger.plugin.util.ServiceTags;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import com.google.gson.Gson;
+import com.google.gson.GsonBuilder;
+
+/**
+ * A test implementation of the RangerAdminClient interface that just reads policies in from a file and returns them
+ */
+public class RangerAdminClientImpl implements RangerAdminClient {
+ private static final Logger LOG = LoggerFactory.getLogger(RangerAdminClientImpl.class);
+ private final static String cacheFilename = "knox-policies.json";
+ private Gson gson;
+
+ public void init(String serviceName, String appId, String configPropertyPrefix) {
+ Gson gson = null;
+ try {
+ gson = new GsonBuilder().setDateFormat("yyyyMMdd-HH:mm:ss.SSS-Z").setPrettyPrinting().create();
+ } catch(Throwable excp) {
+ LOG.error("RangerAdminClientImpl: failed to create GsonBuilder object", excp);
+ }
+ this.gson = gson;
+ }
+
+ public ServicePolicies getServicePoliciesIfUpdated(long lastKnownVersion, long lastActivationTimeInMillis) throws Exception {
+
+ String basedir = System.getProperty("basedir");
+ if (basedir == null) {
+ basedir = new File(".").getCanonicalPath();
+ }
+
+ java.nio.file.Path cachePath = FileSystems.getDefault().getPath(basedir, "/src/test/resources/" + cacheFilename);
+ byte[] cacheBytes = Files.readAllBytes(cachePath);
+
+ return gson.fromJson(new String(cacheBytes), ServicePolicies.class);
+ }
+
+ public void grantAccess(GrantRevokeRequest request) throws Exception {
+
+ }
+
+ public void revokeAccess(GrantRevokeRequest request) throws Exception {
+
+ }
+
+ public ServiceTags getServiceTagsIfUpdated(long lastKnownVersion, long lastActivationTimeInMillis) throws Exception {
+ return null;
+ }
+
+ public List<String> getTagTypes(String tagTypePattern) throws Exception {
+ return null;
+ }
+
+
+}
\ No newline at end of file
http://git-wip-us.apache.org/repos/asf/ranger/blob/dbc84c18/knox-agent/src/test/java/org/apache/ranger/services/knox/client/KnoxClientTest.java
----------------------------------------------------------------------
diff --git a/knox-agent/src/test/java/org/apache/ranger/services/knox/client/KnoxClientTest.java b/knox-agent/src/test/java/org/apache/ranger/services/knox/client/KnoxClientTest.java
deleted file mode 100644
index dd13cbb..0000000
--- a/knox-agent/src/test/java/org/apache/ranger/services/knox/client/KnoxClientTest.java
+++ /dev/null
@@ -1,41 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.ranger.services.knox.client;
-
-public class KnoxClientTest {
-
-
- /*
- Sample curl calls to knox REST API to discover topologies
- curl -ivk -u admin:admin-password https://localhost:8443/gateway/admin/api/v1/topologies
- curl -ivk -u admin:admin-password https://localhost:8443/gateway/admin/api/v1/topologies/admin
- */
-
- public static void main(String[] args) {
- System.out.println(System.getProperty("java.class.path"));
- System.setProperty("javax.net.ssl.trustStore", "/tmp/cacertswithknox)");
- String[] testArgs = {
- "https://localhost:8443/gateway/admin/api/v1/topologies",
- "admin",
- "admin-password"
- };
- KnoxClient.main(testArgs);
- }
-
-
-}
http://git-wip-us.apache.org/repos/asf/ranger/blob/dbc84c18/knox-agent/src/test/java/org/apache/ranger/services/knox/client/TestRangerServiceKnox.java
----------------------------------------------------------------------
diff --git a/knox-agent/src/test/java/org/apache/ranger/services/knox/client/TestRangerServiceKnox.java b/knox-agent/src/test/java/org/apache/ranger/services/knox/client/TestRangerServiceKnox.java
deleted file mode 100644
index c3690d4..0000000
--- a/knox-agent/src/test/java/org/apache/ranger/services/knox/client/TestRangerServiceKnox.java
+++ /dev/null
@@ -1,132 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package org.apache.ranger.services.knox.client;
-
-import java.util.HashMap;
-import java.util.List;
-import java.util.Map;
-
-import org.apache.ranger.plugin.model.RangerService;
-import org.apache.ranger.plugin.model.RangerServiceDef;
-import org.apache.ranger.plugin.service.ResourceLookupContext;
-import org.apache.ranger.services.knox.RangerServiceKnox;
-import org.junit.After;
-import org.junit.Before;
-import org.junit.Test;
-
-
-public class TestRangerServiceKnox {
-
- static final String sdName = "svcDef-Knox";
- static final String serviceName = "KnoxDef";
- HashMap<String, Object> responseData = null;
- Map<String, String> configs = null;
- RangerServiceKnox svcKnox = null;
- RangerServiceDef sd = null;
- RangerService svc = null;
- ResourceLookupContext lookupContext = null;
-
-
- @Before
- public void setup() {
- configs = new HashMap<String,String>();
- lookupContext = new ResourceLookupContext();
-
- buildHbaseConnectionConfig();
- buildLookupContext();
-
- sd = new RangerServiceDef(sdName, " org.apache.ranger.services.knox.RangerServiceKnox", "TestKnoxService", "test Knox servicedef description", null, null, null, null, null, null, null);
- svc = new RangerService(sdName, serviceName, "unit test Knox resource lookup and validateConfig", null, configs);
- svcKnox = new RangerServiceKnox();
- svcKnox.init(sd, svc);
- }
-
- @Test
- public void testValidateConfig() {
-
- /* TODO: does this test require a live Knox environment?
- *
- HashMap<String,Object> ret = null;
- String errorMessage = null;
-
- try {
- ret = svcKnox.validateConfig();
- }catch (Exception e) {
- errorMessage = e.getMessage();
- if ( e instanceof HadoopException) {
- errorMessage = "HadoopException";
- }
- }
-
- if ( errorMessage != null) {
- assertTrue(errorMessage.contains("HadoopException"));
- } else {
- assertNotNull(ret);
- }
- *
- */
- }
-
-
- @Test
- public void testLookUpResource() {
- /* TODO: does this test require a live Knox environment?
- *
- List<String> ret = new ArrayList<String>();
- String errorMessage = null;
- try {
- ret = svcKnox.lookupResource(lookupContext);
- }catch (Exception e) {
- errorMessage = e.getMessage();
- if ( e instanceof HadoopException) {
- errorMessage = "HadoopException";
- }
- }
-
- if ( errorMessage != null) {
- assertTrue(errorMessage.contains("HadoopException"));
- } else {
- assertNotNull(ret);
- }
- *
- */
- }
-
- public void buildHbaseConnectionConfig() {
- configs.put("username", "admin");
- configs.put("password", "admin-password");
- configs.put("knox.url", "https://localhost:8443/gateway/admin/api/v1/topologies");
- }
-
- public void buildLookupContext() {
- Map<String, List<String>> resourceMap = new HashMap<String,List<String>>();
- resourceMap.put("topology", null);
- lookupContext.setUserInput("a");
- lookupContext.setResourceName("topology");
- lookupContext.setResources(resourceMap);
- }
-
- @After
- public void tearDown() {
- sd = null;
- svc = null;
- }
-
-}
http://git-wip-us.apache.org/repos/asf/ranger/blob/dbc84c18/knox-agent/src/test/resources/cluster-configuration.json
----------------------------------------------------------------------
diff --git a/knox-agent/src/test/resources/cluster-configuration.json b/knox-agent/src/test/resources/cluster-configuration.json
new file mode 100644
index 0000000..c8261eb
--- /dev/null
+++ b/knox-agent/src/test/resources/cluster-configuration.json
@@ -0,0 +1,141 @@
+{
+ "dev.zookeeper.path": "/tmp/dev-storm-zookeeper",
+ "topology.tick.tuple.freq.secs": null,
+ "topology.builtin.metrics.bucket.size.secs": 60,
+ "topology.fall.back.on.java.serialization": true,
+ "supervisor.run.worker.as.user": false,
+ "topology.max.error.report.per.interval": 5,
+ "storm.group.mapping.service": "backtype.storm.security.auth.ShellBasedGroupsMapping",
+ "zmq.linger.millis": 5000,
+ "topology.skip.missing.kryo.registrations": false,
+ "storm.messaging.netty.client_worker_threads": 1,
+ "ui.childopts": "-Xmx220m",
+ "storm.zookeeper.session.timeout": 20000,
+ "ui.filter.params": null,
+ "nimbus.reassign": true,
+ "storm.auth.simple-acl.admins": [],
+ "storm.group.mapping.service.cache.duration.secs": 120,
+ "topology.trident.batch.emit.interval.millis": 500,
+ "drpc.authorizer.acl.filename": "drpc-auth-acl.yaml",
+ "storm.messaging.netty.flush.check.interval.ms": 10,
+ "ui.header.buffer.bytes": 4096,
+ "nimbus.monitor.freq.secs": 10,
+ "logviewer.childopts": "-Xmx128m ",
+ "java.library.path": "/usr/local/lib:/opt/local/lib:/usr/lib:/usr/hdp/current/storm-client/lib",
+ "supervisor.supervisors": [],
+ "topology.executor.send.buffer.size": 1024,
+ "storm.local.dir": "/hadoop/storm",
+ "storm.messaging.netty.buffer_size": 5242880,
+ "supervisor.worker.start.timeout.secs": 120,
+ "drpc.authorizer.acl.strict": false,
+ "storm.nimbus.retry.times": 5,
+ "topology.enable.message.timeouts": true,
+ "nimbus.cleanup.inbox.freq.secs": 600,
+ "nimbus.inbox.jar.expiration.secs": 3600,
+ "drpc.worker.threads": 64,
+ "storm.meta.serialization.delegate": "backtype.storm.serialization.DefaultSerializationDelegate",
+ "topology.worker.shared.thread.pool.size": 4,
+ "nimbus.host": "sandbox.hortonworks.com",
+ "storm.messaging.netty.min_wait_ms": 100,
+ "storm.zookeeper.port": 2181,
+ "transactional.zookeeper.port": null,
+ "ui.http.creds.plugin": "backtype.storm.security.auth.DefaultHttpCredentialsPlugin",
+ "topology.executor.receive.buffer.size": 1024,
+ "logs.users": null,
+ "transactional.zookeeper.servers": null,
+ "storm.zookeeper.root": "/storm",
+ "storm.zookeeper.retry.intervalceiling.millis": 30000,
+ "supervisor.enable": true,
+ "storm.messaging.netty.server_worker_threads": 1,
+ "storm.zookeeper.servers": ["sandbox.hortonworks.com"],
+ "transactional.zookeeper.root": "/transactional",
+ "topology.acker.executors": null,
+ "storm.auth.simple-acl.users": [],
+ "storm.zookeeper.auth.user": null,
+ "topology.testing.always.try.serialize": false,
+ "topology.transfer.buffer.size": 1024,
+ "storm.principal.tolocal": "backtype.storm.security.auth.DefaultPrincipalToLocal",
+ "topology.worker.childopts": null,
+ "drpc.queue.size": 128,
+ "worker.childopts": "-Xmx768m -javaagent:/usr/hdp/current/storm-client/contrib/storm-jmxetric/lib/jmxetric-1.0.4.jar=host=sandbox.hortonworks.com,port=8650,wireformat31x=true,mode=multicast,config=/usr/hdp/current/storm-client/contrib/storm-jmxetric/conf/jmxetric-conf.xml,process=Worker_%ID%_JVM",
+ "storm.auth.simple-acl.users.commands": [],
+ "supervisor.heartbeat.frequency.secs": 5,
+ "topology.error.throttle.interval.secs": 10,
+ "storm.nimbus.retry.interval.millis": 2000,
+ "ui.users": null,
+ "zmq.hwm": 0,
+ "drpc.port": 3772,
+ "supervisor.monitor.frequency.secs": 3,
+ "drpc.childopts": "-Xmx220m",
+ "topology.receiver.buffer.size": 8,
+ "task.heartbeat.frequency.secs": 3,
+ "topology.tasks": null,
+ "storm.messaging.netty.max_retries": 30,
+ "topology.spout.wait.strategy": "backtype.storm.spout.SleepSpoutWaitStrategy",
+ "nimbus.thrift.max_buffer_size": 1048576,
+ "drpc.invocations.threads": 64,
+ "drpc.https.port": -1,
+ "supervisor.supervisors.commands": [],
+ "topology.max.spout.pending": null,
+ "ui.filter": null,
+ "logviewer.cleanup.age.mins": 10080,
+ "storm.zookeeper.retry.interval": 1000,
+ "topology.sleep.spout.wait.strategy.time.ms": 1,
+ "nimbus.topology.validator": "backtype.storm.nimbus.DefaultTopologyValidator",
+ "supervisor.slots.ports": [
+ 6700,
+ 6701
+ ],
+ "storm.messaging.netty.authentication": false,
+ "topology.environment": null,
+ "topology.debug": false,
+ "nimbus.thrift.threads": 64,
+ "nimbus.task.launch.secs": 120,
+ "nimbus.supervisor.timeout.secs": 60,
+ "drpc.http.creds.plugin": "backtype.storm.security.auth.DefaultHttpCredentialsPlugin",
+ "topology.message.timeout.secs": 30,
+ "task.refresh.poll.secs": 10,
+ "topology.workers": 1,
+ "supervisor.childopts": "-Xmx256m -Dcom.sun.management.jmxremote -Dcom.sun.management.jmxremote.ssl=false -Dcom.sun.management.jmxremote.authenticate=false -Dcom.sun.management.jmxremote.port=56431 -javaagent:/usr/hdp/current/storm-supervisor/contrib/storm-jmxetric/lib/jmxetric-1.0.4.jar=host=sandbox.hortonworks.com,port=8650,wireformat31x=true,mode=multicast,config=/usr/hdp/current/storm-supervisor/contrib/storm-jmxetric/conf/jmxetric-conf.xml,process=Supervisor_JVM",
+ "storm.auth.simple-white-list.users": [],
+ "nimbus.thrift.port": 6627,
+ "drpc.https.keystore.type": "JKS",
+ "topology.stats.sample.rate": 0.05,
+ "task.credentials.poll.secs": 30,
+ "worker.heartbeat.frequency.secs": 1,
+ "ui.actions.enabled": true,
+ "topology.tuple.serializer": "backtype.storm.serialization.types.ListDelegateSerializer",
+ "drpc.https.keystore.password": "",
+ "topology.disruptor.wait.strategy": "com.lmax.disruptor.BlockingWaitStrategy",
+ "topology.multilang.serializer": "backtype.storm.multilang.JsonSerializer",
+ "drpc.max_buffer_size": 1048576,
+ "nimbus.task.timeout.secs": 30,
+ "storm.zookeeper.connection.timeout": 15000,
+ "topology.kryo.factory": "backtype.storm.serialization.DefaultKryoFactory",
+ "drpc.invocations.port": 3773,
+ "logviewer.port": 8005,
+ "zmq.threads": 1,
+ "storm.zookeeper.retry.times": 5,
+ "topology.worker.receiver.thread.count": 1,
+ "storm.thrift.transport": "backtype.storm.security.auth.SimpleTransportPlugin",
+ "topology.state.synchronization.timeout.secs": 60,
+ "supervisor.worker.timeout.secs": 30,
+ "nimbus.file.copy.expiration.secs": 600,
+ "nimbus.credential.renewers.freq.secs": 600,
+ "storm.messaging.transport": "backtype.storm.messaging.netty.Context",
+ "worker.gc.childopts": "",
+ "logviewer.appender.name": "A1",
+ "storm.messaging.netty.max_wait_ms": 1000,
+ "storm.zookeeper.auth.password": null,
+ "drpc.http.port": 3774,
+ "drpc.request.timeout.secs": 600,
+ "storm.local.mode.zmq": false,
+ "ui.port": 8744,
+ "nimbus.childopts": "-Xmx220m -javaagent:/usr/hdp/current/storm-client/contrib/storm-jmxetric/lib/jmxetric-1.0.4.jar=host=sandbox.hortonworks.com,port=8649,wireformat31x=true,mode=multicast,config=/usr/hdp/current/storm-client/contrib/storm-jmxetric/conf/jmxetric-conf.xml,process=Nimbus_JVM",
+ "storm.cluster.mode": "distributed",
+ "topology.optimize": true,
+ "topology.max.task.parallelism": null,
+ "storm.messaging.netty.transfer.batch.size": 262144,
+ "storm.nimbus.retry.intervalceiling.millis": 60000,
+ "topology.classpath": null
+}
\ No newline at end of file
http://git-wip-us.apache.org/repos/asf/ranger/blob/dbc84c18/knox-agent/src/test/resources/knox-policies.json
----------------------------------------------------------------------
diff --git a/knox-agent/src/test/resources/knox-policies.json b/knox-agent/src/test/resources/knox-policies.json
new file mode 100644
index 0000000..0863d74
--- /dev/null
+++ b/knox-agent/src/test/resources/knox-policies.json
@@ -0,0 +1,285 @@
+{
+ "serviceName": "cl1_knox",
+ "serviceId": 1,
+ "policyVersion": 5,
+ "policyUpdateTime": "20170620-17:47:01.000-+0100",
+ "policies": [
+ {
+ "service": "cl1_knox",
+ "name": "all - topology, service",
+ "policyType": 0,
+ "description": "Policy for all - topology, service",
+ "isAuditEnabled": true,
+ "resources": {
+ "topology": {
+ "values": [
+ "*"
+ ],
+ "isExcludes": false,
+ "isRecursive": false
+ },
+ "service": {
+ "values": [
+ "*"
+ ],
+ "isExcludes": false,
+ "isRecursive": false
+ }
+ },
+ "policyItems": [
+ {
+ "accesses": [
+ {
+ "type": "allow",
+ "isAllowed": true
+ }
+ ],
+ "users": [
+ "admin"
+ ],
+ "groups": [],
+ "conditions": [],
+ "delegateAdmin": true
+ }
+ ],
+ "denyPolicyItems": [],
+ "allowExceptions": [],
+ "denyExceptions": [],
+ "dataMaskPolicyItems": [],
+ "rowFilterPolicyItems": [],
+ "id": 1,
+ "isEnabled": true,
+ "version": 1
+ },
+ {
+ "service": "cl1_knox",
+ "name": "ClusterPolicy",
+ "policyType": 0,
+ "description": "",
+ "isAuditEnabled": true,
+ "resources": {
+ "topology": {
+ "values": [
+ "cluster"
+ ],
+ "isExcludes": false,
+ "isRecursive": false
+ },
+ "service": {
+ "values": [
+ "KAFKA",
+ "WEBHDFS",
+ "WEBHBASE",
+ "SOLR"
+ ],
+ "isExcludes": false,
+ "isRecursive": false
+ }
+ },
+ "policyItems": [
+ {
+ "accesses": [
+ {
+ "type": "allow",
+ "isAllowed": true
+ }
+ ],
+ "users": [
+ "alice"
+ ],
+ "groups": [],
+ "conditions": [],
+ "delegateAdmin": false
+ }
+ ],
+ "denyPolicyItems": [],
+ "allowExceptions": [],
+ "denyExceptions": [],
+ "dataMaskPolicyItems": [],
+ "rowFilterPolicyItems": [],
+ "id": 2,
+ "isEnabled": true,
+ "version": 2
+ },
+ {
+ "service": "cl1_knox",
+ "name": "StormPolicy",
+ "policyType": 0,
+ "description": "",
+ "isAuditEnabled": true,
+ "resources": {
+ "topology": {
+ "values": [
+ "cluster"
+ ],
+ "isExcludes": false,
+ "isRecursive": false
+ },
+ "service": {
+ "values": [
+ "STORM"
+ ],
+ "isExcludes": false,
+ "isRecursive": false
+ }
+ },
+ "policyItems": [
+ {
+ "accesses": [
+ {
+ "type": "allow",
+ "isAllowed": true
+ }
+ ],
+ "users": [
+ "bob"
+ ],
+ "groups": [],
+ "conditions": [],
+ "delegateAdmin": false
+ }
+ ],
+ "denyPolicyItems": [],
+ "allowExceptions": [],
+ "denyExceptions": [],
+ "dataMaskPolicyItems": [],
+ "rowFilterPolicyItems": [],
+ "id": 3,
+ "isEnabled": true,
+ "version": 1
+ }
+ ],
+ "serviceDef": {
+ "name": "knox",
+ "implClass": "org.apache.ranger.services.knox.RangerServiceKnox",
+ "label": "Knox Gateway",
+ "description": "Knox Gateway",
+ "options": {
+ "enableDenyAndExceptionsInPolicies": "true"
+ },
+ "configs": [
+ {
+ "itemId": 1,
+ "name": "username",
+ "type": "string",
+ "mandatory": true,
+ "validationRegEx": "",
+ "validationMessage": "",
+ "uiHint": "",
+ "label": "Username"
+ },
+ {
+ "itemId": 2,
+ "name": "password",
+ "type": "password",
+ "mandatory": true,
+ "validationRegEx": "",
+ "validationMessage": "",
+ "uiHint": "",
+ "label": "Password"
+ },
+ {
+ "itemId": 3,
+ "name": "knox.url",
+ "type": "string",
+ "mandatory": true,
+ "defaultValue": "",
+ "validationRegEx": "",
+ "validationMessage": "",
+ "uiHint": ""
+ },
+ {
+ "itemId": 4,
+ "name": "commonNameForCertificate",
+ "type": "string",
+ "mandatory": false,
+ "validationRegEx": "",
+ "validationMessage": "",
+ "uiHint": "",
+ "label": "Common Name for Certificate"
+ }
+ ],
+ "resources": [
+ {
+ "itemId": 1,
+ "name": "topology",
+ "type": "string",
+ "level": 10,
+ "mandatory": true,
+ "lookupSupported": true,
+ "recursiveSupported": false,
+ "excludesSupported": true,
+ "matcher": "org.apache.ranger.plugin.resourcematcher.RangerDefaultResourceMatcher",
+ "matcherOptions": {
+ "wildCard": "true",
+ "ignoreCase": "false"
+ },
+ "validationRegEx": "",
+ "validationMessage": "",
+ "uiHint": "",
+ "label": "Knox Topology",
+ "description": "Knox Topology"
+ },
+ {
+ "itemId": 2,
+ "name": "service",
+ "type": "string",
+ "level": 20,
+ "parent": "topology",
+ "mandatory": true,
+ "lookupSupported": true,
+ "recursiveSupported": false,
+ "excludesSupported": true,
+ "matcher": "org.apache.ranger.plugin.resourcematcher.RangerDefaultResourceMatcher",
+ "matcherOptions": {
+ "wildCard": "true",
+ "ignoreCase": "false"
+ },
+ "validationRegEx": "",
+ "validationMessage": "",
+ "uiHint": "",
+ "label": "Knox Service",
+ "description": "Knox Service"
+ }
+ ],
+ "accessTypes": [
+ {
+ "itemId": 1,
+ "name": "allow",
+ "label": "Allow",
+ "impliedGrants": []
+ }
+ ],
+ "policyConditions": [
+ {
+ "itemId": 1,
+ "name": "ip-range",
+ "evaluator": "org.apache.ranger.plugin.conditionevaluator.RangerIpMatcher",
+ "evaluatorOptions": {},
+ "validationRegEx": "",
+ "validationMessage": "",
+ "uiHint": "",
+ "label": "IP Address Range",
+ "description": "IP Address Range"
+ }
+ ],
+ "contextEnrichers": [],
+ "enums": [],
+ "dataMaskDef": {
+ "maskTypes": [],
+ "accessTypes": [],
+ "resources": []
+ },
+ "rowFilterDef": {
+ "accessTypes": [],
+ "resources": []
+ },
+ "id": 5,
+ "guid": "84b481b5-f23b-4f71-b8b6-ab33977149ca",
+ "isEnabled": true,
+ "createTime": "20170620-17:41:25.000-+0100",
+ "updateTime": "20170620-17:41:25.000-+0100",
+ "version": 1
+ },
+ "auditMode": "audit-default"
+}
\ No newline at end of file
http://git-wip-us.apache.org/repos/asf/ranger/blob/dbc84c18/knox-agent/src/test/resources/log4j.properties
----------------------------------------------------------------------
diff --git a/knox-agent/src/test/resources/log4j.properties b/knox-agent/src/test/resources/log4j.properties
index 7558996..2c9f1b2 100644
--- a/knox-agent/src/test/resources/log4j.properties
+++ b/knox-agent/src/test/resources/log4j.properties
@@ -14,7 +14,7 @@
# limitations under the License.
# Define some default values that can be overridden by system properties
-ranger.root.logger=DEBUG,console
+#ranger.root.logger=WARN,console
# Define the root logger to the system property "hbase.root.logger".
log4j.rootLogger=${ranger.root.logger}
http://git-wip-us.apache.org/repos/asf/ranger/blob/dbc84c18/knox-agent/src/test/resources/query_response.xml
----------------------------------------------------------------------
diff --git a/knox-agent/src/test/resources/query_response.xml b/knox-agent/src/test/resources/query_response.xml
new file mode 100644
index 0000000..dd79042
--- /dev/null
+++ b/knox-agent/src/test/resources/query_response.xml
@@ -0,0 +1,20 @@
+<!--
+ Licensed to the Apache Software Foundation (ASF) under one or more
+ contributor license agreements. See the NOTICE file distributed with
+ this work for additional information regarding copyright ownership.
+ The ASF licenses this file to You under the Apache License, Version 2.0
+ (the "License"); you may not use this file except in compliance with
+ the License. You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
+-->
+<?xml version="1.0" encoding="UTF-8"?>
+<response>
+<lst name="responseHeader"><bool name="zkConnected">true</bool><int name="status">0</int><int name="QTime">9</int><lst name="params"><str name="q">author_s:William Shakespeare</str></lst></lst><result name="response" numFound="2" start="0" maxScore="0.62191015"><doc><str name="id">book2</str><arr name="title_t"><str>The Merchant of Venice</str></arr><str name="author_s">William Shakespeare</str><long name="_version_">1571258160735322112</long></doc><doc><str name="id">book1</str><arr name="title_t"><str>The Merchant of Venice</str></arr><str name="author_s">William Shakespeare</str><long name="_version_">1571258154399825920</long></doc></result>
+</response>
\ No newline at end of file
http://git-wip-us.apache.org/repos/asf/ranger/blob/dbc84c18/knox-agent/src/test/resources/ranger-knox-security.xml
----------------------------------------------------------------------
diff --git a/knox-agent/src/test/resources/ranger-knox-security.xml b/knox-agent/src/test/resources/ranger-knox-security.xml
new file mode 100644
index 0000000..5465270
--- /dev/null
+++ b/knox-agent/src/test/resources/ranger-knox-security.xml
@@ -0,0 +1,52 @@
+<?xml version="1.0"?>
+<!--
+ Licensed to the Apache Software Foundation (ASF) under one or more
+ contributor license agreements. See the NOTICE file distributed with
+ this work for additional information regarding copyright ownership.
+ The ASF licenses this file to You under the Apache License, Version 2.0
+ (the "License"); you may not use this file except in compliance with
+ the License. You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
+-->
+<?xml-stylesheet type="text/xsl" href="configuration.xsl"?>
+<configuration xmlns:xi="http://www.w3.org/2001/XInclude">
+ <property>
+ <name>ranger.plugin.knox.policy.rest.url</name>
+ <value>http://localhost:6080</value>
+ <description>
+ URL to Ranger Admin
+ </description>
+ </property>
+
+ <property>
+ <name>ranger.plugin.knox.service.name</name>
+ <value>cl1_knox</value>
+ <description>
+ Name of the Ranger service containing policies for this SampleApp instance
+ </description>
+ </property>
+
+ <property>
+ <name>ranger.plugin.knox.policy.source.impl</name>
+ <value>org.apache.ranger.services.knox.RangerAdminClientImpl</value>
+ <description>
+ Policy source.
+ </description>
+ </property>
+
+ <property>
+ <name>ranger.plugin.knox.policy.cache.dir</name>
+ <value>${project.build.directory}</value>
+ <description>
+ Directory where Ranger policies are cached after successful retrieval from the source
+ </description>
+ </property>
+
+</configuration>
http://git-wip-us.apache.org/repos/asf/ranger/blob/dbc84c18/knox-agent/src/test/resources/users.ldif
----------------------------------------------------------------------
diff --git a/knox-agent/src/test/resources/users.ldif b/knox-agent/src/test/resources/users.ldif
new file mode 100644
index 0000000..37d7ed9
--- /dev/null
+++ b/knox-agent/src/test/resources/users.ldif
@@ -0,0 +1,55 @@
+##########################################################################
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+##########################################################################
+
+version: 1
+
+dn: dc=hadoop,dc=apache,dc=org
+objectclass: organization
+objectclass: dcObject
+o: Hadoop
+dc: hadoop
+
+dn: ou=people,dc=hadoop,dc=apache,dc=org
+objectclass:top
+objectclass:organizationalUnit
+ou: people
+
+dn: ou=groups,dc=hadoop,dc=apache,dc=org
+objectclass:top
+objectclass:organizationalUnit
+ou: groups
+
+dn: uid=alice,ou=people,dc=hadoop,dc=apache,dc=org
+objectclass:top
+objectclass:person
+objectclass:organizationalPerson
+objectclass:inetOrgPerson
+cn: alice
+sn: alice
+uid: alice
+userPassword:password
+
+dn: uid=bob,ou=people,dc=hadoop,dc=apache,dc=org
+objectclass:top
+objectclass:person
+objectclass:organizationalPerson
+objectclass:inetOrgPerson
+cn: bob
+sn: bob
+uid: bob
+userPassword:password
\ No newline at end of file
http://git-wip-us.apache.org/repos/asf/ranger/blob/dbc84c18/knox-agent/src/test/resources/webhbase-table-list.xml
----------------------------------------------------------------------
diff --git a/knox-agent/src/test/resources/webhbase-table-list.xml b/knox-agent/src/test/resources/webhbase-table-list.xml
new file mode 100644
index 0000000..3872752
--- /dev/null
+++ b/knox-agent/src/test/resources/webhbase-table-list.xml
@@ -0,0 +1,26 @@
+<?xml version="1.0" encoding="UTF-8" standalone="yes"?>
+
+<!--
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements. See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership. The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License. You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+-->
+
+<TableList>
+ <table name="table1"/>
+ <table name="table2"/>
+ <br></br>
+ <br/>
+</TableList>
http://git-wip-us.apache.org/repos/asf/ranger/blob/dbc84c18/knox-agent/src/test/resources/webhdfs-liststatus-test.json
----------------------------------------------------------------------
diff --git a/knox-agent/src/test/resources/webhdfs-liststatus-test.json b/knox-agent/src/test/resources/webhdfs-liststatus-test.json
new file mode 100644
index 0000000..a4f83fd
--- /dev/null
+++ b/knox-agent/src/test/resources/webhdfs-liststatus-test.json
@@ -0,0 +1,3 @@
+{ "FileStatuses":{"FileStatus":[
+{"accessTime":0,"blockSize":0,"group":"hdfs","length":0,"modificationTime":1350595857178,"owner":"hdfs","pathSuffix":"dir","permission":"755","replication":0,"type":"DIRECTORY"}
+]}}
http://git-wip-us.apache.org/repos/asf/ranger/blob/dbc84c18/pom.xml
----------------------------------------------------------------------
diff --git a/pom.xml b/pom.xml
index f7d690f..3958014 100644
--- a/pom.xml
+++ b/pom.xml
@@ -183,7 +183,7 @@
<junit.version>4.12</junit.version>
<kafka.version>0.10.0.0</kafka.version>
<kerby.version>1.0.0</kerby.version>
- <knox.gateway.version>0.6.0</knox.gateway.version>
+ <knox.gateway.version>0.13.0</knox.gateway.version>
<libpam4j.version>1.8</libpam4j.version>
<local.lib.dir>${project.basedir}/../lib/local</local.lib.dir>
<log4j.version>1.2.17</log4j.version>