You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@ranger.apache.org by rm...@apache.org on 2021/03/30 17:00:34 UTC

[ranger] branch master updated: RANGER-3226:Ranger Audit framework to handle UnsupportedOperationException while writing into S3AFileSystem with hflush api

This is an automated email from the ASF dual-hosted git repository.

rmani pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/ranger.git


The following commit(s) were added to refs/heads/master by this push:
     new 2f05ff6  RANGER-3226:Ranger Audit framework to handle UnsupportedOperationException while writing into S3AFileSystem with hflush api
2f05ff6 is described below

commit 2f05ff6745ac209783067a735f111fa66600849a
Author: Ramesh Mani <rm...@cloudera.com>
AuthorDate: Tue Mar 30 00:13:46 2021 -0700

    RANGER-3226:Ranger Audit framework to handle UnsupportedOperationException while writing into S3AFileSystem with hflush api
    
    Change-Id: I3904609af718b2f9372b5d4569507fc6e71ededa
    Signed-off-by: Ramesh Mani <rm...@cloudera.com>
---
 .../ranger/audit/destination/HDFSAuditDestination.java       | 12 +++++++++++-
 1 file changed, 11 insertions(+), 1 deletion(-)

diff --git a/agents-audit/src/main/java/org/apache/ranger/audit/destination/HDFSAuditDestination.java b/agents-audit/src/main/java/org/apache/ranger/audit/destination/HDFSAuditDestination.java
index 906ff34..5e6f402 100644
--- a/agents-audit/src/main/java/org/apache/ranger/audit/destination/HDFSAuditDestination.java
+++ b/agents-audit/src/main/java/org/apache/ranger/audit/destination/HDFSAuditDestination.java
@@ -34,6 +34,7 @@ import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FSDataOutputStream;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.fs.StreamCapabilities;
 import org.apache.ranger.audit.model.AuditEventBase;
 import org.apache.ranger.audit.provider.MiscUtil;
 import org.apache.ranger.audit.utils.RollingTimeUtil;
@@ -74,6 +75,8 @@ public class HDFSAuditDestination extends AuditDestination {
 
 	private boolean rollOverByDuration  = false;
 
+	private boolean isHFlushCapableStream    = false;
+
 	@Override
 	public void init(Properties prop, String propPrefix) {
 		super.init(prop, propPrefix);
@@ -299,6 +302,7 @@ public class HDFSAuditDestination extends AuditDestination {
 			ostream = fileSystem.create(hdfPath);
 			logWriter = new PrintWriter(ostream);
 			currentFileName = fullPath;
+			isHFlushCapableStream = ostream.hasCapability(StreamCapabilities.HFLUSH);
 		}
 		return logWriter;
 	}
@@ -374,7 +378,13 @@ public class HDFSAuditDestination extends AuditDestination {
 						// 1) PrinterWriter does not have bufferring of its own so
 						// we need to flush its underlying stream
 						// 2) HDFS flush() does not really flush all the way to disk.
-						ostream.hflush();
+						if (isHFlushCapableStream) {
+							//Checking HFLUSH capability of the stream because of HADOOP-13327.
+							//For S3 filesystem, hflush throws UnsupportedOperationException and hence we call flush.
+							ostream.hflush();
+						} else {
+							ostream.flush();
+						}
 					logger.info("Flush HDFS audit logs completed.....");
 				}
 			} catch (IOException e) {