You are viewing a plain text version of this content. The canonical link for it is here.
Posted to common-issues@hadoop.apache.org by "ASF GitHub Bot (Jira)" <ji...@apache.org> on 2022/12/08 08:41:00 UTC

[jira] [Commented] (HADOOP-18258) Merging of S3A Audit Logs

    [ https://issues.apache.org/jira/browse/HADOOP-18258?page=com.atlassian.jira.plugin.system.issuetabpanels:comment-tabpanel&focusedCommentId=17644669#comment-17644669 ] 

ASF GitHub Bot commented on HADOOP-18258:
-----------------------------------------

sravanigadey commented on code in PR #4383:
URL: https://github.com/apache/hadoop/pull/4383#discussion_r1043069282


##########
hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3a/audit/AuditTool.java:
##########
@@ -0,0 +1,256 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ *  or more contributor license agreements.  See the NOTICE file
+ *  distributed with this work for additional information
+ *  regarding copyright ownership.  The ASF licenses this file
+ *  to you under the Apache License, Version 2.0 (the
+ *  "License"); you may not use this file except in compliance
+ *  with the License.  You may obtain a copy of the License at
+ *
+ *       http://www.apache.org/licenses/LICENSE-2.0
+ *
+ *  Unless required by applicable law or agreed to in writing, software
+ *  distributed under the License is distributed on an "AS IS" BASIS,
+ *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ *  See the License for the specific language governing permissions and
+ *  limitations under the License.
+ */
+
+package org.apache.hadoop.fs.s3a.audit;
+
+import java.io.Closeable;
+import java.io.IOException;
+import java.io.PrintWriter;
+import java.net.URI;
+import java.net.URISyntaxException;
+import java.util.Arrays;
+import java.util.List;
+
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.conf.Configured;
+import org.apache.hadoop.fs.FileStatus;
+import org.apache.hadoop.fs.FileSystem;
+import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.fs.s3a.audit.mapreduce.S3AAuditLogMergerAndParser;
+import org.apache.hadoop.util.ExitUtil;
+import org.apache.hadoop.util.Tool;
+import org.apache.hadoop.util.ToolRunner;
+
+import static org.apache.hadoop.service.launcher.LauncherExitCodes.EXIT_COMMAND_ARGUMENT_ERROR;
+import static org.apache.hadoop.service.launcher.LauncherExitCodes.EXIT_FAIL;
+import static org.apache.hadoop.service.launcher.LauncherExitCodes.EXIT_SUCCESS;
+
+/**
+ * AuditTool is a Command Line Interface.
+ * Its functionality is to parse the audit log files
+ * and generate avro file.
+ */
+public class AuditTool extends Configured implements Tool, Closeable {
+
+  private static final Logger LOG = LoggerFactory.getLogger(AuditTool.class);
+
+  private final S3AAuditLogMergerAndParser s3AAuditLogMergerAndParser =
+      new S3AAuditLogMergerAndParser();
+
+  /**
+   * Name of this tool: {@value}.
+   */
+  public static final String AUDITTOOL =
+      "org.apache.hadoop.fs.s3a.audit.AuditTool";
+
+  /**
+   * Purpose of this tool: {@value}.
+   */
+  public static final String PURPOSE =
+      "\n\nUSAGE:\nMerge, parse audit log files and convert into avro file "
+          + "for "
+          + "better "
+          + "visualization";
+
+  // Exit codes
+  private static final int SUCCESS = EXIT_SUCCESS;
+  private static final int FAILURE = EXIT_FAIL;
+  private static final int INVALID_ARGUMENT = EXIT_COMMAND_ARGUMENT_ERROR;
+
+  private static final String USAGE =
+      "bin/hadoop " + "Class" + " DestinationPath" + " SourcePath" + "\n" +
+          "bin/hadoop " + AUDITTOOL + " s3a://BUCKET" + " s3a://BUCKET" + "\n";
+
+  private PrintWriter out;
+
+  public AuditTool() {
+    super();
+  }
+
+  /**
+   * Tells us the usage of the AuditTool by commands.
+   *
+   * @return the string USAGE
+   */
+  public String getUsage() {
+    return USAGE + PURPOSE;
+  }
+
+  public String getName() {
+    return AUDITTOOL;
+  }
+
+  /**
+   * This run method in AuditTool takes source and destination path of bucket,
+   * and check if there are directories and pass these paths to merge and
+   * parse audit log files.
+   *
+   * @param args argument list
+   * @return SUCCESS i.e, '0', which is an exit code
+   * @throws Exception on any failure.
+   */
+  @Override
+  public int run(String[] args) throws Exception {
+    List<String> paths = Arrays.asList(args);
+    if (paths.isEmpty()) {
+      errorln(getUsage());
+      throw invalidArgs("No bucket specified");
+    }
+
+    // Path of audit log files
+    Path logsPath = new Path(paths.get(1));

Review Comment:
   modified





> Merging of S3A Audit Logs
> -------------------------
>
>                 Key: HADOOP-18258
>                 URL: https://issues.apache.org/jira/browse/HADOOP-18258
>             Project: Hadoop Common
>          Issue Type: Sub-task
>          Components: fs/s3
>            Reporter: Sravani Gadey
>            Assignee: Sravani Gadey
>            Priority: Major
>              Labels: pull-request-available
>          Time Spent: 12.5h
>  Remaining Estimate: 0h
>
> Merging audit log files containing huge number of audit logs collected from a job like Hive or Spark job containing various S3 requests like list, head, get and put requests.



--
This message was sent by Atlassian Jira
(v8.20.10#820010)

---------------------------------------------------------------------
To unsubscribe, e-mail: common-issues-unsubscribe@hadoop.apache.org
For additional commands, e-mail: common-issues-help@hadoop.apache.org