You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@druid.apache.org by gi...@apache.org on 2021/11/09 19:11:10 UTC

[druid] branch master updated: Migrate File.mkdirs to FileUtils.mkdirp. (#11879)

This is an automated email from the ASF dual-hosted git repository.

gian pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/druid.git


The following commit(s) were added to refs/heads/master by this push:
     new babf00f  Migrate File.mkdirs to FileUtils.mkdirp. (#11879)
babf00f is described below

commit babf00f8e3a368e44afdd3eddf7356dad36a67b1
Author: Gian Merlino <gi...@gmail.com>
AuthorDate: Tue Nov 9 11:10:49 2021 -0800

    Migrate File.mkdirs to FileUtils.mkdirp. (#11879)
    
    * Migrate File.mkdirs to FileUtils.mkdirp.
    
    * Remove unused imports.
    
    * Fix LookupReferencesManager.
    
    * Simplify.
    
    * Also migrate usages of forceMkdir.
    
    * Fix var name.
    
    * Fix incorrect call.
    
    * Update test.
---
 .../BaseColumnarLongsFromGeneratorBenchmark.java   |  6 ++-
 .../BaseColumnarLongsFromSegmentsBenchmark.java    |  6 ++-
 .../benchmark/indexing/IndexMergeBenchmark.java    |  2 +-
 codestyle/druid-forbidden-apis.txt                 |  2 +
 .../apache/druid/java/util/common/FileUtils.java   | 19 +++++++++
 .../apache/druid/java/util/common/StreamUtils.java |  2 +-
 .../druid/java/util/common/FileUtilsTest.java      | 49 ++++++++++++++++++++++
 .../java/util/metrics/CgroupCpuMonitorTest.java    |  3 +-
 .../java/util/metrics/CgroupCpuSetMonitorTest.java |  3 +-
 .../java/util/metrics/CgroupMemoryMonitorTest.java |  4 +-
 .../java/util/metrics/CpuAcctDeltaMonitorTest.java |  4 +-
 .../druid/java/util/metrics/ProcFsReaderTest.java  |  4 +-
 .../java/util/metrics/cgroups/CpuAcctTest.java     |  4 +-
 .../java/util/metrics/cgroups/CpuSetTest.java      |  3 +-
 .../druid/java/util/metrics/cgroups/CpuTest.java   |  4 +-
 .../java/util/metrics/cgroups/MemoryTest.java      |  4 +-
 .../druid/java/util/metrics/cgroups/TestUtils.java |  9 ++--
 .../druid/storage/aliyun/OssDataSegmentPuller.java |  2 +-
 .../cassandra/CassandraDataSegmentPuller.java      |  2 +-
 .../cloudfiles/CloudFilesDataSegmentPuller.java    | 13 ------
 .../storage/azure/AzureDataSegmentPuller.java      |  2 +-
 ...natorPollingBasicAuthenticatorCacheManager.java |  2 +-
 ...rdinatorPollingBasicAuthorizerCacheManager.java |  4 +-
 .../storage/google/GoogleDataSegmentPuller.java    |  2 +-
 .../druid/storage/hdfs/HdfsDataSegmentPuller.java  |  2 +-
 .../druid/storage/s3/S3DataSegmentPuller.java      |  2 +-
 .../indexer/hadoop/FSSpideringIteratorTest.java    | 11 +++--
 .../common/MultipleFileTaskReportFileWriter.java   |  4 +-
 .../common/SingleFileTaskReportFileWriter.java     |  4 +-
 .../apache/druid/indexing/common/TaskToolbox.java  |  4 +-
 .../batch/parallel/DeepStorageShuffleClient.java   |  4 +-
 .../task/batch/parallel/HttpShuffleClient.java     |  2 +-
 .../batch/parallel/PartialSegmentMergeTask.java    | 14 +++----
 .../indexing/common/tasklogs/FileTaskLogs.java     | 22 ++++------
 .../druid/indexing/overlord/ForkingTaskRunner.java |  5 +--
 .../indexing/overlord/ThreadingTaskRunner.java     |  5 +--
 .../druid/indexing/worker/WorkerTaskManager.java   | 20 +++------
 .../worker/executor/ExecutorLifecycle.java         |  4 +-
 .../shuffle/LocalIntermediaryDataManager.java      | 18 ++++----
 .../AbstractParallelIndexSupervisorTaskTest.java   |  2 +-
 .../indexing/common/tasklogs/FileTaskLogsTest.java |  2 +-
 .../firehose/IngestSegmentFirehoseFactoryTest.java |  5 +--
 .../druid/indexing/overlord/TaskLifecycleTest.java |  4 +-
 .../indexing/worker/WorkerTaskManagerTest.java     |  4 +-
 .../epinephelinae/LimitedTemporaryStorage.java     |  4 +-
 .../druid/query/lookup/LookupSnapshotTaker.java    |  6 ---
 .../org/apache/druid/segment/IndexMergerV9.java    |  8 ++--
 .../writeout/TmpFileSegmentWriteOutMedium.java     |  2 +-
 .../org/apache/druid/query/DoubleStorageTest.java  |  3 +-
 .../segment/IndexMergerV9CompatibilityTest.java    |  2 +-
 .../segment/IndexMergerV9WithSpatialIndexTest.java | 10 ++---
 .../apache/druid/segment/SchemalessIndexTest.java  | 19 +++++----
 .../java/org/apache/druid/segment/TestIndex.java   |  9 ++--
 .../segment/filter/SpatialFilterBonusTest.java     | 11 ++---
 .../druid/segment/filter/SpatialFilterTest.java    | 11 ++---
 .../query/lookup/LookupReferencesManager.java      |  7 +++-
 .../segment/loading/LocalDataSegmentPusher.java    |  4 +-
 .../segment/loading/SegmentLocalCacheManager.java  |  5 +--
 .../realtime/appenderator/AppenderatorImpl.java    |  8 ++--
 .../realtime/appenderator/BatchAppenderator.java   |  5 ++-
 .../realtime/appenderator/StreamAppenderator.java  |  5 ++-
 .../segment/realtime/plumber/FlushingPlumber.java  | 10 ++++-
 .../segment/realtime/plumber/RealtimePlumber.java  |  8 +++-
 .../coordination/SegmentLoadDropHandler.java       | 11 ++---
 .../apache/druid/server/log/FileRequestLogger.java |  3 +-
 .../druid/metadata/input/SqlInputSourceTest.java   |  8 ++--
 .../loading/LocalDataSegmentKillerTest.java        |  3 +-
 .../loading/LocalDataSegmentPusherTest.java        |  4 +-
 .../SegmentLocalCacheManagerConcurrencyTest.java   |  3 +-
 .../loading/SegmentLocalCacheManagerTest.java      | 22 +++++-----
 .../realtime/firehose/SqlFirehoseFactoryTest.java  | 14 +++----
 .../segment/realtime/firehose/SqlFirehoseTest.java |  8 ++--
 ...egmentManagerBroadcastJoinIndexedTableTest.java |  2 +-
 .../server/SegmentManagerThreadSafetyTest.java     |  3 +-
 .../SegmentLoadDropHandlerCacheTest.java           |  3 +-
 .../coordination/SegmentLoadDropHandlerTest.java   |  2 +-
 .../server/coordination/TestStorageLocation.java   | 13 +++---
 .../org/apache/druid/cli/PullDependencies.java     |  4 +-
 78 files changed, 302 insertions(+), 231 deletions(-)

diff --git a/benchmarks/src/test/java/org/apache/druid/benchmark/compression/BaseColumnarLongsFromGeneratorBenchmark.java b/benchmarks/src/test/java/org/apache/druid/benchmark/compression/BaseColumnarLongsFromGeneratorBenchmark.java
index 5e1c85e..882b5e3 100644
--- a/benchmarks/src/test/java/org/apache/druid/benchmark/compression/BaseColumnarLongsFromGeneratorBenchmark.java
+++ b/benchmarks/src/test/java/org/apache/druid/benchmark/compression/BaseColumnarLongsFromGeneratorBenchmark.java
@@ -20,6 +20,7 @@
 package org.apache.druid.benchmark.compression;
 
 import com.google.common.collect.ImmutableList;
+import org.apache.druid.java.util.common.FileUtils;
 import org.apache.druid.java.util.common.StringUtils;
 import org.apache.druid.segment.column.ValueType;
 import org.apache.druid.segment.generator.ColumnValueGenerator;
@@ -29,6 +30,7 @@ import org.openjdk.jmh.annotations.Scope;
 import org.openjdk.jmh.annotations.State;
 
 import java.io.File;
+import java.io.IOException;
 import java.util.List;
 
 @State(Scope.Benchmark)
@@ -385,11 +387,11 @@ public class BaseColumnarLongsFromGeneratorBenchmark extends BaseColumnarLongsBe
     return StringUtils.format("%s-%s-%s-%s.bin", encoding, distribution, rows, nullProbability);
   }
 
-  static File getTmpDir()
+  static File getTmpDir() throws IOException
   {
     final String dirPath = "tmp/encoding/longs/";
     File dir = new File(dirPath);
-    dir.mkdirs();
+    FileUtils.mkdirp(dir);
     return dir;
   }
 }
diff --git a/benchmarks/src/test/java/org/apache/druid/benchmark/compression/BaseColumnarLongsFromSegmentsBenchmark.java b/benchmarks/src/test/java/org/apache/druid/benchmark/compression/BaseColumnarLongsFromSegmentsBenchmark.java
index 0c0fd2d..2882616 100644
--- a/benchmarks/src/test/java/org/apache/druid/benchmark/compression/BaseColumnarLongsFromSegmentsBenchmark.java
+++ b/benchmarks/src/test/java/org/apache/druid/benchmark/compression/BaseColumnarLongsFromSegmentsBenchmark.java
@@ -19,12 +19,14 @@
 
 package org.apache.druid.benchmark.compression;
 
+import org.apache.druid.java.util.common.FileUtils;
 import org.apache.druid.java.util.common.StringUtils;
 import org.openjdk.jmh.annotations.Param;
 import org.openjdk.jmh.annotations.Scope;
 import org.openjdk.jmh.annotations.State;
 
 import java.io.File;
+import java.io.IOException;
 
 @State(Scope.Benchmark)
 public class BaseColumnarLongsFromSegmentsBenchmark extends BaseColumnarLongsBenchmark
@@ -78,11 +80,11 @@ public class BaseColumnarLongsFromSegmentsBenchmark extends BaseColumnarLongsBen
     return StringUtils.format("%s-%s-longs-%s.bin", encoding, segmentName, columnName);
   }
 
-  File getTmpDir()
+  File getTmpDir() throws IOException
   {
     final String dirPath = StringUtils.format("tmp/encoding/%s", segmentName);
     File dir = new File(dirPath);
-    dir.mkdirs();
+    FileUtils.mkdirp(dir);
     return dir;
   }
 }
diff --git a/benchmarks/src/test/java/org/apache/druid/benchmark/indexing/IndexMergeBenchmark.java b/benchmarks/src/test/java/org/apache/druid/benchmark/indexing/IndexMergeBenchmark.java
index 4bf9c12..86d735e 100644
--- a/benchmarks/src/test/java/org/apache/druid/benchmark/indexing/IndexMergeBenchmark.java
+++ b/benchmarks/src/test/java/org/apache/druid/benchmark/indexing/IndexMergeBenchmark.java
@@ -157,7 +157,7 @@ public class IndexMergeBenchmark
   {
     File tmpFile = File.createTempFile("IndexMergeBenchmark-MERGEDFILE-V9-" + System.currentTimeMillis(), ".TEMPFILE");
     tmpFile.delete();
-    tmpFile.mkdirs();
+    FileUtils.mkdirp(tmpFile);
     try {
       log.info(tmpFile.getAbsolutePath() + " isFile: " + tmpFile.isFile() + " isDir:" + tmpFile.isDirectory());
 
diff --git a/codestyle/druid-forbidden-apis.txt b/codestyle/druid-forbidden-apis.txt
index 77f2adb..909039e 100644
--- a/codestyle/druid-forbidden-apis.txt
+++ b/codestyle/druid-forbidden-apis.txt
@@ -22,6 +22,7 @@ com.google.common.collect.Sets#newTreeSet() @ Create java.util.TreeSet directly
 com.google.common.collect.Sets#newTreeSet(java.util.Comparator) @ Create java.util.TreeSet directly
 com.google.common.io.Files#createTempDir() @ Use org.apache.druid.java.util.common.FileUtils.createTempDir()
 com.google.common.util.concurrent.MoreExecutors#sameThreadExecutor() @ Use org.apache.druid.java.util.common.concurrent.Execs#directExecutor()
+java.io.File#mkdirs() @ Use org.apache.druid.java.util.common.FileUtils.mkdirp instead
 java.io.File#toURL() @ Use java.io.File#toURI() and java.net.URI#toURL() instead
 java.lang.String#matches(java.lang.String) @ Use startsWith(), endsWith(), contains(), or compile and cache a Pattern explicitly
 java.lang.String#replace(java.lang.CharSequence,java.lang.CharSequence) @ Use one of the appropriate methods in StringUtils instead
@@ -43,6 +44,7 @@ java.lang.Math#random() @ Use ThreadLocalRandom.current()
 java.util.regex.Pattern#matches(java.lang.String,java.lang.CharSequence) @ Use String.startsWith(), endsWith(), contains(), or compile and cache a Pattern explicitly
 org.apache.commons.io.FileUtils#getTempDirectory() @ Use org.junit.rules.TemporaryFolder for tests instead
 org.apache.commons.io.FileUtils#deleteDirectory(java.io.File) @ Use org.apache.druid.java.util.common.FileUtils#deleteDirectory()
+org.apache.commons.io.FileUtils#forceMkdir(java.io.File) @ Use org.apache.druid.java.util.common.FileUtils.mkdirp instead
 java.lang.Class#getCanonicalName() @ Class.getCanonicalName can return null for anonymous types, use Class.getName instead.
 com.google.common.base.Objects#firstNonNull(java.lang.Object, java.lang.Object) @ Use org.apache.druid.common.guava.GuavaUtils#firstNonNull(java.lang.Object, java.lang.Object) instead (probably... the GuavaUtils method return object is nullable)
 
diff --git a/core/src/main/java/org/apache/druid/java/util/common/FileUtils.java b/core/src/main/java/org/apache/druid/java/util/common/FileUtils.java
index 73d04bc..10990c7 100644
--- a/core/src/main/java/org/apache/druid/java/util/common/FileUtils.java
+++ b/core/src/main/java/org/apache/druid/java/util/common/FileUtils.java
@@ -380,6 +380,25 @@ public class FileUtils
   }
 
   /**
+   * Create "directory" and all intermediate directories as needed. If the directory is successfully created, or already
+   * exists, returns quietly. Otherwise, throws an IOException.
+   *
+   * Simpler to use than {@link File#mkdirs()}, and more reliable since it is safe from races where two threads try
+   * to create the same directory at the same time.
+   *
+   * The name is inspired by UNIX {@code mkdir -p}, which has the same behavior.
+   */
+  @SuppressForbidden(reason = "File#mkdirs")
+  public static void mkdirp(final File directory) throws IOException
+  {
+    // isDirectory check after mkdirs is necessary in case of concurrent calls to mkdirp, because two concurrent
+    // calls to mkdirs cannot both succeed.
+    if (!directory.mkdirs() && !directory.isDirectory()) {
+      throw new IOE("Cannot create directory [%s]", directory);
+    }
+  }
+
+  /**
    * Equivalent to {@link org.apache.commons.io.FileUtils#deleteDirectory(File)}. Exists here mostly so callers
    * can avoid dealing with our FileUtils and the Commons FileUtils having the same name.
    */
diff --git a/core/src/main/java/org/apache/druid/java/util/common/StreamUtils.java b/core/src/main/java/org/apache/druid/java/util/common/StreamUtils.java
index 4bb138b..ffc79e2 100644
--- a/core/src/main/java/org/apache/druid/java/util/common/StreamUtils.java
+++ b/core/src/main/java/org/apache/druid/java/util/common/StreamUtils.java
@@ -49,7 +49,7 @@ public class StreamUtils
    */
   public static long copyToFileAndClose(InputStream is, File file) throws IOException
   {
-    file.getParentFile().mkdirs();
+    FileUtils.mkdirp(file.getParentFile());
     try (OutputStream os = new BufferedOutputStream(new FileOutputStream(file))) {
       final long result = ByteStreams.copy(is, os);
       // Workarround for http://hg.openjdk.java.net/jdk8/jdk8/jdk/rev/759aa847dcaf
diff --git a/core/src/test/java/org/apache/druid/java/util/common/FileUtilsTest.java b/core/src/test/java/org/apache/druid/java/util/common/FileUtilsTest.java
index 3250089..39c7258 100644
--- a/core/src/test/java/org/apache/druid/java/util/common/FileUtilsTest.java
+++ b/core/src/test/java/org/apache/druid/java/util/common/FileUtilsTest.java
@@ -19,9 +19,12 @@
 
 package org.apache.druid.java.util.common;
 
+import org.hamcrest.CoreMatchers;
+import org.hamcrest.MatcherAssert;
 import org.junit.Assert;
 import org.junit.Rule;
 import org.junit.Test;
+import org.junit.internal.matchers.ThrowableMessageMatcher;
 import org.junit.rules.ExpectedException;
 import org.junit.rules.TemporaryFolder;
 
@@ -141,4 +144,50 @@ public class FileUtilsTest
       Files.delete(baseDir.toPath());
     }
   }
+
+  @Test
+  public void testMkdirp() throws IOException
+  {
+    final File tmpDir = folder.newFolder();
+    final File testDirectory = new File(tmpDir, "test");
+
+    FileUtils.mkdirp(testDirectory);
+    Assert.assertTrue(testDirectory.isDirectory());
+
+    FileUtils.mkdirp(testDirectory);
+    Assert.assertTrue(testDirectory.isDirectory());
+  }
+
+  @Test
+  public void testMkdirpCannotCreateOverExistingFile() throws IOException
+  {
+    final File tmpFile = folder.newFile();
+
+    expectedException.expect(IOException.class);
+    expectedException.expectMessage("Cannot create directory");
+    FileUtils.mkdirp(tmpFile);
+  }
+
+  @Test
+  public void testMkdirpCannotCreateInNonWritableDirectory() throws IOException
+  {
+    final File tmpDir = folder.newFolder();
+    final File testDirectory = new File(tmpDir, "test");
+    tmpDir.setWritable(false);
+    try {
+      final IOException e = Assert.assertThrows(IOException.class, () -> FileUtils.mkdirp(testDirectory));
+
+      MatcherAssert.assertThat(
+          e,
+          ThrowableMessageMatcher.hasMessage(CoreMatchers.containsString("Cannot create directory"))
+      );
+    }
+    finally {
+      tmpDir.setWritable(true);
+    }
+
+    // Now it should work.
+    FileUtils.mkdirp(testDirectory);
+    Assert.assertTrue(testDirectory.isDirectory());
+  }
 }
diff --git a/core/src/test/java/org/apache/druid/java/util/metrics/CgroupCpuMonitorTest.java b/core/src/test/java/org/apache/druid/java/util/metrics/CgroupCpuMonitorTest.java
index 67c03d2..d637984 100644
--- a/core/src/test/java/org/apache/druid/java/util/metrics/CgroupCpuMonitorTest.java
+++ b/core/src/test/java/org/apache/druid/java/util/metrics/CgroupCpuMonitorTest.java
@@ -20,6 +20,7 @@
 package org.apache.druid.java.util.metrics;
 
 import com.google.common.collect.ImmutableMap;
+import org.apache.druid.java.util.common.FileUtils;
 import org.apache.druid.java.util.emitter.core.Event;
 import org.apache.druid.java.util.metrics.cgroups.CgroupDiscoverer;
 import org.apache.druid.java.util.metrics.cgroups.ProcCgroupDiscoverer;
@@ -58,7 +59,7 @@ public class CgroupCpuMonitorTest
         "cpu,cpuacct/system.slice/some.service/f12ba7e0-fa16-462e-bb9d-652ccc27f0ee"
     );
 
-    Assert.assertTrue((cpuDir.isDirectory() && cpuDir.exists()) || cpuDir.mkdirs());
+    FileUtils.mkdirp(cpuDir);
     TestUtils.copyOrReplaceResource("/cpu.shares", new File(cpuDir, "cpu.shares"));
     TestUtils.copyOrReplaceResource("/cpu.cfs_quota_us", new File(cpuDir, "cpu.cfs_quota_us"));
     TestUtils.copyOrReplaceResource("/cpu.cfs_period_us", new File(cpuDir, "cpu.cfs_period_us"));
diff --git a/core/src/test/java/org/apache/druid/java/util/metrics/CgroupCpuSetMonitorTest.java b/core/src/test/java/org/apache/druid/java/util/metrics/CgroupCpuSetMonitorTest.java
index c661355..027bd99 100644
--- a/core/src/test/java/org/apache/druid/java/util/metrics/CgroupCpuSetMonitorTest.java
+++ b/core/src/test/java/org/apache/druid/java/util/metrics/CgroupCpuSetMonitorTest.java
@@ -20,6 +20,7 @@
 package org.apache.druid.java.util.metrics;
 
 import com.google.common.collect.ImmutableMap;
+import org.apache.druid.java.util.common.FileUtils;
 import org.apache.druid.java.util.emitter.core.Event;
 import org.apache.druid.java.util.metrics.cgroups.CgroupDiscoverer;
 import org.apache.druid.java.util.metrics.cgroups.ProcCgroupDiscoverer;
@@ -57,8 +58,8 @@ public class CgroupCpuSetMonitorTest
         cgroupDir,
         "cpuset/system.slice/some.service/f12ba7e0-fa16-462e-bb9d-652ccc27f0ee"
     );
-    Assert.assertTrue((cpusetDir.isDirectory() && cpusetDir.exists()) || cpusetDir.mkdirs());
 
+    FileUtils.mkdirp(cpusetDir);
     TestUtils.copyOrReplaceResource("/cpuset.cpus", new File(cpusetDir, "cpuset.cpus"));
     TestUtils.copyOrReplaceResource("/cpuset.effective_cpus.complex", new File(cpusetDir, "cpuset.effective_cpus"));
     TestUtils.copyOrReplaceResource("/cpuset.mems", new File(cpusetDir, "cpuset.mems"));
diff --git a/core/src/test/java/org/apache/druid/java/util/metrics/CgroupMemoryMonitorTest.java b/core/src/test/java/org/apache/druid/java/util/metrics/CgroupMemoryMonitorTest.java
index 058e094..39ff532 100644
--- a/core/src/test/java/org/apache/druid/java/util/metrics/CgroupMemoryMonitorTest.java
+++ b/core/src/test/java/org/apache/druid/java/util/metrics/CgroupMemoryMonitorTest.java
@@ -20,6 +20,7 @@
 package org.apache.druid.java.util.metrics;
 
 import com.google.common.collect.ImmutableMap;
+import org.apache.druid.java.util.common.FileUtils;
 import org.apache.druid.java.util.emitter.core.Event;
 import org.apache.druid.java.util.metrics.cgroups.CgroupDiscoverer;
 import org.apache.druid.java.util.metrics.cgroups.ProcCgroupDiscoverer;
@@ -56,7 +57,8 @@ public class CgroupMemoryMonitorTest
         cgroupDir,
         "memory/system.slice/some.service"
     );
-    Assert.assertTrue((memoryDir.isDirectory() && memoryDir.exists()) || memoryDir.mkdirs());
+
+    FileUtils.mkdirp(memoryDir);
     TestUtils.copyResource("/memory.stat", new File(memoryDir, "memory.stat"));
     TestUtils.copyResource("/memory.numa_stat", new File(memoryDir, "memory.numa_stat"));
   }
diff --git a/core/src/test/java/org/apache/druid/java/util/metrics/CpuAcctDeltaMonitorTest.java b/core/src/test/java/org/apache/druid/java/util/metrics/CpuAcctDeltaMonitorTest.java
index 9db54a4..97dd707 100644
--- a/core/src/test/java/org/apache/druid/java/util/metrics/CpuAcctDeltaMonitorTest.java
+++ b/core/src/test/java/org/apache/druid/java/util/metrics/CpuAcctDeltaMonitorTest.java
@@ -20,6 +20,7 @@
 package org.apache.druid.java.util.metrics;
 
 import com.google.common.collect.ImmutableMap;
+import org.apache.druid.java.util.common.FileUtils;
 import org.apache.druid.java.util.common.StringUtils;
 import org.apache.druid.java.util.metrics.cgroups.TestUtils;
 import org.junit.Assert;
@@ -53,7 +54,8 @@ public class CpuAcctDeltaMonitorTest
         cgroupDir,
         "cpu,cpuacct/system.slice/some.service/f12ba7e0-fa16-462e-bb9d-652ccc27f0ee"
     );
-    Assert.assertTrue((cpuacctDir.isDirectory() && cpuacctDir.exists()) || cpuacctDir.mkdirs());
+
+    FileUtils.mkdirp(cpuacctDir);
     TestUtils.copyResource("/cpuacct.usage_all", new File(cpuacctDir, "cpuacct.usage_all"));
   }
 
diff --git a/core/src/test/java/org/apache/druid/java/util/metrics/ProcFsReaderTest.java b/core/src/test/java/org/apache/druid/java/util/metrics/ProcFsReaderTest.java
index 0cd68d1..02d8626 100644
--- a/core/src/test/java/org/apache/druid/java/util/metrics/ProcFsReaderTest.java
+++ b/core/src/test/java/org/apache/druid/java/util/metrics/ProcFsReaderTest.java
@@ -19,6 +19,7 @@
 
 package org.apache.druid.java.util.metrics;
 
+import org.apache.druid.java.util.common.FileUtils;
 import org.apache.druid.java.util.metrics.cgroups.TestUtils;
 import org.junit.Assert;
 import org.junit.Before;
@@ -44,7 +45,8 @@ public class ProcFsReaderTest
         procDir,
         "sys/kernel/random"
     );
-    Assert.assertTrue(kernelDir.mkdirs());
+
+    FileUtils.mkdirp(kernelDir);
     TestUtils.copyResource("/cpuinfo", new File(procDir, "cpuinfo"));
     TestUtils.copyResource("/boot_id", new File(kernelDir, "boot_id"));
   }
diff --git a/core/src/test/java/org/apache/druid/java/util/metrics/cgroups/CpuAcctTest.java b/core/src/test/java/org/apache/druid/java/util/metrics/cgroups/CpuAcctTest.java
index 76f1922..4f80460 100644
--- a/core/src/test/java/org/apache/druid/java/util/metrics/cgroups/CpuAcctTest.java
+++ b/core/src/test/java/org/apache/druid/java/util/metrics/cgroups/CpuAcctTest.java
@@ -19,6 +19,7 @@
 
 package org.apache.druid.java.util.metrics.cgroups;
 
+import org.apache.druid.java.util.common.FileUtils;
 import org.junit.Assert;
 import org.junit.Before;
 import org.junit.Rule;
@@ -54,7 +55,8 @@ public class CpuAcctTest
         cgroupDir,
         "cpu,cpuacct/system.slice/some.service/f12ba7e0-fa16-462e-bb9d-652ccc27f0ee"
     );
-    Assert.assertTrue((cpuacctDir.isDirectory() && cpuacctDir.exists()) || cpuacctDir.mkdirs());
+
+    FileUtils.mkdirp(cpuacctDir);
     TestUtils.copyResource("/cpuacct.usage_all", new File(cpuacctDir, "cpuacct.usage_all"));
   }
 
diff --git a/core/src/test/java/org/apache/druid/java/util/metrics/cgroups/CpuSetTest.java b/core/src/test/java/org/apache/druid/java/util/metrics/cgroups/CpuSetTest.java
index 516af9e..4e30c32 100644
--- a/core/src/test/java/org/apache/druid/java/util/metrics/cgroups/CpuSetTest.java
+++ b/core/src/test/java/org/apache/druid/java/util/metrics/cgroups/CpuSetTest.java
@@ -19,6 +19,7 @@
 
 package org.apache.druid.java.util.metrics.cgroups;
 
+import org.apache.druid.java.util.common.FileUtils;
 import org.junit.Assert;
 import org.junit.Before;
 import org.junit.Rule;
@@ -47,8 +48,8 @@ public class CpuSetTest
         cgroupDir,
         "cpuset/system.slice/some.service/f12ba7e0-fa16-462e-bb9d-652ccc27f0ee"
     );
-    Assert.assertTrue((cpusetDir.isDirectory() && cpusetDir.exists()) || cpusetDir.mkdirs());
 
+    FileUtils.mkdirp(cpusetDir);
     TestUtils.copyOrReplaceResource("/cpuset.cpus", new File(cpusetDir, "cpuset.cpus"));
     TestUtils.copyOrReplaceResource("/cpuset.mems", new File(cpusetDir, "cpuset.mems"));
     TestUtils.copyOrReplaceResource("/cpuset.effective_mems", new File(cpusetDir, "cpuset.effective_mems"));
diff --git a/core/src/test/java/org/apache/druid/java/util/metrics/cgroups/CpuTest.java b/core/src/test/java/org/apache/druid/java/util/metrics/cgroups/CpuTest.java
index c2a023f..817b466 100644
--- a/core/src/test/java/org/apache/druid/java/util/metrics/cgroups/CpuTest.java
+++ b/core/src/test/java/org/apache/druid/java/util/metrics/cgroups/CpuTest.java
@@ -19,6 +19,7 @@
 
 package org.apache.druid.java.util.metrics.cgroups;
 
+import org.apache.druid.java.util.common.FileUtils;
 import org.junit.Assert;
 import org.junit.Before;
 import org.junit.Rule;
@@ -46,7 +47,8 @@ public class CpuTest
         cgroupDir,
         "cpu,cpuacct/system.slice/some.service/f12ba7e0-fa16-462e-bb9d-652ccc27f0ee"
     );
-    Assert.assertTrue((cpuDir.isDirectory() && cpuDir.exists()) || cpuDir.mkdirs());
+
+    FileUtils.mkdirp(cpuDir);
     TestUtils.copyOrReplaceResource("/cpu.shares", new File(cpuDir, "cpu.shares"));
     TestUtils.copyOrReplaceResource("/cpu.cfs_quota_us", new File(cpuDir, "cpu.cfs_quota_us"));
     TestUtils.copyOrReplaceResource("/cpu.cfs_period_us", new File(cpuDir, "cpu.cfs_period_us"));
diff --git a/core/src/test/java/org/apache/druid/java/util/metrics/cgroups/MemoryTest.java b/core/src/test/java/org/apache/druid/java/util/metrics/cgroups/MemoryTest.java
index 029ef5d..87e68d5 100644
--- a/core/src/test/java/org/apache/druid/java/util/metrics/cgroups/MemoryTest.java
+++ b/core/src/test/java/org/apache/druid/java/util/metrics/cgroups/MemoryTest.java
@@ -20,6 +20,7 @@
 package org.apache.druid.java.util.metrics.cgroups;
 
 import com.google.common.collect.ImmutableMap;
+import org.apache.druid.java.util.common.FileUtils;
 import org.junit.Assert;
 import org.junit.Before;
 import org.junit.Rule;
@@ -52,7 +53,8 @@ public class MemoryTest
         cgroupDir,
         "memory/system.slice/some.service"
     );
-    Assert.assertTrue((memoryDir.isDirectory() && memoryDir.exists()) || memoryDir.mkdirs());
+
+    FileUtils.mkdirp(memoryDir);
     TestUtils.copyResource("/memory.stat", new File(memoryDir, "memory.stat"));
     TestUtils.copyResource("/memory.numa_stat", new File(memoryDir, "memory.numa_stat"));
   }
diff --git a/core/src/test/java/org/apache/druid/java/util/metrics/cgroups/TestUtils.java b/core/src/test/java/org/apache/druid/java/util/metrics/cgroups/TestUtils.java
index c24b60d..e522ff6 100644
--- a/core/src/test/java/org/apache/druid/java/util/metrics/cgroups/TestUtils.java
+++ b/core/src/test/java/org/apache/druid/java/util/metrics/cgroups/TestUtils.java
@@ -19,6 +19,7 @@
 
 package org.apache.druid.java.util.metrics.cgroups;
 
+import org.apache.druid.java.util.common.FileUtils;
 import org.apache.druid.java.util.common.StringUtils;
 import org.junit.Assert;
 
@@ -43,15 +44,15 @@ public class TestUtils
         StringUtils.toUtf8(StringUtils.replace(procMountsString, "/sys/fs/cgroup", cgroupDir.getAbsolutePath()))
     );
 
-    Assert.assertTrue(new File(
+    FileUtils.mkdirp(new File(
         cgroupDir,
         "cpu,cpuacct/system.slice/some.service/f12ba7e0-fa16-462e-bb9d-652ccc27f0ee"
-    ).mkdirs());
+    ));
 
-    Assert.assertTrue(new File(
+    FileUtils.mkdirp(new File(
         cgroupDir,
         "cpuset/system.slice/some.service/f12ba7e0-fa16-462e-bb9d-652ccc27f0ee"
-    ).mkdirs());
+    ));
     copyResource("/proc.pid.cgroup", new File(procDir, "cgroup"));
   }
 
diff --git a/extensions-contrib/aliyun-oss-extensions/src/main/java/org/apache/druid/storage/aliyun/OssDataSegmentPuller.java b/extensions-contrib/aliyun-oss-extensions/src/main/java/org/apache/druid/storage/aliyun/OssDataSegmentPuller.java
index 82c0f2e..08fe23e 100644
--- a/extensions-contrib/aliyun-oss-extensions/src/main/java/org/apache/druid/storage/aliyun/OssDataSegmentPuller.java
+++ b/extensions-contrib/aliyun-oss-extensions/src/main/java/org/apache/druid/storage/aliyun/OssDataSegmentPuller.java
@@ -80,7 +80,7 @@ public class OssDataSegmentPuller implements URIDataPuller
     }
 
     try {
-      org.apache.commons.io.FileUtils.forceMkdir(outDir);
+      FileUtils.mkdirp(outDir);
 
       final URI uri = ossCoords.toUri(OssStorageDruidModule.SCHEME);
       final ByteSource byteSource = new ByteSource()
diff --git a/extensions-contrib/cassandra-storage/src/main/java/org/apache/druid/storage/cassandra/CassandraDataSegmentPuller.java b/extensions-contrib/cassandra-storage/src/main/java/org/apache/druid/storage/cassandra/CassandraDataSegmentPuller.java
index 076a330..2e43cca 100644
--- a/extensions-contrib/cassandra-storage/src/main/java/org/apache/druid/storage/cassandra/CassandraDataSegmentPuller.java
+++ b/extensions-contrib/cassandra-storage/src/main/java/org/apache/druid/storage/cassandra/CassandraDataSegmentPuller.java
@@ -52,7 +52,7 @@ public class CassandraDataSegmentPuller extends CassandraStorage
   {
     log.info("Pulling index from C* at path[%s] to outDir[%s]", key, outDir);
     try {
-      org.apache.commons.io.FileUtils.forceMkdir(outDir);
+      FileUtils.mkdirp(outDir);
     }
     catch (IOException e) {
       throw new SegmentLoadingException(e, "");
diff --git a/extensions-contrib/cloudfiles-extensions/src/main/java/org/apache/druid/storage/cloudfiles/CloudFilesDataSegmentPuller.java b/extensions-contrib/cloudfiles-extensions/src/main/java/org/apache/druid/storage/cloudfiles/CloudFilesDataSegmentPuller.java
index 55399fa..4357c77 100644
--- a/extensions-contrib/cloudfiles-extensions/src/main/java/org/apache/druid/storage/cloudfiles/CloudFilesDataSegmentPuller.java
+++ b/extensions-contrib/cloudfiles-extensions/src/main/java/org/apache/druid/storage/cloudfiles/CloudFilesDataSegmentPuller.java
@@ -21,7 +21,6 @@ package org.apache.druid.storage.cloudfiles;
 
 import com.google.inject.Inject;
 import org.apache.druid.java.util.common.FileUtils;
-import org.apache.druid.java.util.common.ISE;
 import org.apache.druid.java.util.common.logger.Logger;
 import org.apache.druid.segment.loading.SegmentLoadingException;
 import org.apache.druid.utils.CompressionUtils;
@@ -81,16 +80,4 @@ public class CloudFilesDataSegmentPuller
       }
     }
   }
-
-  private void prepareOutDir(final File outDir) throws ISE
-  {
-    if (!outDir.exists()) {
-      outDir.mkdirs();
-    }
-
-    if (!outDir.isDirectory()) {
-      throw new ISE("outDir[%s] must be a directory.", outDir);
-    }
-  }
-
 }
diff --git a/extensions-core/azure-extensions/src/main/java/org/apache/druid/storage/azure/AzureDataSegmentPuller.java b/extensions-core/azure-extensions/src/main/java/org/apache/druid/storage/azure/AzureDataSegmentPuller.java
index 727c28a..571ecc6 100644
--- a/extensions-core/azure-extensions/src/main/java/org/apache/druid/storage/azure/AzureDataSegmentPuller.java
+++ b/extensions-core/azure-extensions/src/main/java/org/apache/druid/storage/azure/AzureDataSegmentPuller.java
@@ -53,7 +53,7 @@ public class AzureDataSegmentPuller
       throws SegmentLoadingException
   {
     try {
-      org.apache.commons.io.FileUtils.forceMkdir(outDir);
+      FileUtils.mkdirp(outDir);
 
       log.info(
           "Loading container: [%s], with blobPath: [%s] and outDir: [%s]", containerName, blobPath, outDir
diff --git a/extensions-core/druid-basic-security/src/main/java/org/apache/druid/security/basic/authentication/db/cache/CoordinatorPollingBasicAuthenticatorCacheManager.java b/extensions-core/druid-basic-security/src/main/java/org/apache/druid/security/basic/authentication/db/cache/CoordinatorPollingBasicAuthenticatorCacheManager.java
index c197adf..5c87fa5 100644
--- a/extensions-core/druid-basic-security/src/main/java/org/apache/druid/security/basic/authentication/db/cache/CoordinatorPollingBasicAuthenticatorCacheManager.java
+++ b/extensions-core/druid-basic-security/src/main/java/org/apache/druid/security/basic/authentication/db/cache/CoordinatorPollingBasicAuthenticatorCacheManager.java
@@ -235,7 +235,7 @@ public class CoordinatorPollingBasicAuthenticatorCacheManager implements BasicAu
   private void writeUserMapToDisk(String prefix, byte[] userMapBytes) throws IOException
   {
     File cacheDir = new File(commonCacheConfig.getCacheDirectory());
-    cacheDir.mkdirs();
+    FileUtils.mkdirp(cacheDir);
     File userMapFile = new File(commonCacheConfig.getCacheDirectory(), getUserMapFilename(prefix));
     FileUtils.writeAtomically(
         userMapFile,
diff --git a/extensions-core/druid-basic-security/src/main/java/org/apache/druid/security/basic/authorization/db/cache/CoordinatorPollingBasicAuthorizerCacheManager.java b/extensions-core/druid-basic-security/src/main/java/org/apache/druid/security/basic/authorization/db/cache/CoordinatorPollingBasicAuthorizerCacheManager.java
index 5ccd04b..32e69d9 100644
--- a/extensions-core/druid-basic-security/src/main/java/org/apache/druid/security/basic/authorization/db/cache/CoordinatorPollingBasicAuthorizerCacheManager.java
+++ b/extensions-core/druid-basic-security/src/main/java/org/apache/druid/security/basic/authorization/db/cache/CoordinatorPollingBasicAuthorizerCacheManager.java
@@ -296,7 +296,7 @@ public class CoordinatorPollingBasicAuthorizerCacheManager implements BasicAutho
   private void writeUserMapToDisk(String prefix, byte[] userMapBytes) throws IOException
   {
     File cacheDir = new File(commonCacheConfig.getCacheDirectory());
-    cacheDir.mkdirs();
+    FileUtils.mkdirp(cacheDir);
     File userMapFile = new File(commonCacheConfig.getCacheDirectory(), getUserRoleMapFilename(prefix));
     FileUtils.writeAtomically(
         userMapFile,
@@ -310,7 +310,7 @@ public class CoordinatorPollingBasicAuthorizerCacheManager implements BasicAutho
   private void writeGroupMappingMapToDisk(String prefix, byte[] groupMappingBytes) throws IOException
   {
     File cacheDir = new File(commonCacheConfig.getCacheDirectory());
-    cacheDir.mkdirs();
+    FileUtils.mkdirp(cacheDir);
     File groupMapFile = new File(commonCacheConfig.getCacheDirectory(), getGroupMappingRoleMapFilename(prefix));
     FileUtils.writeAtomically(
         groupMapFile,
diff --git a/extensions-core/google-extensions/src/main/java/org/apache/druid/storage/google/GoogleDataSegmentPuller.java b/extensions-core/google-extensions/src/main/java/org/apache/druid/storage/google/GoogleDataSegmentPuller.java
index 7ed270a..40bd926 100644
--- a/extensions-core/google-extensions/src/main/java/org/apache/druid/storage/google/GoogleDataSegmentPuller.java
+++ b/extensions-core/google-extensions/src/main/java/org/apache/druid/storage/google/GoogleDataSegmentPuller.java
@@ -51,7 +51,7 @@ public class GoogleDataSegmentPuller implements URIDataPuller
     LOG.info("Pulling index at bucket[%s] path[%s] to outDir[%s]", bucket, path, outDir.getAbsolutePath());
 
     try {
-      org.apache.commons.io.FileUtils.forceMkdir(outDir);
+      FileUtils.mkdirp(outDir);
 
       final GoogleByteSource byteSource = new GoogleByteSource(storage, bucket, path);
       final FileUtils.FileCopyResult result = CompressionUtils.unzip(
diff --git a/extensions-core/hdfs-storage/src/main/java/org/apache/druid/storage/hdfs/HdfsDataSegmentPuller.java b/extensions-core/hdfs-storage/src/main/java/org/apache/druid/storage/hdfs/HdfsDataSegmentPuller.java
index 342b82a..1c0adbd 100644
--- a/extensions-core/hdfs-storage/src/main/java/org/apache/druid/storage/hdfs/HdfsDataSegmentPuller.java
+++ b/extensions-core/hdfs-storage/src/main/java/org/apache/druid/storage/hdfs/HdfsDataSegmentPuller.java
@@ -187,7 +187,7 @@ public class HdfsDataSegmentPuller implements URIDataPuller
   FileUtils.FileCopyResult getSegmentFiles(final Path path, final File outDir) throws SegmentLoadingException
   {
     try {
-      org.apache.commons.io.FileUtils.forceMkdir(outDir);
+      FileUtils.mkdirp(outDir);
     }
     catch (IOException e) {
       throw new SegmentLoadingException(e, "");
diff --git a/extensions-core/s3-extensions/src/main/java/org/apache/druid/storage/s3/S3DataSegmentPuller.java b/extensions-core/s3-extensions/src/main/java/org/apache/druid/storage/s3/S3DataSegmentPuller.java
index 73df48a..368a50f 100644
--- a/extensions-core/s3-extensions/src/main/java/org/apache/druid/storage/s3/S3DataSegmentPuller.java
+++ b/extensions-core/s3-extensions/src/main/java/org/apache/druid/storage/s3/S3DataSegmentPuller.java
@@ -81,7 +81,7 @@ public class S3DataSegmentPuller implements URIDataPuller
     }
 
     try {
-      org.apache.commons.io.FileUtils.forceMkdir(outDir);
+      FileUtils.mkdirp(outDir);
 
       final URI uri = s3Coords.toUri(S3StorageDruidModule.SCHEME);
       final ByteSource byteSource = new ByteSource()
diff --git a/indexing-hadoop/src/test/java/org/apache/druid/indexer/hadoop/FSSpideringIteratorTest.java b/indexing-hadoop/src/test/java/org/apache/druid/indexer/hadoop/FSSpideringIteratorTest.java
index 7df958d..896ee84 100644
--- a/indexing-hadoop/src/test/java/org/apache/druid/indexer/hadoop/FSSpideringIteratorTest.java
+++ b/indexing-hadoop/src/test/java/org/apache/druid/indexer/hadoop/FSSpideringIteratorTest.java
@@ -51,9 +51,9 @@ public class FSSpideringIteratorTest
       new File(baseDir, "dir1/file1").createNewFile();
       new File(baseDir, "dir1/file2").createNewFile();
 
-      new File(baseDir, "dir2/subDir1").mkdirs();
+      FileUtils.mkdirp(new File(baseDir, "dir2/subDir1"));
       new File(baseDir, "dir2/subDir1/file3").createNewFile();
-      new File(baseDir, "dir2/subDir2").mkdirs();
+      FileUtils.mkdirp(new File(baseDir, "dir2/subDir2"));
       new File(baseDir, "dir2/subDir2/file4").createNewFile();
       new File(baseDir, "dir2/subDir2/file5").createNewFile();
 
@@ -101,10 +101,9 @@ public class FSSpideringIteratorTest
     try {
       new File(baseDir, "dir1").mkdir();
 
-      new File(baseDir, "dir2/subDir1").mkdirs();
-      new File(baseDir, "dir2/subDir2").mkdirs();
-
-      new File(baseDir, "dir3/subDir1").mkdirs();
+      FileUtils.mkdirp(new File(baseDir, "dir2/subDir1"));
+      FileUtils.mkdirp(new File(baseDir, "dir2/subDir2"));
+      FileUtils.mkdirp(new File(baseDir, "dir3/subDir1"));
 
       List<String> files = Lists.newArrayList(
           Iterables.transform(
diff --git a/indexing-service/src/main/java/org/apache/druid/indexing/common/MultipleFileTaskReportFileWriter.java b/indexing-service/src/main/java/org/apache/druid/indexing/common/MultipleFileTaskReportFileWriter.java
index 3e77f1f..5119b98 100644
--- a/indexing-service/src/main/java/org/apache/druid/indexing/common/MultipleFileTaskReportFileWriter.java
+++ b/indexing-service/src/main/java/org/apache/druid/indexing/common/MultipleFileTaskReportFileWriter.java
@@ -20,7 +20,7 @@
 package org.apache.druid.indexing.common;
 
 import com.fasterxml.jackson.databind.ObjectMapper;
-import org.apache.commons.io.FileUtils;
+import org.apache.druid.java.util.common.FileUtils;
 import org.apache.druid.java.util.common.logger.Logger;
 
 import java.io.File;
@@ -47,7 +47,7 @@ public class MultipleFileTaskReportFileWriter implements TaskReportFileWriter
     try {
       final File reportsFileParent = reportsFile.getParentFile();
       if (reportsFileParent != null) {
-        FileUtils.forceMkdir(reportsFileParent);
+        FileUtils.mkdirp(reportsFileParent);
       }
       objectMapper.writeValue(reportsFile, reports);
     }
diff --git a/indexing-service/src/main/java/org/apache/druid/indexing/common/SingleFileTaskReportFileWriter.java b/indexing-service/src/main/java/org/apache/druid/indexing/common/SingleFileTaskReportFileWriter.java
index adf6ad5..2e79fe0 100644
--- a/indexing-service/src/main/java/org/apache/druid/indexing/common/SingleFileTaskReportFileWriter.java
+++ b/indexing-service/src/main/java/org/apache/druid/indexing/common/SingleFileTaskReportFileWriter.java
@@ -20,7 +20,7 @@
 package org.apache.druid.indexing.common;
 
 import com.fasterxml.jackson.databind.ObjectMapper;
-import org.apache.commons.io.FileUtils;
+import org.apache.druid.java.util.common.FileUtils;
 import org.apache.druid.java.util.common.logger.Logger;
 
 import java.io.File;
@@ -44,7 +44,7 @@ public class SingleFileTaskReportFileWriter implements TaskReportFileWriter
     try {
       final File reportsFileParent = reportsFile.getParentFile();
       if (reportsFileParent != null) {
-        FileUtils.forceMkdir(reportsFileParent);
+        FileUtils.mkdirp(reportsFileParent);
       }
       objectMapper.writeValue(reportsFile, reports);
     }
diff --git a/indexing-service/src/main/java/org/apache/druid/indexing/common/TaskToolbox.java b/indexing-service/src/main/java/org/apache/druid/indexing/common/TaskToolbox.java
index 979de5f..7c32750 100644
--- a/indexing-service/src/main/java/org/apache/druid/indexing/common/TaskToolbox.java
+++ b/indexing-service/src/main/java/org/apache/druid/indexing/common/TaskToolbox.java
@@ -27,7 +27,6 @@ import com.google.common.collect.Maps;
 import com.google.common.collect.Multimap;
 import com.google.common.collect.Multimaps;
 import com.google.inject.Provider;
-import org.apache.commons.io.FileUtils;
 import org.apache.druid.client.cache.Cache;
 import org.apache.druid.client.cache.CacheConfig;
 import org.apache.druid.client.cache.CachePopulatorStats;
@@ -43,6 +42,7 @@ import org.apache.druid.indexing.common.task.IndexTaskClientFactory;
 import org.apache.druid.indexing.common.task.batch.parallel.ParallelIndexSupervisorTaskClient;
 import org.apache.druid.indexing.common.task.batch.parallel.ShuffleClient;
 import org.apache.druid.indexing.worker.shuffle.IntermediaryDataManager;
+import org.apache.druid.java.util.common.FileUtils;
 import org.apache.druid.java.util.emitter.service.ServiceEmitter;
 import org.apache.druid.java.util.metrics.Monitor;
 import org.apache.druid.java.util.metrics.MonitorScheduler;
@@ -372,7 +372,7 @@ public class TaskToolbox
   {
     final File tmpDir = new File(taskWorkDir, "indexing-tmp");
     try {
-      FileUtils.forceMkdir(tmpDir);
+      FileUtils.mkdirp(tmpDir);
     }
     catch (IOException e) {
       throw new RuntimeException(e);
diff --git a/indexing-service/src/main/java/org/apache/druid/indexing/common/task/batch/parallel/DeepStorageShuffleClient.java b/indexing-service/src/main/java/org/apache/druid/indexing/common/task/batch/parallel/DeepStorageShuffleClient.java
index ee906ba..6227cc3 100644
--- a/indexing-service/src/main/java/org/apache/druid/indexing/common/task/batch/parallel/DeepStorageShuffleClient.java
+++ b/indexing-service/src/main/java/org/apache/druid/indexing/common/task/batch/parallel/DeepStorageShuffleClient.java
@@ -21,7 +21,7 @@ package org.apache.druid.indexing.common.task.batch.parallel;
 
 import com.fasterxml.jackson.databind.ObjectMapper;
 import com.google.inject.Inject;
-import org.apache.commons.io.FileUtils;
+import org.apache.druid.java.util.common.FileUtils;
 import org.apache.druid.java.util.common.StringUtils;
 import org.apache.druid.java.util.common.logger.Logger;
 import org.apache.druid.segment.loading.LoadSpec;
@@ -47,7 +47,7 @@ public class DeepStorageShuffleClient implements ShuffleClient<DeepStoragePartit
   {
     final LoadSpec loadSpec = objectMapper.convertValue(location.getLoadSpec(), LoadSpec.class);
     final File unzippedDir = new File(partitionDir, StringUtils.format("unzipped_%s", location.getSubTaskId()));
-    FileUtils.forceMkdir(unzippedDir);
+    FileUtils.mkdirp(unzippedDir);
     try {
       loadSpec.loadSegment(unzippedDir);
     }
diff --git a/indexing-service/src/main/java/org/apache/druid/indexing/common/task/batch/parallel/HttpShuffleClient.java b/indexing-service/src/main/java/org/apache/druid/indexing/common/task/batch/parallel/HttpShuffleClient.java
index 13abcae..1946978 100644
--- a/indexing-service/src/main/java/org/apache/druid/indexing/common/task/batch/parallel/HttpShuffleClient.java
+++ b/indexing-service/src/main/java/org/apache/druid/indexing/common/task/batch/parallel/HttpShuffleClient.java
@@ -88,7 +88,7 @@ public class HttpShuffleClient implements ShuffleClient<GenericPartitionLocation
     );
     final File unzippedDir = new File(partitionDir, StringUtils.format("unzipped_%s", location.getSubTaskId()));
     try {
-      org.apache.commons.io.FileUtils.forceMkdir(unzippedDir);
+      FileUtils.mkdirp(unzippedDir);
       CompressionUtils.unzip(zippedFile, unzippedDir);
     }
     finally {
diff --git a/indexing-service/src/main/java/org/apache/druid/indexing/common/task/batch/parallel/PartialSegmentMergeTask.java b/indexing-service/src/main/java/org/apache/druid/indexing/common/task/batch/parallel/PartialSegmentMergeTask.java
index 60814fd..76928c2 100644
--- a/indexing-service/src/main/java/org/apache/druid/indexing/common/task/batch/parallel/PartialSegmentMergeTask.java
+++ b/indexing-service/src/main/java/org/apache/druid/indexing/common/task/batch/parallel/PartialSegmentMergeTask.java
@@ -26,7 +26,6 @@ import com.google.common.collect.ImmutableMap;
 import com.google.common.collect.Maps;
 import it.unimi.dsi.fastutil.ints.Int2ObjectMap;
 import it.unimi.dsi.fastutil.ints.Int2ObjectOpenHashMap;
-import org.apache.commons.io.FileUtils;
 import org.apache.druid.indexer.TaskStatus;
 import org.apache.druid.indexing.common.TaskLock;
 import org.apache.druid.indexing.common.TaskToolbox;
@@ -35,6 +34,7 @@ import org.apache.druid.indexing.common.actions.SurrogateAction;
 import org.apache.druid.indexing.common.actions.TaskActionClient;
 import org.apache.druid.indexing.common.task.ClientBasedTaskInfoProvider;
 import org.apache.druid.indexing.common.task.TaskResource;
+import org.apache.druid.java.util.common.FileUtils;
 import org.apache.druid.java.util.common.ISE;
 import org.apache.druid.java.util.common.Pair;
 import org.apache.druid.java.util.common.RetryUtils;
@@ -186,8 +186,8 @@ abstract class PartialSegmentMergeTask<S extends ShardSpec> extends PerfectRollu
     );
 
     final File persistDir = toolbox.getPersistDir();
-    FileUtils.deleteQuietly(persistDir);
-    FileUtils.forceMkdir(persistDir);
+    org.apache.commons.io.FileUtils.deleteQuietly(persistDir);
+    FileUtils.mkdirp(persistDir);
 
     final Set<DataSegment> pushedSegments = mergeAndPushSegments(
         toolbox,
@@ -212,8 +212,8 @@ abstract class PartialSegmentMergeTask<S extends ShardSpec> extends PerfectRollu
   ) throws IOException
   {
     final File tempDir = toolbox.getIndexingTmpDir();
-    FileUtils.deleteQuietly(tempDir);
-    FileUtils.forceMkdir(tempDir);
+    org.apache.commons.io.FileUtils.deleteQuietly(tempDir);
+    FileUtils.mkdirp(tempDir);
 
     final Map<Interval, Int2ObjectMap<List<File>>> intervalToUnzippedFiles = new HashMap<>();
     // Fetch partition files
@@ -221,13 +221,13 @@ abstract class PartialSegmentMergeTask<S extends ShardSpec> extends PerfectRollu
       final Interval interval = entryPerInterval.getKey();
       for (Int2ObjectMap.Entry<List<PartitionLocation>> entryPerBucketId : entryPerInterval.getValue().int2ObjectEntrySet()) {
         final int bucketId = entryPerBucketId.getIntKey();
-        final File partitionDir = FileUtils.getFile(
+        final File partitionDir = org.apache.commons.io.FileUtils.getFile(
             tempDir,
             interval.getStart().toString(),
             interval.getEnd().toString(),
             Integer.toString(bucketId)
         );
-        FileUtils.forceMkdir(partitionDir);
+        FileUtils.mkdirp(partitionDir);
         for (PartitionLocation location : entryPerBucketId.getValue()) {
           final File unzippedDir = toolbox.getShuffleClient().fetchSegmentFile(partitionDir, supervisorTaskId, location);
           intervalToUnzippedFiles.computeIfAbsent(interval, k -> new Int2ObjectOpenHashMap<>())
diff --git a/indexing-service/src/main/java/org/apache/druid/indexing/common/tasklogs/FileTaskLogs.java b/indexing-service/src/main/java/org/apache/druid/indexing/common/tasklogs/FileTaskLogs.java
index c97c885..857d4c6 100644
--- a/indexing-service/src/main/java/org/apache/druid/indexing/common/tasklogs/FileTaskLogs.java
+++ b/indexing-service/src/main/java/org/apache/druid/indexing/common/tasklogs/FileTaskLogs.java
@@ -51,25 +51,19 @@ public class FileTaskLogs implements TaskLogs
   @Override
   public void pushTaskLog(final String taskid, File file) throws IOException
   {
-    if (config.getDirectory().exists() || config.getDirectory().mkdirs()) {
-      final File outputFile = fileForTask(taskid, file.getName());
-      Files.copy(file, outputFile);
-      log.info("Wrote task log to: %s", outputFile);
-    } else {
-      throw new IOE("Unable to create task log dir[%s]", config.getDirectory());
-    }
+    FileUtils.mkdirp(config.getDirectory());
+    final File outputFile = fileForTask(taskid, file.getName());
+    Files.copy(file, outputFile);
+    log.info("Wrote task log to: %s", outputFile);
   }
 
   @Override
   public void pushTaskReports(String taskid, File reportFile) throws IOException
   {
-    if (config.getDirectory().exists() || config.getDirectory().mkdirs()) {
-      final File outputFile = fileForTask(taskid, reportFile.getName());
-      Files.copy(reportFile, outputFile);
-      log.info("Wrote task report to: %s", outputFile);
-    } else {
-      throw new IOE("Unable to create task report dir[%s]", config.getDirectory());
-    }
+    FileUtils.mkdirp(config.getDirectory());
+    final File outputFile = fileForTask(taskid, reportFile.getName());
+    Files.copy(reportFile, outputFile);
+    log.info("Wrote task report to: %s", outputFile);
   }
 
   @Override
diff --git a/indexing-service/src/main/java/org/apache/druid/indexing/overlord/ForkingTaskRunner.java b/indexing-service/src/main/java/org/apache/druid/indexing/overlord/ForkingTaskRunner.java
index 49cae23..212c039 100644
--- a/indexing-service/src/main/java/org/apache/druid/indexing/overlord/ForkingTaskRunner.java
+++ b/indexing-service/src/main/java/org/apache/druid/indexing/overlord/ForkingTaskRunner.java
@@ -51,7 +51,6 @@ import org.apache.druid.indexing.overlord.config.ForkingTaskRunnerConfig;
 import org.apache.druid.indexing.worker.config.WorkerConfig;
 import org.apache.druid.java.util.common.DateTimes;
 import org.apache.druid.java.util.common.FileUtils;
-import org.apache.druid.java.util.common.IOE;
 import org.apache.druid.java.util.common.ISE;
 import org.apache.druid.java.util.common.StringUtils;
 import org.apache.druid.java.util.common.concurrent.Execs;
@@ -161,9 +160,7 @@ public class ForkingTaskRunner
                   try {
                     final Closer closer = Closer.create();
                     try {
-                      if (!attemptDir.mkdirs()) {
-                        throw new IOE("Could not create directories: %s", attemptDir);
-                      }
+                      FileUtils.mkdirp(attemptDir);
 
                       final File taskFile = new File(taskDir, "task.json");
                       final File statusFile = new File(attemptDir, "status.json");
diff --git a/indexing-service/src/main/java/org/apache/druid/indexing/overlord/ThreadingTaskRunner.java b/indexing-service/src/main/java/org/apache/druid/indexing/overlord/ThreadingTaskRunner.java
index 84a414c..bff6d61 100644
--- a/indexing-service/src/main/java/org/apache/druid/indexing/overlord/ThreadingTaskRunner.java
+++ b/indexing-service/src/main/java/org/apache/druid/indexing/overlord/ThreadingTaskRunner.java
@@ -43,7 +43,6 @@ import org.apache.druid.indexing.overlord.autoscaling.ScalingStats;
 import org.apache.druid.indexing.worker.config.WorkerConfig;
 import org.apache.druid.java.util.common.DateTimes;
 import org.apache.druid.java.util.common.FileUtils;
-import org.apache.druid.java.util.common.IOE;
 import org.apache.druid.java.util.common.ISE;
 import org.apache.druid.java.util.common.StringUtils;
 import org.apache.druid.java.util.common.concurrent.Execs;
@@ -165,9 +164,7 @@ public class ThreadingTaskRunner
                           final ThreadingTaskRunnerWorkItem taskWorkItem;
 
                           try {
-                            if (!attemptDir.mkdirs()) {
-                              throw new IOE("Could not create directories: %s", attemptDir);
-                            }
+                            FileUtils.mkdirp(attemptDir);
 
                             final File taskFile = new File(taskDir, "task.json");
                             final File reportsFile = new File(attemptDir, "report.json");
diff --git a/indexing-service/src/main/java/org/apache/druid/indexing/worker/WorkerTaskManager.java b/indexing-service/src/main/java/org/apache/druid/indexing/worker/WorkerTaskManager.java
index d0f2a3b..b2b65ec 100644
--- a/indexing-service/src/main/java/org/apache/druid/indexing/worker/WorkerTaskManager.java
+++ b/indexing-service/src/main/java/org/apache/druid/indexing/worker/WorkerTaskManager.java
@@ -312,10 +312,10 @@ public class WorkerTaskManager
     return new File(taskConfig.getBaseTaskDir(), "workerTaskManagerTmp");
   }
 
-  private void cleanupAndMakeTmpTaskDir()
+  private void cleanupAndMakeTmpTaskDir() throws IOException
   {
     File tmpDir = getTmpTaskDir();
-    tmpDir.mkdirs();
+    FileUtils.mkdirp(tmpDir);
     if (!tmpDir.isDirectory()) {
       throw new ISE("Tmp Tasks Dir [%s] does not exist/not-a-directory.", tmpDir);
     }
@@ -334,17 +334,13 @@ public class WorkerTaskManager
     return new File(taskConfig.getBaseTaskDir(), "assignedTasks");
   }
 
-  private void initAssignedTasks()
+  private void initAssignedTasks() throws IOException
   {
     File assignedTaskDir = getAssignedTaskDir();
 
     log.debug("Looking for any previously assigned tasks on disk[%s].", assignedTaskDir);
 
-    assignedTaskDir.mkdirs();
-
-    if (!assignedTaskDir.isDirectory()) {
-      throw new ISE("Assigned Tasks Dir [%s] does not exist/not-a-directory.", assignedTaskDir);
-    }
+    FileUtils.mkdirp(assignedTaskDir);
 
     for (File taskFile : assignedTaskDir.listFiles()) {
       try {
@@ -461,16 +457,12 @@ public class WorkerTaskManager
     }
   }
 
-  private void initCompletedTasks()
+  private void initCompletedTasks() throws IOException
   {
     File completedTaskDir = getCompletedTaskDir();
     log.debug("Looking for any previously completed tasks on disk[%s].", completedTaskDir);
 
-    completedTaskDir.mkdirs();
-
-    if (!completedTaskDir.isDirectory()) {
-      throw new ISE("Completed Tasks Dir [%s] does not exist/not-a-directory.", completedTaskDir);
-    }
+    FileUtils.mkdirp(completedTaskDir);
 
     for (File taskFile : completedTaskDir.listFiles()) {
       try {
diff --git a/indexing-service/src/main/java/org/apache/druid/indexing/worker/executor/ExecutorLifecycle.java b/indexing-service/src/main/java/org/apache/druid/indexing/worker/executor/ExecutorLifecycle.java
index 3568e79..7d5d565 100644
--- a/indexing-service/src/main/java/org/apache/druid/indexing/worker/executor/ExecutorLifecycle.java
+++ b/indexing-service/src/main/java/org/apache/druid/indexing/worker/executor/ExecutorLifecycle.java
@@ -25,13 +25,13 @@ import com.google.common.base.Preconditions;
 import com.google.common.util.concurrent.Futures;
 import com.google.common.util.concurrent.ListenableFuture;
 import com.google.inject.Inject;
-import org.apache.commons.io.FileUtils;
 import org.apache.druid.indexer.TaskStatus;
 import org.apache.druid.indexing.common.actions.TaskActionClientFactory;
 import org.apache.druid.indexing.common.config.TaskConfig;
 import org.apache.druid.indexing.common.task.Task;
 import org.apache.druid.indexing.overlord.TaskRunner;
 import org.apache.druid.java.util.common.DateTimes;
+import org.apache.druid.java.util.common.FileUtils;
 import org.apache.druid.java.util.common.ISE;
 import org.apache.druid.java.util.common.concurrent.Execs;
 import org.apache.druid.java.util.common.lifecycle.LifecycleStart;
@@ -192,7 +192,7 @@ public class ExecutorLifecycle
 
               final File statusFileParent = statusFile.getParentFile();
               if (statusFileParent != null) {
-                FileUtils.forceMkdir(statusFileParent);
+                FileUtils.mkdirp(statusFileParent);
               }
               jsonMapper.writeValue(statusFile, taskStatus);
 
diff --git a/indexing-service/src/main/java/org/apache/druid/indexing/worker/shuffle/LocalIntermediaryDataManager.java b/indexing-service/src/main/java/org/apache/druid/indexing/worker/shuffle/LocalIntermediaryDataManager.java
index feaa867..4c56b00 100644
--- a/indexing-service/src/main/java/org/apache/druid/indexing/worker/shuffle/LocalIntermediaryDataManager.java
+++ b/indexing-service/src/main/java/org/apache/druid/indexing/worker/shuffle/LocalIntermediaryDataManager.java
@@ -24,7 +24,6 @@ import com.google.common.collect.Iterators;
 import com.google.common.io.ByteSource;
 import com.google.common.io.Files;
 import com.google.inject.Inject;
-import org.apache.commons.io.FileUtils;
 import org.apache.commons.lang3.mutable.MutableInt;
 import org.apache.druid.client.indexing.IndexingServiceClient;
 import org.apache.druid.client.indexing.TaskStatus;
@@ -35,6 +34,7 @@ import org.apache.druid.indexing.common.config.TaskConfig;
 import org.apache.druid.indexing.common.task.batch.parallel.GenericPartitionStat;
 import org.apache.druid.indexing.worker.config.WorkerConfig;
 import org.apache.druid.java.util.common.DateTimes;
+import org.apache.druid.java.util.common.FileUtils;
 import org.apache.druid.java.util.common.IAE;
 import org.apache.druid.java.util.common.IOE;
 import org.apache.druid.java.util.common.ISE;
@@ -197,7 +197,7 @@ public class LocalIntermediaryDataManager implements IntermediaryDataManager
           supervisorTaskCheckTimes.computeIfAbsent(
               supervisorTaskId,
               k -> {
-                for (File eachFile : FileUtils.listFiles(supervisorTaskDir, null, true)) {
+                for (File eachFile : org.apache.commons.io.FileUtils.listFiles(supervisorTaskDir, null, true)) {
                   final String relativeSegmentPath = locationPath
                       .relativize(eachFile.toPath().toAbsolutePath())
                       .toString();
@@ -298,7 +298,7 @@ public class LocalIntermediaryDataManager implements IntermediaryDataManager
     final Closer closer = Closer.create();
     closer.register(() -> {
       try {
-        FileUtils.forceDelete(taskTempDir);
+        org.apache.commons.io.FileUtils.forceDelete(taskTempDir);
       }
       catch (IOException e) {
         LOG.warn(e, "Failed to delete directory[%s]", taskTempDir.getAbsolutePath());
@@ -316,7 +316,7 @@ public class LocalIntermediaryDataManager implements IntermediaryDataManager
 
     //noinspection unused
     try (final Closer resourceCloser = closer) {
-      FileUtils.forceMkdir(taskTempDir);
+      FileUtils.mkdirp(taskTempDir);
 
       // Tempary compressed file. Will be removed when taskTempDir is deleted.
       final File tempZippedFile = new File(taskTempDir, segment.getId().toString());
@@ -340,8 +340,8 @@ public class LocalIntermediaryDataManager implements IntermediaryDataManager
         final File destFile = location.reserve(partitionFilePath, segment.getId().toString(), tempZippedFile.length());
         if (destFile != null) {
           try {
-            FileUtils.forceMkdirParent(destFile);
-            org.apache.druid.java.util.common.FileUtils.writeAtomically(
+            FileUtils.mkdirp(destFile.getParentFile());
+            FileUtils.writeAtomically(
                 destFile,
                 out -> Files.asByteSource(tempZippedFile).copyTo(out)
             );
@@ -355,7 +355,7 @@ public class LocalIntermediaryDataManager implements IntermediaryDataManager
           }
           catch (Exception e) {
             location.release(partitionFilePath, tempZippedFile.length());
-            FileUtils.deleteQuietly(destFile);
+            org.apache.commons.io.FileUtils.deleteQuietly(destFile);
             LOG.warn(
                 e,
                 "Failed to write segment[%s] at [%s]. Trying again with the next location",
@@ -421,10 +421,10 @@ public class LocalIntermediaryDataManager implements IntermediaryDataManager
       final File supervisorTaskPath = new File(location.getPath(), supervisorTaskId);
       if (supervisorTaskPath.exists()) {
         LOG.info("Cleaning up [%s]", supervisorTaskPath);
-        for (File eachFile : FileUtils.listFiles(supervisorTaskPath, null, true)) {
+        for (File eachFile : org.apache.commons.io.FileUtils.listFiles(supervisorTaskPath, null, true)) {
           location.removeFile(eachFile);
         }
-        FileUtils.forceDelete(supervisorTaskPath);
+        org.apache.commons.io.FileUtils.forceDelete(supervisorTaskPath);
       }
     }
     supervisorTaskCheckTimes.remove(supervisorTaskId);
diff --git a/indexing-service/src/test/java/org/apache/druid/indexing/common/task/batch/parallel/AbstractParallelIndexSupervisorTaskTest.java b/indexing-service/src/test/java/org/apache/druid/indexing/common/task/batch/parallel/AbstractParallelIndexSupervisorTaskTest.java
index 9f5e4d6..42703aa 100644
--- a/indexing-service/src/test/java/org/apache/druid/indexing/common/task/batch/parallel/AbstractParallelIndexSupervisorTaskTest.java
+++ b/indexing-service/src/test/java/org/apache/druid/indexing/common/task/batch/parallel/AbstractParallelIndexSupervisorTaskTest.java
@@ -777,7 +777,7 @@ public class AbstractParallelIndexSupervisorTaskTest extends IngestionTestBase
       );
       final File unzippedDir = new File(partitionDir, StringUtils.format("unzipped_%s", location.getSubTaskId()));
       try {
-        org.apache.commons.io.FileUtils.forceMkdir(unzippedDir);
+        FileUtils.mkdirp(unzippedDir);
         CompressionUtils.unzip(fetchedFile, unzippedDir);
       }
       finally {
diff --git a/indexing-service/src/test/java/org/apache/druid/indexing/common/tasklogs/FileTaskLogsTest.java b/indexing-service/src/test/java/org/apache/druid/indexing/common/tasklogs/FileTaskLogsTest.java
index 88d676f..e905587 100644
--- a/indexing-service/src/test/java/org/apache/druid/indexing/common/tasklogs/FileTaskLogsTest.java
+++ b/indexing-service/src/test/java/org/apache/druid/indexing/common/tasklogs/FileTaskLogsTest.java
@@ -84,7 +84,7 @@ public class FileTaskLogsTest
     final TaskLogs taskLogs = new FileTaskLogs(new FileTaskLogsConfig(logDir));
 
     expectedException.expect(IOException.class);
-    expectedException.expectMessage("Unable to create task log dir");
+    expectedException.expectMessage("Cannot create directory");
     taskLogs.pushTaskLog("foo", logFile);
   }
 
diff --git a/indexing-service/src/test/java/org/apache/druid/indexing/firehose/IngestSegmentFirehoseFactoryTest.java b/indexing-service/src/test/java/org/apache/druid/indexing/firehose/IngestSegmentFirehoseFactoryTest.java
index fa13c8e..b4cefda 100644
--- a/indexing-service/src/test/java/org/apache/druid/indexing/firehose/IngestSegmentFirehoseFactoryTest.java
+++ b/indexing-service/src/test/java/org/apache/druid/indexing/firehose/IngestSegmentFirehoseFactoryTest.java
@@ -54,7 +54,6 @@ import org.apache.druid.indexing.overlord.Segments;
 import org.apache.druid.indexing.overlord.TaskLockbox;
 import org.apache.druid.indexing.overlord.TaskStorage;
 import org.apache.druid.java.util.common.FileUtils;
-import org.apache.druid.java.util.common.IOE;
 import org.apache.druid.java.util.common.Intervals;
 import org.apache.druid.java.util.common.JodaUtils;
 import org.apache.druid.java.util.common.StringUtils;
@@ -206,9 +205,7 @@ public class IngestSegmentFirehoseFactoryTest
       index.add(ROW_PARSER.parseBatch(buildRow(i.longValue())).get(0));
     }
 
-    if (!PERSIST_DIR.mkdirs() && !PERSIST_DIR.exists()) {
-      throw new IOE("Could not create directory at [%s]", PERSIST_DIR.getAbsolutePath());
-    }
+    FileUtils.mkdirp(PERSIST_DIR);
     INDEX_MERGER_V9.persist(index, PERSIST_DIR, indexSpec, null);
 
     final CoordinatorClient cc = new CoordinatorClient(null, null)
diff --git a/indexing-service/src/test/java/org/apache/druid/indexing/overlord/TaskLifecycleTest.java b/indexing-service/src/test/java/org/apache/druid/indexing/overlord/TaskLifecycleTest.java
index 3e48fcb..f7826e9 100644
--- a/indexing-service/src/test/java/org/apache/druid/indexing/overlord/TaskLifecycleTest.java
+++ b/indexing-service/src/test/java/org/apache/druid/indexing/overlord/TaskLifecycleTest.java
@@ -89,6 +89,7 @@ import org.apache.druid.indexing.worker.config.WorkerConfig;
 import org.apache.druid.jackson.DefaultObjectMapper;
 import org.apache.druid.java.util.common.CloseableIterators;
 import org.apache.druid.java.util.common.DateTimes;
+import org.apache.druid.java.util.common.FileUtils;
 import org.apache.druid.java.util.common.ISE;
 import org.apache.druid.java.util.common.Intervals;
 import org.apache.druid.java.util.common.Pair;
@@ -157,7 +158,6 @@ import org.junit.runner.RunWith;
 import org.junit.runners.Parameterized;
 
 import javax.annotation.Nullable;
-
 import java.io.File;
 import java.io.IOException;
 import java.net.URI;
@@ -925,7 +925,7 @@ public class TaskLifecycleTest extends InitializedNullHandlingTest
     List<File> segmentFiles = new ArrayList<>();
     for (DataSegment segment : mdc.retrieveUnusedSegmentsForInterval("test_kill_task", Intervals.of("2011-04-01/P4D"))) {
       File file = new File((String) segment.getLoadSpec().get("path"));
-      file.mkdirs();
+      FileUtils.mkdirp(file);
       segmentFiles.add(file);
     }
 
diff --git a/indexing-service/src/test/java/org/apache/druid/indexing/worker/WorkerTaskManagerTest.java b/indexing-service/src/test/java/org/apache/druid/indexing/worker/WorkerTaskManagerTest.java
index 1fe3acb..c4c1636 100644
--- a/indexing-service/src/test/java/org/apache/druid/indexing/worker/WorkerTaskManagerTest.java
+++ b/indexing-service/src/test/java/org/apache/druid/indexing/worker/WorkerTaskManagerTest.java
@@ -181,8 +181,8 @@ public class WorkerTaskManagerTest
     Task task2 = createNoopTask("task2-completed-already");
     Task task3 = createNoopTask("task3-assigned-explicitly");
 
-    workerTaskManager.getAssignedTaskDir().mkdirs();
-    workerTaskManager.getCompletedTaskDir().mkdirs();
+    FileUtils.mkdirp(workerTaskManager.getAssignedTaskDir());
+    FileUtils.mkdirp(workerTaskManager.getCompletedTaskDir());
 
     // create a task in assigned task directory, to simulate MM shutdown right after a task was assigned.
     jsonMapper.writeValue(new File(workerTaskManager.getAssignedTaskDir(), task1.getId()), task1);
diff --git a/processing/src/main/java/org/apache/druid/query/groupby/epinephelinae/LimitedTemporaryStorage.java b/processing/src/main/java/org/apache/druid/query/groupby/epinephelinae/LimitedTemporaryStorage.java
index 3269d47..b40e574 100644
--- a/processing/src/main/java/org/apache/druid/query/groupby/epinephelinae/LimitedTemporaryStorage.java
+++ b/processing/src/main/java/org/apache/druid/query/groupby/epinephelinae/LimitedTemporaryStorage.java
@@ -20,7 +20,7 @@
 package org.apache.druid.query.groupby.epinephelinae;
 
 import com.google.common.collect.ImmutableSet;
-import org.apache.commons.io.FileUtils;
+import org.apache.druid.java.util.common.FileUtils;
 import org.apache.druid.java.util.common.ISE;
 import org.apache.druid.java.util.common.StringUtils;
 import org.apache.druid.java.util.common.logger.Logger;
@@ -82,7 +82,7 @@ public class LimitedTemporaryStorage implements Closeable
         throw new ISE("Closed");
       }
 
-      FileUtils.forceMkdir(storageDirectory);
+      FileUtils.mkdirp(storageDirectory);
       if (!createdStorageDirectory) {
         createdStorageDirectory = true;
       }
diff --git a/processing/src/main/java/org/apache/druid/query/lookup/LookupSnapshotTaker.java b/processing/src/main/java/org/apache/druid/query/lookup/LookupSnapshotTaker.java
index b2f2132..6111d99 100644
--- a/processing/src/main/java/org/apache/druid/query/lookup/LookupSnapshotTaker.java
+++ b/processing/src/main/java/org/apache/druid/query/lookup/LookupSnapshotTaker.java
@@ -54,12 +54,6 @@ public class LookupSnapshotTaker
         "can not work without specifying persistDirectory"
     );
     this.persistDirectory = new File(persistDirectory);
-    if (!this.persistDirectory.exists()) {
-      Preconditions.checkArgument(this.persistDirectory.mkdirs(), "Oups was not able to create persist directory");
-    }
-    if (!this.persistDirectory.isDirectory()) {
-      throw new ISE("Can only persist to directories, [%s] wasn't a directory", persistDirectory);
-    }
   }
 
   public synchronized List<LookupBean> pullExistingSnapshot(final String tier)
diff --git a/processing/src/main/java/org/apache/druid/segment/IndexMergerV9.java b/processing/src/main/java/org/apache/druid/segment/IndexMergerV9.java
index c5191d3..b433fb9 100644
--- a/processing/src/main/java/org/apache/druid/segment/IndexMergerV9.java
+++ b/processing/src/main/java/org/apache/druid/segment/IndexMergerV9.java
@@ -209,7 +209,7 @@ public class IndexMergerV9 implements IndexMerger
     Closer closer = Closer.create();
     try {
       final FileSmoosher v9Smoosher = new FileSmoosher(outDir);
-      org.apache.commons.io.FileUtils.forceMkdir(outDir);
+      FileUtils.mkdirp(outDir);
 
       SegmentWriteOutMediumFactory omf = segmentWriteOutMediumFactory != null ? segmentWriteOutMediumFactory
                                                                               : defaultSegmentWriteOutMediumFactory;
@@ -859,7 +859,7 @@ public class IndexMergerV9 implements IndexMerger
       );
     }
 
-    org.apache.commons.io.FileUtils.forceMkdir(outDir);
+    FileUtils.mkdirp(outDir);
 
     log.debug("Starting persist for interval[%s], rows[%,d]", dataInterval, index.size());
     return multiphaseMerge(
@@ -985,7 +985,7 @@ public class IndexMergerV9 implements IndexMerger
   ) throws IOException
   {
     FileUtils.deleteDirectory(outDir);
-    org.apache.commons.io.FileUtils.forceMkdir(outDir);
+    FileUtils.mkdirp(outDir);
 
     List<File> tempDirs = new ArrayList<>();
 
@@ -1227,7 +1227,7 @@ public class IndexMergerV9 implements IndexMerger
   ) throws IOException
   {
     FileUtils.deleteDirectory(outDir);
-    org.apache.commons.io.FileUtils.forceMkdir(outDir);
+    FileUtils.mkdirp(outDir);
 
     final List<String> mergedDimensions = IndexMerger.getMergedDimensions(indexes, null);
 
diff --git a/processing/src/main/java/org/apache/druid/segment/writeout/TmpFileSegmentWriteOutMedium.java b/processing/src/main/java/org/apache/druid/segment/writeout/TmpFileSegmentWriteOutMedium.java
index 3baf728..b075a67 100644
--- a/processing/src/main/java/org/apache/druid/segment/writeout/TmpFileSegmentWriteOutMedium.java
+++ b/processing/src/main/java/org/apache/druid/segment/writeout/TmpFileSegmentWriteOutMedium.java
@@ -35,7 +35,7 @@ public final class TmpFileSegmentWriteOutMedium implements SegmentWriteOutMedium
   TmpFileSegmentWriteOutMedium(File outDir) throws IOException
   {
     File tmpOutputFilesDir = new File(outDir, "tmpOutputFiles");
-    org.apache.commons.io.FileUtils.forceMkdir(tmpOutputFilesDir);
+    FileUtils.mkdirp(tmpOutputFilesDir);
     closer.register(() -> FileUtils.deleteDirectory(tmpOutputFilesDir));
     this.dir = tmpOutputFilesDir;
   }
diff --git a/processing/src/test/java/org/apache/druid/query/DoubleStorageTest.java b/processing/src/test/java/org/apache/druid/query/DoubleStorageTest.java
index ee58422..db7c57d 100644
--- a/processing/src/test/java/org/apache/druid/query/DoubleStorageTest.java
+++ b/processing/src/test/java/org/apache/druid/query/DoubleStorageTest.java
@@ -27,6 +27,7 @@ import org.apache.druid.data.input.impl.JSONParseSpec;
 import org.apache.druid.data.input.impl.MapInputRowParser;
 import org.apache.druid.data.input.impl.TimestampSpec;
 import org.apache.druid.java.util.common.DateTimes;
+import org.apache.druid.java.util.common.FileUtils;
 import org.apache.druid.java.util.common.Intervals;
 import org.apache.druid.query.aggregation.DoubleSumAggregatorFactory;
 import org.apache.druid.query.metadata.SegmentMetadataQueryConfig;
@@ -344,7 +345,7 @@ public class DoubleStorageTest
     }
     File someTmpFile = File.createTempFile("billy", "yay");
     someTmpFile.delete();
-    someTmpFile.mkdirs();
+    FileUtils.mkdirp(someTmpFile);
     INDEX_MERGER_V9.persist(index, someTmpFile, new IndexSpec(), null);
     someTmpFile.delete();
     return INDEX_IO.loadIndex(someTmpFile);
diff --git a/processing/src/test/java/org/apache/druid/segment/IndexMergerV9CompatibilityTest.java b/processing/src/test/java/org/apache/druid/segment/IndexMergerV9CompatibilityTest.java
index b1866ff..4f61646 100644
--- a/processing/src/test/java/org/apache/druid/segment/IndexMergerV9CompatibilityTest.java
+++ b/processing/src/test/java/org/apache/druid/segment/IndexMergerV9CompatibilityTest.java
@@ -150,7 +150,7 @@ public class IndexMergerV9CompatibilityTest
     }
     tmpDir = FileUtils.createTempDir();
     persistTmpDir = new File(tmpDir, "persistDir");
-    org.apache.commons.io.FileUtils.forceMkdir(persistTmpDir);
+    FileUtils.mkdirp(persistTmpDir);
     String[] files = new String[] {"00000.smoosh", "meta.smoosh", "version.bin"};
     for (String file : files) {
       new ByteSource()
diff --git a/processing/src/test/java/org/apache/druid/segment/IndexMergerV9WithSpatialIndexTest.java b/processing/src/test/java/org/apache/druid/segment/IndexMergerV9WithSpatialIndexTest.java
index 8e5da94..1f0ae6f 100644
--- a/processing/src/test/java/org/apache/druid/segment/IndexMergerV9WithSpatialIndexTest.java
+++ b/processing/src/test/java/org/apache/druid/segment/IndexMergerV9WithSpatialIndexTest.java
@@ -259,7 +259,7 @@ public class IndexMergerV9WithSpatialIndexTest extends InitializedNullHandlingTe
     IncrementalIndex theIndex = makeIncrementalIndex();
     File tmpFile = File.createTempFile("billy", "yay");
     tmpFile.delete();
-    tmpFile.mkdirs();
+    FileUtils.mkdirp(tmpFile);
 
     try {
       indexMergerV9.persist(theIndex, tmpFile, indexSpec, null);
@@ -488,10 +488,10 @@ public class IndexMergerV9WithSpatialIndexTest extends InitializedNullHandlingTe
       File thirdFile = new File(tmpFile, "third");
       File mergedFile = new File(tmpFile, "merged");
 
-      firstFile.mkdirs();
-      secondFile.mkdirs();
-      thirdFile.mkdirs();
-      mergedFile.mkdirs();
+      FileUtils.mkdirp(firstFile);
+      FileUtils.mkdirp(secondFile);
+      FileUtils.mkdirp(thirdFile);
+      FileUtils.mkdirp(mergedFile);
 
       indexMergerV9.persist(first, DATA_INTERVAL, firstFile, indexSpec, null);
       indexMergerV9.persist(second, DATA_INTERVAL, secondFile, indexSpec, null);
diff --git a/processing/src/test/java/org/apache/druid/segment/SchemalessIndexTest.java b/processing/src/test/java/org/apache/druid/segment/SchemalessIndexTest.java
index 7328061..1352a11 100644
--- a/processing/src/test/java/org/apache/druid/segment/SchemalessIndexTest.java
+++ b/processing/src/test/java/org/apache/druid/segment/SchemalessIndexTest.java
@@ -26,6 +26,7 @@ import com.google.common.collect.Lists;
 import org.apache.druid.data.input.MapBasedInputRow;
 import org.apache.druid.jackson.DefaultObjectMapper;
 import org.apache.druid.java.util.common.DateTimes;
+import org.apache.druid.java.util.common.FileUtils;
 import org.apache.druid.java.util.common.Intervals;
 import org.apache.druid.java.util.common.Pair;
 import org.apache.druid.java.util.common.granularity.Granularities;
@@ -202,11 +203,11 @@ public class SchemalessIndexTest
         File bottomFile = new File(tmpFile, "bottom");
         File mergedFile = new File(tmpFile, "merged");
 
-        topFile.mkdirs();
+        FileUtils.mkdirp(topFile);
+        FileUtils.mkdirp(bottomFile);
+        FileUtils.mkdirp(mergedFile);
         topFile.deleteOnExit();
-        bottomFile.mkdirs();
         bottomFile.deleteOnExit();
-        mergedFile.mkdirs();
         mergedFile.deleteOnExit();
 
         indexMerger.persist(top, topFile, INDEX_SPEC, null);
@@ -257,7 +258,7 @@ public class SchemalessIndexTest
 
         File mergedFile = new File(tmpFile, "merged");
 
-        mergedFile.mkdirs();
+        FileUtils.mkdirp(mergedFile);
         mergedFile.deleteOnExit();
 
         QueryableIndex index = indexIO.loadIndex(
@@ -295,7 +296,7 @@ public class SchemalessIndexTest
 
         File mergedFile = new File(tmpFile, "merged");
 
-        mergedFile.mkdirs();
+        FileUtils.mkdirp(mergedFile);
         mergedFile.deleteOnExit();
 
         List<QueryableIndex> indexesToMerge = new ArrayList<>();
@@ -387,7 +388,7 @@ public class SchemalessIndexTest
 
           File tmpFile = File.createTempFile("billy", "yay");
           tmpFile.delete();
-          tmpFile.mkdirs();
+          FileUtils.mkdirp(tmpFile);
           tmpFile.deleteOnExit();
 
           indexMerger.persist(rowIndex, tmpFile, INDEX_SPEC, null);
@@ -453,7 +454,7 @@ public class SchemalessIndexTest
     for (Pair<String, AggregatorFactory[]> file : files) {
       IncrementalIndex index = makeIncrementalIndex(file.lhs, file.rhs);
       File theFile = new File(tmpFile, file.lhs);
-      theFile.mkdirs();
+      FileUtils.mkdirp(theFile);
       theFile.deleteOnExit();
       filesToMap.add(theFile);
       indexMerger.persist(index, theFile, INDEX_SPEC, null);
@@ -471,7 +472,7 @@ public class SchemalessIndexTest
       File tmpFile = File.createTempFile("yay", "boo");
       tmpFile.delete();
       File mergedFile = new File(tmpFile, "merged");
-      mergedFile.mkdirs();
+      FileUtils.mkdirp(mergedFile);
       mergedFile.deleteOnExit();
 
       List<File> filesToMap = makeFilesToMap(tmpFile, files);
@@ -536,7 +537,7 @@ public class SchemalessIndexTest
       File tmpFile = File.createTempFile("yay", "who");
       tmpFile.delete();
       File mergedFile = new File(tmpFile, "merged");
-      mergedFile.mkdirs();
+      FileUtils.mkdirp(mergedFile);
       mergedFile.deleteOnExit();
 
       List<File> filesToMap = makeFilesToMap(tmpFile, files);
diff --git a/processing/src/test/java/org/apache/druid/segment/TestIndex.java b/processing/src/test/java/org/apache/druid/segment/TestIndex.java
index ad97940..d519ecd 100644
--- a/processing/src/test/java/org/apache/druid/segment/TestIndex.java
+++ b/processing/src/test/java/org/apache/druid/segment/TestIndex.java
@@ -34,6 +34,7 @@ import org.apache.druid.data.input.impl.StringDimensionSchema;
 import org.apache.druid.data.input.impl.StringInputRowParser;
 import org.apache.druid.data.input.impl.TimestampSpec;
 import org.apache.druid.java.util.common.DateTimes;
+import org.apache.druid.java.util.common.FileUtils;
 import org.apache.druid.java.util.common.Intervals;
 import org.apache.druid.java.util.common.logger.Logger;
 import org.apache.druid.query.aggregation.AggregatorFactory;
@@ -191,11 +192,11 @@ public class TestIndex
       File bottomFile = new File(tmpFile, "bottom");
       File mergedFile = new File(tmpFile, "merged");
 
-      topFile.mkdirs();
+      FileUtils.mkdirp(topFile);
+      FileUtils.mkdirp(bottomFile);
+      FileUtils.mkdirp(mergedFile);
       topFile.deleteOnExit();
-      bottomFile.mkdirs();
       bottomFile.deleteOnExit();
-      mergedFile.mkdirs();
       mergedFile.deleteOnExit();
 
       INDEX_MERGER.persist(top, DATA_INTERVAL, topFile, INDEX_SPEC, null);
@@ -377,7 +378,7 @@ public class TestIndex
     try {
       File someTmpFile = File.createTempFile("billy", "yay");
       someTmpFile.delete();
-      someTmpFile.mkdirs();
+      FileUtils.mkdirp(someTmpFile);
       someTmpFile.deleteOnExit();
 
       INDEX_MERGER.persist(index, someTmpFile, INDEX_SPEC, null);
diff --git a/processing/src/test/java/org/apache/druid/segment/filter/SpatialFilterBonusTest.java b/processing/src/test/java/org/apache/druid/segment/filter/SpatialFilterBonusTest.java
index 94e413c..64971cb 100644
--- a/processing/src/test/java/org/apache/druid/segment/filter/SpatialFilterBonusTest.java
+++ b/processing/src/test/java/org/apache/druid/segment/filter/SpatialFilterBonusTest.java
@@ -27,6 +27,7 @@ import org.apache.druid.data.input.MapBasedInputRow;
 import org.apache.druid.data.input.impl.DimensionsSpec;
 import org.apache.druid.data.input.impl.SpatialDimensionSchema;
 import org.apache.druid.java.util.common.DateTimes;
+import org.apache.druid.java.util.common.FileUtils;
 import org.apache.druid.java.util.common.Intervals;
 import org.apache.druid.java.util.common.StringUtils;
 import org.apache.druid.java.util.common.granularity.Granularities;
@@ -248,7 +249,7 @@ public class SpatialFilterBonusTest
     IncrementalIndex theIndex = makeIncrementalIndex();
     File tmpFile = File.createTempFile("billy", "yay");
     tmpFile.delete();
-    tmpFile.mkdirs();
+    FileUtils.mkdirp(tmpFile);
     tmpFile.deleteOnExit();
 
     indexMerger.persist(theIndex, tmpFile, indexSpec, null);
@@ -433,13 +434,13 @@ public class SpatialFilterBonusTest
       File thirdFile = new File(tmpFile, "third");
       File mergedFile = new File(tmpFile, "merged");
 
-      firstFile.mkdirs();
+      FileUtils.mkdirp(firstFile);
+      FileUtils.mkdirp(secondFile);
+      FileUtils.mkdirp(thirdFile);
+      FileUtils.mkdirp(mergedFile);
       firstFile.deleteOnExit();
-      secondFile.mkdirs();
       secondFile.deleteOnExit();
-      thirdFile.mkdirs();
       thirdFile.deleteOnExit();
-      mergedFile.mkdirs();
       mergedFile.deleteOnExit();
 
       indexMerger.persist(first, DATA_INTERVAL, firstFile, indexSpec, null);
diff --git a/processing/src/test/java/org/apache/druid/segment/filter/SpatialFilterTest.java b/processing/src/test/java/org/apache/druid/segment/filter/SpatialFilterTest.java
index 47835bf..45de06f 100644
--- a/processing/src/test/java/org/apache/druid/segment/filter/SpatialFilterTest.java
+++ b/processing/src/test/java/org/apache/druid/segment/filter/SpatialFilterTest.java
@@ -28,6 +28,7 @@ import org.apache.druid.data.input.MapBasedInputRow;
 import org.apache.druid.data.input.impl.DimensionsSpec;
 import org.apache.druid.data.input.impl.SpatialDimensionSchema;
 import org.apache.druid.java.util.common.DateTimes;
+import org.apache.druid.java.util.common.FileUtils;
 import org.apache.druid.java.util.common.Intervals;
 import org.apache.druid.java.util.common.granularity.Granularities;
 import org.apache.druid.query.Druids;
@@ -270,7 +271,7 @@ public class SpatialFilterTest extends InitializedNullHandlingTest
     IncrementalIndex theIndex = makeIncrementalIndex();
     File tmpFile = File.createTempFile("billy", "yay");
     tmpFile.delete();
-    tmpFile.mkdirs();
+    FileUtils.mkdirp(tmpFile);
     tmpFile.deleteOnExit();
 
     INDEX_MERGER.persist(theIndex, tmpFile, indexSpec, null);
@@ -488,13 +489,13 @@ public class SpatialFilterTest extends InitializedNullHandlingTest
       File thirdFile = new File(tmpFile, "third");
       File mergedFile = new File(tmpFile, "merged");
 
-      firstFile.mkdirs();
+      FileUtils.mkdirp(firstFile);
+      FileUtils.mkdirp(secondFile);
+      FileUtils.mkdirp(thirdFile);
+      FileUtils.mkdirp(mergedFile);
       firstFile.deleteOnExit();
-      secondFile.mkdirs();
       secondFile.deleteOnExit();
-      thirdFile.mkdirs();
       thirdFile.deleteOnExit();
-      mergedFile.mkdirs();
       mergedFile.deleteOnExit();
 
       INDEX_MERGER.persist(first, DATA_INTERVAL, firstFile, indexSpec, null);
diff --git a/server/src/main/java/org/apache/druid/query/lookup/LookupReferencesManager.java b/server/src/main/java/org/apache/druid/query/lookup/LookupReferencesManager.java
index ccc5d0b..8c5872e 100644
--- a/server/src/main/java/org/apache/druid/query/lookup/LookupReferencesManager.java
+++ b/server/src/main/java/org/apache/druid/query/lookup/LookupReferencesManager.java
@@ -34,6 +34,7 @@ import org.apache.druid.concurrent.LifecycleLock;
 import org.apache.druid.discovery.DruidLeaderClient;
 import org.apache.druid.guice.ManageLifecycle;
 import org.apache.druid.guice.annotations.Json;
+import org.apache.druid.java.util.common.FileUtils;
 import org.apache.druid.java.util.common.IOE;
 import org.apache.druid.java.util.common.ISE;
 import org.apache.druid.java.util.common.RE;
@@ -48,6 +49,7 @@ import org.jboss.netty.handler.codec.http.HttpMethod;
 import org.jboss.netty.handler.codec.http.HttpResponseStatus;
 
 import javax.annotation.Nullable;
+import java.io.File;
 import java.io.IOException;
 import java.util.AbstractMap;
 import java.util.ArrayList;
@@ -147,13 +149,16 @@ public class LookupReferencesManager implements LookupExtractorFactoryContainerP
   }
 
   @LifecycleStart
-  public void start()
+  public void start() throws IOException
   {
     if (!lifecycleLock.canStart()) {
       throw new ISE("can't start.");
     }
     try {
       LOG.debug("LookupExtractorFactoryContainerProvider starting.");
+      if (!Strings.isNullOrEmpty(lookupConfig.getSnapshotWorkingDir())) {
+        FileUtils.mkdirp(new File(lookupConfig.getSnapshotWorkingDir()));
+      }
       loadAllLookupsAndInitStateRef();
       if (!testMode) {
         mainThread = Execs.makeThread(
diff --git a/server/src/main/java/org/apache/druid/segment/loading/LocalDataSegmentPusher.java b/server/src/main/java/org/apache/druid/segment/loading/LocalDataSegmentPusher.java
index fc8aeb8..07222ff 100644
--- a/server/src/main/java/org/apache/druid/segment/loading/LocalDataSegmentPusher.java
+++ b/server/src/main/java/org/apache/druid/segment/loading/LocalDataSegmentPusher.java
@@ -89,7 +89,7 @@ public class LocalDataSegmentPusher implements DataSegmentPusher
 
     final File tmpOutDir = new File(config.getStorageDirectory(), makeIntermediateDir());
     log.debug("Creating intermediate directory[%s] for segment[%s].", tmpOutDir.toString(), segment.getId());
-    org.apache.commons.io.FileUtils.forceMkdir(tmpOutDir);
+    FileUtils.mkdirp(tmpOutDir);
 
     try {
       final File tmpIndexFile = new File(tmpOutDir, INDEX_FILENAME);
@@ -99,7 +99,7 @@ public class LocalDataSegmentPusher implements DataSegmentPusher
                                              .withSize(size)
                                              .withBinaryVersion(SegmentUtils.getVersionFromDir(dataSegmentFile));
 
-      org.apache.commons.io.FileUtils.forceMkdir(outDir);
+      FileUtils.mkdirp(outDir);
       final File indexFileTarget = new File(outDir, tmpIndexFile.getName());
 
       if (!tmpIndexFile.renameTo(indexFileTarget)) {
diff --git a/server/src/main/java/org/apache/druid/segment/loading/SegmentLocalCacheManager.java b/server/src/main/java/org/apache/druid/segment/loading/SegmentLocalCacheManager.java
index 25c9cae..0ad2901 100644
--- a/server/src/main/java/org/apache/druid/segment/loading/SegmentLocalCacheManager.java
+++ b/server/src/main/java/org/apache/druid/segment/loading/SegmentLocalCacheManager.java
@@ -300,10 +300,9 @@ public class SegmentLocalCacheManager implements SegmentCacheManager
     // the parent directories of the segment are removed
     final File downloadStartMarker = new File(storageDir, DOWNLOAD_START_MARKER_FILE_NAME);
     synchronized (directoryWriteRemoveLock) {
-      if (!storageDir.mkdirs()) {
-        log.debug("Unable to make parent file[%s]", storageDir);
-      }
       try {
+        FileUtils.mkdirp(storageDir);
+
         if (!downloadStartMarker.createNewFile()) {
           throw new SegmentLoadingException("Was not able to create new download marker for [%s]", storageDir);
         }
diff --git a/server/src/main/java/org/apache/druid/segment/realtime/appenderator/AppenderatorImpl.java b/server/src/main/java/org/apache/druid/segment/realtime/appenderator/AppenderatorImpl.java
index c79b57e..4b599ee 100644
--- a/server/src/main/java/org/apache/druid/segment/realtime/appenderator/AppenderatorImpl.java
+++ b/server/src/main/java/org/apache/druid/segment/realtime/appenderator/AppenderatorImpl.java
@@ -260,7 +260,6 @@ public class AppenderatorImpl implements Appenderator
   @Override
   public Object startJob()
   {
-    tuningConfig.getBasePersistDirectory().mkdirs();
     lockBasePersistDirectory();
     final Object retVal = bootstrapSinksFromDisk();
     initializeExecutors();
@@ -381,7 +380,8 @@ public class AppenderatorImpl implements Appenderator
           }
         }
 
-        if (!skipBytesInMemoryOverheadCheck && bytesCurrentlyInMemory.get() - bytesToBePersisted > maxBytesTuningConfig) {
+        if (!skipBytesInMemoryOverheadCheck
+            && bytesCurrentlyInMemory.get() - bytesToBePersisted > maxBytesTuningConfig) {
           // We are still over maxBytesTuningConfig even after persisting.
           // This means that we ran out of all available memory to ingest (due to overheads created as part of ingestion)
           final String alertMessage = StringUtils.format(
@@ -1099,6 +1099,8 @@ public class AppenderatorImpl implements Appenderator
   {
     if (basePersistDirLock == null) {
       try {
+        FileUtils.mkdirp(tuningConfig.getBasePersistDirectory());
+
         basePersistDirLockChannel = FileChannel.open(
             computeLockFile().toPath(),
             StandardOpenOption.CREATE,
@@ -1476,7 +1478,7 @@ public class AppenderatorImpl implements Appenderator
   private File createPersistDirIfNeeded(SegmentIdWithShardSpec identifier) throws IOException
   {
     final File persistDir = computePersistDir(identifier);
-    org.apache.commons.io.FileUtils.forceMkdir(persistDir);
+    FileUtils.mkdirp(persistDir);
 
     objectMapper.writeValue(computeIdentifierFile(identifier), identifier);
 
diff --git a/server/src/main/java/org/apache/druid/segment/realtime/appenderator/BatchAppenderator.java b/server/src/main/java/org/apache/druid/segment/realtime/appenderator/BatchAppenderator.java
index f4a212d..3912db5 100644
--- a/server/src/main/java/org/apache/druid/segment/realtime/appenderator/BatchAppenderator.java
+++ b/server/src/main/java/org/apache/druid/segment/realtime/appenderator/BatchAppenderator.java
@@ -193,7 +193,6 @@ public class BatchAppenderator implements Appenderator
   @Override
   public Object startJob()
   {
-    tuningConfig.getBasePersistDirectory().mkdirs();
     lockBasePersistDirectory();
     initializeExecutors();
     return null;
@@ -927,6 +926,8 @@ public class BatchAppenderator implements Appenderator
   {
     if (basePersistDirLock == null) {
       try {
+        FileUtils.mkdirp(tuningConfig.getBasePersistDirectory());
+
         basePersistDirLockChannel = FileChannel.open(
             computeLockFile().toPath(),
             StandardOpenOption.CREATE,
@@ -1099,7 +1100,7 @@ public class BatchAppenderator implements Appenderator
   private File createPersistDirIfNeeded(SegmentIdWithShardSpec identifier) throws IOException
   {
     final File persistDir = computePersistDir(identifier);
-    org.apache.commons.io.FileUtils.forceMkdir(persistDir);
+    FileUtils.mkdirp(persistDir);
 
     objectMapper.writeValue(computeIdentifierFile(identifier), identifier);
 
diff --git a/server/src/main/java/org/apache/druid/segment/realtime/appenderator/StreamAppenderator.java b/server/src/main/java/org/apache/druid/segment/realtime/appenderator/StreamAppenderator.java
index 1ee36f5..dbdddf7 100644
--- a/server/src/main/java/org/apache/druid/segment/realtime/appenderator/StreamAppenderator.java
+++ b/server/src/main/java/org/apache/druid/segment/realtime/appenderator/StreamAppenderator.java
@@ -228,7 +228,6 @@ public class StreamAppenderator implements Appenderator
   @Override
   public Object startJob()
   {
-    tuningConfig.getBasePersistDirectory().mkdirs();
     lockBasePersistDirectory();
     final Object retVal = bootstrapSinksFromDisk();
     initializeExecutors();
@@ -1022,6 +1021,8 @@ public class StreamAppenderator implements Appenderator
   {
     if (basePersistDirLock == null) {
       try {
+        FileUtils.mkdirp(tuningConfig.getBasePersistDirectory());
+
         basePersistDirLockChannel = FileChannel.open(
             computeLockFile().toPath(),
             StandardOpenOption.CREATE,
@@ -1397,7 +1398,7 @@ public class StreamAppenderator implements Appenderator
   private File createPersistDirIfNeeded(SegmentIdWithShardSpec identifier) throws IOException
   {
     final File persistDir = computePersistDir(identifier);
-    org.apache.commons.io.FileUtils.forceMkdir(persistDir);
+    FileUtils.mkdirp(persistDir);
 
     objectMapper.writeValue(computeIdentifierFile(identifier), identifier);
 
diff --git a/server/src/main/java/org/apache/druid/segment/realtime/plumber/FlushingPlumber.java b/server/src/main/java/org/apache/druid/segment/realtime/plumber/FlushingPlumber.java
index 0b2ce80..2194ff7 100644
--- a/server/src/main/java/org/apache/druid/segment/realtime/plumber/FlushingPlumber.java
+++ b/server/src/main/java/org/apache/druid/segment/realtime/plumber/FlushingPlumber.java
@@ -25,6 +25,7 @@ import org.apache.druid.client.cache.CacheConfig;
 import org.apache.druid.client.cache.CachePopulatorStats;
 import org.apache.druid.common.guava.ThreadRenamingCallable;
 import org.apache.druid.java.util.common.DateTimes;
+import org.apache.druid.java.util.common.FileUtils;
 import org.apache.druid.java.util.common.StringUtils;
 import org.apache.druid.java.util.common.concurrent.Execs;
 import org.apache.druid.java.util.common.concurrent.ScheduledExecutors;
@@ -43,6 +44,7 @@ import org.apache.druid.server.coordination.DataSegmentAnnouncer;
 import org.joda.time.DateTime;
 import org.joda.time.Duration;
 
+import java.io.IOException;
 import java.util.ArrayList;
 import java.util.List;
 import java.util.Map;
@@ -111,7 +113,13 @@ public class FlushingPlumber extends RealtimePlumber
   {
     log.info("Starting job for %s", getSchema().getDataSource());
 
-    computeBaseDir(getSchema()).mkdirs();
+    try {
+      FileUtils.mkdirp(computeBaseDir(getSchema()));
+    }
+    catch (IOException e) {
+      throw new RuntimeException(e);
+    }
+
     initializeExecutors();
 
     if (flushScheduledExec == null) {
diff --git a/server/src/main/java/org/apache/druid/segment/realtime/plumber/RealtimePlumber.java b/server/src/main/java/org/apache/druid/segment/realtime/plumber/RealtimePlumber.java
index ca0484c..34daf3a 100644
--- a/server/src/main/java/org/apache/druid/segment/realtime/plumber/RealtimePlumber.java
+++ b/server/src/main/java/org/apache/druid/segment/realtime/plumber/RealtimePlumber.java
@@ -202,7 +202,13 @@ public class RealtimePlumber implements Plumber
   @Override
   public Object startJob()
   {
-    computeBaseDir(schema).mkdirs();
+    try {
+      FileUtils.mkdirp(computeBaseDir(schema));
+    }
+    catch (IOException e) {
+      throw new RuntimeException(e);
+    }
+
     initializeExecutors();
     handoffNotifier.start();
     Object retVal = bootstrapSinksFromDisk();
diff --git a/server/src/main/java/org/apache/druid/server/coordination/SegmentLoadDropHandler.java b/server/src/main/java/org/apache/druid/server/coordination/SegmentLoadDropHandler.java
index a9d972f..53013d1 100644
--- a/server/src/main/java/org/apache/druid/server/coordination/SegmentLoadDropHandler.java
+++ b/server/src/main/java/org/apache/druid/server/coordination/SegmentLoadDropHandler.java
@@ -35,6 +35,7 @@ import com.google.common.util.concurrent.SettableFuture;
 import com.google.inject.Inject;
 import org.apache.druid.guice.ManageLifecycle;
 import org.apache.druid.guice.ServerTypeConfig;
+import org.apache.druid.java.util.common.FileUtils;
 import org.apache.druid.java.util.common.ISE;
 import org.apache.druid.java.util.common.concurrent.Execs;
 import org.apache.druid.java.util.common.lifecycle.LifecycleStart;
@@ -210,17 +211,11 @@ public class SegmentLoadDropHandler implements DataSegmentChangeHandler
     return started;
   }
 
-  private void loadLocalCache()
+  private void loadLocalCache() throws IOException
   {
     final long start = System.currentTimeMillis();
     File baseDir = config.getInfoDir();
-    if (!baseDir.isDirectory()) {
-      if (baseDir.exists()) {
-        throw new ISE("[%s] exists but not a directory.", baseDir);
-      } else if (!baseDir.mkdirs()) {
-        throw new ISE("Failed to create directory[%s].", baseDir);
-      }
-    }
+    FileUtils.mkdirp(baseDir);
 
     List<DataSegment> cachedSegments = new ArrayList<>();
     File[] segmentsToLoad = baseDir.listFiles();
diff --git a/server/src/main/java/org/apache/druid/server/log/FileRequestLogger.java b/server/src/main/java/org/apache/druid/server/log/FileRequestLogger.java
index e8c1361..9ca4516 100644
--- a/server/src/main/java/org/apache/druid/server/log/FileRequestLogger.java
+++ b/server/src/main/java/org/apache/druid/server/log/FileRequestLogger.java
@@ -21,6 +21,7 @@ package org.apache.druid.server.log;
 
 import com.fasterxml.jackson.databind.ObjectMapper;
 import org.apache.druid.java.util.common.DateTimes;
+import org.apache.druid.java.util.common.FileUtils;
 import org.apache.druid.java.util.common.concurrent.ScheduledExecutors;
 import org.apache.druid.java.util.common.lifecycle.LifecycleStart;
 import org.apache.druid.java.util.common.lifecycle.LifecycleStop;
@@ -73,7 +74,7 @@ public class FileRequestLogger implements RequestLogger
   public void start()
   {
     try {
-      baseDir.mkdirs();
+      FileUtils.mkdirp(baseDir);
 
       MutableDateTime mutableDateTime = DateTimes.nowUtc().toMutableDateTime(ISOChronology.getInstanceUTC());
       mutableDateTime.setMillisOfDay(0);
diff --git a/server/src/test/java/org/apache/druid/metadata/input/SqlInputSourceTest.java b/server/src/test/java/org/apache/druid/metadata/input/SqlInputSourceTest.java
index 7edf954..14391a6 100644
--- a/server/src/test/java/org/apache/druid/metadata/input/SqlInputSourceTest.java
+++ b/server/src/test/java/org/apache/druid/metadata/input/SqlInputSourceTest.java
@@ -27,7 +27,6 @@ import com.google.common.collect.ImmutableList;
 import com.google.common.collect.ImmutableSet;
 import nl.jqno.equalsverifier.EqualsVerifier;
 import org.apache.commons.dbcp2.BasicDataSource;
-import org.apache.commons.io.FileUtils;
 import org.apache.druid.data.input.ColumnsFilter;
 import org.apache.druid.data.input.InputFormat;
 import org.apache.druid.data.input.InputRow;
@@ -38,6 +37,7 @@ import org.apache.druid.data.input.InputSplit;
 import org.apache.druid.data.input.Row;
 import org.apache.druid.data.input.impl.DimensionsSpec;
 import org.apache.druid.data.input.impl.TimestampSpec;
+import org.apache.druid.java.util.common.FileUtils;
 import org.apache.druid.java.util.common.StringUtils;
 import org.apache.druid.java.util.common.parsers.CloseableIterator;
 import org.apache.druid.metadata.MetadataStorageConnectorConfig;
@@ -102,7 +102,7 @@ public class SqlInputSourceTest
   public static void teardown() throws IOException
   {
     for (File dir : FIREHOSE_TMP_DIRS) {
-      FileUtils.forceDelete(dir);
+      org.apache.commons.io.FileUtils.forceDelete(dir);
     }
   }
 
@@ -131,8 +131,8 @@ public class SqlInputSourceTest
         SqlInputSourceTest.class.getSimpleName(),
         dirSuffix
     );
-    FileUtils.forceDelete(firehoseTempDir);
-    FileUtils.forceMkdir(firehoseTempDir);
+    org.apache.commons.io.FileUtils.forceDelete(firehoseTempDir);
+    FileUtils.mkdirp(firehoseTempDir);
     FIREHOSE_TMP_DIRS.add(firehoseTempDir);
     return firehoseTempDir;
   }
diff --git a/server/src/test/java/org/apache/druid/segment/loading/LocalDataSegmentKillerTest.java b/server/src/test/java/org/apache/druid/segment/loading/LocalDataSegmentKillerTest.java
index 01f62a8..bc17677 100644
--- a/server/src/test/java/org/apache/druid/segment/loading/LocalDataSegmentKillerTest.java
+++ b/server/src/test/java/org/apache/druid/segment/loading/LocalDataSegmentKillerTest.java
@@ -21,6 +21,7 @@ package org.apache.druid.segment.loading;
 
 import com.google.common.collect.ImmutableList;
 import com.google.common.collect.ImmutableMap;
+import org.apache.druid.java.util.common.FileUtils;
 import org.apache.druid.java.util.common.Intervals;
 import org.apache.druid.timeline.DataSegment;
 import org.apache.druid.timeline.partition.NoneShardSpec;
@@ -118,7 +119,7 @@ public class LocalDataSegmentKillerTest
 
   private void makePartitionDirWithIndex(File path) throws IOException
   {
-    Assert.assertTrue(path.mkdirs());
+    FileUtils.mkdirp(path);
     Assert.assertTrue(new File(path, "index.zip").createNewFile());
   }
 
diff --git a/server/src/test/java/org/apache/druid/segment/loading/LocalDataSegmentPusherTest.java b/server/src/test/java/org/apache/druid/segment/loading/LocalDataSegmentPusherTest.java
index 5ecca15..739075e 100644
--- a/server/src/test/java/org/apache/druid/segment/loading/LocalDataSegmentPusherTest.java
+++ b/server/src/test/java/org/apache/druid/segment/loading/LocalDataSegmentPusherTest.java
@@ -22,7 +22,7 @@ package org.apache.druid.segment.loading;
 import com.google.common.collect.ImmutableList;
 import com.google.common.io.Files;
 import com.google.common.primitives.Ints;
-import org.apache.commons.io.FileUtils;
+import org.apache.druid.java.util.common.FileUtils;
 import org.apache.druid.java.util.common.Intervals;
 import org.apache.druid.java.util.common.StringUtils;
 import org.apache.druid.timeline.DataSegment;
@@ -140,7 +140,7 @@ public class LocalDataSegmentPusherTest
     Assert.assertEquals(dataSegment2.getDimensions(), returnSegment2.getDimensions());
 
     File unzipDir = new File(config.storageDirectory, "unzip");
-    FileUtils.forceMkdir(unzipDir);
+    FileUtils.mkdirp(unzipDir);
     CompressionUtils.unzip(
         new File(config.storageDirectory, "/ds/1970-01-01T00:00:00.000Z_1970-01-01T00:00:00.001Z/v1/0/index.zip"),
         unzipDir
diff --git a/server/src/test/java/org/apache/druid/segment/loading/SegmentLocalCacheManagerConcurrencyTest.java b/server/src/test/java/org/apache/druid/segment/loading/SegmentLocalCacheManagerConcurrencyTest.java
index e812ff7..061cf8a 100644
--- a/server/src/test/java/org/apache/druid/segment/loading/SegmentLocalCacheManagerConcurrencyTest.java
+++ b/server/src/test/java/org/apache/druid/segment/loading/SegmentLocalCacheManagerConcurrencyTest.java
@@ -26,6 +26,7 @@ import com.google.common.collect.ImmutableList;
 import com.google.common.collect.ImmutableMap;
 import org.apache.druid.jackson.DefaultObjectMapper;
 import org.apache.druid.java.util.common.DateTimes;
+import org.apache.druid.java.util.common.FileUtils;
 import org.apache.druid.java.util.common.Intervals;
 import org.apache.druid.java.util.common.StringUtils;
 import org.apache.druid.java.util.common.concurrent.Execs;
@@ -125,7 +126,7 @@ public class SegmentLocalCacheManagerConcurrencyTest
           localStorageFolder,
           segmentPath
       );
-      localSegmentFile.mkdirs();
+      FileUtils.mkdirp(localSegmentFile);
       final File indexZip = new File(localSegmentFile, "index.zip");
       indexZip.createNewFile();
 
diff --git a/server/src/test/java/org/apache/druid/segment/loading/SegmentLocalCacheManagerTest.java b/server/src/test/java/org/apache/druid/segment/loading/SegmentLocalCacheManagerTest.java
index 5d116d1..7facb3f 100644
--- a/server/src/test/java/org/apache/druid/segment/loading/SegmentLocalCacheManagerTest.java
+++ b/server/src/test/java/org/apache/druid/segment/loading/SegmentLocalCacheManagerTest.java
@@ -25,6 +25,7 @@ import com.fasterxml.jackson.databind.jsontype.NamedType;
 import com.google.common.collect.ImmutableList;
 import com.google.common.collect.ImmutableMap;
 import org.apache.druid.jackson.DefaultObjectMapper;
+import org.apache.druid.java.util.common.FileUtils;
 import org.apache.druid.java.util.common.Intervals;
 import org.apache.druid.java.util.emitter.EmittingLogger;
 import org.apache.druid.server.metrics.NoopServiceEmitter;
@@ -37,6 +38,7 @@ import org.junit.Test;
 import org.junit.rules.TemporaryFolder;
 
 import java.io.File;
+import java.io.IOException;
 import java.util.ArrayList;
 import java.util.Arrays;
 import java.util.List;
@@ -80,14 +82,14 @@ public class SegmentLocalCacheManagerTest
   }
 
   @Test
-  public void testIfSegmentIsLoaded()
+  public void testIfSegmentIsLoaded() throws IOException
   {
     final DataSegment cachedSegment = dataSegmentWithInterval("2014-10-20T00:00:00Z/P1D");
     final File cachedSegmentFile = new File(
         localSegmentCacheFolder,
         "test_segment_loader/2014-10-20T00:00:00.000Z_2014-10-21T00:00:00.000Z/2015-05-27T03:38:35.683Z/0"
     );
-    cachedSegmentFile.mkdirs();
+    FileUtils.mkdirp(cachedSegmentFile);
 
     Assert.assertTrue("Expect cache hit", manager.isSegmentCached(cachedSegment));
 
@@ -117,7 +119,7 @@ public class SegmentLocalCacheManagerTest
         localStorageFolder,
         "test_segment_loader/2014-10-20T00:00:00.000Z_2014-10-21T00:00:00.000Z/2015-05-27T03:38:35.683Z/0"
     );
-    localSegmentFile.mkdirs();
+    FileUtils.mkdirp(localSegmentFile);
     final File indexZip = new File(localSegmentFile, "index.zip");
     indexZip.createNewFile();
 
@@ -163,7 +165,7 @@ public class SegmentLocalCacheManagerTest
         segmentSrcFolder,
         "test_segment_loader/2014-10-20T00:00:00.000Z_2014-10-21T00:00:00.000Z/2015-05-27T03:38:35.683Z/0"
     );
-    localSegmentFile.mkdirs();
+    FileUtils.mkdirp(localSegmentFile);
     final File indexZip = new File(localSegmentFile, "index.zip");
     indexZip.createNewFile();
 
@@ -211,7 +213,7 @@ public class SegmentLocalCacheManagerTest
         segmentSrcFolder,
         "test_segment_loader/2014-10-20T00:00:00.000Z_2014-10-21T00:00:00.000Z/2015-05-27T03:38:35.683Z/0"
     );
-    localSegmentFile.mkdirs();
+    FileUtils.mkdirp(localSegmentFile);
     final File indexZip = new File(localSegmentFile, "index.zip");
     indexZip.createNewFile();
 
@@ -261,7 +263,7 @@ public class SegmentLocalCacheManagerTest
         segmentSrcFolder,
         "test_segment_loader/2014-10-20T00:00:00.000Z_2014-10-21T00:00:00.000Z/2015-05-27T03:38:35.683Z/0"
     );
-    localSegmentFile.mkdirs();
+    FileUtils.mkdirp(localSegmentFile);
     final File indexZip = new File(localSegmentFile, "index.zip");
     indexZip.createNewFile();
 
@@ -310,7 +312,7 @@ public class SegmentLocalCacheManagerTest
         segmentSrcFolder,
         "test_segment_loader/2014-10-20T00:00:00.000Z_2014-10-21T00:00:00.000Z/2015-05-27T03:38:35.683Z/0"
     );
-    localSegmentFile.mkdirs();
+    FileUtils.mkdirp(localSegmentFile);
     final File indexZip = new File(localSegmentFile, "index.zip");
     indexZip.createNewFile();
 
@@ -336,7 +338,7 @@ public class SegmentLocalCacheManagerTest
         segmentSrcFolder,
         "test_segment_loader/2014-11-20T00:00:00.000Z_2014-11-21T00:00:00.000Z/2015-05-27T03:38:35.683Z/0"
     );
-    localSegmentFile2.mkdirs();
+    FileUtils.mkdirp(localSegmentFile2);
     final File indexZip2 = new File(localSegmentFile2, "index.zip");
     indexZip2.createNewFile();
 
@@ -504,7 +506,7 @@ public class SegmentLocalCacheManagerTest
   {
     // manually create a local segment under segmentSrcFolder
     final File localSegmentFile = new File(segmentSrcFolder, localSegmentPath);
-    localSegmentFile.mkdirs();
+    FileUtils.mkdirp(localSegmentFile);
     final File indexZip = new File(localSegmentFile, "index.zip");
     indexZip.createNewFile();
   }
@@ -746,7 +748,7 @@ public class SegmentLocalCacheManagerTest
         localStorageFolder,
         "test_segment_loader/2014-10-20T00:00:00.000Z_2014-10-21T00:00:00.000Z/2015-05-27T03:38:35.683Z/0"
     );
-    Assert.assertTrue(localSegmentFile.mkdirs());
+    FileUtils.mkdirp(localSegmentFile);
     final File indexZip = new File(localSegmentFile, "index.zip");
     Assert.assertTrue(indexZip.createNewFile());
 
diff --git a/server/src/test/java/org/apache/druid/segment/realtime/firehose/SqlFirehoseFactoryTest.java b/server/src/test/java/org/apache/druid/segment/realtime/firehose/SqlFirehoseFactoryTest.java
index 0f3b0c5..769c077 100644
--- a/server/src/test/java/org/apache/druid/segment/realtime/firehose/SqlFirehoseFactoryTest.java
+++ b/server/src/test/java/org/apache/druid/segment/realtime/firehose/SqlFirehoseFactoryTest.java
@@ -21,7 +21,6 @@ package org.apache.druid.segment.realtime.firehose;
 
 import com.fasterxml.jackson.databind.ObjectMapper;
 import com.google.common.collect.ImmutableList;
-import org.apache.commons.io.FileUtils;
 import org.apache.druid.data.input.Firehose;
 import org.apache.druid.data.input.Row;
 import org.apache.druid.data.input.impl.DimensionsSpec;
@@ -29,6 +28,7 @@ import org.apache.druid.data.input.impl.InputRowParser;
 import org.apache.druid.data.input.impl.MapInputRowParser;
 import org.apache.druid.data.input.impl.TimeAndDimsParseSpec;
 import org.apache.druid.data.input.impl.TimestampSpec;
+import org.apache.druid.java.util.common.FileUtils;
 import org.apache.druid.java.util.common.StringUtils;
 import org.apache.druid.metadata.TestDerbyConnector;
 import org.apache.druid.metadata.input.SqlTestUtils;
@@ -82,16 +82,16 @@ public class SqlFirehoseFactoryTest
   public static void setup() throws IOException
   {
     TEST_DIR = File.createTempFile(SqlFirehoseFactoryTest.class.getSimpleName(), "testDir");
-    FileUtils.forceDelete(TEST_DIR);
-    FileUtils.forceMkdir(TEST_DIR);
+    org.apache.commons.io.FileUtils.forceDelete(TEST_DIR);
+    FileUtils.mkdirp(TEST_DIR);
   }
 
   @AfterClass
   public static void teardown() throws IOException
   {
-    FileUtils.forceDelete(TEST_DIR);
+    org.apache.commons.io.FileUtils.forceDelete(TEST_DIR);
     for (File dir : FIREHOSE_TMP_DIRS) {
-      FileUtils.forceDelete(dir);
+      org.apache.commons.io.FileUtils.forceDelete(dir);
     }
   }
 
@@ -127,8 +127,8 @@ public class SqlFirehoseFactoryTest
         SqlFirehoseFactoryTest.class.getSimpleName(),
         dirSuffix
     );
-    FileUtils.forceDelete(firehoseTempDir);
-    FileUtils.forceMkdir(firehoseTempDir);
+    org.apache.commons.io.FileUtils.forceDelete(firehoseTempDir);
+    FileUtils.mkdirp(firehoseTempDir);
     FIREHOSE_TMP_DIRS.add(firehoseTempDir);
     return firehoseTempDir;
   }
diff --git a/server/src/test/java/org/apache/druid/segment/realtime/firehose/SqlFirehoseTest.java b/server/src/test/java/org/apache/druid/segment/realtime/firehose/SqlFirehoseTest.java
index 3a58903..63deabc 100644
--- a/server/src/test/java/org/apache/druid/segment/realtime/firehose/SqlFirehoseTest.java
+++ b/server/src/test/java/org/apache/druid/segment/realtime/firehose/SqlFirehoseTest.java
@@ -25,7 +25,6 @@ import com.fasterxml.jackson.databind.ObjectMapper;
 import com.fasterxml.jackson.dataformat.smile.SmileFactory;
 import com.google.common.collect.ImmutableList;
 import com.google.common.collect.ImmutableMap;
-import org.apache.commons.io.FileUtils;
 import org.apache.druid.data.input.InputRow;
 import org.apache.druid.data.input.impl.DimensionsSpec;
 import org.apache.druid.data.input.impl.InputRowParser;
@@ -34,6 +33,7 @@ import org.apache.druid.data.input.impl.StringInputRowParser;
 import org.apache.druid.data.input.impl.TimeAndDimsParseSpec;
 import org.apache.druid.data.input.impl.TimestampSpec;
 import org.apache.druid.data.input.impl.prefetch.JsonIterator;
+import org.apache.druid.java.util.common.FileUtils;
 import org.apache.druid.math.expr.ExprMacroTable;
 import org.apache.druid.segment.transform.ExpressionTransform;
 import org.apache.druid.segment.transform.TransformSpec;
@@ -70,8 +70,8 @@ public class SqlFirehoseTest
   public void setup() throws IOException
   {
     TEST_DIR = File.createTempFile(SqlFirehose.class.getSimpleName(), "testDir");
-    FileUtils.forceDelete(TEST_DIR);
-    FileUtils.forceMkdir(TEST_DIR);
+    org.apache.commons.io.FileUtils.forceDelete(TEST_DIR);
+    FileUtils.mkdirp(TEST_DIR);
 
     final List<Map<String, Object>> inputTexts = ImmutableList.of(
         ImmutableMap.of("x", "foostring1", "timestamp", 2000),
@@ -249,7 +249,7 @@ public class SqlFirehoseTest
   @After
   public void teardown() throws IOException
   {
-    FileUtils.forceDelete(TEST_DIR);
+    org.apache.commons.io.FileUtils.forceDelete(TEST_DIR);
   }
 
   private static final class TestCloseable implements Closeable
diff --git a/server/src/test/java/org/apache/druid/server/SegmentManagerBroadcastJoinIndexedTableTest.java b/server/src/test/java/org/apache/druid/server/SegmentManagerBroadcastJoinIndexedTableTest.java
index b62d453..a84a98d 100644
--- a/server/src/test/java/org/apache/druid/server/SegmentManagerBroadcastJoinIndexedTableTest.java
+++ b/server/src/test/java/org/apache/druid/server/SegmentManagerBroadcastJoinIndexedTableTest.java
@@ -338,7 +338,7 @@ public class SegmentManagerBroadcastJoinIndexedTableTest extends InitializedNull
     );
     final String storageDir = DataSegmentPusher.getDefaultStorageDir(tmpSegment, false);
     final File segmentDir = new File(segmentDeepStorageDir, storageDir);
-    org.apache.commons.io.FileUtils.forceMkdir(segmentDir);
+    FileUtils.mkdirp(segmentDir);
 
     IndexMerger indexMerger =
         new IndexMergerV9(objectMapper, indexIO, OffHeapMemorySegmentWriteOutMediumFactory.instance());
diff --git a/server/src/test/java/org/apache/druid/server/SegmentManagerThreadSafetyTest.java b/server/src/test/java/org/apache/druid/server/SegmentManagerThreadSafetyTest.java
index 5295b9d..b1de3a2 100644
--- a/server/src/test/java/org/apache/druid/server/SegmentManagerThreadSafetyTest.java
+++ b/server/src/test/java/org/apache/druid/server/SegmentManagerThreadSafetyTest.java
@@ -58,7 +58,6 @@ import org.junit.Test;
 import org.junit.rules.TemporaryFolder;
 
 import javax.annotation.Nullable;
-
 import java.io.File;
 import java.io.IOException;
 import java.util.ArrayList;
@@ -187,7 +186,7 @@ public class SegmentManagerThreadSafetyTest
     );
     final String storageDir = DataSegmentPusher.getDefaultStorageDir(tmpSegment, false);
     final File segmentDir = new File(segmentDeepStorageDir, storageDir);
-    org.apache.commons.io.FileUtils.forceMkdir(segmentDir);
+    FileUtils.mkdirp(segmentDir);
 
     final File factoryJson = new File(segmentDir, "factory.json");
     objectMapper.writeValue(factoryJson, new TestSegmentizerFactory());
diff --git a/server/src/test/java/org/apache/druid/server/coordination/SegmentLoadDropHandlerCacheTest.java b/server/src/test/java/org/apache/druid/server/coordination/SegmentLoadDropHandlerCacheTest.java
index 00164fc..ff22f00 100644
--- a/server/src/test/java/org/apache/druid/server/coordination/SegmentLoadDropHandlerCacheTest.java
+++ b/server/src/test/java/org/apache/druid/server/coordination/SegmentLoadDropHandlerCacheTest.java
@@ -26,6 +26,7 @@ import com.fasterxml.jackson.databind.ObjectMapper;
 import com.google.common.collect.ImmutableMap;
 import com.google.common.io.Files;
 import org.apache.druid.guice.ServerTypeConfig;
+import org.apache.druid.java.util.common.FileUtils;
 import org.apache.druid.java.util.common.Intervals;
 import org.apache.druid.java.util.emitter.EmittingLogger;
 import org.apache.druid.segment.IndexIO;
@@ -188,7 +189,7 @@ public class SegmentLoadDropHandlerCacheTest
       File segmentFile = new File(destDir, "segment");
       File factoryJson = new File(destDir, "factory.json");
       try {
-        destDir.mkdirs();
+        FileUtils.mkdirp(destDir);
         segmentFile.createNewFile();
         factoryJson.createNewFile();
       }
diff --git a/server/src/test/java/org/apache/druid/server/coordination/SegmentLoadDropHandlerTest.java b/server/src/test/java/org/apache/druid/server/coordination/SegmentLoadDropHandlerTest.java
index 02a6db1..9e41197 100644
--- a/server/src/test/java/org/apache/druid/server/coordination/SegmentLoadDropHandlerTest.java
+++ b/server/src/test/java/org/apache/druid/server/coordination/SegmentLoadDropHandlerTest.java
@@ -103,7 +103,7 @@ public class SegmentLoadDropHandlerTest
   }
 
   @Before
-  public void setUp()
+  public void setUp() throws IOException
   {
     try {
       testStorageLocation = new TestStorageLocation(temporaryFolder);
diff --git a/server/src/test/java/org/apache/druid/server/coordination/TestStorageLocation.java b/server/src/test/java/org/apache/druid/server/coordination/TestStorageLocation.java
index d2b55f4..d738147 100644
--- a/server/src/test/java/org/apache/druid/server/coordination/TestStorageLocation.java
+++ b/server/src/test/java/org/apache/druid/server/coordination/TestStorageLocation.java
@@ -20,6 +20,7 @@
 package org.apache.druid.server.coordination;
 
 import com.fasterxml.jackson.databind.ObjectMapper;
+import org.apache.druid.java.util.common.FileUtils;
 import org.apache.druid.java.util.common.logger.Logger;
 import org.apache.druid.segment.TestHelper;
 import org.apache.druid.segment.loading.StorageLocationConfig;
@@ -104,19 +105,15 @@ public class TestStorageLocation
     Assert.assertEquals(expectedSegments, segmentsInFiles);
   }
 
-  public StorageLocationConfig toStorageLocationConfig()
+  public StorageLocationConfig toStorageLocationConfig() throws IOException
   {
-    if (!cacheDir.exists()) {
-      cacheDir.mkdirs();
-    }
+    FileUtils.mkdirp(cacheDir);
     return new StorageLocationConfig(cacheDir, 100L, 100d);
   }
 
-  public StorageLocationConfig toStorageLocationConfig(long maxSize, Double freeSpacePercent)
+  public StorageLocationConfig toStorageLocationConfig(long maxSize, Double freeSpacePercent) throws IOException
   {
-    if (!cacheDir.exists()) {
-      cacheDir.mkdirs();
-    }
+    FileUtils.mkdirp(cacheDir);
     return new StorageLocationConfig(cacheDir, maxSize, freeSpacePercent);
   }
 }
diff --git a/services/src/main/java/org/apache/druid/cli/PullDependencies.java b/services/src/main/java/org/apache/druid/cli/PullDependencies.java
index bbb91f2..936ff5b 100644
--- a/services/src/main/java/org/apache/druid/cli/PullDependencies.java
+++ b/services/src/main/java/org/apache/druid/cli/PullDependencies.java
@@ -285,8 +285,8 @@ public class PullDependencies implements Runnable
         FileUtils.deleteDirectory(extensionsDir);
         FileUtils.deleteDirectory(hadoopDependenciesDir);
       }
-      org.apache.commons.io.FileUtils.forceMkdir(extensionsDir);
-      org.apache.commons.io.FileUtils.forceMkdir(hadoopDependenciesDir);
+      FileUtils.mkdirp(extensionsDir);
+      FileUtils.mkdirp(hadoopDependenciesDir);
     }
     catch (IOException e) {
       log.error(e, "Unable to clear or create extension directory at [%s]", extensionsDir);

---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscribe@druid.apache.org
For additional commands, e-mail: commits-help@druid.apache.org