You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@avro.apache.org by dk...@apache.org on 2020/05/21 15:49:40 UTC
[avro] branch master updated: AVRO-2763:Resource leak:'' is never closed
This is an automated email from the ASF dual-hosted git repository.
dkulp pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/avro.git
The following commit(s) were added to refs/heads/master by this push:
new b534b8b AVRO-2763:Resource leak:'<unassigned Closeable value>' is never closed
b534b8b is described below
commit b534b8ba924cd7515ec71bd0c0898153952c6eba
Author: zeshuai007 <51...@qq.com>
AuthorDate: Sat Feb 29 15:45:23 2020 +0800
AVRO-2763:Resource leak:'<unassigned Closeable value>' is never closed
---
.../src/main/java/example/GenericMain.java | 16 ++-
.../src/main/java/example/SpecificMain.java | 16 +--
.../test/java/org/apache/avro/TestDataFile.java | 55 ++++----
.../org/apache/avro/TestDataFileCorruption.java | 18 +--
.../java/org/apache/avro/TestDataFileDeflate.java | 15 ++-
.../java/org/apache/avro/TestDataFileMeta.java | 30 +++--
.../java/org/apache/avro/TestSchemaBuilder.java | 34 ++---
.../apache/avro/reflect/TestNonStringMapKeys.java | 34 ++---
.../org/apache/avro/ipc/netty/TestNettyServer.java | 18 +--
.../java/org/apache/avro/TestProtocolSpecific.java | 30 +++--
.../avro/hadoop/io/TestAvroSequenceFile.java | 144 ++++++++++-----------
.../avro/mapred/TestAvroTextOutputFormat.java | 1 +
.../avro/mapred/tether/TestWordCountTether.java | 7 +-
.../mapreduce/TestAvroKeyValueRecordWriter.java | 1 +
.../TestCombineAvroKeyValueFileInputFormat.java | 1 +
.../org/apache/avro/tool/DataFileGetMetaTool.java | 1 +
.../apache/avro/tool/DataFileGetSchemaTool.java | 1 +
.../java/org/apache/avro/tool/RecodecTool.java | 1 +
.../apache/avro/tool/TestDataFileRepairTool.java | 1 +
.../java/org/apache/avro/tool/TestRecodecTool.java | 9 +-
.../apache/avro/tool/TestSpecificCompilerTool.java | 1 +
.../java/org/apache/avro/tool/TestTetherTool.java | 4 +-
.../org/apache/avro/tool/TestTextFileTools.java | 1 +
.../org/apache/trevni/avro/TestEvolvedSchema.java | 18 +--
.../test/java/org/apache/trevni/TestIOBuffers.java | 22 ++++
.../java/org/apache/trevni/TestInputBytes.java | 1 +
26 files changed, 270 insertions(+), 210 deletions(-)
diff --git a/doc/examples/java-example/src/main/java/example/GenericMain.java b/doc/examples/java-example/src/main/java/example/GenericMain.java
index 8d051e0..6e08a61 100644
--- a/doc/examples/java-example/src/main/java/example/GenericMain.java
+++ b/doc/examples/java-example/src/main/java/example/GenericMain.java
@@ -57,15 +57,17 @@ public class GenericMain {
// Deserialize users from disk
DatumReader<GenericRecord> datumReader = new GenericDatumReader<GenericRecord>(schema);
- DataFileReader<GenericRecord> dataFileReader = new DataFileReader<GenericRecord>(file, datumReader);
GenericRecord user = null;
- while (dataFileReader.hasNext()) {
- // Reuse user object by passing it to next(). This saves us from
- // allocating and garbage collecting many objects for files with
- // many items.
- user = dataFileReader.next(user);
- System.out.println(user);
+ try(DataFileReader<GenericRecord> dataFileReader = new DataFileReader<GenericRecord>(file, datumReader)){
+ while (dataFileReader.hasNext()) {
+ // Reuse user object by passing it to next(). This saves us from
+ // allocating and garbage collecting many objects for files with
+ // many items.
+ user = dataFileReader.next(user);
+ System.out.println(user);
+ }
}
+
}
}
diff --git a/doc/examples/java-example/src/main/java/example/SpecificMain.java b/doc/examples/java-example/src/main/java/example/SpecificMain.java
index 188c2c0..7750077 100644
--- a/doc/examples/java-example/src/main/java/example/SpecificMain.java
+++ b/doc/examples/java-example/src/main/java/example/SpecificMain.java
@@ -59,15 +59,15 @@ public class SpecificMain {
// Deserialize Users from disk
DatumReader<User> userDatumReader = new SpecificDatumReader<User>(User.class);
- DataFileReader<User> dataFileReader = new DataFileReader<User>(file, userDatumReader);
User user = null;
- while (dataFileReader.hasNext()) {
- // Reuse user object by passing it to next(). This saves us from
- // allocating and garbage collecting many objects for files with
- // many items.
- user = dataFileReader.next(user);
- System.out.println(user);
+ try(DataFileReader<User> dataFileReader = new DataFileReader<User>(file, userDatumReader)){
+ while (dataFileReader.hasNext()) {
+ // Reuse user object by passing it to next(). This saves us from
+ // allocating and garbage collecting many objects for files with
+ // many items.
+ user = dataFileReader.next(user);
+ System.out.println(user);
+ }
}
-
}
}
diff --git a/lang/java/avro/src/test/java/org/apache/avro/TestDataFile.java b/lang/java/avro/src/test/java/org/apache/avro/TestDataFile.java
index 0e65ecd..81eb012 100644
--- a/lang/java/avro/src/test/java/org/apache/avro/TestDataFile.java
+++ b/lang/java/avro/src/test/java/org/apache/avro/TestDataFile.java
@@ -233,29 +233,39 @@ public class TestDataFile {
private void testReadWithHeader() throws IOException {
File file = makeFile();
- DataFileReader<Object> reader = new DataFileReader<>(file, new GenericDatumReader<>());
- // get a header for this file
- DataFileStream.Header header = reader.getHeader();
- // re-open to an arbitrary position near the middle, with sync == true
- SeekableFileInput sin = new SeekableFileInput(file);
- sin.seek(sin.length() / 2);
- reader = DataFileReader.openReader(sin, new GenericDatumReader<>(), header, true);
- assertNotNull("Should be able to reopen from arbitrary point", reader.next());
- long validPos = reader.previousSync();
- // post sync, we know of a valid sync point: re-open with seek (sync == false)
- sin.seek(validPos);
- reader = DataFileReader.openReader(sin, new GenericDatumReader<>(), header, false);
- assertEquals("Should not move from sync point on reopen", validPos, sin.tell());
- assertNotNull("Should be able to reopen at sync point", reader.next());
+ try (DataFileReader<Object> reader = new DataFileReader<>(file, new GenericDatumReader<>())) {
+ // get a header for this file
+ DataFileStream.Header header = reader.getHeader();
+ // re-open to an arbitrary position near the middle, with sync == true
+ SeekableFileInput sin = new SeekableFileInput(file);
+ sin.seek(sin.length() / 2);
+ try (DataFileReader<Object> readerTrue = DataFileReader.openReader(sin, new GenericDatumReader<>(), header,
+ true);) {
+
+ assertNotNull("Should be able to reopen from arbitrary point", readerTrue.next());
+ long validPos = readerTrue.previousSync();
+ // post sync, we know of a valid sync point: re-open with seek (sync == false)
+ sin.seek(validPos);
+ try (DataFileReader<Object> readerFalse = DataFileReader.openReader(sin, new GenericDatumReader<>(), header,
+ false)) {
+ assertEquals("Should not move from sync point on reopen", validPos, sin.tell());
+ assertNotNull("Should be able to reopen at sync point", readerFalse.next());
+ }
+
+ }
+
+ }
+
}
@Test
public void testSyncInHeader() throws IOException {
- DataFileReader<Object> reader = new DataFileReader<>(new File("../../../share/test/data/syncInMeta.avro"),
- new GenericDatumReader<>());
- reader.sync(0);
- for (Object datum : reader)
- assertNotNull(datum);
+ try (DataFileReader<Object> reader = new DataFileReader<>(new File("../../../share/test/data/syncInMeta.avro"),
+ new GenericDatumReader<>())) {
+ reader.sync(0);
+ for (Object datum : reader)
+ assertNotNull(datum);
+ }
}
@Test
@@ -321,9 +331,10 @@ public class TestDataFile {
}
static void readFile(File f, DatumReader<? extends Object> datumReader) throws IOException {
- FileReader<? extends Object> reader = DataFileReader.openReader(f, datumReader);
- for (Object datum : reader) {
- assertNotNull(datum);
+ try (FileReader<? extends Object> reader = DataFileReader.openReader(f, datumReader)) {
+ for (Object datum : reader) {
+ assertNotNull(datum);
+ }
}
}
diff --git a/lang/java/avro/src/test/java/org/apache/avro/TestDataFileCorruption.java b/lang/java/avro/src/test/java/org/apache/avro/TestDataFileCorruption.java
index 7de6820..0dc615f 100644
--- a/lang/java/avro/src/test/java/org/apache/avro/TestDataFileCorruption.java
+++ b/lang/java/avro/src/test/java/org/apache/avro/TestDataFileCorruption.java
@@ -76,19 +76,19 @@ public class TestDataFileCorruption {
out.close();
// Read the data file
- DataFileReader r = new DataFileReader<>(file, new GenericDatumReader<>(schema));
- assertEquals("apple", r.next().toString());
- assertEquals("banana", r.next().toString());
- long prevSync = r.previousSync();
- try {
+ try (DataFileReader r = new DataFileReader<>(file, new GenericDatumReader<>(schema))) {
+ assertEquals("apple", r.next().toString());
+ assertEquals("banana", r.next().toString());
+ long prevSync = r.previousSync();
r.next();
fail("Corrupt block should throw exception");
+ r.sync(prevSync); // go to sync point after previous successful one
+ assertEquals("endive", r.next().toString());
+ assertEquals("fig", r.next().toString());
+ assertFalse(r.hasNext());
} catch (AvroRuntimeException e) {
assertEquals("Invalid sync!", e.getCause().getMessage());
}
- r.sync(prevSync); // go to sync point after previous successful one
- assertEquals("endive", r.next().toString());
- assertEquals("fig", r.next().toString());
- assertFalse(r.hasNext());
+
}
}
diff --git a/lang/java/avro/src/test/java/org/apache/avro/TestDataFileDeflate.java b/lang/java/avro/src/test/java/org/apache/avro/TestDataFileDeflate.java
index 3586492..1eb5993 100644
--- a/lang/java/avro/src/test/java/org/apache/avro/TestDataFileDeflate.java
+++ b/lang/java/avro/src/test/java/org/apache/avro/TestDataFileDeflate.java
@@ -52,12 +52,13 @@ public class TestDataFileDeflate {
w.close();
// Read it
- DataFileStream<Utf8> r = new DataFileStream<>(new ByteArrayInputStream(baos.toByteArray()),
- new GenericDatumReader<>(schema));
- assertEquals("hello world", r.next().toString());
- assertEquals("hello moon", r.next().toString());
- assertEquals("bye bye world", r.next().toString());
- assertEquals("bye bye moon", r.next().toString());
- assertFalse(r.hasNext());
+ try (DataFileStream<Utf8> r = new DataFileStream<>(new ByteArrayInputStream(baos.toByteArray()),
+ new GenericDatumReader<>(schema))) {
+ assertEquals("hello world", r.next().toString());
+ assertEquals("hello moon", r.next().toString());
+ assertEquals("bye bye world", r.next().toString());
+ assertEquals("bye bye moon", r.next().toString());
+ assertFalse(r.hasNext());
+ }
}
}
diff --git a/lang/java/avro/src/test/java/org/apache/avro/TestDataFileMeta.java b/lang/java/avro/src/test/java/org/apache/avro/TestDataFileMeta.java
index 3c96a14..3a70df4 100644
--- a/lang/java/avro/src/test/java/org/apache/avro/TestDataFileMeta.java
+++ b/lang/java/avro/src/test/java/org/apache/avro/TestDataFileMeta.java
@@ -40,31 +40,35 @@ public class TestDataFileMeta {
public TemporaryFolder DIR = new TemporaryFolder();
@Test(expected = AvroRuntimeException.class)
- public void testUseReservedMeta() {
- DataFileWriter<?> w = new DataFileWriter<>(new GenericDatumWriter<>());
- w.setMeta("avro.foo", "bar");
+ public void testUseReservedMeta() throws IOException {
+ try (DataFileWriter<?> w = new DataFileWriter<>(new GenericDatumWriter<>())) {
+ w.setMeta("avro.foo", "bar");
+ }
}
@Test()
public void testUseMeta() throws IOException {
- DataFileWriter<?> w = new DataFileWriter<>(new GenericDatumWriter<>());
File f = new File(DIR.getRoot().getPath(), "testDataFileMeta.avro");
- w.setMeta("hello", "bar");
- w.create(Schema.create(Type.NULL), f);
- w.close();
+ try (DataFileWriter<?> w = new DataFileWriter<>(new GenericDatumWriter<>())) {
+ w.setMeta("hello", "bar");
+ w.create(Schema.create(Type.NULL), f);
+ }
- DataFileStream<Void> r = new DataFileStream<>(new FileInputStream(f), new GenericDatumReader<>());
+ try (DataFileStream<Void> r = new DataFileStream<>(new FileInputStream(f), new GenericDatumReader<>())) {
+ assertTrue(r.getMetaKeys().contains("hello"));
- assertTrue(r.getMetaKeys().contains("hello"));
+ assertEquals("bar", r.getMetaString("hello"));
+ }
- assertEquals("bar", r.getMetaString("hello"));
}
@Test(expected = AvroRuntimeException.class)
public void testUseMetaAfterCreate() throws IOException {
- DataFileWriter<?> w = new DataFileWriter<>(new GenericDatumWriter<>());
- w.create(Schema.create(Type.NULL), new ByteArrayOutputStream());
- w.setMeta("foo", "bar");
+ try (DataFileWriter<?> w = new DataFileWriter<>(new GenericDatumWriter<>())) {
+ w.create(Schema.create(Type.NULL), new ByteArrayOutputStream());
+ w.setMeta("foo", "bar");
+ }
+
}
@Test
diff --git a/lang/java/avro/src/test/java/org/apache/avro/TestSchemaBuilder.java b/lang/java/avro/src/test/java/org/apache/avro/TestSchemaBuilder.java
index 2679c5d..afc3026 100644
--- a/lang/java/avro/src/test/java/org/apache/avro/TestSchemaBuilder.java
+++ b/lang/java/avro/src/test/java/org/apache/avro/TestSchemaBuilder.java
@@ -776,22 +776,24 @@ public class TestSchemaBuilder {
.intDefault(3).name("newOptionalInt").type().optional().intType().name("newNullableIntWithDefault").type()
.nullable().intType().intDefault(5).endRecord();
- DataFileReader<GenericData.Record> reader = new DataFileReader<>(file,
- new GenericDatumReader<>(writeSchema, readSchema));
-
- GenericData.Record rec1read = reader.iterator().next();
- Assert.assertEquals(1, rec1read.get("requiredInt"));
- Assert.assertNull(rec1read.get("optionalInt"));
- Assert.assertEquals(3, rec1read.get("nullableIntWithDefault"));
- Assert.assertNull(rec1read.get("newOptionalInt"));
- Assert.assertEquals(5, rec1read.get("newNullableIntWithDefault"));
-
- GenericData.Record rec2read = reader.iterator().next();
- Assert.assertEquals(1, rec2read.get("requiredInt"));
- Assert.assertEquals(2, rec2read.get("optionalInt"));
- Assert.assertEquals(13, rec2read.get("nullableIntWithDefault"));
- Assert.assertNull(rec2read.get("newOptionalInt"));
- Assert.assertEquals(5, rec2read.get("newNullableIntWithDefault"));
+ try (DataFileReader<GenericData.Record> reader = new DataFileReader<>(file,
+ new GenericDatumReader<>(writeSchema, readSchema))) {
+
+ GenericData.Record rec1read = reader.iterator().next();
+ Assert.assertEquals(1, rec1read.get("requiredInt"));
+ Assert.assertNull(rec1read.get("optionalInt"));
+ Assert.assertEquals(3, rec1read.get("nullableIntWithDefault"));
+ Assert.assertNull(rec1read.get("newOptionalInt"));
+ Assert.assertEquals(5, rec1read.get("newNullableIntWithDefault"));
+
+ GenericData.Record rec2read = reader.iterator().next();
+ Assert.assertEquals(1, rec2read.get("requiredInt"));
+ Assert.assertEquals(2, rec2read.get("optionalInt"));
+ Assert.assertEquals(13, rec2read.get("nullableIntWithDefault"));
+ Assert.assertNull(rec2read.get("newOptionalInt"));
+ Assert.assertEquals(5, rec2read.get("newNullableIntWithDefault"));
+ }
+
}
@Test
diff --git a/lang/java/avro/src/test/java/org/apache/avro/reflect/TestNonStringMapKeys.java b/lang/java/avro/src/test/java/org/apache/avro/reflect/TestNonStringMapKeys.java
index 7fcb1a3..0b33a28 100644
--- a/lang/java/avro/src/test/java/org/apache/avro/reflect/TestNonStringMapKeys.java
+++ b/lang/java/avro/src/test/java/org/apache/avro/reflect/TestNonStringMapKeys.java
@@ -241,17 +241,18 @@ public class TestNonStringMapKeys {
GenericDatumReader<GenericRecord> datumReader = new GenericDatumReader<>();
SeekableByteArrayInput avroInputStream = new SeekableByteArrayInput(bytes);
- DataFileReader<GenericRecord> fileReader = new DataFileReader<>(avroInputStream, datumReader);
-
- Schema schema = fileReader.getSchema();
- assertNotNull("Unable to get schema for " + testType, schema);
- GenericRecord record = null;
List<GenericRecord> records = new ArrayList<>();
- while (fileReader.hasNext()) {
- try {
- records.add(fileReader.next(record));
- } catch (Exception e) {
- fail("Fail with schema: " + schema);
+ try (DataFileReader<GenericRecord> fileReader = new DataFileReader<>(avroInputStream, datumReader)) {
+
+ Schema schema = fileReader.getSchema();
+ assertNotNull("Unable to get schema for " + testType, schema);
+ GenericRecord record = null;
+ while (fileReader.hasNext()) {
+ try {
+ records.add(fileReader.next(record));
+ } catch (Exception e) {
+ fail("Fail with schema: " + schema);
+ }
}
}
return records;
@@ -266,13 +267,14 @@ public class TestNonStringMapKeys {
ReflectDatumReader<T> datumReader = new ReflectDatumReader<>();
SeekableByteArrayInput avroInputStream = new SeekableByteArrayInput(bytes);
- DataFileReader<T> fileReader = new DataFileReader<>(avroInputStream, datumReader);
-
- Schema schema = fileReader.getSchema();
- T record = null;
List<T> records = new ArrayList<>();
- while (fileReader.hasNext()) {
- records.add(fileReader.next(record));
+ try (DataFileReader<T> fileReader = new DataFileReader<>(avroInputStream, datumReader)) {
+
+ Schema schema = fileReader.getSchema();
+ T record = null;
+ while (fileReader.hasNext()) {
+ records.add(fileReader.next(record));
+ }
}
return records;
}
diff --git a/lang/java/ipc-netty/src/test/java/org/apache/avro/ipc/netty/TestNettyServer.java b/lang/java/ipc-netty/src/test/java/org/apache/avro/ipc/netty/TestNettyServer.java
index 35a4084..c5cfe4d 100644
--- a/lang/java/ipc-netty/src/test/java/org/apache/avro/ipc/netty/TestNettyServer.java
+++ b/lang/java/ipc-netty/src/test/java/org/apache/avro/ipc/netty/TestNettyServer.java
@@ -168,14 +168,16 @@ public class TestNettyServer {
int port = server.getPort();
String msg = "GET /status HTTP/1.1\n\n";
InetSocketAddress sockAddr = new InetSocketAddress("127.0.0.1", port);
- Socket sock = new Socket();
- sock.connect(sockAddr);
- OutputStream out = sock.getOutputStream();
- out.write(msg.getBytes(StandardCharsets.UTF_8));
- out.flush();
- byte[] buf = new byte[2048];
- int bytesRead = sock.getInputStream().read(buf);
- Assert.assertTrue("Connection should have been closed", bytesRead == -1);
+
+ try (Socket sock = new Socket()) {
+ sock.connect(sockAddr);
+ OutputStream out = sock.getOutputStream();
+ out.write(msg.getBytes(StandardCharsets.UTF_8));
+ out.flush();
+ byte[] buf = new byte[2048];
+ int bytesRead = sock.getInputStream().read(buf);
+ Assert.assertTrue("Connection should have been closed", bytesRead == -1);
+ }
}
}
diff --git a/lang/java/ipc/src/test/java/org/apache/avro/TestProtocolSpecific.java b/lang/java/ipc/src/test/java/org/apache/avro/TestProtocolSpecific.java
index f53106f..9ef96ec 100644
--- a/lang/java/ipc/src/test/java/org/apache/avro/TestProtocolSpecific.java
+++ b/lang/java/ipc/src/test/java/org/apache/avro/TestProtocolSpecific.java
@@ -331,17 +331,18 @@ public class TestProtocolSpecific {
@Test
public void testClient() throws Exception {
for (File f : Objects.requireNonNull(SERVER_PORTS_DIR.listFiles())) {
- LineNumberReader reader = new LineNumberReader(new FileReader(f));
- int port = Integer.parseInt(reader.readLine());
- System.out.println("Validating java client to " + f.getName() + " - " + port);
- Transceiver client = new SocketTransceiver(new InetSocketAddress("localhost", port));
- proxy = SpecificRequestor.getClient(Simple.class, client);
- TestProtocolSpecific proto = new TestProtocolSpecific();
- proto.testHello();
- proto.testEcho();
- proto.testEchoBytes();
- proto.testError();
- System.out.println("Done! Validation java client to " + f.getName() + " - " + port);
+ try (LineNumberReader reader = new LineNumberReader(new FileReader(f))) {
+ int port = Integer.parseInt(reader.readLine());
+ System.out.println("Validating java client to " + f.getName() + " - " + port);
+ Transceiver client = new SocketTransceiver(new InetSocketAddress("localhost", port));
+ proxy = SpecificRequestor.getClient(Simple.class, client);
+ TestProtocolSpecific proto = new TestProtocolSpecific();
+ proto.testHello();
+ proto.testEcho();
+ proto.testEchoBytes();
+ proto.testError();
+ System.out.println("Done! Validation java client to " + f.getName() + " - " + port);
+ }
}
}
@@ -353,9 +354,10 @@ public class TestProtocolSpecific {
new InetSocketAddress(0));
server.start();
File portFile = new File(SERVER_PORTS_DIR, "java-port");
- FileWriter w = new FileWriter(portFile);
- w.write(Integer.toString(server.getPort()));
- w.close();
+ try (FileWriter w = new FileWriter(portFile)) {
+ w.write(Integer.toString(server.getPort()));
+ }
+
}
}
}
diff --git a/lang/java/mapred/src/test/java/org/apache/avro/hadoop/io/TestAvroSequenceFile.java b/lang/java/mapred/src/test/java/org/apache/avro/hadoop/io/TestAvroSequenceFile.java
index c857ac7..f3f3b4e 100644
--- a/lang/java/mapred/src/test/java/org/apache/avro/hadoop/io/TestAvroSequenceFile.java
+++ b/lang/java/mapred/src/test/java/org/apache/avro/hadoop/io/TestAvroSequenceFile.java
@@ -59,28 +59,28 @@ public class TestAvroSequenceFile {
AvroSequenceFile.Reader.Options options = new AvroSequenceFile.Reader.Options().withFileSystem(fs)
.withInputPath(sequenceFilePath).withKeySchema(Schema.create(Schema.Type.STRING))
.withValueSchema(Schema.create(Schema.Type.INT)).withConfiguration(conf);
- SequenceFile.Reader reader = new AvroSequenceFile.Reader(options);
-
- AvroKey<CharSequence> key = new AvroKey<>();
- AvroValue<Integer> value = new AvroValue<>();
-
- // Read the first record.
- key = (AvroKey<CharSequence>) reader.next(key);
- assertNotNull(key);
- assertEquals("one", key.datum().toString());
- value = (AvroValue<Integer>) reader.getCurrentValue(value);
- assertNotNull(value);
- assertEquals(1, value.datum().intValue());
-
- // Read the second record.
- key = (AvroKey<CharSequence>) reader.next(key);
- assertNotNull(key);
- assertEquals("two", key.datum().toString());
- value = (AvroValue<Integer>) reader.getCurrentValue(value);
- assertNotNull(value);
- assertEquals(2, value.datum().intValue());
-
- assertNull("Should be no more records.", reader.next(key));
+ try (SequenceFile.Reader reader = new AvroSequenceFile.Reader(options)) {
+ AvroKey<CharSequence> key = new AvroKey<>();
+ AvroValue<Integer> value = new AvroValue<>();
+
+ // Read the first record.
+ key = (AvroKey<CharSequence>) reader.next(key);
+ assertNotNull(key);
+ assertEquals("one", key.datum().toString());
+ value = (AvroValue<Integer>) reader.getCurrentValue(value);
+ assertNotNull(value);
+ assertEquals(1, value.datum().intValue());
+
+ // Read the second record.
+ key = (AvroKey<CharSequence>) reader.next(key);
+ assertNotNull(key);
+ assertEquals("two", key.datum().toString());
+ value = (AvroValue<Integer>) reader.getCurrentValue(value);
+ assertNotNull(value);
+ assertEquals(2, value.datum().intValue());
+
+ assertNull("Should be no more records.", reader.next(key));
+ }
}
/**
@@ -99,28 +99,29 @@ public class TestAvroSequenceFile {
FileSystem fs = FileSystem.get(conf);
AvroSequenceFile.Reader.Options options = new AvroSequenceFile.Reader.Options().withFileSystem(fs)
.withInputPath(sequenceFilePath).withConfiguration(conf);
- SequenceFile.Reader reader = new AvroSequenceFile.Reader(options);
-
- AvroKey<CharSequence> key = new AvroKey<>();
- AvroValue<Integer> value = new AvroValue<>();
-
- // Read the first record.
- key = (AvroKey<CharSequence>) reader.next(key);
- assertNotNull(key);
- assertEquals("one", key.datum().toString());
- value = (AvroValue<Integer>) reader.getCurrentValue(value);
- assertNotNull(value);
- assertEquals(1, value.datum().intValue());
-
- // Read the second record.
- key = (AvroKey<CharSequence>) reader.next(key);
- assertNotNull(key);
- assertEquals("two", key.datum().toString());
- value = (AvroValue<Integer>) reader.getCurrentValue(value);
- assertNotNull(value);
- assertEquals(2, value.datum().intValue());
-
- assertNull("Should be no more records.", reader.next(key));
+
+ try (SequenceFile.Reader reader = new AvroSequenceFile.Reader(options)) {
+ AvroKey<CharSequence> key = new AvroKey<>();
+ AvroValue<Integer> value = new AvroValue<>();
+
+ // Read the first record.
+ key = (AvroKey<CharSequence>) reader.next(key);
+ assertNotNull(key);
+ assertEquals("one", key.datum().toString());
+ value = (AvroValue<Integer>) reader.getCurrentValue(value);
+ assertNotNull(value);
+ assertEquals(1, value.datum().intValue());
+
+ // Read the second record.
+ key = (AvroKey<CharSequence>) reader.next(key);
+ assertNotNull(key);
+ assertEquals("two", key.datum().toString());
+ value = (AvroValue<Integer>) reader.getCurrentValue(value);
+ assertNotNull(value);
+ assertEquals(2, value.datum().intValue());
+
+ assertNull("Should be no more records.", reader.next(key));
+ }
}
/** Tests that reading and writing ordinary Writables still works. */
@@ -135,26 +136,28 @@ public class TestAvroSequenceFile {
FileSystem fs = FileSystem.get(conf);
AvroSequenceFile.Reader.Options options = new AvroSequenceFile.Reader.Options().withFileSystem(fs)
.withInputPath(sequenceFilePath).withConfiguration(conf);
- SequenceFile.Reader reader = new AvroSequenceFile.Reader(options);
-
- Text key = new Text();
- IntWritable value = new IntWritable();
-
- // Read the first record.
- assertTrue(reader.next(key));
- assertEquals("one", key.toString());
- reader.getCurrentValue(value);
- assertNotNull(value);
- assertEquals(1, value.get());
-
- // Read the second record.
- assertTrue(reader.next(key));
- assertEquals("two", key.toString());
- reader.getCurrentValue(value);
- assertNotNull(value);
- assertEquals(2, value.get());
-
- assertFalse("Should be no more records.", reader.next(key));
+
+ try (SequenceFile.Reader reader = new AvroSequenceFile.Reader(options)) {
+ Text key = new Text();
+ IntWritable value = new IntWritable();
+
+ // Read the first record.
+ assertTrue(reader.next(key));
+ assertEquals("one", key.toString());
+ reader.getCurrentValue(value);
+ assertNotNull(value);
+ assertEquals(1, value.get());
+
+ // Read the second record.
+ assertTrue(reader.next(key));
+ assertEquals("two", key.toString());
+ reader.getCurrentValue(value);
+ assertNotNull(value);
+ assertEquals(2, value.get());
+
+ assertFalse("Should be no more records.", reader.next(key));
+
+ }
}
/**
@@ -188,14 +191,11 @@ public class TestAvroSequenceFile {
} else {
options.withValueClass(valueClass);
}
- SequenceFile.Writer writer = new AvroSequenceFile.Writer(options);
-
- // Write some records.
- for (int i = 0; i < records.length; i += 2) {
- writer.append(records[i], records[i + 1]);
+ try (SequenceFile.Writer writer = new AvroSequenceFile.Writer(options)) {
+ // Write some records.
+ for (int i = 0; i < records.length; i += 2) {
+ writer.append(records[i], records[i + 1]);
+ }
}
-
- // Close the file.
- writer.close();
}
}
diff --git a/lang/java/mapred/src/test/java/org/apache/avro/mapred/TestAvroTextOutputFormat.java b/lang/java/mapred/src/test/java/org/apache/avro/mapred/TestAvroTextOutputFormat.java
index 0df8adc..2daf88f 100644
--- a/lang/java/mapred/src/test/java/org/apache/avro/mapred/TestAvroTextOutputFormat.java
+++ b/lang/java/mapred/src/test/java/org/apache/avro/mapred/TestAvroTextOutputFormat.java
@@ -78,6 +78,7 @@ public class TestAvroTextOutputFormat {
assertEquals("k3\tv3", asString(fileReader.next()));
assertEquals("k4\tv4", asString(fileReader.next()));
assertFalse("End", fileReader.hasNext());
+ fileReader.close();
}
private String asString(ByteBuffer buf) {
diff --git a/lang/java/mapred/src/test/java/org/apache/avro/mapred/tether/TestWordCountTether.java b/lang/java/mapred/src/test/java/org/apache/avro/mapred/tether/TestWordCountTether.java
index 89db4e8..3af0771 100644
--- a/lang/java/mapred/src/test/java/org/apache/avro/mapred/tether/TestWordCountTether.java
+++ b/lang/java/mapred/src/test/java/org/apache/avro/mapred/tether/TestWordCountTether.java
@@ -23,7 +23,6 @@ import static org.junit.Assert.assertEquals;
import java.io.BufferedInputStream;
import java.io.File;
import java.io.FileInputStream;
-import java.io.InputStream;
import java.util.ArrayList;
import java.util.List;
@@ -95,15 +94,15 @@ public class TestWordCountTether {
// validate the output
DatumReader<Pair<Utf8, Long>> reader = new SpecificDatumReader<>();
- InputStream cin = new BufferedInputStream(new FileInputStream(outputPath + "/part-00000.avro"));
- DataFileStream<Pair<Utf8, Long>> counts = new DataFileStream<>(cin, reader);
+ DataFileStream<Pair<Utf8, Long>> counts = new DataFileStream<>(
+ new BufferedInputStream(new FileInputStream(outputPath + "/part-00000.avro")), reader);
int numWords = 0;
for (Pair<Utf8, Long> wc : counts) {
assertEquals(wc.key().toString(), WordCountUtil.COUNTS.get(wc.key().toString()), wc.value());
numWords++;
}
- cin.close();
+ counts.close();
assertEquals(WordCountUtil.COUNTS.size(), numWords);
}
diff --git a/lang/java/mapred/src/test/java/org/apache/avro/mapreduce/TestAvroKeyValueRecordWriter.java b/lang/java/mapred/src/test/java/org/apache/avro/mapreduce/TestAvroKeyValueRecordWriter.java
index 902d7be..af2d4df 100644
--- a/lang/java/mapred/src/test/java/org/apache/avro/mapreduce/TestAvroKeyValueRecordWriter.java
+++ b/lang/java/mapred/src/test/java/org/apache/avro/mapreduce/TestAvroKeyValueRecordWriter.java
@@ -156,6 +156,7 @@ public class TestAvroKeyValueRecordWriter {
assertNotNull(firstRecord.get());
assertEquals("reflectionData", firstRecord.getKey().toString());
assertEquals(record.attribute, firstRecord.getValue().attribute);
+ avroFileReader.close();
}
@Test
diff --git a/lang/java/mapred/src/test/java/org/apache/avro/mapreduce/TestCombineAvroKeyValueFileInputFormat.java b/lang/java/mapred/src/test/java/org/apache/avro/mapreduce/TestCombineAvroKeyValueFileInputFormat.java
index 539834f..d46a489 100644
--- a/lang/java/mapred/src/test/java/org/apache/avro/mapreduce/TestCombineAvroKeyValueFileInputFormat.java
+++ b/lang/java/mapred/src/test/java/org/apache/avro/mapreduce/TestCombineAvroKeyValueFileInputFormat.java
@@ -110,5 +110,6 @@ public class TestCombineAvroKeyValueFileInputFormat {
fail("Unknown key " + mapRecord1.getKey().intValue());
}
}
+ avroFileReader.close();
}
}
diff --git a/lang/java/tools/src/main/java/org/apache/avro/tool/DataFileGetMetaTool.java b/lang/java/tools/src/main/java/org/apache/avro/tool/DataFileGetMetaTool.java
index 7f9f75e..29317d7 100644
--- a/lang/java/tools/src/main/java/org/apache/avro/tool/DataFileGetMetaTool.java
+++ b/lang/java/tools/src/main/java/org/apache/avro/tool/DataFileGetMetaTool.java
@@ -74,6 +74,7 @@ public class DataFileGetMetaTool implements Tool {
out.println();
}
}
+ reader.close();
return 0;
}
diff --git a/lang/java/tools/src/main/java/org/apache/avro/tool/DataFileGetSchemaTool.java b/lang/java/tools/src/main/java/org/apache/avro/tool/DataFileGetSchemaTool.java
index 1275b94..5f1b601 100644
--- a/lang/java/tools/src/main/java/org/apache/avro/tool/DataFileGetSchemaTool.java
+++ b/lang/java/tools/src/main/java/org/apache/avro/tool/DataFileGetSchemaTool.java
@@ -46,6 +46,7 @@ public class DataFileGetSchemaTool implements Tool {
DataFileReader<Void> reader = new DataFileReader<>(Util.openSeekableFromFS(args.get(0)),
new GenericDatumReader<>());
out.println(reader.getSchema().toString(true));
+ reader.close();
return 0;
}
}
diff --git a/lang/java/tools/src/main/java/org/apache/avro/tool/RecodecTool.java b/lang/java/tools/src/main/java/org/apache/avro/tool/RecodecTool.java
index c851891..73e30c0 100644
--- a/lang/java/tools/src/main/java/org/apache/avro/tool/RecodecTool.java
+++ b/lang/java/tools/src/main/java/org/apache/avro/tool/RecodecTool.java
@@ -86,6 +86,7 @@ public class RecodecTool implements Tool {
if (outputNeedsClosing) {
output.close();
}
+ writer.close();
return 0;
}
diff --git a/lang/java/tools/src/test/java/org/apache/avro/tool/TestDataFileRepairTool.java b/lang/java/tools/src/test/java/org/apache/avro/tool/TestDataFileRepairTool.java
index 577012b..d4e6f3d 100644
--- a/lang/java/tools/src/test/java/org/apache/avro/tool/TestDataFileRepairTool.java
+++ b/lang/java/tools/src/test/java/org/apache/avro/tool/TestDataFileRepairTool.java
@@ -184,6 +184,7 @@ public class TestDataFileRepairTool {
assertEquals(line, r.next().toString());
}
assertFalse(r.hasNext());
+ r.close();
}
}
diff --git a/lang/java/tools/src/test/java/org/apache/avro/tool/TestRecodecTool.java b/lang/java/tools/src/test/java/org/apache/avro/tool/TestRecodecTool.java
index 58fb860..2ceb8cd 100644
--- a/lang/java/tools/src/test/java/org/apache/avro/tool/TestRecodecTool.java
+++ b/lang/java/tools/src/test/java/org/apache/avro/tool/TestRecodecTool.java
@@ -48,8 +48,8 @@ public class TestRecodecTool {
File inputFile = new File(DIR.getRoot(), "input.avro");
Schema schema = Schema.create(Type.STRING);
- DataFileWriter<String> writer = new DataFileWriter<>(new GenericDatumWriter<String>(schema))
- .setMeta(metaKey, metaValue).create(schema, inputFile);
+ DataFileWriter<String> writer = new DataFileWriter<>(new GenericDatumWriter<String>(schema));
+ writer.setMeta(metaKey, metaValue).create(schema, inputFile);
// We write some garbage which should be quite compressible by deflate,
// but is complicated enough that deflate-9 will work better than deflate-1.
// These values were plucked from thin air and worked on the first try, so
@@ -77,8 +77,9 @@ public class TestRecodecTool {
// We assume that metadata copying is orthogonal to codec selection, and
// so only test it for a single file.
- Assert.assertEquals(metaValue,
- new DataFileReader<Void>(defaultOutputFile, new GenericDatumReader<>()).getMetaString(metaKey));
+ try (DataFileReader<Void> reader = new DataFileReader<Void>(defaultOutputFile, new GenericDatumReader<>())) {
+ Assert.assertEquals(metaValue, reader.getMetaString(metaKey));
+ }
// The "default" codec should be the same as null.
Assert.assertEquals(defaultOutputFile.length(), nullOutputFile.length());
diff --git a/lang/java/tools/src/test/java/org/apache/avro/tool/TestSpecificCompilerTool.java b/lang/java/tools/src/test/java/org/apache/avro/tool/TestSpecificCompilerTool.java
index 646bdb2..c511d66 100644
--- a/lang/java/tools/src/test/java/org/apache/avro/tool/TestSpecificCompilerTool.java
+++ b/lang/java/tools/src/test/java/org/apache/avro/tool/TestSpecificCompilerTool.java
@@ -163,6 +163,7 @@ public class TestSpecificCompilerTool {
}
sb.append(line);
}
+ reader.close();
return sb.toString();
}
}
diff --git a/lang/java/tools/src/test/java/org/apache/avro/tool/TestTetherTool.java b/lang/java/tools/src/test/java/org/apache/avro/tool/TestTetherTool.java
index bb47f1b..d453069 100644
--- a/lang/java/tools/src/test/java/org/apache/avro/tool/TestTetherTool.java
+++ b/lang/java/tools/src/test/java/org/apache/avro/tool/TestTetherTool.java
@@ -98,8 +98,8 @@ public class TestTetherTool {
// validate the output
int numWords = 0;
DatumReader<Pair<Utf8, Long>> reader = new SpecificDatumReader<>();
- try (InputStream cin = new BufferedInputStream(new FileInputStream(outputPathStr + "/part-00000.avro"))) {
- DataFileStream<Pair<Utf8, Long>> counts = new DataFileStream<>(cin, reader);
+ try (InputStream cin = new BufferedInputStream(new FileInputStream(outputPathStr + "/part-00000.avro"));
+ DataFileStream<Pair<Utf8, Long>> counts = new DataFileStream<>(cin, reader)) {
for (Pair<Utf8, Long> wc : counts) {
assertEquals(wc.key().toString(), WordCountUtil.COUNTS.get(wc.key().toString()), wc.value());
numWords++;
diff --git a/lang/java/tools/src/test/java/org/apache/avro/tool/TestTextFileTools.java b/lang/java/tools/src/test/java/org/apache/avro/tool/TestTextFileTools.java
index 770b812..0532fbe 100644
--- a/lang/java/tools/src/test/java/org/apache/avro/tool/TestTextFileTools.java
+++ b/lang/java/tools/src/test/java/org/apache/avro/tool/TestTextFileTools.java
@@ -97,6 +97,7 @@ public class TestTextFileTools {
i++;
}
assertEquals(COUNT, i);
+ file.close();
}
@Test
diff --git a/lang/java/trevni/avro/src/test/java/org/apache/trevni/avro/TestEvolvedSchema.java b/lang/java/trevni/avro/src/test/java/org/apache/trevni/avro/TestEvolvedSchema.java
index d7ed39a..59b3a11 100644
--- a/lang/java/trevni/avro/src/test/java/org/apache/trevni/avro/TestEvolvedSchema.java
+++ b/lang/java/trevni/avro/src/test/java/org/apache/trevni/avro/TestEvolvedSchema.java
@@ -81,10 +81,11 @@ public class TestEvolvedSchema {
AvroColumnReader.Params params = new Params(serializedTrevni);
params.setSchema(evolved);
- AvroColumnReader<GenericRecord> acr = new AvroColumnReader<>(params);
- GenericRecord readRecord = acr.next();
- Assert.assertEquals(evolvedRecord, readRecord);
- Assert.assertFalse(acr.hasNext());
+ try (AvroColumnReader<GenericRecord> acr = new AvroColumnReader<>(params)) {
+ GenericRecord readRecord = acr.next();
+ Assert.assertEquals(evolvedRecord, readRecord);
+ Assert.assertFalse(acr.hasNext());
+ }
}
@Test
@@ -99,10 +100,11 @@ public class TestEvolvedSchema {
GenericDatumReader<GenericRecord> reader = new GenericDatumReader<>(writer);
reader.setExpected(evolved);
- DataFileReader<GenericRecord> dfr = new DataFileReader<>(serializedAvro, reader);
- GenericRecord readRecord = dfr.next();
- Assert.assertEquals(evolvedRecord, readRecord);
- Assert.assertFalse(dfr.hasNext());
+ try (DataFileReader<GenericRecord> dfr = new DataFileReader<>(serializedAvro, reader)) {
+ GenericRecord readRecord = dfr.next();
+ Assert.assertEquals(evolvedRecord, readRecord);
+ Assert.assertFalse(dfr.hasNext());
+ }
}
}
diff --git a/lang/java/trevni/core/src/test/java/org/apache/trevni/TestIOBuffers.java b/lang/java/trevni/core/src/test/java/org/apache/trevni/TestIOBuffers.java
index f217fd0..d68d0e0 100644
--- a/lang/java/trevni/core/src/test/java/org/apache/trevni/TestIOBuffers.java
+++ b/lang/java/trevni/core/src/test/java/org/apache/trevni/TestIOBuffers.java
@@ -37,6 +37,7 @@ public class TestIOBuffers {
InputBuffer in = new InputBuffer(new InputBytes(out.toByteArray()));
Assert.assertEquals(0, in.tell());
Assert.assertEquals(0, in.length());
+ out.close();
}
@Test
@@ -49,6 +50,7 @@ public class TestIOBuffers {
Assert.assertEquals(0, bytes[0]);
InputBuffer in = new InputBuffer(new InputBytes(out.toByteArray()));
Assert.assertEquals(0, in.readInt());
+ out.close();
}
@Test
@@ -62,6 +64,7 @@ public class TestIOBuffers {
random = TestUtil.createRandom();
for (int i = 0; i < COUNT; i++)
Assert.assertEquals(random.nextBoolean(), in.readValue(ValueType.BOOLEAN));
+ out.close();
}
@Test
@@ -75,6 +78,7 @@ public class TestIOBuffers {
random = TestUtil.createRandom();
for (int i = 0; i < COUNT; i++)
Assert.assertEquals(random.nextInt(), in.readInt());
+ out.close();
}
@Test
@@ -88,6 +92,7 @@ public class TestIOBuffers {
random = TestUtil.createRandom();
for (int i = 0; i < COUNT; i++)
Assert.assertEquals(random.nextLong(), in.readLong());
+ out.close();
}
@Test
@@ -101,6 +106,7 @@ public class TestIOBuffers {
random = TestUtil.createRandom();
for (int i = 0; i < COUNT; i++)
Assert.assertEquals(random.nextInt(), in.readFixed32());
+ out.close();
}
@Test
@@ -114,6 +120,7 @@ public class TestIOBuffers {
random = TestUtil.createRandom();
for (int i = 0; i < COUNT; i++)
Assert.assertEquals(random.nextLong(), in.readFixed64());
+ out.close();
}
@Test
@@ -127,6 +134,7 @@ public class TestIOBuffers {
random = TestUtil.createRandom();
for (int i = 0; i < COUNT; i++)
Assert.assertEquals(random.nextFloat(), in.readFloat(), 0);
+ out.close();
}
@Test
@@ -138,6 +146,7 @@ public class TestIOBuffers {
InputBuffer in = new InputBuffer(new InputBytes(out.toByteArray()));
for (int i = 0; i < COUNT; i++)
Assert.assertEquals(Double.MIN_VALUE, in.readDouble(), 0);
+ out.close();
}
@Test
@@ -151,6 +160,7 @@ public class TestIOBuffers {
random = TestUtil.createRandom();
for (int i = 0; i < COUNT; i++)
Assert.assertEquals(TestUtil.randomBytes(random), in.readBytes(null));
+ out.close();
}
@Test
@@ -164,6 +174,7 @@ public class TestIOBuffers {
random = TestUtil.createRandom();
for (int i = 0; i < COUNT; i++)
Assert.assertEquals(TestUtil.randomString(random), in.readString());
+ out.close();
}
@Test
@@ -176,6 +187,7 @@ public class TestIOBuffers {
InputBuffer in = new InputBuffer(new InputBytes(out.toByteArray()));
in.skipValue(ValueType.NULL);
Assert.assertEquals(sentinel, in.readLong());
+ out.close();
}
@Test
@@ -188,6 +200,7 @@ public class TestIOBuffers {
InputBuffer in = new InputBuffer(new InputBytes(out.toByteArray()));
in.skipValue(ValueType.BOOLEAN);
Assert.assertEquals(sentinel, in.readLong());
+ out.close();
}
@Test
@@ -200,6 +213,7 @@ public class TestIOBuffers {
InputBuffer in = new InputBuffer(new InputBytes(out.toByteArray()));
in.skipValue(ValueType.INT);
Assert.assertEquals(sentinel, in.readLong());
+ out.close();
}
@Test
@@ -212,6 +226,7 @@ public class TestIOBuffers {
InputBuffer in = new InputBuffer(new InputBytes(out.toByteArray()));
in.skipValue(ValueType.LONG);
Assert.assertEquals(sentinel, in.readLong());
+ out.close();
}
@Test
@@ -224,6 +239,7 @@ public class TestIOBuffers {
InputBuffer in = new InputBuffer(new InputBytes(out.toByteArray()));
in.skipValue(ValueType.LONG);
Assert.assertEquals(sentinel, in.readLong());
+ out.close();
}
@Test
@@ -236,6 +252,7 @@ public class TestIOBuffers {
InputBuffer in = new InputBuffer(new InputBytes(out.toByteArray()));
in.skipValue(ValueType.LONG);
Assert.assertEquals(sentinel, in.readLong());
+ out.close();
}
@Test
@@ -248,6 +265,7 @@ public class TestIOBuffers {
InputBuffer in = new InputBuffer(new InputBytes(out.toByteArray()));
in.skipValue(ValueType.FLOAT);
Assert.assertEquals(sentinel, in.readLong());
+ out.close();
}
@Test
@@ -260,6 +278,7 @@ public class TestIOBuffers {
InputBuffer in = new InputBuffer(new InputBytes(out.toByteArray()));
in.skipValue(ValueType.DOUBLE);
Assert.assertEquals(sentinel, in.readLong());
+ out.close();
}
@Test
@@ -272,6 +291,7 @@ public class TestIOBuffers {
InputBuffer in = new InputBuffer(new InputBytes(out.toByteArray()));
in.skipValue(ValueType.STRING);
Assert.assertEquals(sentinel, in.readLong());
+ out.close();
}
@Test
@@ -284,6 +304,7 @@ public class TestIOBuffers {
InputBuffer in = new InputBuffer(new InputBytes(out.toByteArray()));
in.skipValue(ValueType.BYTES);
Assert.assertEquals(sentinel, in.readLong());
+ out.close();
}
@Test
@@ -297,5 +318,6 @@ public class TestIOBuffers {
long pos = in.tell();
in = new InputBuffer(new InputBytes(out.toByteArray()), pos);
Assert.assertEquals(sentinel, in.readLong());
+ out.close();
}
}
diff --git a/lang/java/trevni/core/src/test/java/org/apache/trevni/TestInputBytes.java b/lang/java/trevni/core/src/test/java/org/apache/trevni/TestInputBytes.java
index 5c39e7b..74b7cef 100644
--- a/lang/java/trevni/core/src/test/java/org/apache/trevni/TestInputBytes.java
+++ b/lang/java/trevni/core/src/test/java/org/apache/trevni/TestInputBytes.java
@@ -44,5 +44,6 @@ public class TestInputBytes {
in.read(p, buffer, 0, l);
Assert.assertArrayEquals(Arrays.copyOfRange(data, p, p + l), buffer);
}
+ in.close();
}
}