You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@ozone.apache.org by av...@apache.org on 2020/04/04 03:53:01 UTC
[hadoop-ozone] branch master updated: HDDS-3237. Recon should provide the list of datanodes that a missing … (#753)
This is an automated email from the ASF dual-hosted git repository.
avijayan pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/hadoop-ozone.git
The following commit(s) were added to refs/heads/master by this push:
new e54d0b5 HDDS-3237. Recon should provide the list of datanodes that a missing … (#753)
e54d0b5 is described below
commit e54d0b5681a2ae7491c90ed782c151b6d9288646
Author: Vivek Ratnavel Subramanian <vi...@gmail.com>
AuthorDate: Fri Apr 3 20:52:55 2020 -0700
HDDS-3237. Recon should provide the list of datanodes that a missing … (#753)
---
.../dist/src/main/smoketest/recon/recon-api.robot | 3 +
.../ozone/recon/codegen/JooqCodeGenerator.java | 2 +-
.../recon/codegen/ReconSchemaGenerationModule.java | 2 +
.../recon/schema/ContainerSchemaDefinition.java | 87 +++++++++++++++++
.../recon/schema/ReconTaskSchemaDefinition.java | 2 +
.../ozone/recon/schema/StatsSchemaDefinition.java | 2 +
.../recon/schema/UtilizationSchemaDefinition.java | 19 +---
.../hadoop/ozone/recon/ReconControllerModule.java | 8 ++
.../hadoop/ozone/recon/api/ContainerEndpoint.java | 29 +++++-
.../recon/api/types/MissingContainerMetadata.java | 14 +--
.../ozone/recon/fsck/MissingContainerTask.java | 22 ++---
.../recon/persistence/ContainerSchemaManager.java | 104 +++++++++++++++++++++
.../ozone/recon/scm/ReconContainerManager.java | 26 +++++-
.../scm/ReconStorageContainerManagerFacade.java | 14 +--
.../spi/impl/ContainerDBServiceProviderImpl.java | 10 +-
.../webapps/recon/ozone-recon-web/api/db.json | 46 +++++++--
.../recon/ozone-recon-web/src/utils/common.tsx | 9 +-
.../views/MissingContainers/MissingContainers.less | 4 +
.../views/MissingContainers/MissingContainers.tsx | 44 +++++++--
.../ozone/recon/api/TestContainerEndpoint.java | 71 ++++++++++----
.../hadoop/ozone/recon/api/TestEndpoints.java | 9 +-
.../ozone/recon/fsck/TestMissingContainerTask.java | 25 +++--
.../TestUtilizationSchemaDefinition.java | 2 -
.../scm/AbstractReconContainerManagerTest.java | 3 +-
.../recon/types/GuiceInjectorUtilsForTests.java | 6 ++
25 files changed, 461 insertions(+), 102 deletions(-)
diff --git a/hadoop-ozone/dist/src/main/smoketest/recon/recon-api.robot b/hadoop-ozone/dist/src/main/smoketest/recon/recon-api.robot
index 621bbd0..7073849 100644
--- a/hadoop-ozone/dist/src/main/smoketest/recon/recon-api.robot
+++ b/hadoop-ozone/dist/src/main/smoketest/recon/recon-api.robot
@@ -62,6 +62,9 @@ Check if Recon picks up DN heartbeats
Should contain ${result} \"healthyDatanodes\":3
Should contain ${result} \"pipelines\":4
+ ${result} = Execute curl --negotiate -u : -v ${API_ENDPOINT_URL}/containers/1/replicaHistory
+ Should contain ${result} \"containerId\":1
+
Check if Recon Web UI is up
Run Keyword if '${SECURITY_ENABLED}' == 'true' Kinit HTTP user
${result} = Execute curl --negotiate -u : -v ${ENDPOINT_URL}
diff --git a/hadoop-ozone/recon-codegen/src/main/java/org/hadoop/ozone/recon/codegen/JooqCodeGenerator.java b/hadoop-ozone/recon-codegen/src/main/java/org/hadoop/ozone/recon/codegen/JooqCodeGenerator.java
index fce4e0b..ad9b819 100644
--- a/hadoop-ozone/recon-codegen/src/main/java/org/hadoop/ozone/recon/codegen/JooqCodeGenerator.java
+++ b/hadoop-ozone/recon-codegen/src/main/java/org/hadoop/ozone/recon/codegen/JooqCodeGenerator.java
@@ -129,7 +129,7 @@ public class JooqCodeGenerator {
public static void main(String[] args) {
if (args.length < 1) {
throw new IllegalArgumentException("Missing required arguments: " +
- "Need a ouput directory for generated code.\nUsage: " +
+ "Need an output directory for generated code.\nUsage: " +
"org.apache.hadoop.ozone.recon.persistence.JooqCodeGenerator " +
"<outputDirectory>.");
}
diff --git a/hadoop-ozone/recon-codegen/src/main/java/org/hadoop/ozone/recon/codegen/ReconSchemaGenerationModule.java b/hadoop-ozone/recon-codegen/src/main/java/org/hadoop/ozone/recon/codegen/ReconSchemaGenerationModule.java
index 2de6da8..8272c2b 100644
--- a/hadoop-ozone/recon-codegen/src/main/java/org/hadoop/ozone/recon/codegen/ReconSchemaGenerationModule.java
+++ b/hadoop-ozone/recon-codegen/src/main/java/org/hadoop/ozone/recon/codegen/ReconSchemaGenerationModule.java
@@ -17,6 +17,7 @@
*/
package org.hadoop.ozone.recon.codegen;
+import org.hadoop.ozone.recon.schema.ContainerSchemaDefinition;
import org.hadoop.ozone.recon.schema.ReconTaskSchemaDefinition;
import org.hadoop.ozone.recon.schema.ReconSchemaDefinition;
import org.hadoop.ozone.recon.schema.StatsSchemaDefinition;
@@ -36,6 +37,7 @@ public class ReconSchemaGenerationModule extends AbstractModule {
Multibinder<ReconSchemaDefinition> schemaBinder =
Multibinder.newSetBinder(binder(), ReconSchemaDefinition.class);
schemaBinder.addBinding().to(UtilizationSchemaDefinition.class);
+ schemaBinder.addBinding().to(ContainerSchemaDefinition.class);
schemaBinder.addBinding().to(ReconTaskSchemaDefinition.class);
schemaBinder.addBinding().to(StatsSchemaDefinition.class);
}
diff --git a/hadoop-ozone/recon-codegen/src/main/java/org/hadoop/ozone/recon/schema/ContainerSchemaDefinition.java b/hadoop-ozone/recon-codegen/src/main/java/org/hadoop/ozone/recon/schema/ContainerSchemaDefinition.java
new file mode 100644
index 0000000..243cb24
--- /dev/null
+++ b/hadoop-ozone/recon-codegen/src/main/java/org/hadoop/ozone/recon/schema/ContainerSchemaDefinition.java
@@ -0,0 +1,87 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ * <p>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.hadoop.ozone.recon.schema;
+
+import com.google.inject.Inject;
+import com.google.inject.Singleton;
+import org.jooq.DSLContext;
+import org.jooq.impl.DSL;
+import org.jooq.impl.SQLDataType;
+
+import javax.sql.DataSource;
+import java.sql.Connection;
+import java.sql.SQLException;
+
+/**
+ * Class used to create tables that are required for tracking containers.
+ */
+@Singleton
+public class ContainerSchemaDefinition implements ReconSchemaDefinition {
+
+ public static final String CONTAINER_HISTORY_TABLE_NAME =
+ "container_history";
+ public static final String MISSING_CONTAINERS_TABLE_NAME =
+ "missing_containers";
+ private static final String CONTAINER_ID = "container_id";
+ private final DataSource dataSource;
+ private DSLContext dslContext;
+
+ @Inject
+ ContainerSchemaDefinition(DataSource dataSource) {
+ this.dataSource = dataSource;
+ }
+
+ @Override
+ public void initializeSchema() throws SQLException {
+ Connection conn = dataSource.getConnection();
+ dslContext = DSL.using(conn);
+ createContainerHistoryTable();
+ createMissingContainersTable();
+ }
+
+ /**
+ * Create the Container History table.
+ */
+ private void createContainerHistoryTable() {
+ dslContext.createTableIfNotExists(CONTAINER_HISTORY_TABLE_NAME)
+ .column(CONTAINER_ID, SQLDataType.BIGINT)
+ .column("datanode_host", SQLDataType.VARCHAR(1024))
+ .column("first_report_timestamp", SQLDataType.BIGINT)
+ .column("last_report_timestamp", SQLDataType.BIGINT)
+ .constraint(DSL.constraint("pk_container_id_datanode_host")
+ .primaryKey(CONTAINER_ID, "datanode_host"))
+ .execute();
+ }
+
+ /**
+ * Create the Missing Containers table.
+ */
+ private void createMissingContainersTable() {
+ dslContext.createTableIfNotExists(MISSING_CONTAINERS_TABLE_NAME)
+ .column(CONTAINER_ID, SQLDataType.BIGINT)
+ .column("missing_since", SQLDataType.BIGINT)
+ .constraint(DSL.constraint("pk_container_id")
+ .primaryKey(CONTAINER_ID))
+ .execute();
+ }
+
+ public DSLContext getDSLContext() {
+ return dslContext;
+ }
+}
diff --git a/hadoop-ozone/recon-codegen/src/main/java/org/hadoop/ozone/recon/schema/ReconTaskSchemaDefinition.java b/hadoop-ozone/recon-codegen/src/main/java/org/hadoop/ozone/recon/schema/ReconTaskSchemaDefinition.java
index 1856cc2..eec3cd5 100644
--- a/hadoop-ozone/recon-codegen/src/main/java/org/hadoop/ozone/recon/schema/ReconTaskSchemaDefinition.java
+++ b/hadoop-ozone/recon-codegen/src/main/java/org/hadoop/ozone/recon/schema/ReconTaskSchemaDefinition.java
@@ -23,6 +23,7 @@ import java.sql.SQLException;
import javax.sql.DataSource;
+import com.google.inject.Singleton;
import org.jooq.impl.DSL;
import org.jooq.impl.SQLDataType;
@@ -32,6 +33,7 @@ import com.google.inject.Inject;
* Class used to create tables that are required for Recon's task
* management.
*/
+@Singleton
public class ReconTaskSchemaDefinition implements ReconSchemaDefinition {
public static final String RECON_TASK_STATUS_TABLE_NAME =
diff --git a/hadoop-ozone/recon-codegen/src/main/java/org/hadoop/ozone/recon/schema/StatsSchemaDefinition.java b/hadoop-ozone/recon-codegen/src/main/java/org/hadoop/ozone/recon/schema/StatsSchemaDefinition.java
index 6763bc8..406585d 100644
--- a/hadoop-ozone/recon-codegen/src/main/java/org/hadoop/ozone/recon/schema/StatsSchemaDefinition.java
+++ b/hadoop-ozone/recon-codegen/src/main/java/org/hadoop/ozone/recon/schema/StatsSchemaDefinition.java
@@ -19,6 +19,7 @@
package org.hadoop.ozone.recon.schema;
import com.google.inject.Inject;
+import com.google.inject.Singleton;
import org.jooq.impl.DSL;
import org.jooq.impl.SQLDataType;
@@ -29,6 +30,7 @@ import java.sql.SQLException;
/**
* Class used to create tables that are required for storing Ozone statistics.
*/
+@Singleton
public class StatsSchemaDefinition implements ReconSchemaDefinition {
public static final String GLOBAL_STATS_TABLE_NAME = "global_stats";
diff --git a/hadoop-ozone/recon-codegen/src/main/java/org/hadoop/ozone/recon/schema/UtilizationSchemaDefinition.java b/hadoop-ozone/recon-codegen/src/main/java/org/hadoop/ozone/recon/schema/UtilizationSchemaDefinition.java
index bc48c38..95df8f7 100644
--- a/hadoop-ozone/recon-codegen/src/main/java/org/hadoop/ozone/recon/schema/UtilizationSchemaDefinition.java
+++ b/hadoop-ozone/recon-codegen/src/main/java/org/hadoop/ozone/recon/schema/UtilizationSchemaDefinition.java
@@ -22,6 +22,7 @@ import java.sql.SQLException;
import javax.sql.DataSource;
+import com.google.inject.Singleton;
import org.jooq.impl.DSL;
import org.jooq.impl.SQLDataType;
import org.springframework.transaction.annotation.Transactional;
@@ -31,6 +32,7 @@ import com.google.inject.Inject;
/**
* Programmatic definition of Recon DDL.
*/
+@Singleton
public class UtilizationSchemaDefinition implements ReconSchemaDefinition {
private final DataSource dataSource;
@@ -41,9 +43,6 @@ public class UtilizationSchemaDefinition implements ReconSchemaDefinition {
public static final String FILE_COUNT_BY_SIZE_TABLE_NAME =
"file_count_by_size";
- public static final String MISSING_CONTAINERS_TABLE_NAME =
- "missing_containers";
-
@Inject
UtilizationSchemaDefinition(DataSource dataSource) {
this.dataSource = dataSource;
@@ -55,10 +54,9 @@ public class UtilizationSchemaDefinition implements ReconSchemaDefinition {
Connection conn = dataSource.getConnection();
createClusterGrowthTable(conn);
createFileSizeCount(conn);
- createMissingContainersTable(conn);
}
- void createClusterGrowthTable(Connection conn) {
+ private void createClusterGrowthTable(Connection conn) {
DSL.using(conn).createTableIfNotExists(CLUSTER_GROWTH_DAILY_TABLE_NAME)
.column("timestamp", SQLDataType.TIMESTAMP)
.column("datanode_id", SQLDataType.INTEGER)
@@ -73,7 +71,7 @@ public class UtilizationSchemaDefinition implements ReconSchemaDefinition {
.execute();
}
- void createFileSizeCount(Connection conn) {
+ private void createFileSizeCount(Connection conn) {
DSL.using(conn).createTableIfNotExists(FILE_COUNT_BY_SIZE_TABLE_NAME)
.column("file_size", SQLDataType.BIGINT)
.column("count", SQLDataType.BIGINT)
@@ -81,13 +79,4 @@ public class UtilizationSchemaDefinition implements ReconSchemaDefinition {
.primaryKey("file_size"))
.execute();
}
-
- void createMissingContainersTable(Connection conn) {
- DSL.using(conn).createTableIfNotExists(MISSING_CONTAINERS_TABLE_NAME)
- .column("container_id", SQLDataType.BIGINT)
- .column("missing_since", SQLDataType.BIGINT)
- .constraint(DSL.constraint("pk_container_id")
- .primaryKey("container_id"))
- .execute();
- }
}
diff --git a/hadoop-ozone/recon/src/main/java/org/apache/hadoop/ozone/recon/ReconControllerModule.java b/hadoop-ozone/recon/src/main/java/org/apache/hadoop/ozone/recon/ReconControllerModule.java
index 8ceb1e0..5992a92 100644
--- a/hadoop-ozone/recon/src/main/java/org/apache/hadoop/ozone/recon/ReconControllerModule.java
+++ b/hadoop-ozone/recon/src/main/java/org/apache/hadoop/ozone/recon/ReconControllerModule.java
@@ -39,6 +39,7 @@ import org.apache.hadoop.hdds.scm.server.OzoneStorageContainerManager;
import org.apache.hadoop.ozone.om.OMMetadataManager;
import org.apache.hadoop.ozone.om.protocol.OzoneManagerProtocol;
import org.apache.hadoop.ozone.om.protocolPB.OzoneManagerProtocolClientSideTranslatorPB;
+import org.apache.hadoop.ozone.recon.persistence.ContainerSchemaManager;
import org.apache.hadoop.ozone.recon.persistence.DataSourceConfiguration;
import org.apache.hadoop.ozone.recon.persistence.JooqPersistenceModule;
import org.apache.hadoop.ozone.recon.recovery.ReconOMMetadataManager;
@@ -59,6 +60,7 @@ import org.apache.hadoop.ozone.recon.tasks.ReconTaskControllerImpl;
import org.apache.hadoop.security.UserGroupInformation;
import org.apache.hadoop.hdds.utils.db.DBStore;
import org.apache.ratis.protocol.ClientId;
+import org.hadoop.ozone.recon.schema.tables.daos.ContainerHistoryDao;
import org.hadoop.ozone.recon.schema.tables.daos.FileCountBySizeDao;
import org.hadoop.ozone.recon.schema.tables.daos.MissingContainersDao;
import org.hadoop.ozone.recon.schema.tables.daos.ReconTaskStatusDao;
@@ -88,6 +90,7 @@ public class ReconControllerModule extends AbstractModule {
.to(ReconOmMetadataManagerImpl.class);
bind(OMMetadataManager.class).to(ReconOmMetadataManagerImpl.class);
+ bind(ContainerSchemaManager.class).in(Singleton.class);
bind(ContainerDBServiceProvider.class)
.to(ContainerDBServiceProviderImpl.class).in(Singleton.class);
bind(OzoneManagerServiceProvider.class)
@@ -118,6 +121,11 @@ public class ReconControllerModule extends AbstractModule {
}
@Provides
+ ContainerHistoryDao getContainerHistoryDao(final Configuration sqlConfig) {
+ return new ContainerHistoryDao(sqlConfig);
+ }
+
+ @Provides
FileCountBySizeDao getFileCountBySizeDao(final Configuration sqlConfig) {
return new FileCountBySizeDao(sqlConfig);
}
diff --git a/hadoop-ozone/recon/src/main/java/org/apache/hadoop/ozone/recon/api/ContainerEndpoint.java b/hadoop-ozone/recon/src/main/java/org/apache/hadoop/ozone/recon/api/ContainerEndpoint.java
index b33db8d..325b0b9 100644
--- a/hadoop-ozone/recon/src/main/java/org/apache/hadoop/ozone/recon/api/ContainerEndpoint.java
+++ b/hadoop-ozone/recon/src/main/java/org/apache/hadoop/ozone/recon/api/ContainerEndpoint.java
@@ -20,7 +20,6 @@ package org.apache.hadoop.ozone.recon.api;
import java.io.IOException;
import java.time.Instant;
import java.util.ArrayList;
-import java.util.Collections;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
@@ -53,9 +52,11 @@ import org.apache.hadoop.ozone.recon.api.types.KeyMetadata.ContainerBlockMetadat
import org.apache.hadoop.ozone.recon.api.types.KeysResponse;
import org.apache.hadoop.ozone.recon.api.types.MissingContainerMetadata;
import org.apache.hadoop.ozone.recon.api.types.MissingContainersResponse;
+import org.apache.hadoop.ozone.recon.persistence.ContainerSchemaManager;
import org.apache.hadoop.ozone.recon.recovery.ReconOMMetadataManager;
import org.apache.hadoop.ozone.recon.scm.ReconContainerManager;
import org.apache.hadoop.ozone.recon.spi.ContainerDBServiceProvider;
+import org.hadoop.ozone.recon.schema.tables.pojos.ContainerHistory;
import static org.apache.hadoop.ozone.recon.ReconConstants.DEFAULT_FETCH_COUNT;
import static org.apache.hadoop.ozone.recon.ReconConstants.PREV_CONTAINER_ID_DEFAULT_VALUE;
@@ -77,11 +78,14 @@ public class ContainerEndpoint {
private ReconOMMetadataManager omMetadataManager;
private ReconContainerManager containerManager;
+ private ContainerSchemaManager containerSchemaManager;
@Inject
- public ContainerEndpoint(OzoneStorageContainerManager reconSCM) {
+ public ContainerEndpoint(OzoneStorageContainerManager reconSCM,
+ ContainerSchemaManager containerSchemaManager) {
this.containerManager =
(ReconContainerManager) reconSCM.getContainerManager();
+ this.containerSchemaManager = containerSchemaManager;
}
/**
@@ -204,6 +208,21 @@ public class ContainerEndpoint {
}
/**
+ * Return Container replica history for the container identified by the id
+ * param.
+ *
+ * @param containerID the given containerID.
+ * @return {@link Response}
+ */
+ @GET
+ @Path("/{id}/replicaHistory")
+ public Response getReplicaHistoryForContainer(
+ @PathParam("id") Long containerID) {
+ return Response.ok(
+ containerSchemaManager.getAllContainerHistory(containerID)).build();
+ }
+
+ /**
* Return
* {@link org.apache.hadoop.ozone.recon.api.types.MissingContainerMetadata}
* for all missing containers.
@@ -222,9 +241,9 @@ public class ContainerEndpoint {
long keyCount = containerInfo.getNumberOfKeys();
UUID pipelineID = containerInfo.getPipelineID().getId();
- // TODO: Find out which datanodes had replicas of this container
- // and populate this list
- List datanodes = Collections.emptyList();
+ List<ContainerHistory> datanodes =
+ containerSchemaManager.getLatestContainerHistory(
+ containerID, containerInfo.getReplicationFactor().getNumber());
missingContainers.add(new MissingContainerMetadata(containerID,
container.getMissingSince(), keyCount, pipelineID, datanodes));
} catch (IOException ioEx) {
diff --git a/hadoop-ozone/recon/src/main/java/org/apache/hadoop/ozone/recon/api/types/MissingContainerMetadata.java b/hadoop-ozone/recon/src/main/java/org/apache/hadoop/ozone/recon/api/types/MissingContainerMetadata.java
index f24bc57..3eff647 100644
--- a/hadoop-ozone/recon/src/main/java/org/apache/hadoop/ozone/recon/api/types/MissingContainerMetadata.java
+++ b/hadoop-ozone/recon/src/main/java/org/apache/hadoop/ozone/recon/api/types/MissingContainerMetadata.java
@@ -17,6 +17,8 @@
*/
package org.apache.hadoop.ozone.recon.api.types;
+import org.hadoop.ozone.recon.schema.tables.pojos.ContainerHistory;
+
import javax.xml.bind.annotation.XmlAccessType;
import javax.xml.bind.annotation.XmlAccessorType;
import javax.xml.bind.annotation.XmlElement;
@@ -41,17 +43,17 @@ public class MissingContainerMetadata {
@XmlElement(name = "pipelineID")
private UUID pipelineID;
- @XmlElement(name = "datanodes")
- private List<String> datanodes;
+ @XmlElement(name = "replicas")
+ private List<ContainerHistory> replicas;
public MissingContainerMetadata(long containerID, long missingSince,
long keys, UUID pipelineID,
- List<String> datanodes) {
+ List<ContainerHistory> replicas) {
this.containerID = containerID;
this.missingSince = missingSince;
this.keys = keys;
this.pipelineID = pipelineID;
- this.datanodes = datanodes;
+ this.replicas = replicas;
}
public long getContainerID() {
@@ -62,8 +64,8 @@ public class MissingContainerMetadata {
return keys;
}
- public List<String> getDatanodes() {
- return datanodes;
+ public List<ContainerHistory> getReplicas() {
+ return replicas;
}
public long getMissingSince() {
diff --git a/hadoop-ozone/recon/src/main/java/org/apache/hadoop/ozone/recon/fsck/MissingContainerTask.java b/hadoop-ozone/recon/src/main/java/org/apache/hadoop/ozone/recon/fsck/MissingContainerTask.java
index 6db2025..ca4da17 100644
--- a/hadoop-ozone/recon/src/main/java/org/apache/hadoop/ozone/recon/fsck/MissingContainerTask.java
+++ b/hadoop-ozone/recon/src/main/java/org/apache/hadoop/ozone/recon/fsck/MissingContainerTask.java
@@ -28,11 +28,10 @@ import org.apache.hadoop.hdds.scm.container.ContainerManager;
import org.apache.hadoop.hdds.scm.container.ContainerNotFoundException;
import org.apache.hadoop.hdds.scm.container.ContainerReplica;
import org.apache.hadoop.hdds.scm.server.OzoneStorageContainerManager;
+import org.apache.hadoop.ozone.recon.persistence.ContainerSchemaManager;
import org.apache.hadoop.ozone.recon.scm.ReconScmTask;
import org.apache.hadoop.util.Time;
-import org.hadoop.ozone.recon.schema.tables.daos.MissingContainersDao;
import org.hadoop.ozone.recon.schema.tables.daos.ReconTaskStatusDao;
-import org.hadoop.ozone.recon.schema.tables.pojos.MissingContainers;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.util.CollectionUtils;
@@ -47,16 +46,16 @@ public class MissingContainerTask extends ReconScmTask {
LoggerFactory.getLogger(MissingContainerTask.class);
private ContainerManager containerManager;
- private MissingContainersDao missingContainersDao;
+ private ContainerSchemaManager containerSchemaManager;
private static final long INTERVAL = 5 * 60 * 1000L;
@Inject
public MissingContainerTask(
OzoneStorageContainerManager ozoneStorageContainerManager,
ReconTaskStatusDao reconTaskStatusDao,
- MissingContainersDao missingContainersDao) {
+ ContainerSchemaManager containerSchemaManager) {
super(reconTaskStatusDao);
- this.missingContainersDao = missingContainersDao;
+ this.containerSchemaManager = containerSchemaManager;
this.containerManager = ozoneStorageContainerManager.getContainerManager();
}
@@ -89,17 +88,18 @@ public class MissingContainerTask extends ReconScmTask {
boolean isAllUnhealthy =
containerReplicas.stream().allMatch(replica ->
replica.getState().equals(State.UNHEALTHY));
+ boolean isMissingContainer =
+ containerSchemaManager.isMissingContainer(containerID.getId());
if (CollectionUtils.isEmpty(containerReplicas) || isAllUnhealthy) {
- if (!missingContainersDao.existsById(containerID.getId())) {
+ if (!isMissingContainer) {
LOG.info("Found a missing container with ID {}. Adding it to the " +
"database", containerID.getId());
- MissingContainers newRecord =
- new MissingContainers(containerID.getId(), currentTime);
- missingContainersDao.insert(newRecord);
+ containerSchemaManager.addMissingContainer(containerID.getId(),
+ currentTime);
}
} else {
- if (missingContainersDao.existsById(containerID.getId())) {
- missingContainersDao.deleteById(containerID.getId());
+ if (isMissingContainer) {
+ containerSchemaManager.deleteMissingContainer(containerID.getId());
}
}
} catch (ContainerNotFoundException e) {
diff --git a/hadoop-ozone/recon/src/main/java/org/apache/hadoop/ozone/recon/persistence/ContainerSchemaManager.java b/hadoop-ozone/recon/src/main/java/org/apache/hadoop/ozone/recon/persistence/ContainerSchemaManager.java
new file mode 100644
index 0000000..6dc70a2
--- /dev/null
+++ b/hadoop-ozone/recon/src/main/java/org/apache/hadoop/ozone/recon/persistence/ContainerSchemaManager.java
@@ -0,0 +1,104 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ * <p>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.ozone.recon.persistence;
+
+import static org.hadoop.ozone.recon.schema.tables.ContainerHistoryTable.CONTAINER_HISTORY;
+
+import com.google.inject.Inject;
+import com.google.inject.Singleton;
+import org.hadoop.ozone.recon.schema.ContainerSchemaDefinition;
+import org.hadoop.ozone.recon.schema.tables.daos.ContainerHistoryDao;
+import org.hadoop.ozone.recon.schema.tables.daos.MissingContainersDao;
+import org.hadoop.ozone.recon.schema.tables.pojos.ContainerHistory;
+import org.hadoop.ozone.recon.schema.tables.pojos.MissingContainers;
+import org.jooq.DSLContext;
+import org.jooq.Record2;
+import java.util.List;
+
+/**
+ * Provide a high level API to access the Container Schema.
+ */
+@Singleton
+public class ContainerSchemaManager {
+ private ContainerHistoryDao containerHistoryDao;
+ private MissingContainersDao missingContainersDao;
+ private ContainerSchemaDefinition containerSchemaDefinition;
+
+ @Inject
+ public ContainerSchemaManager(ContainerHistoryDao containerHistoryDao,
+ ContainerSchemaDefinition containerSchemaDefinition,
+ MissingContainersDao missingContainersDao) {
+ this.containerHistoryDao = containerHistoryDao;
+ this.missingContainersDao = missingContainersDao;
+ this.containerSchemaDefinition = containerSchemaDefinition;
+ }
+
+ public void addMissingContainer(long containerID, long time) {
+ MissingContainers record = new MissingContainers(containerID, time);
+ missingContainersDao.insert(record);
+ }
+
+ public List<MissingContainers> getAllMissingContainers() {
+ return missingContainersDao.findAll();
+ }
+
+ public boolean isMissingContainer(long containerID) {
+ return missingContainersDao.existsById(containerID);
+ }
+
+ public void deleteMissingContainer(long containerID) {
+ missingContainersDao.deleteById(containerID);
+ }
+
+ public void upsertContainerHistory(long containerID, String datanode,
+ long time) {
+ DSLContext dslContext = containerSchemaDefinition.getDSLContext();
+ Record2<Long, String> recordToFind =
+ dslContext.newRecord(
+ CONTAINER_HISTORY.CONTAINER_ID,
+ CONTAINER_HISTORY.DATANODE_HOST).value1(containerID).value2(datanode);
+ ContainerHistory newRecord = new ContainerHistory();
+ newRecord.setContainerId(containerID);
+ newRecord.setDatanodeHost(datanode);
+ newRecord.setLastReportTimestamp(time);
+ ContainerHistory record = containerHistoryDao.findById(recordToFind);
+ if (record != null) {
+ newRecord.setFirstReportTimestamp(record.getFirstReportTimestamp());
+ containerHistoryDao.update(newRecord);
+ } else {
+ newRecord.setFirstReportTimestamp(time);
+ containerHistoryDao.insert(newRecord);
+ }
+ }
+
+ public List<ContainerHistory> getAllContainerHistory(long containerID) {
+ return containerHistoryDao.fetchByContainerId(containerID);
+ }
+
+ public List<ContainerHistory> getLatestContainerHistory(long containerID,
+ int limit) {
+ DSLContext dslContext = containerSchemaDefinition.getDSLContext();
+ // Get container history sorted in descending order of last report timestamp
+ return dslContext.select()
+ .from(CONTAINER_HISTORY)
+ .where(CONTAINER_HISTORY.CONTAINER_ID.eq(containerID))
+ .orderBy(CONTAINER_HISTORY.LAST_REPORT_TIMESTAMP.desc())
+ .limit(limit)
+ .fetchInto(ContainerHistory.class);
+ }
+}
diff --git a/hadoop-ozone/recon/src/main/java/org/apache/hadoop/ozone/recon/scm/ReconContainerManager.java b/hadoop-ozone/recon/src/main/java/org/apache/hadoop/ozone/recon/scm/ReconContainerManager.java
index 06ee5ce..754b525 100644
--- a/hadoop-ozone/recon/src/main/java/org/apache/hadoop/ozone/recon/scm/ReconContainerManager.java
+++ b/hadoop-ozone/recon/src/main/java/org/apache/hadoop/ozone/recon/scm/ReconContainerManager.java
@@ -27,10 +27,13 @@ import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hdds.protocol.DatanodeDetails;
import org.apache.hadoop.hdds.scm.container.ContainerID;
import org.apache.hadoop.hdds.scm.container.ContainerInfo;
+import org.apache.hadoop.hdds.scm.container.ContainerNotFoundException;
+import org.apache.hadoop.hdds.scm.container.ContainerReplica;
import org.apache.hadoop.hdds.scm.container.SCMContainerManager;
import org.apache.hadoop.hdds.scm.container.common.helpers.ContainerWithPipeline;
import org.apache.hadoop.hdds.scm.pipeline.PipelineManager;
import org.apache.hadoop.ozone.recon.ReconUtils;
+import org.apache.hadoop.ozone.recon.persistence.ContainerSchemaManager;
import org.apache.hadoop.ozone.recon.spi.StorageContainerServiceProvider;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@@ -43,6 +46,7 @@ public class ReconContainerManager extends SCMContainerManager {
private static final Logger LOG =
LoggerFactory.getLogger(ReconContainerManager.class);
private StorageContainerServiceProvider scmClient;
+ private ContainerSchemaManager containerSchemaManager;
/**
* Constructs a mapping class that creates mapping between container names
@@ -58,9 +62,11 @@ public class ReconContainerManager extends SCMContainerManager {
*/
public ReconContainerManager(
Configuration conf, PipelineManager pipelineManager,
- StorageContainerServiceProvider scm) throws IOException {
+ StorageContainerServiceProvider scm,
+ ContainerSchemaManager containerSchemaManager) throws IOException {
super(conf, pipelineManager);
this.scmClient = scm;
+ this.containerSchemaManager = containerSchemaManager;
}
@Override
@@ -128,4 +134,22 @@ public class ReconContainerManager extends SCMContainerManager {
getLock().unlock();
}
}
+
+ /**
+ * Add a container Replica for given DataNode.
+ *
+ * @param containerID
+ * @param replica
+ */
+ @Override
+ public void updateContainerReplica(ContainerID containerID,
+ ContainerReplica replica)
+ throws ContainerNotFoundException {
+ super.updateContainerReplica(containerID, replica);
+ // Update container_history table
+ long currentTime = System.currentTimeMillis();
+ String datanodeHost = replica.getDatanodeDetails().getHostName();
+ containerSchemaManager.upsertContainerHistory(containerID.getId(),
+ datanodeHost, currentTime);
+ }
}
diff --git a/hadoop-ozone/recon/src/main/java/org/apache/hadoop/ozone/recon/scm/ReconStorageContainerManagerFacade.java b/hadoop-ozone/recon/src/main/java/org/apache/hadoop/ozone/recon/scm/ReconStorageContainerManagerFacade.java
index 9cb6a31..61de428 100644
--- a/hadoop-ozone/recon/src/main/java/org/apache/hadoop/ozone/recon/scm/ReconStorageContainerManagerFacade.java
+++ b/hadoop-ozone/recon/src/main/java/org/apache/hadoop/ozone/recon/scm/ReconStorageContainerManagerFacade.java
@@ -54,8 +54,8 @@ import org.apache.hadoop.hdds.scm.server.OzoneStorageContainerManager;
import org.apache.hadoop.hdds.server.events.EventQueue;
import org.apache.hadoop.io.IOUtils;
import org.apache.hadoop.ozone.recon.fsck.MissingContainerTask;
+import org.apache.hadoop.ozone.recon.persistence.ContainerSchemaManager;
import org.apache.hadoop.ozone.recon.spi.StorageContainerServiceProvider;
-import org.hadoop.ozone.recon.schema.tables.daos.MissingContainersDao;
import org.hadoop.ozone.recon.schema.tables.daos.ReconTaskStatusDao;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@@ -83,9 +83,9 @@ public class ReconStorageContainerManagerFacade
@Inject
public ReconStorageContainerManagerFacade(OzoneConfiguration conf,
- StorageContainerServiceProvider scmServiceProvider,
- MissingContainersDao missingContainersDao,
- ReconTaskStatusDao reconTaskStatusDao)
+ StorageContainerServiceProvider scmServiceProvider,
+ ReconTaskStatusDao reconTaskStatusDao,
+ ContainerSchemaManager containerSchemaManager)
throws IOException {
this.eventQueue = new EventQueue();
eventQueue.setSilent(true);
@@ -99,7 +99,7 @@ public class ReconStorageContainerManagerFacade
this.pipelineManager =
new ReconPipelineManager(conf, nodeManager, eventQueue);
this.containerManager = new ReconContainerManager(conf, pipelineManager,
- scmServiceProvider);
+ scmServiceProvider, containerSchemaManager);
this.scmServiceProvider = scmServiceProvider;
NodeReportHandler nodeReportHandler =
@@ -148,12 +148,12 @@ public class ReconStorageContainerManagerFacade
reconScmTasks.add(new MissingContainerTask(
this,
reconTaskStatusDao,
- missingContainersDao));
+ containerSchemaManager));
reconScmTasks.forEach(ReconScmTask::register);
}
/**
- * For every config key which is prefixed by 'recon.scm', create a new
+ * For every config key which is prefixed by 'recon.scmconfig', create a new
* config key without the prefix keeping the same value.
* For example, if recon.scm.a.b. = xyz, we add a new config like
* a.b.c = xyz. This is done to override Recon's passive SCM configs if
diff --git a/hadoop-ozone/recon/src/main/java/org/apache/hadoop/ozone/recon/spi/impl/ContainerDBServiceProviderImpl.java b/hadoop-ozone/recon/src/main/java/org/apache/hadoop/ozone/recon/spi/impl/ContainerDBServiceProviderImpl.java
index 11f8bfe..30c397b 100644
--- a/hadoop-ozone/recon/src/main/java/org/apache/hadoop/ozone/recon/spi/impl/ContainerDBServiceProviderImpl.java
+++ b/hadoop-ozone/recon/src/main/java/org/apache/hadoop/ozone/recon/spi/impl/ContainerDBServiceProviderImpl.java
@@ -42,13 +42,13 @@ import org.apache.hadoop.hdds.conf.OzoneConfiguration;
import org.apache.hadoop.ozone.recon.ReconUtils;
import org.apache.hadoop.ozone.recon.api.types.ContainerKeyPrefix;
import org.apache.hadoop.ozone.recon.api.types.ContainerMetadata;
+import org.apache.hadoop.ozone.recon.persistence.ContainerSchemaManager;
import org.apache.hadoop.ozone.recon.spi.ContainerDBServiceProvider;
import org.apache.hadoop.hdds.utils.db.DBStore;
import org.apache.hadoop.hdds.utils.db.Table;
import org.apache.hadoop.hdds.utils.db.Table.KeyValue;
import org.apache.hadoop.hdds.utils.db.TableIterator;
import org.hadoop.ozone.recon.schema.tables.daos.GlobalStatsDao;
-import org.hadoop.ozone.recon.schema.tables.daos.MissingContainersDao;
import org.hadoop.ozone.recon.schema.tables.pojos.GlobalStats;
import org.hadoop.ozone.recon.schema.tables.pojos.MissingContainers;
import org.jooq.Configuration;
@@ -70,6 +70,9 @@ public class ContainerDBServiceProviderImpl
private GlobalStatsDao globalStatsDao;
@Inject
+ private ContainerSchemaManager containerSchemaManager;
+
+ @Inject
private OzoneConfiguration configuration;
@Inject
@@ -82,9 +85,6 @@ public class ContainerDBServiceProviderImpl
private ReconUtils reconUtils;
@Inject
- private MissingContainersDao missingContainersDao;
-
- @Inject
public ContainerDBServiceProviderImpl(DBStore dbStore,
Configuration sqlConfiguration) {
containerDbStore = dbStore;
@@ -359,7 +359,7 @@ public class ContainerDBServiceProviderImpl
}
public List<MissingContainers> getMissingContainers() {
- return missingContainersDao.findAll();
+ return containerSchemaManager.getAllMissingContainers();
}
@Override
diff --git a/hadoop-ozone/recon/src/main/resources/webapps/recon/ozone-recon-web/api/db.json b/hadoop-ozone/recon/src/main/resources/webapps/recon/ozone-recon-web/api/db.json
index ab807df..b4f181c 100644
--- a/hadoop-ozone/recon/src/main/resources/webapps/recon/ozone-recon-web/api/db.json
+++ b/hadoop-ozone/recon/src/main/resources/webapps/recon/ozone-recon-web/api/db.json
@@ -357,10 +357,25 @@
{
"id": 1,
"keys": 3876,
- "datanodes": [
- "localhost1.storage.enterprise.com",
- "localhost3.storage.enterprise.com",
- "localhost5.storage.enterprise.com"
+ "replicas": [
+ {
+ "containerId": 1,
+ "datanodeHost": "localhost1.storage.enterprise.com",
+ "firstReportTimestamp": 1578491371100,
+ "lastReportTimestamp": 1578491371528
+ },
+ {
+ "containerId": 1,
+ "datanodeHost": "localhost3.storage.enterprise.com",
+ "firstReportTimestamp": 1578491370100,
+ "lastReportTimestamp": 1578491371328
+ },
+ {
+ "containerId": 1,
+ "datanodeHost": "localhost5.storage.enterprise.com",
+ "firstReportTimestamp": 1578491371200,
+ "lastReportTimestamp": 1578491371528
+ }
],
"missingSince": 1578491371528,
"pipelineId": "05e3d908-ff01-4ce6-ad75-f3ec79bcc7982"
@@ -368,10 +383,25 @@
{
"id": 2,
"keys": 5943,
- "datanodes": [
- "localhost1.storage.enterprise.com",
- "localhost3.storage.enterprise.com",
- "localhost5.storage.enterprise.com"
+ "replicas": [
+ {
+ "containerId": 1,
+ "datanodeHost": "localhost1.storage.enterprise.com",
+ "firstReportTimestamp": 1578491371100,
+ "lastReportTimestamp": 1578491371528
+ },
+ {
+ "containerId": 1,
+ "datanodeHost": "localhost3.storage.enterprise.com",
+ "firstReportTimestamp": 1578491370100,
+ "lastReportTimestamp": 1578491371328
+ },
+ {
+ "containerId": 1,
+ "datanodeHost": "localhost5.storage.enterprise.com",
+ "firstReportTimestamp": 1578491371200,
+ "lastReportTimestamp": 1578491371528
+ }
],
"missingSince": 1578491471528,
"pipelineId": "04a5d908-ff01-4ce6-ad75-f3ec73dfc8a2"
diff --git a/hadoop-ozone/recon/src/main/resources/webapps/recon/ozone-recon-web/src/utils/common.tsx b/hadoop-ozone/recon/src/main/resources/webapps/recon/ozone-recon-web/src/utils/common.tsx
index 97b68d8..e96b175 100644
--- a/hadoop-ozone/recon/src/main/resources/webapps/recon/ozone-recon-web/src/utils/common.tsx
+++ b/hadoop-ozone/recon/src/main/resources/webapps/recon/ozone-recon-web/src/utils/common.tsx
@@ -1,4 +1,4 @@
-/**
+/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
@@ -16,4 +16,9 @@
* limitations under the License.
*/
-export const getCapacityPercent = (used: number, total: number) => Math.round((used / total) * 100);
\ No newline at end of file
+import moment from "moment";
+
+export const getCapacityPercent = (used: number, total: number) => Math.round((used / total) * 100);
+
+export const timeFormat = (time: number) => time > 0 ?
+ moment(time).format('lll') : 'NA';
diff --git a/hadoop-ozone/recon/src/main/resources/webapps/recon/ozone-recon-web/src/views/MissingContainers/MissingContainers.less b/hadoop-ozone/recon/src/main/resources/webapps/recon/ozone-recon-web/src/views/MissingContainers/MissingContainers.less
index 5d5f1e3..bb05b09 100644
--- a/hadoop-ozone/recon/src/main/resources/webapps/recon/ozone-recon-web/src/views/MissingContainers/MissingContainers.less
+++ b/hadoop-ozone/recon/src/main/resources/webapps/recon/ozone-recon-web/src/views/MissingContainers/MissingContainers.less
@@ -15,3 +15,7 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
+
+.pl-5 {
+ padding-left: 5px;
+}
\ No newline at end of file
diff --git a/hadoop-ozone/recon/src/main/resources/webapps/recon/ozone-recon-web/src/views/MissingContainers/MissingContainers.tsx b/hadoop-ozone/recon/src/main/resources/webapps/recon/ozone-recon-web/src/views/MissingContainers/MissingContainers.tsx
index d8b4b11..9c9d4ff 100644
--- a/hadoop-ozone/recon/src/main/resources/webapps/recon/ozone-recon-web/src/views/MissingContainers/MissingContainers.tsx
+++ b/hadoop-ozone/recon/src/main/resources/webapps/recon/ozone-recon-web/src/views/MissingContainers/MissingContainers.tsx
@@ -18,20 +18,28 @@
import React from 'react';
import axios from 'axios';
-import {Table} from 'antd';
+import {Icon, Table, Tooltip} from 'antd';
import './MissingContainers.less';
import {PaginationConfig} from "antd/lib/pagination";
import prettyBytes from "pretty-bytes";
import moment from "moment";
+import {timeFormat} from "../../utils/common";
interface MissingContainerResponse {
containerID: number;
keys: number;
- datanodes: string[];
+ replicas: ContainerReplica[];
missingSince: number;
pipelineID: string;
}
+export interface ContainerReplica {
+ containerId: number;
+ datanodeHost: string;
+ firstReportTimestamp: number;
+ lastReportTimestamp: number;
+}
+
export interface MissingContainersResponse {
totalCount: number;
containers: MissingContainerResponse[];
@@ -68,9 +76,28 @@ const COLUMNS = [
},
{
title: 'Datanodes',
- dataIndex: 'datanodes',
- key: 'datanodes',
- render: (datanodes: string[]) => <div>{datanodes.map(datanode => <div key={datanode}>{datanode}</div>)}</div>
+ dataIndex: 'replicas',
+ key: 'replicas',
+ render: (replicas: ContainerReplica[]) => <div>
+ {replicas.map(replica => {
+ const tooltip = <div>
+ <div>First Report Time: {timeFormat(replica.firstReportTimestamp)}</div>
+ <div>Last Report Time: {timeFormat(replica.lastReportTimestamp)}</div>
+ </div>;
+ return(
+ <div key={replica.datanodeHost}>
+ <Tooltip placement="left"
+ title={tooltip}>
+ <Icon type="info-circle" className="icon-small"/>
+ </Tooltip>
+ <span className="pl-5">
+ {replica.datanodeHost}
+ </span>
+ </div>
+ );
+ }
+ )}
+ </div>
},
{
title: 'Pipeline ID',
@@ -82,8 +109,7 @@ const COLUMNS = [
title: 'Missing Since',
dataIndex: 'missingSince',
key: 'missingSince',
- render: (missingSince: number) => missingSince > 0 ?
- moment(missingSince).format('lll') : 'NA',
+ render: (missingSince: number) => timeFormat(missingSince),
sorter: (a: MissingContainerResponse, b: MissingContainerResponse) => a.missingSince - b.missingSince
},
];
@@ -114,13 +140,13 @@ const KEY_TABLE_COLUMNS = [
title: 'Date Created',
dataIndex: 'CreationTime',
key: 'CreationTime',
- render: (date: number) => moment(date).format('lll')
+ render: (date: string) => moment(date).format('lll')
},
{
title: 'Date Modified',
dataIndex: 'ModificationTime',
key: 'ModificationTime',
- render: (date: number) => moment(date).format('lll')
+ render: (date: string) => moment(date).format('lll')
}
];
diff --git a/hadoop-ozone/recon/src/test/java/org/apache/hadoop/ozone/recon/api/TestContainerEndpoint.java b/hadoop-ozone/recon/src/test/java/org/apache/hadoop/ozone/recon/api/TestContainerEndpoint.java
index 29ddcdf..daee217 100644
--- a/hadoop-ozone/recon/src/test/java/org/apache/hadoop/ozone/recon/api/TestContainerEndpoint.java
+++ b/hadoop-ozone/recon/src/test/java/org/apache/hadoop/ozone/recon/api/TestContainerEndpoint.java
@@ -24,15 +24,19 @@ import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.when;
import java.util.ArrayList;
+import java.util.Arrays;
import java.util.Collection;
import java.util.Collections;
+import java.util.HashSet;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
+import java.util.Set;
import javax.ws.rs.core.Response;
import org.apache.commons.lang3.StringUtils;
import org.apache.hadoop.hdds.client.BlockID;
+import org.apache.hadoop.hdds.protocol.proto.HddsProtos.ReplicationFactor;
import org.apache.hadoop.hdds.scm.container.ContainerID;
import org.apache.hadoop.hdds.scm.container.ContainerInfo;
import org.apache.hadoop.hdds.scm.container.ContainerManager;
@@ -50,6 +54,7 @@ import org.apache.hadoop.ozone.recon.api.types.KeyMetadata;
import org.apache.hadoop.ozone.recon.api.types.KeysResponse;
import org.apache.hadoop.ozone.recon.api.types.MissingContainerMetadata;
import org.apache.hadoop.ozone.recon.api.types.MissingContainersResponse;
+import org.apache.hadoop.ozone.recon.persistence.ContainerSchemaManager;
import org.apache.hadoop.ozone.recon.recovery.ReconOMMetadataManager;
import org.apache.hadoop.ozone.recon.scm.ReconContainerManager;
import org.apache.hadoop.ozone.recon.scm.ReconStorageContainerManagerFacade;
@@ -59,12 +64,11 @@ import org.apache.hadoop.ozone.recon.spi.impl.OzoneManagerServiceProviderImpl;
import org.apache.hadoop.ozone.recon.spi.impl.StorageContainerServiceProviderImpl;
import org.apache.hadoop.ozone.recon.tasks.ContainerKeyMapperTask;
import org.apache.hadoop.hdds.utils.db.Table;
+import org.hadoop.ozone.recon.schema.ContainerSchemaDefinition;
import org.hadoop.ozone.recon.schema.ReconTaskSchemaDefinition;
import org.hadoop.ozone.recon.schema.StatsSchemaDefinition;
-import org.hadoop.ozone.recon.schema.UtilizationSchemaDefinition;
-import org.hadoop.ozone.recon.schema.tables.daos.MissingContainersDao;
import org.hadoop.ozone.recon.schema.tables.daos.ReconTaskStatusDao;
-import org.hadoop.ozone.recon.schema.tables.pojos.MissingContainers;
+import org.hadoop.ozone.recon.schema.tables.pojos.ContainerHistory;
import org.jooq.Configuration;
import org.junit.Assert;
import org.junit.Before;
@@ -83,8 +87,8 @@ public class TestContainerEndpoint extends AbstractOMMetadataManagerTest {
private GuiceInjectorUtilsForTestsImpl guiceInjectorTest =
new GuiceInjectorUtilsForTestsImpl();
private boolean isSetupDone = false;
+ private ContainerSchemaManager containerSchemaManager;
private ReconOMMetadataManager reconOMMetadataManager;
- private MissingContainersDao missingContainersDao;
private ContainerID containerID = new ContainerID(1L);
private PipelineID pipelineID;
private long keyCount = 5L;
@@ -110,19 +114,19 @@ public class TestContainerEndpoint extends AbstractOMMetadataManagerTest {
new ContainerInfo.Builder()
.setContainerID(containerID.getId())
.setNumberOfKeys(keyCount)
+ .setReplicationFactor(ReplicationFactor.THREE)
.setPipelineID(pipelineID)
.build()
);
when(mockReconSCM.getContainerManager())
.thenReturn(mockContainerManager);
+ Configuration sqlConfiguration =
+ parentInjector.getInstance((Configuration.class));
Injector injector = parentInjector.createChildInjector(
new AbstractModule() {
@Override
protected void configure() {
- Configuration sqlConfiguration =
- parentInjector.getInstance((Configuration.class));
-
try {
ReconTaskSchemaDefinition taskSchemaDefinition = parentInjector
.getInstance(ReconTaskSchemaDefinition.class);
@@ -133,7 +137,6 @@ public class TestContainerEndpoint extends AbstractOMMetadataManagerTest {
ReconTaskStatusDao reconTaskStatusDao =
new ReconTaskStatusDao(sqlConfiguration);
-
bind(ReconTaskStatusDao.class).toInstance(reconTaskStatusDao);
StorageContainerServiceProvider mockScmServiceProvider = mock(
@@ -145,16 +148,17 @@ public class TestContainerEndpoint extends AbstractOMMetadataManagerTest {
bind(ContainerEndpoint.class);
}
});
+
containerEndpoint = injector.getInstance(ContainerEndpoint.class);
containerDbServiceProvider = injector.getInstance(
ContainerDBServiceProvider.class);
StatsSchemaDefinition schemaDefinition = injector.getInstance(
StatsSchemaDefinition.class);
schemaDefinition.initializeSchema();
- UtilizationSchemaDefinition utilizationSchemaDefinition =
- injector.getInstance(UtilizationSchemaDefinition.class);
- utilizationSchemaDefinition.initializeSchema();
- missingContainersDao = injector.getInstance(MissingContainersDao.class);
+ ContainerSchemaDefinition containerSchemaDefinition =
+ injector.getInstance(ContainerSchemaDefinition.class);
+ containerSchemaDefinition.initializeSchema();
+ containerSchemaManager = injector.getInstance(ContainerSchemaManager.class);
}
@Before
@@ -426,9 +430,13 @@ public class TestContainerEndpoint extends AbstractOMMetadataManagerTest {
// Add missing containers to the database
long missingSince = System.currentTimeMillis();
- MissingContainers newRecord =
- new MissingContainers(1L, missingSince);
- missingContainersDao.insert(newRecord);
+ containerSchemaManager.addMissingContainer(1L, missingSince);
+
+ // Add container history for id 1
+ containerSchemaManager.upsertContainerHistory(1L, "host1", 1L);
+ containerSchemaManager.upsertContainerHistory(1L, "host2", 2L);
+ containerSchemaManager.upsertContainerHistory(1L, "host3", 3L);
+ containerSchemaManager.upsertContainerHistory(1L, "host4", 4L);
response = containerEndpoint.getMissingContainers();
responseObject = (MissingContainersResponse) response.getEntity();
@@ -440,7 +448,38 @@ public class TestContainerEndpoint extends AbstractOMMetadataManagerTest {
assertEquals(containerID.getId(), container.getContainerID());
assertEquals(keyCount, container.getKeys());
assertEquals(pipelineID.getId(), container.getPipelineID());
- assertEquals(0, container.getDatanodes().size());
+ assertEquals(3, container.getReplicas().size());
assertEquals(missingSince, container.getMissingSince());
+
+ Set<String> datanodes = Collections.unmodifiableSet(
+ new HashSet<>(Arrays.asList("host2", "host3", "host4")));
+ List<ContainerHistory> containerReplicas = container.getReplicas();
+ containerReplicas.forEach(history -> {
+ Assert.assertTrue(datanodes.contains(history.getDatanodeHost()));
+ });
+ }
+
+ @Test
+ public void testGetReplicaHistoryForContainer() {
+ // Add container history for id 1
+ containerSchemaManager.upsertContainerHistory(1L, "host1", 1L);
+ containerSchemaManager.upsertContainerHistory(1L, "host2", 2L);
+ containerSchemaManager.upsertContainerHistory(1L, "host3", 3L);
+ containerSchemaManager.upsertContainerHistory(1L, "host4", 4L);
+ containerSchemaManager.upsertContainerHistory(1L, "host1", 5L);
+
+ Response response = containerEndpoint.getReplicaHistoryForContainer(1L);
+ List<ContainerHistory> histories =
+ (List<ContainerHistory>) response.getEntity();
+ Set<String> datanodes = Collections.unmodifiableSet(
+ new HashSet<>(Arrays.asList("host1", "host2", "host3", "host4")));
+ Assert.assertEquals(4, histories.size());
+ histories.forEach(history -> {
+ Assert.assertTrue(datanodes.contains(history.getDatanodeHost()));
+ if (history.getDatanodeHost().equals("host1")) {
+ Assert.assertEquals(1L, (long) history.getFirstReportTimestamp());
+ Assert.assertEquals(5L, (long) history.getLastReportTimestamp());
+ }
+ });
}
}
\ No newline at end of file
diff --git a/hadoop-ozone/recon/src/test/java/org/apache/hadoop/ozone/recon/api/TestEndpoints.java b/hadoop-ozone/recon/src/test/java/org/apache/hadoop/ozone/recon/api/TestEndpoints.java
index 6d5ea50..07d5052 100644
--- a/hadoop-ozone/recon/src/test/java/org/apache/hadoop/ozone/recon/api/TestEndpoints.java
+++ b/hadoop-ozone/recon/src/test/java/org/apache/hadoop/ozone/recon/api/TestEndpoints.java
@@ -55,6 +55,7 @@ import org.apache.hadoop.ozone.recon.spi.StorageContainerServiceProvider;
import org.apache.hadoop.ozone.recon.spi.impl.OzoneManagerServiceProviderImpl;
import org.apache.hadoop.ozone.recon.spi.impl.StorageContainerServiceProviderImpl;
import org.apache.hadoop.test.LambdaTestUtils;
+import org.hadoop.ozone.recon.schema.ContainerSchemaDefinition;
import org.hadoop.ozone.recon.schema.ReconTaskSchemaDefinition;
import org.hadoop.ozone.recon.schema.tables.daos.ReconTaskStatusDao;
import org.jooq.Configuration;
@@ -67,7 +68,6 @@ import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.when;
import javax.ws.rs.core.Response;
-import java.io.IOException;
import java.util.UUID;
import java.util.concurrent.Callable;
@@ -91,7 +91,7 @@ public class TestEndpoints extends AbstractOMMetadataManagerTest {
private DatanodeDetailsProto datanodeDetailsProto;
private Pipeline pipeline;
- private void initializeInjector() throws IOException {
+ private void initializeInjector() throws Exception {
reconOMMetadataManager = getTestMetadataManager(
initializeNewOmMetadataManager());
OzoneManagerServiceProviderImpl omServiceProviderMock =
@@ -159,10 +159,13 @@ public class TestEndpoints extends AbstractOMMetadataManagerTest {
clusterStateEndpoint = injector.getInstance(ClusterStateEndpoint.class);
reconScm = (ReconStorageContainerManagerFacade)
injector.getInstance(OzoneStorageContainerManager.class);
+ ContainerSchemaDefinition containerSchemaDefinition =
+ injector.getInstance(ContainerSchemaDefinition.class);
+ containerSchemaDefinition.initializeSchema();
}
@Before
- public void setUp() throws IOException {
+ public void setUp() throws Exception {
// The following setup runs only once
if (!isSetupDone) {
initializeInjector();
diff --git a/hadoop-ozone/recon/src/test/java/org/apache/hadoop/ozone/recon/fsck/TestMissingContainerTask.java b/hadoop-ozone/recon/src/test/java/org/apache/hadoop/ozone/recon/fsck/TestMissingContainerTask.java
index 639373c..ba3357b 100644
--- a/hadoop-ozone/recon/src/test/java/org/apache/hadoop/ozone/recon/fsck/TestMissingContainerTask.java
+++ b/hadoop-ozone/recon/src/test/java/org/apache/hadoop/ozone/recon/fsck/TestMissingContainerTask.java
@@ -32,10 +32,12 @@ import org.apache.hadoop.hdds.scm.container.ContainerID;
import org.apache.hadoop.hdds.scm.container.ContainerManager;
import org.apache.hadoop.hdds.scm.container.ContainerReplica;
import org.apache.hadoop.ozone.recon.persistence.AbstractSqlDatabaseTest;
+import org.apache.hadoop.ozone.recon.persistence.ContainerSchemaManager;
import org.apache.hadoop.ozone.recon.scm.ReconStorageContainerManagerFacade;
import org.apache.hadoop.test.LambdaTestUtils;
+import org.hadoop.ozone.recon.schema.ContainerSchemaDefinition;
import org.hadoop.ozone.recon.schema.ReconTaskSchemaDefinition;
-import org.hadoop.ozone.recon.schema.UtilizationSchemaDefinition;
+import org.hadoop.ozone.recon.schema.tables.daos.ContainerHistoryDao;
import org.hadoop.ozone.recon.schema.tables.daos.MissingContainersDao;
import org.hadoop.ozone.recon.schema.tables.daos.ReconTaskStatusDao;
import org.hadoop.ozone.recon.schema.tables.pojos.MissingContainers;
@@ -58,10 +60,16 @@ public class TestMissingContainerTask extends AbstractSqlDatabaseTest {
ReconTaskSchemaDefinition.class);
taskSchemaDefinition.initializeSchema();
- UtilizationSchemaDefinition schemaDefinition =
- getInjector().getInstance(UtilizationSchemaDefinition.class);
+ ContainerSchemaDefinition schemaDefinition =
+ getInjector().getInstance(ContainerSchemaDefinition.class);
schemaDefinition.initializeSchema();
+ MissingContainersDao missingContainersTableHandle =
+ new MissingContainersDao(sqlConfiguration);
+
+ ContainerSchemaManager containerSchemaManager =
+ new ContainerSchemaManager(mock(ContainerHistoryDao.class),
+ schemaDefinition, missingContainersTableHandle);
ReconStorageContainerManagerFacade scmMock =
mock(ReconStorageContainerManagerFacade.class);
ContainerManager containerManagerMock = mock(ContainerManager.class);
@@ -85,25 +93,22 @@ public class TestMissingContainerTask extends AbstractSqlDatabaseTest {
when(containerManagerMock.getContainerReplicas(new ContainerID(3L)))
.thenReturn(Collections.emptySet());
- MissingContainersDao missingContainersTableHandle =
- new MissingContainersDao(sqlConfiguration);
List<MissingContainers> all = missingContainersTableHandle.findAll();
Assert.assertTrue(all.isEmpty());
long currentTime = System.currentTimeMillis();
ReconTaskStatusDao reconTaskStatusDao =
new ReconTaskStatusDao(sqlConfiguration);
- MissingContainersDao missingContainersDao =
- new MissingContainersDao(sqlConfiguration);
MissingContainerTask missingContainerTask =
new MissingContainerTask(scmMock, reconTaskStatusDao,
- missingContainersDao);
+ containerSchemaManager);
missingContainerTask.register();
missingContainerTask.start();
LambdaTestUtils.await(6000, 1000, () ->
- (missingContainersTableHandle.findAll().size() == 2));
- all = missingContainersTableHandle.findAll();
+ (containerSchemaManager.getAllMissingContainers().size() == 2));
+
+ all = containerSchemaManager.getAllMissingContainers();
// Container IDs 2 and 3 should be present in the missing containers table
Set<Long> missingContainerIDs = Collections.unmodifiableSet(
new HashSet<>(Arrays.asList(2L, 3L))
diff --git a/hadoop-ozone/recon/src/test/java/org/apache/hadoop/ozone/recon/persistence/TestUtilizationSchemaDefinition.java b/hadoop-ozone/recon/src/test/java/org/apache/hadoop/ozone/recon/persistence/TestUtilizationSchemaDefinition.java
index 22cc55b..0d7d102 100644
--- a/hadoop-ozone/recon/src/test/java/org/apache/hadoop/ozone/recon/persistence/TestUtilizationSchemaDefinition.java
+++ b/hadoop-ozone/recon/src/test/java/org/apache/hadoop/ozone/recon/persistence/TestUtilizationSchemaDefinition.java
@@ -221,8 +221,6 @@ public class TestUtilizationSchemaDefinition extends AbstractSqlDatabaseTest {
dbRecord = fileCountBySizeDao.findById(1024L);
assertEquals(Long.valueOf(2), dbRecord.getCount());
-
-
Table<FileCountBySizeRecord> fileCountBySizeRecordTable =
fileCountBySizeDao.getTable();
List<UniqueKey<FileCountBySizeRecord>> tableKeys =
diff --git a/hadoop-ozone/recon/src/test/java/org/apache/hadoop/ozone/recon/scm/AbstractReconContainerManagerTest.java b/hadoop-ozone/recon/src/test/java/org/apache/hadoop/ozone/recon/scm/AbstractReconContainerManagerTest.java
index fce22b2..b0ecfe2 100644
--- a/hadoop-ozone/recon/src/test/java/org/apache/hadoop/ozone/recon/scm/AbstractReconContainerManagerTest.java
+++ b/hadoop-ozone/recon/src/test/java/org/apache/hadoop/ozone/recon/scm/AbstractReconContainerManagerTest.java
@@ -40,6 +40,7 @@ import org.apache.hadoop.hdds.scm.node.SCMNodeManager;
import org.apache.hadoop.hdds.scm.pipeline.Pipeline;
import org.apache.hadoop.hdds.scm.server.SCMStorageConfig;
import org.apache.hadoop.hdds.server.events.EventQueue;
+import org.apache.hadoop.ozone.recon.persistence.ContainerSchemaManager;
import org.apache.hadoop.ozone.recon.spi.StorageContainerServiceProvider;
import org.junit.After;
import org.junit.Before;
@@ -72,7 +73,7 @@ public class AbstractReconContainerManagerTest {
new SCMNodeManager(conf, scmStorageConfig, eventQueue, clusterMap);
pipelineManager = new ReconPipelineManager(conf, nodeManager, eventQueue);
containerManager = new ReconContainerManager(conf, pipelineManager,
- getScmServiceProvider());
+ getScmServiceProvider(), mock(ContainerSchemaManager.class));
}
@After
diff --git a/hadoop-ozone/recon/src/test/java/org/apache/hadoop/ozone/recon/types/GuiceInjectorUtilsForTests.java b/hadoop-ozone/recon/src/test/java/org/apache/hadoop/ozone/recon/types/GuiceInjectorUtilsForTests.java
index d147e58..71218c0 100644
--- a/hadoop-ozone/recon/src/test/java/org/apache/hadoop/ozone/recon/types/GuiceInjectorUtilsForTests.java
+++ b/hadoop-ozone/recon/src/test/java/org/apache/hadoop/ozone/recon/types/GuiceInjectorUtilsForTests.java
@@ -24,6 +24,7 @@ import com.google.inject.Injector;
import com.google.inject.Singleton;
import org.apache.hadoop.hdds.conf.OzoneConfiguration;
import org.apache.hadoop.ozone.recon.persistence.AbstractSqlDatabaseTest;
+import org.apache.hadoop.ozone.recon.persistence.ContainerSchemaManager;
import org.apache.hadoop.ozone.recon.persistence.DataSourceConfiguration;
import org.apache.hadoop.ozone.recon.persistence.JooqPersistenceModule;
import org.apache.hadoop.ozone.recon.recovery.ReconOMMetadataManager;
@@ -33,6 +34,7 @@ import org.apache.hadoop.ozone.recon.spi.impl.ContainerDBServiceProviderImpl;
import org.apache.hadoop.ozone.recon.spi.impl.OzoneManagerServiceProviderImpl;
import org.apache.hadoop.ozone.recon.spi.impl.ReconContainerDBProvider;
import org.apache.hadoop.hdds.utils.db.DBStore;
+import org.hadoop.ozone.recon.schema.tables.daos.ContainerHistoryDao;
import org.hadoop.ozone.recon.schema.tables.daos.MissingContainersDao;
import org.jooq.Configuration;
import org.junit.Assert;
@@ -108,7 +110,11 @@ public interface GuiceInjectorUtilsForTests {
baseInjector.getInstance((Configuration.class));
MissingContainersDao missingContainersDao =
new MissingContainersDao(sqlConfiguration);
+ ContainerHistoryDao containerHistoryDao =
+ new ContainerHistoryDao(sqlConfiguration);
bind(MissingContainersDao.class).toInstance(missingContainersDao);
+ bind(ContainerHistoryDao.class).toInstance(containerHistoryDao);
+ bind(ContainerSchemaManager.class).in(Singleton.class);
bind(ContainerDBServiceProvider.class).to(
ContainerDBServiceProviderImpl.class).in(Singleton.class);
}
---------------------------------------------------------------------
To unsubscribe, e-mail: ozone-commits-unsubscribe@hadoop.apache.org
For additional commands, e-mail: ozone-commits-help@hadoop.apache.org